diff options
author | Jerome Glisse <jglisse@redhat.com> | 2009-07-13 15:04:08 -0400 |
---|---|---|
committer | Dave Airlie <airlied@redhat.com> | 2009-07-29 01:45:09 -0400 |
commit | c93bb85b5cba3e3a06f2cad8e9bc5c23d3d10aac (patch) | |
tree | 3168bee69e08dcb1f0f509b03ea1693a688d34ef /drivers/gpu/drm/radeon/rv515.c | |
parent | e024e11070a0a0dc7163ce1ec2da354a638bdbed (diff) |
drm/radeon/kms: fix bandwidth computation on avivo hardware
Fix bandwidth computation and crtc priority in memory controller
so that crtc memory request are fullfill in time to avoid display
artifact.
Signed-off-by: Jerome Glisse <jglisse@redhat.com>
Signed-off-by: Dave Airlie <airlied@redhat.com>
Diffstat (limited to 'drivers/gpu/drm/radeon/rv515.c')
-rw-r--r-- | drivers/gpu/drm/radeon/rv515.c | 774 |
1 files changed, 652 insertions, 122 deletions
diff --git a/drivers/gpu/drm/radeon/rv515.c b/drivers/gpu/drm/radeon/rv515.c index 677929ed8ed3..4fd411893b91 100644 --- a/drivers/gpu/drm/radeon/rv515.c +++ b/drivers/gpu/drm/radeon/rv515.c | |||
@@ -27,8 +27,9 @@ | |||
27 | */ | 27 | */ |
28 | #include <linux/seq_file.h> | 28 | #include <linux/seq_file.h> |
29 | #include "drmP.h" | 29 | #include "drmP.h" |
30 | #include "radeon_reg.h" | 30 | #include "rv515r.h" |
31 | #include "radeon.h" | 31 | #include "radeon.h" |
32 | #include "radeon_share.h" | ||
32 | 33 | ||
33 | /* rv515 depends on : */ | 34 | /* rv515 depends on : */ |
34 | void r100_hdp_reset(struct radeon_device *rdev); | 35 | void r100_hdp_reset(struct radeon_device *rdev); |
@@ -100,25 +101,25 @@ int rv515_mc_init(struct radeon_device *rdev) | |||
100 | } | 101 | } |
101 | /* Write VRAM size in case we are limiting it */ | 102 | /* Write VRAM size in case we are limiting it */ |
102 | WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.vram_size); | 103 | WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.vram_size); |
103 | tmp = REG_SET(RV515_MC_FB_START, rdev->mc.vram_location >> 16); | 104 | tmp = REG_SET(MC_FB_START, rdev->mc.vram_location >> 16); |
104 | WREG32(0x134, tmp); | 105 | WREG32(0x134, tmp); |
105 | tmp = rdev->mc.vram_location + rdev->mc.vram_size - 1; | 106 | tmp = rdev->mc.vram_location + rdev->mc.vram_size - 1; |
106 | tmp = REG_SET(RV515_MC_FB_TOP, tmp >> 16); | 107 | tmp = REG_SET(MC_FB_TOP, tmp >> 16); |
107 | tmp |= REG_SET(RV515_MC_FB_START, rdev->mc.vram_location >> 16); | 108 | tmp |= REG_SET(MC_FB_START, rdev->mc.vram_location >> 16); |
108 | WREG32_MC(RV515_MC_FB_LOCATION, tmp); | 109 | WREG32_MC(MC_FB_LOCATION, tmp); |
109 | WREG32(RS690_HDP_FB_LOCATION, rdev->mc.vram_location >> 16); | 110 | WREG32(HDP_FB_LOCATION, rdev->mc.vram_location >> 16); |
110 | WREG32(0x310, rdev->mc.vram_location); | 111 | WREG32(0x310, rdev->mc.vram_location); |
111 | if (rdev->flags & RADEON_IS_AGP) { | 112 | if (rdev->flags & RADEON_IS_AGP) { |
112 | tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - 1; | 113 | tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - 1; |
113 | tmp = REG_SET(RV515_MC_AGP_TOP, tmp >> 16); | 114 | tmp = REG_SET(MC_AGP_TOP, tmp >> 16); |
114 | tmp |= REG_SET(RV515_MC_AGP_START, rdev->mc.gtt_location >> 16); | 115 | tmp |= REG_SET(MC_AGP_START, rdev->mc.gtt_location >> 16); |
115 | WREG32_MC(RV515_MC_AGP_LOCATION, tmp); | 116 | WREG32_MC(MC_AGP_LOCATION, tmp); |
116 | WREG32_MC(RV515_MC_AGP_BASE, rdev->mc.agp_base); | 117 | WREG32_MC(MC_AGP_BASE, rdev->mc.agp_base); |
117 | WREG32_MC(RV515_MC_AGP_BASE_2, 0); | 118 | WREG32_MC(MC_AGP_BASE_2, 0); |
118 | } else { | 119 | } else { |
119 | WREG32_MC(RV515_MC_AGP_LOCATION, 0x0FFFFFFF); | 120 | WREG32_MC(MC_AGP_LOCATION, 0x0FFFFFFF); |
120 | WREG32_MC(RV515_MC_AGP_BASE, 0); | 121 | WREG32_MC(MC_AGP_BASE, 0); |
121 | WREG32_MC(RV515_MC_AGP_BASE_2, 0); | 122 | WREG32_MC(MC_AGP_BASE_2, 0); |
122 | } | 123 | } |
123 | return 0; | 124 | return 0; |
124 | } | 125 | } |
@@ -136,95 +137,67 @@ void rv515_mc_fini(struct radeon_device *rdev) | |||
136 | */ | 137 | */ |
137 | void rv515_ring_start(struct radeon_device *rdev) | 138 | void rv515_ring_start(struct radeon_device *rdev) |
138 | { | 139 | { |
139 | unsigned gb_tile_config; | ||
140 | int r; | 140 | int r; |
141 | 141 | ||
142 | /* Sub pixel 1/12 so we can have 4K rendering according to doc */ | ||
143 | gb_tile_config = R300_ENABLE_TILING | R300_TILE_SIZE_16; | ||
144 | switch (rdev->num_gb_pipes) { | ||
145 | case 2: | ||
146 | gb_tile_config |= R300_PIPE_COUNT_R300; | ||
147 | break; | ||
148 | case 3: | ||
149 | gb_tile_config |= R300_PIPE_COUNT_R420_3P; | ||
150 | break; | ||
151 | case 4: | ||
152 | gb_tile_config |= R300_PIPE_COUNT_R420; | ||
153 | break; | ||
154 | case 1: | ||
155 | default: | ||
156 | gb_tile_config |= R300_PIPE_COUNT_RV350; | ||
157 | break; | ||
158 | } | ||
159 | |||
160 | r = radeon_ring_lock(rdev, 64); | 142 | r = radeon_ring_lock(rdev, 64); |
161 | if (r) { | 143 | if (r) { |
162 | return; | 144 | return; |
163 | } | 145 | } |
164 | radeon_ring_write(rdev, PACKET0(RADEON_ISYNC_CNTL, 0)); | 146 | radeon_ring_write(rdev, PACKET0(ISYNC_CNTL, 0)); |
165 | radeon_ring_write(rdev, | ||
166 | RADEON_ISYNC_ANY2D_IDLE3D | | ||
167 | RADEON_ISYNC_ANY3D_IDLE2D | | ||
168 | RADEON_ISYNC_WAIT_IDLEGUI | | ||
169 | RADEON_ISYNC_CPSCRATCH_IDLEGUI); | ||
170 | radeon_ring_write(rdev, PACKET0(R300_GB_TILE_CONFIG, 0)); | ||
171 | radeon_ring_write(rdev, gb_tile_config); | ||
172 | radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0)); | ||
173 | radeon_ring_write(rdev, | 147 | radeon_ring_write(rdev, |
174 | RADEON_WAIT_2D_IDLECLEAN | | 148 | ISYNC_ANY2D_IDLE3D | |
175 | RADEON_WAIT_3D_IDLECLEAN); | 149 | ISYNC_ANY3D_IDLE2D | |
150 | ISYNC_WAIT_IDLEGUI | | ||
151 | ISYNC_CPSCRATCH_IDLEGUI); | ||
152 | radeon_ring_write(rdev, PACKET0(WAIT_UNTIL, 0)); | ||
153 | radeon_ring_write(rdev, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN); | ||
176 | radeon_ring_write(rdev, PACKET0(0x170C, 0)); | 154 | radeon_ring_write(rdev, PACKET0(0x170C, 0)); |
177 | radeon_ring_write(rdev, 1 << 31); | 155 | radeon_ring_write(rdev, 1 << 31); |
178 | radeon_ring_write(rdev, PACKET0(R300_GB_SELECT, 0)); | 156 | radeon_ring_write(rdev, PACKET0(GB_SELECT, 0)); |
179 | radeon_ring_write(rdev, 0); | 157 | radeon_ring_write(rdev, 0); |
180 | radeon_ring_write(rdev, PACKET0(R300_GB_ENABLE, 0)); | 158 | radeon_ring_write(rdev, PACKET0(GB_ENABLE, 0)); |
181 | radeon_ring_write(rdev, 0); | 159 | radeon_ring_write(rdev, 0); |
182 | radeon_ring_write(rdev, PACKET0(0x42C8, 0)); | 160 | radeon_ring_write(rdev, PACKET0(0x42C8, 0)); |
183 | radeon_ring_write(rdev, (1 << rdev->num_gb_pipes) - 1); | 161 | radeon_ring_write(rdev, (1 << rdev->num_gb_pipes) - 1); |
184 | radeon_ring_write(rdev, PACKET0(R500_VAP_INDEX_OFFSET, 0)); | 162 | radeon_ring_write(rdev, PACKET0(VAP_INDEX_OFFSET, 0)); |
185 | radeon_ring_write(rdev, 0); | 163 | radeon_ring_write(rdev, 0); |
186 | radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); | 164 | radeon_ring_write(rdev, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0)); |
187 | radeon_ring_write(rdev, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE); | 165 | radeon_ring_write(rdev, RB3D_DC_FLUSH | RB3D_DC_FREE); |
188 | radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0)); | 166 | radeon_ring_write(rdev, PACKET0(ZB_ZCACHE_CTLSTAT, 0)); |
189 | radeon_ring_write(rdev, R300_ZC_FLUSH | R300_ZC_FREE); | 167 | radeon_ring_write(rdev, ZC_FLUSH | ZC_FREE); |
190 | radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0)); | 168 | radeon_ring_write(rdev, PACKET0(WAIT_UNTIL, 0)); |
191 | radeon_ring_write(rdev, | 169 | radeon_ring_write(rdev, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN); |
192 | RADEON_WAIT_2D_IDLECLEAN | | 170 | radeon_ring_write(rdev, PACKET0(GB_AA_CONFIG, 0)); |
193 | RADEON_WAIT_3D_IDLECLEAN); | ||
194 | radeon_ring_write(rdev, PACKET0(R300_GB_AA_CONFIG, 0)); | ||
195 | radeon_ring_write(rdev, 0); | 171 | radeon_ring_write(rdev, 0); |
196 | radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); | 172 | radeon_ring_write(rdev, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0)); |
197 | radeon_ring_write(rdev, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE); | 173 | radeon_ring_write(rdev, RB3D_DC_FLUSH | RB3D_DC_FREE); |
198 | radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0)); | 174 | radeon_ring_write(rdev, PACKET0(ZB_ZCACHE_CTLSTAT, 0)); |
199 | radeon_ring_write(rdev, R300_ZC_FLUSH | R300_ZC_FREE); | 175 | radeon_ring_write(rdev, ZC_FLUSH | ZC_FREE); |
200 | radeon_ring_write(rdev, PACKET0(R300_GB_MSPOS0, 0)); | 176 | radeon_ring_write(rdev, PACKET0(GB_MSPOS0, 0)); |
201 | radeon_ring_write(rdev, | ||
202 | ((6 << R300_MS_X0_SHIFT) | | ||
203 | (6 << R300_MS_Y0_SHIFT) | | ||
204 | (6 << R300_MS_X1_SHIFT) | | ||
205 | (6 << R300_MS_Y1_SHIFT) | | ||
206 | (6 << R300_MS_X2_SHIFT) | | ||
207 | (6 << R300_MS_Y2_SHIFT) | | ||
208 | (6 << R300_MSBD0_Y_SHIFT) | | ||
209 | (6 << R300_MSBD0_X_SHIFT))); | ||
210 | radeon_ring_write(rdev, PACKET0(R300_GB_MSPOS1, 0)); | ||
211 | radeon_ring_write(rdev, | ||
212 | ((6 << R300_MS_X3_SHIFT) | | ||
213 | (6 << R300_MS_Y3_SHIFT) | | ||
214 | (6 << R300_MS_X4_SHIFT) | | ||
215 | (6 << R300_MS_Y4_SHIFT) | | ||
216 | (6 << R300_MS_X5_SHIFT) | | ||
217 | (6 << R300_MS_Y5_SHIFT) | | ||
218 | (6 << R300_MSBD1_SHIFT))); | ||
219 | radeon_ring_write(rdev, PACKET0(R300_GA_ENHANCE, 0)); | ||
220 | radeon_ring_write(rdev, R300_GA_DEADLOCK_CNTL | R300_GA_FASTSYNC_CNTL); | ||
221 | radeon_ring_write(rdev, PACKET0(R300_GA_POLY_MODE, 0)); | ||
222 | radeon_ring_write(rdev, | 177 | radeon_ring_write(rdev, |
223 | R300_FRONT_PTYPE_TRIANGE | R300_BACK_PTYPE_TRIANGE); | 178 | ((6 << MS_X0_SHIFT) | |
224 | radeon_ring_write(rdev, PACKET0(R300_GA_ROUND_MODE, 0)); | 179 | (6 << MS_Y0_SHIFT) | |
180 | (6 << MS_X1_SHIFT) | | ||
181 | (6 << MS_Y1_SHIFT) | | ||
182 | (6 << MS_X2_SHIFT) | | ||
183 | (6 << MS_Y2_SHIFT) | | ||
184 | (6 << MSBD0_Y_SHIFT) | | ||
185 | (6 << MSBD0_X_SHIFT))); | ||
186 | radeon_ring_write(rdev, PACKET0(GB_MSPOS1, 0)); | ||
225 | radeon_ring_write(rdev, | 187 | radeon_ring_write(rdev, |
226 | R300_GEOMETRY_ROUND_NEAREST | | 188 | ((6 << MS_X3_SHIFT) | |
227 | R300_COLOR_ROUND_NEAREST); | 189 | (6 << MS_Y3_SHIFT) | |
190 | (6 << MS_X4_SHIFT) | | ||
191 | (6 << MS_Y4_SHIFT) | | ||
192 | (6 << MS_X5_SHIFT) | | ||
193 | (6 << MS_Y5_SHIFT) | | ||
194 | (6 << MSBD1_SHIFT))); | ||
195 | radeon_ring_write(rdev, PACKET0(GA_ENHANCE, 0)); | ||
196 | radeon_ring_write(rdev, GA_DEADLOCK_CNTL | GA_FASTSYNC_CNTL); | ||
197 | radeon_ring_write(rdev, PACKET0(GA_POLY_MODE, 0)); | ||
198 | radeon_ring_write(rdev, FRONT_PTYPE_TRIANGE | BACK_PTYPE_TRIANGE); | ||
199 | radeon_ring_write(rdev, PACKET0(GA_ROUND_MODE, 0)); | ||
200 | radeon_ring_write(rdev, GEOMETRY_ROUND_NEAREST | COLOR_ROUND_NEAREST); | ||
228 | radeon_ring_write(rdev, PACKET0(0x20C8, 0)); | 201 | radeon_ring_write(rdev, PACKET0(0x20C8, 0)); |
229 | radeon_ring_write(rdev, 0); | 202 | radeon_ring_write(rdev, 0); |
230 | radeon_ring_unlock_commit(rdev); | 203 | radeon_ring_unlock_commit(rdev); |
@@ -242,8 +215,8 @@ int rv515_mc_wait_for_idle(struct radeon_device *rdev) | |||
242 | 215 | ||
243 | for (i = 0; i < rdev->usec_timeout; i++) { | 216 | for (i = 0; i < rdev->usec_timeout; i++) { |
244 | /* read MC_STATUS */ | 217 | /* read MC_STATUS */ |
245 | tmp = RREG32_MC(RV515_MC_STATUS); | 218 | tmp = RREG32_MC(MC_STATUS); |
246 | if (tmp & RV515_MC_STATUS_IDLE) { | 219 | if (tmp & MC_STATUS_IDLE) { |
247 | return 0; | 220 | return 0; |
248 | } | 221 | } |
249 | DRM_UDELAY(1); | 222 | DRM_UDELAY(1); |
@@ -291,33 +264,33 @@ int rv515_ga_reset(struct radeon_device *rdev) | |||
291 | reinit_cp = rdev->cp.ready; | 264 | reinit_cp = rdev->cp.ready; |
292 | rdev->cp.ready = false; | 265 | rdev->cp.ready = false; |
293 | for (i = 0; i < rdev->usec_timeout; i++) { | 266 | for (i = 0; i < rdev->usec_timeout; i++) { |
294 | WREG32(RADEON_CP_CSQ_MODE, 0); | 267 | WREG32(CP_CSQ_MODE, 0); |
295 | WREG32(RADEON_CP_CSQ_CNTL, 0); | 268 | WREG32(CP_CSQ_CNTL, 0); |
296 | WREG32(RADEON_RBBM_SOFT_RESET, 0x32005); | 269 | WREG32(RBBM_SOFT_RESET, 0x32005); |
297 | (void)RREG32(RADEON_RBBM_SOFT_RESET); | 270 | (void)RREG32(RBBM_SOFT_RESET); |
298 | udelay(200); | 271 | udelay(200); |
299 | WREG32(RADEON_RBBM_SOFT_RESET, 0); | 272 | WREG32(RBBM_SOFT_RESET, 0); |
300 | /* Wait to prevent race in RBBM_STATUS */ | 273 | /* Wait to prevent race in RBBM_STATUS */ |
301 | mdelay(1); | 274 | mdelay(1); |
302 | tmp = RREG32(RADEON_RBBM_STATUS); | 275 | tmp = RREG32(RBBM_STATUS); |
303 | if (tmp & ((1 << 20) | (1 << 26))) { | 276 | if (tmp & ((1 << 20) | (1 << 26))) { |
304 | DRM_ERROR("VAP & CP still busy (RBBM_STATUS=0x%08X)\n", tmp); | 277 | DRM_ERROR("VAP & CP still busy (RBBM_STATUS=0x%08X)\n", tmp); |
305 | /* GA still busy soft reset it */ | 278 | /* GA still busy soft reset it */ |
306 | WREG32(0x429C, 0x200); | 279 | WREG32(0x429C, 0x200); |
307 | WREG32(R300_VAP_PVS_STATE_FLUSH_REG, 0); | 280 | WREG32(VAP_PVS_STATE_FLUSH_REG, 0); |
308 | WREG32(0x43E0, 0); | 281 | WREG32(0x43E0, 0); |
309 | WREG32(0x43E4, 0); | 282 | WREG32(0x43E4, 0); |
310 | WREG32(0x24AC, 0); | 283 | WREG32(0x24AC, 0); |
311 | } | 284 | } |
312 | /* Wait to prevent race in RBBM_STATUS */ | 285 | /* Wait to prevent race in RBBM_STATUS */ |
313 | mdelay(1); | 286 | mdelay(1); |
314 | tmp = RREG32(RADEON_RBBM_STATUS); | 287 | tmp = RREG32(RBBM_STATUS); |
315 | if (!(tmp & ((1 << 20) | (1 << 26)))) { | 288 | if (!(tmp & ((1 << 20) | (1 << 26)))) { |
316 | break; | 289 | break; |
317 | } | 290 | } |
318 | } | 291 | } |
319 | for (i = 0; i < rdev->usec_timeout; i++) { | 292 | for (i = 0; i < rdev->usec_timeout; i++) { |
320 | tmp = RREG32(RADEON_RBBM_STATUS); | 293 | tmp = RREG32(RBBM_STATUS); |
321 | if (!(tmp & ((1 << 20) | (1 << 26)))) { | 294 | if (!(tmp & ((1 << 20) | (1 << 26)))) { |
322 | DRM_INFO("GA reset succeed (RBBM_STATUS=0x%08X)\n", | 295 | DRM_INFO("GA reset succeed (RBBM_STATUS=0x%08X)\n", |
323 | tmp); | 296 | tmp); |
@@ -331,7 +304,7 @@ int rv515_ga_reset(struct radeon_device *rdev) | |||
331 | } | 304 | } |
332 | DRM_UDELAY(1); | 305 | DRM_UDELAY(1); |
333 | } | 306 | } |
334 | tmp = RREG32(RADEON_RBBM_STATUS); | 307 | tmp = RREG32(RBBM_STATUS); |
335 | DRM_ERROR("Failed to reset GA ! (RBBM_STATUS=0x%08X)\n", tmp); | 308 | DRM_ERROR("Failed to reset GA ! (RBBM_STATUS=0x%08X)\n", tmp); |
336 | return -1; | 309 | return -1; |
337 | } | 310 | } |
@@ -341,7 +314,7 @@ int rv515_gpu_reset(struct radeon_device *rdev) | |||
341 | uint32_t status; | 314 | uint32_t status; |
342 | 315 | ||
343 | /* reset order likely matter */ | 316 | /* reset order likely matter */ |
344 | status = RREG32(RADEON_RBBM_STATUS); | 317 | status = RREG32(RBBM_STATUS); |
345 | /* reset HDP */ | 318 | /* reset HDP */ |
346 | r100_hdp_reset(rdev); | 319 | r100_hdp_reset(rdev); |
347 | /* reset rb2d */ | 320 | /* reset rb2d */ |
@@ -353,12 +326,12 @@ int rv515_gpu_reset(struct radeon_device *rdev) | |||
353 | rv515_ga_reset(rdev); | 326 | rv515_ga_reset(rdev); |
354 | } | 327 | } |
355 | /* reset CP */ | 328 | /* reset CP */ |
356 | status = RREG32(RADEON_RBBM_STATUS); | 329 | status = RREG32(RBBM_STATUS); |
357 | if (status & (1 << 16)) { | 330 | if (status & (1 << 16)) { |
358 | r100_cp_reset(rdev); | 331 | r100_cp_reset(rdev); |
359 | } | 332 | } |
360 | /* Check if GPU is idle */ | 333 | /* Check if GPU is idle */ |
361 | status = RREG32(RADEON_RBBM_STATUS); | 334 | status = RREG32(RBBM_STATUS); |
362 | if (status & (1 << 31)) { | 335 | if (status & (1 << 31)) { |
363 | DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status); | 336 | DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status); |
364 | return -1; | 337 | return -1; |
@@ -377,8 +350,7 @@ static void rv515_vram_get_type(struct radeon_device *rdev) | |||
377 | 350 | ||
378 | rdev->mc.vram_width = 128; | 351 | rdev->mc.vram_width = 128; |
379 | rdev->mc.vram_is_ddr = true; | 352 | rdev->mc.vram_is_ddr = true; |
380 | tmp = RREG32_MC(RV515_MC_CNTL); | 353 | tmp = RREG32_MC(RV515_MC_CNTL) & MEM_NUM_CHANNELS_MASK; |
381 | tmp &= RV515_MEM_NUM_CHANNELS_MASK; | ||
382 | switch (tmp) { | 354 | switch (tmp) { |
383 | case 0: | 355 | case 0: |
384 | rdev->mc.vram_width = 64; | 356 | rdev->mc.vram_width = 64; |
@@ -394,9 +366,19 @@ static void rv515_vram_get_type(struct radeon_device *rdev) | |||
394 | 366 | ||
395 | void rv515_vram_info(struct radeon_device *rdev) | 367 | void rv515_vram_info(struct radeon_device *rdev) |
396 | { | 368 | { |
369 | fixed20_12 a; | ||
370 | |||
397 | rv515_vram_get_type(rdev); | 371 | rv515_vram_get_type(rdev); |
398 | 372 | rdev->mc.vram_size = RREG32(CONFIG_MEMSIZE); | |
399 | r100_vram_init_sizes(rdev); | 373 | |
374 | rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0); | ||
375 | rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0); | ||
376 | /* FIXME: we should enforce default clock in case GPU is not in | ||
377 | * default setup | ||
378 | */ | ||
379 | a.full = rfixed_const(100); | ||
380 | rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk); | ||
381 | rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a); | ||
400 | } | 382 | } |
401 | 383 | ||
402 | 384 | ||
@@ -407,35 +389,35 @@ uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg) | |||
407 | { | 389 | { |
408 | uint32_t r; | 390 | uint32_t r; |
409 | 391 | ||
410 | WREG32(R520_MC_IND_INDEX, 0x7f0000 | (reg & 0xffff)); | 392 | WREG32(MC_IND_INDEX, 0x7f0000 | (reg & 0xffff)); |
411 | r = RREG32(R520_MC_IND_DATA); | 393 | r = RREG32(MC_IND_DATA); |
412 | WREG32(R520_MC_IND_INDEX, 0); | 394 | WREG32(MC_IND_INDEX, 0); |
413 | return r; | 395 | return r; |
414 | } | 396 | } |
415 | 397 | ||
416 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) | 398 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) |
417 | { | 399 | { |
418 | WREG32(R520_MC_IND_INDEX, 0xff0000 | ((reg) & 0xffff)); | 400 | WREG32(MC_IND_INDEX, 0xff0000 | ((reg) & 0xffff)); |
419 | WREG32(R520_MC_IND_DATA, (v)); | 401 | WREG32(MC_IND_DATA, (v)); |
420 | WREG32(R520_MC_IND_INDEX, 0); | 402 | WREG32(MC_IND_INDEX, 0); |
421 | } | 403 | } |
422 | 404 | ||
423 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg) | 405 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg) |
424 | { | 406 | { |
425 | uint32_t r; | 407 | uint32_t r; |
426 | 408 | ||
427 | WREG32(RADEON_PCIE_INDEX, ((reg) & 0x7ff)); | 409 | WREG32(PCIE_INDEX, ((reg) & 0x7ff)); |
428 | (void)RREG32(RADEON_PCIE_INDEX); | 410 | (void)RREG32(PCIE_INDEX); |
429 | r = RREG32(RADEON_PCIE_DATA); | 411 | r = RREG32(PCIE_DATA); |
430 | return r; | 412 | return r; |
431 | } | 413 | } |
432 | 414 | ||
433 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) | 415 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) |
434 | { | 416 | { |
435 | WREG32(RADEON_PCIE_INDEX, ((reg) & 0x7ff)); | 417 | WREG32(PCIE_INDEX, ((reg) & 0x7ff)); |
436 | (void)RREG32(RADEON_PCIE_INDEX); | 418 | (void)RREG32(PCIE_INDEX); |
437 | WREG32(RADEON_PCIE_DATA, (v)); | 419 | WREG32(PCIE_DATA, (v)); |
438 | (void)RREG32(RADEON_PCIE_DATA); | 420 | (void)RREG32(PCIE_DATA); |
439 | } | 421 | } |
440 | 422 | ||
441 | 423 | ||
@@ -450,13 +432,13 @@ static int rv515_debugfs_pipes_info(struct seq_file *m, void *data) | |||
450 | struct radeon_device *rdev = dev->dev_private; | 432 | struct radeon_device *rdev = dev->dev_private; |
451 | uint32_t tmp; | 433 | uint32_t tmp; |
452 | 434 | ||
453 | tmp = RREG32(R400_GB_PIPE_SELECT); | 435 | tmp = RREG32(GB_PIPE_SELECT); |
454 | seq_printf(m, "GB_PIPE_SELECT 0x%08x\n", tmp); | 436 | seq_printf(m, "GB_PIPE_SELECT 0x%08x\n", tmp); |
455 | tmp = RREG32(R500_SU_REG_DEST); | 437 | tmp = RREG32(SU_REG_DEST); |
456 | seq_printf(m, "SU_REG_DEST 0x%08x\n", tmp); | 438 | seq_printf(m, "SU_REG_DEST 0x%08x\n", tmp); |
457 | tmp = RREG32(R300_GB_TILE_CONFIG); | 439 | tmp = RREG32(GB_TILE_CONFIG); |
458 | seq_printf(m, "GB_TILE_CONFIG 0x%08x\n", tmp); | 440 | seq_printf(m, "GB_TILE_CONFIG 0x%08x\n", tmp); |
459 | tmp = RREG32(R300_DST_PIPE_CONFIG); | 441 | tmp = RREG32(DST_PIPE_CONFIG); |
460 | seq_printf(m, "DST_PIPE_CONFIG 0x%08x\n", tmp); | 442 | seq_printf(m, "DST_PIPE_CONFIG 0x%08x\n", tmp); |
461 | return 0; | 443 | return 0; |
462 | } | 444 | } |
@@ -571,3 +553,551 @@ int rv515_init(struct radeon_device *rdev) | |||
571 | rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r500_reg_safe_bm); | 553 | rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r500_reg_safe_bm); |
572 | return 0; | 554 | return 0; |
573 | } | 555 | } |
556 | |||
557 | void atom_rv515_force_tv_scaler(struct radeon_device *rdev) | ||
558 | { | ||
559 | |||
560 | WREG32(0x659C, 0x0); | ||
561 | WREG32(0x6594, 0x705); | ||
562 | WREG32(0x65A4, 0x10001); | ||
563 | WREG32(0x65D8, 0x0); | ||
564 | WREG32(0x65B0, 0x0); | ||
565 | WREG32(0x65C0, 0x0); | ||
566 | WREG32(0x65D4, 0x0); | ||
567 | WREG32(0x6578, 0x0); | ||
568 | WREG32(0x657C, 0x841880A8); | ||
569 | WREG32(0x6578, 0x1); | ||
570 | WREG32(0x657C, 0x84208680); | ||
571 | WREG32(0x6578, 0x2); | ||
572 | WREG32(0x657C, 0xBFF880B0); | ||
573 | WREG32(0x6578, 0x100); | ||
574 | WREG32(0x657C, 0x83D88088); | ||
575 | WREG32(0x6578, 0x101); | ||
576 | WREG32(0x657C, 0x84608680); | ||
577 | WREG32(0x6578, 0x102); | ||
578 | WREG32(0x657C, 0xBFF080D0); | ||
579 | WREG32(0x6578, 0x200); | ||
580 | WREG32(0x657C, 0x83988068); | ||
581 | WREG32(0x6578, 0x201); | ||
582 | WREG32(0x657C, 0x84A08680); | ||
583 | WREG32(0x6578, 0x202); | ||
584 | WREG32(0x657C, 0xBFF080F8); | ||
585 | WREG32(0x6578, 0x300); | ||
586 | WREG32(0x657C, 0x83588058); | ||
587 | WREG32(0x6578, 0x301); | ||
588 | WREG32(0x657C, 0x84E08660); | ||
589 | WREG32(0x6578, 0x302); | ||
590 | WREG32(0x657C, 0xBFF88120); | ||
591 | WREG32(0x6578, 0x400); | ||
592 | WREG32(0x657C, 0x83188040); | ||
593 | WREG32(0x6578, 0x401); | ||
594 | WREG32(0x657C, 0x85008660); | ||
595 | WREG32(0x6578, 0x402); | ||
596 | WREG32(0x657C, 0xBFF88150); | ||
597 | WREG32(0x6578, 0x500); | ||
598 | WREG32(0x657C, 0x82D88030); | ||
599 | WREG32(0x6578, 0x501); | ||
600 | WREG32(0x657C, 0x85408640); | ||
601 | WREG32(0x6578, 0x502); | ||
602 | WREG32(0x657C, 0xBFF88180); | ||
603 | WREG32(0x6578, 0x600); | ||
604 | WREG32(0x657C, 0x82A08018); | ||
605 | WREG32(0x6578, 0x601); | ||
606 | WREG32(0x657C, 0x85808620); | ||
607 | WREG32(0x6578, 0x602); | ||
608 | WREG32(0x657C, 0xBFF081B8); | ||
609 | WREG32(0x6578, 0x700); | ||
610 | WREG32(0x657C, 0x82608010); | ||
611 | WREG32(0x6578, 0x701); | ||
612 | WREG32(0x657C, 0x85A08600); | ||
613 | WREG32(0x6578, 0x702); | ||
614 | WREG32(0x657C, 0x800081F0); | ||
615 | WREG32(0x6578, 0x800); | ||
616 | WREG32(0x657C, 0x8228BFF8); | ||
617 | WREG32(0x6578, 0x801); | ||
618 | WREG32(0x657C, 0x85E085E0); | ||
619 | WREG32(0x6578, 0x802); | ||
620 | WREG32(0x657C, 0xBFF88228); | ||
621 | WREG32(0x6578, 0x10000); | ||
622 | WREG32(0x657C, 0x82A8BF00); | ||
623 | WREG32(0x6578, 0x10001); | ||
624 | WREG32(0x657C, 0x82A08CC0); | ||
625 | WREG32(0x6578, 0x10002); | ||
626 | WREG32(0x657C, 0x8008BEF8); | ||
627 | WREG32(0x6578, 0x10100); | ||
628 | WREG32(0x657C, 0x81F0BF28); | ||
629 | WREG32(0x6578, 0x10101); | ||
630 | WREG32(0x657C, 0x83608CA0); | ||
631 | WREG32(0x6578, 0x10102); | ||
632 | WREG32(0x657C, 0x8018BED0); | ||
633 | WREG32(0x6578, 0x10200); | ||
634 | WREG32(0x657C, 0x8148BF38); | ||
635 | WREG32(0x6578, 0x10201); | ||
636 | WREG32(0x657C, 0x84408C80); | ||
637 | WREG32(0x6578, 0x10202); | ||
638 | WREG32(0x657C, 0x8008BEB8); | ||
639 | WREG32(0x6578, 0x10300); | ||
640 | WREG32(0x657C, 0x80B0BF78); | ||
641 | WREG32(0x6578, 0x10301); | ||
642 | WREG32(0x657C, 0x85008C20); | ||
643 | WREG32(0x6578, 0x10302); | ||
644 | WREG32(0x657C, 0x8020BEA0); | ||
645 | WREG32(0x6578, 0x10400); | ||
646 | WREG32(0x657C, 0x8028BF90); | ||
647 | WREG32(0x6578, 0x10401); | ||
648 | WREG32(0x657C, 0x85E08BC0); | ||
649 | WREG32(0x6578, 0x10402); | ||
650 | WREG32(0x657C, 0x8018BE90); | ||
651 | WREG32(0x6578, 0x10500); | ||
652 | WREG32(0x657C, 0xBFB8BFB0); | ||
653 | WREG32(0x6578, 0x10501); | ||
654 | WREG32(0x657C, 0x86C08B40); | ||
655 | WREG32(0x6578, 0x10502); | ||
656 | WREG32(0x657C, 0x8010BE90); | ||
657 | WREG32(0x6578, 0x10600); | ||
658 | WREG32(0x657C, 0xBF58BFC8); | ||
659 | WREG32(0x6578, 0x10601); | ||
660 | WREG32(0x657C, 0x87A08AA0); | ||
661 | WREG32(0x6578, 0x10602); | ||
662 | WREG32(0x657C, 0x8010BE98); | ||
663 | WREG32(0x6578, 0x10700); | ||
664 | WREG32(0x657C, 0xBF10BFF0); | ||
665 | WREG32(0x6578, 0x10701); | ||
666 | WREG32(0x657C, 0x886089E0); | ||
667 | WREG32(0x6578, 0x10702); | ||
668 | WREG32(0x657C, 0x8018BEB0); | ||
669 | WREG32(0x6578, 0x10800); | ||
670 | WREG32(0x657C, 0xBED8BFE8); | ||
671 | WREG32(0x6578, 0x10801); | ||
672 | WREG32(0x657C, 0x89408940); | ||
673 | WREG32(0x6578, 0x10802); | ||
674 | WREG32(0x657C, 0xBFE8BED8); | ||
675 | WREG32(0x6578, 0x20000); | ||
676 | WREG32(0x657C, 0x80008000); | ||
677 | WREG32(0x6578, 0x20001); | ||
678 | WREG32(0x657C, 0x90008000); | ||
679 | WREG32(0x6578, 0x20002); | ||
680 | WREG32(0x657C, 0x80008000); | ||
681 | WREG32(0x6578, 0x20003); | ||
682 | WREG32(0x657C, 0x80008000); | ||
683 | WREG32(0x6578, 0x20100); | ||
684 | WREG32(0x657C, 0x80108000); | ||
685 | WREG32(0x6578, 0x20101); | ||
686 | WREG32(0x657C, 0x8FE0BF70); | ||
687 | WREG32(0x6578, 0x20102); | ||
688 | WREG32(0x657C, 0xBFE880C0); | ||
689 | WREG32(0x6578, 0x20103); | ||
690 | WREG32(0x657C, 0x80008000); | ||
691 | WREG32(0x6578, 0x20200); | ||
692 | WREG32(0x657C, 0x8018BFF8); | ||
693 | WREG32(0x6578, 0x20201); | ||
694 | WREG32(0x657C, 0x8F80BF08); | ||
695 | WREG32(0x6578, 0x20202); | ||
696 | WREG32(0x657C, 0xBFD081A0); | ||
697 | WREG32(0x6578, 0x20203); | ||
698 | WREG32(0x657C, 0xBFF88000); | ||
699 | WREG32(0x6578, 0x20300); | ||
700 | WREG32(0x657C, 0x80188000); | ||
701 | WREG32(0x6578, 0x20301); | ||
702 | WREG32(0x657C, 0x8EE0BEC0); | ||
703 | WREG32(0x6578, 0x20302); | ||
704 | WREG32(0x657C, 0xBFB082A0); | ||
705 | WREG32(0x6578, 0x20303); | ||
706 | WREG32(0x657C, 0x80008000); | ||
707 | WREG32(0x6578, 0x20400); | ||
708 | WREG32(0x657C, 0x80188000); | ||
709 | WREG32(0x6578, 0x20401); | ||
710 | WREG32(0x657C, 0x8E00BEA0); | ||
711 | WREG32(0x6578, 0x20402); | ||
712 | WREG32(0x657C, 0xBF8883C0); | ||
713 | WREG32(0x6578, 0x20403); | ||
714 | WREG32(0x657C, 0x80008000); | ||
715 | WREG32(0x6578, 0x20500); | ||
716 | WREG32(0x657C, 0x80188000); | ||
717 | WREG32(0x6578, 0x20501); | ||
718 | WREG32(0x657C, 0x8D00BE90); | ||
719 | WREG32(0x6578, 0x20502); | ||
720 | WREG32(0x657C, 0xBF588500); | ||
721 | WREG32(0x6578, 0x20503); | ||
722 | WREG32(0x657C, 0x80008008); | ||
723 | WREG32(0x6578, 0x20600); | ||
724 | WREG32(0x657C, 0x80188000); | ||
725 | WREG32(0x6578, 0x20601); | ||
726 | WREG32(0x657C, 0x8BC0BE98); | ||
727 | WREG32(0x6578, 0x20602); | ||
728 | WREG32(0x657C, 0xBF308660); | ||
729 | WREG32(0x6578, 0x20603); | ||
730 | WREG32(0x657C, 0x80008008); | ||
731 | WREG32(0x6578, 0x20700); | ||
732 | WREG32(0x657C, 0x80108000); | ||
733 | WREG32(0x6578, 0x20701); | ||
734 | WREG32(0x657C, 0x8A80BEB0); | ||
735 | WREG32(0x6578, 0x20702); | ||
736 | WREG32(0x657C, 0xBF0087C0); | ||
737 | WREG32(0x6578, 0x20703); | ||
738 | WREG32(0x657C, 0x80008008); | ||
739 | WREG32(0x6578, 0x20800); | ||
740 | WREG32(0x657C, 0x80108000); | ||
741 | WREG32(0x6578, 0x20801); | ||
742 | WREG32(0x657C, 0x8920BED0); | ||
743 | WREG32(0x6578, 0x20802); | ||
744 | WREG32(0x657C, 0xBED08920); | ||
745 | WREG32(0x6578, 0x20803); | ||
746 | WREG32(0x657C, 0x80008010); | ||
747 | WREG32(0x6578, 0x30000); | ||
748 | WREG32(0x657C, 0x90008000); | ||
749 | WREG32(0x6578, 0x30001); | ||
750 | WREG32(0x657C, 0x80008000); | ||
751 | WREG32(0x6578, 0x30100); | ||
752 | WREG32(0x657C, 0x8FE0BF90); | ||
753 | WREG32(0x6578, 0x30101); | ||
754 | WREG32(0x657C, 0xBFF880A0); | ||
755 | WREG32(0x6578, 0x30200); | ||
756 | WREG32(0x657C, 0x8F60BF40); | ||
757 | WREG32(0x6578, 0x30201); | ||
758 | WREG32(0x657C, 0xBFE88180); | ||
759 | WREG32(0x6578, 0x30300); | ||
760 | WREG32(0x657C, 0x8EC0BF00); | ||
761 | WREG32(0x6578, 0x30301); | ||
762 | WREG32(0x657C, 0xBFC88280); | ||
763 | WREG32(0x6578, 0x30400); | ||
764 | WREG32(0x657C, 0x8DE0BEE0); | ||
765 | WREG32(0x6578, 0x30401); | ||
766 | WREG32(0x657C, 0xBFA083A0); | ||
767 | WREG32(0x6578, 0x30500); | ||
768 | WREG32(0x657C, 0x8CE0BED0); | ||
769 | WREG32(0x6578, 0x30501); | ||
770 | WREG32(0x657C, 0xBF7884E0); | ||
771 | WREG32(0x6578, 0x30600); | ||
772 | WREG32(0x657C, 0x8BA0BED8); | ||
773 | WREG32(0x6578, 0x30601); | ||
774 | WREG32(0x657C, 0xBF508640); | ||
775 | WREG32(0x6578, 0x30700); | ||
776 | WREG32(0x657C, 0x8A60BEE8); | ||
777 | WREG32(0x6578, 0x30701); | ||
778 | WREG32(0x657C, 0xBF2087A0); | ||
779 | WREG32(0x6578, 0x30800); | ||
780 | WREG32(0x657C, 0x8900BF00); | ||
781 | WREG32(0x6578, 0x30801); | ||
782 | WREG32(0x657C, 0xBF008900); | ||
783 | } | ||
784 | |||
785 | struct rv515_watermark { | ||
786 | u32 lb_request_fifo_depth; | ||
787 | fixed20_12 num_line_pair; | ||
788 | fixed20_12 estimated_width; | ||
789 | fixed20_12 worst_case_latency; | ||
790 | fixed20_12 consumption_rate; | ||
791 | fixed20_12 active_time; | ||
792 | fixed20_12 dbpp; | ||
793 | fixed20_12 priority_mark_max; | ||
794 | fixed20_12 priority_mark; | ||
795 | fixed20_12 sclk; | ||
796 | }; | ||
797 | |||
798 | void rv515_crtc_bandwidth_compute(struct radeon_device *rdev, | ||
799 | struct radeon_crtc *crtc, | ||
800 | struct rv515_watermark *wm) | ||
801 | { | ||
802 | struct drm_display_mode *mode = &crtc->base.mode; | ||
803 | fixed20_12 a, b, c; | ||
804 | fixed20_12 pclk, request_fifo_depth, tolerable_latency, estimated_width; | ||
805 | fixed20_12 consumption_time, line_time, chunk_time, read_delay_latency; | ||
806 | |||
807 | if (!crtc->base.enabled) { | ||
808 | /* FIXME: wouldn't it better to set priority mark to maximum */ | ||
809 | wm->lb_request_fifo_depth = 4; | ||
810 | return; | ||
811 | } | ||
812 | |||
813 | if (crtc->vsc.full > rfixed_const(2)) | ||
814 | wm->num_line_pair.full = rfixed_const(2); | ||
815 | else | ||
816 | wm->num_line_pair.full = rfixed_const(1); | ||
817 | |||
818 | b.full = rfixed_const(mode->crtc_hdisplay); | ||
819 | c.full = rfixed_const(256); | ||
820 | a.full = rfixed_mul(wm->num_line_pair, b); | ||
821 | request_fifo_depth.full = rfixed_div(a, c); | ||
822 | if (a.full < rfixed_const(4)) { | ||
823 | wm->lb_request_fifo_depth = 4; | ||
824 | } else { | ||
825 | wm->lb_request_fifo_depth = rfixed_trunc(request_fifo_depth); | ||
826 | } | ||
827 | |||
828 | /* Determine consumption rate | ||
829 | * pclk = pixel clock period(ns) = 1000 / (mode.clock / 1000) | ||
830 | * vtaps = number of vertical taps, | ||
831 | * vsc = vertical scaling ratio, defined as source/destination | ||
832 | * hsc = horizontal scaling ration, defined as source/destination | ||
833 | */ | ||
834 | a.full = rfixed_const(mode->clock); | ||
835 | b.full = rfixed_const(1000); | ||
836 | a.full = rfixed_div(a, b); | ||
837 | pclk.full = rfixed_div(b, a); | ||
838 | if (crtc->rmx_type != RMX_OFF) { | ||
839 | b.full = rfixed_const(2); | ||
840 | if (crtc->vsc.full > b.full) | ||
841 | b.full = crtc->vsc.full; | ||
842 | b.full = rfixed_mul(b, crtc->hsc); | ||
843 | c.full = rfixed_const(2); | ||
844 | b.full = rfixed_div(b, c); | ||
845 | consumption_time.full = rfixed_div(pclk, b); | ||
846 | } else { | ||
847 | consumption_time.full = pclk.full; | ||
848 | } | ||
849 | a.full = rfixed_const(1); | ||
850 | wm->consumption_rate.full = rfixed_div(a, consumption_time); | ||
851 | |||
852 | |||
853 | /* Determine line time | ||
854 | * LineTime = total time for one line of displayhtotal | ||
855 | * LineTime = total number of horizontal pixels | ||
856 | * pclk = pixel clock period(ns) | ||
857 | */ | ||
858 | a.full = rfixed_const(crtc->base.mode.crtc_htotal); | ||
859 | line_time.full = rfixed_mul(a, pclk); | ||
860 | |||
861 | /* Determine active time | ||
862 | * ActiveTime = time of active region of display within one line, | ||
863 | * hactive = total number of horizontal active pixels | ||
864 | * htotal = total number of horizontal pixels | ||
865 | */ | ||
866 | a.full = rfixed_const(crtc->base.mode.crtc_htotal); | ||
867 | b.full = rfixed_const(crtc->base.mode.crtc_hdisplay); | ||
868 | wm->active_time.full = rfixed_mul(line_time, b); | ||
869 | wm->active_time.full = rfixed_div(wm->active_time, a); | ||
870 | |||
871 | /* Determine chunk time | ||
872 | * ChunkTime = the time it takes the DCP to send one chunk of data | ||
873 | * to the LB which consists of pipeline delay and inter chunk gap | ||
874 | * sclk = system clock(Mhz) | ||
875 | */ | ||
876 | a.full = rfixed_const(600 * 1000); | ||
877 | chunk_time.full = rfixed_div(a, rdev->pm.sclk); | ||
878 | read_delay_latency.full = rfixed_const(1000); | ||
879 | |||
880 | /* Determine the worst case latency | ||
881 | * NumLinePair = Number of line pairs to request(1=2 lines, 2=4 lines) | ||
882 | * WorstCaseLatency = worst case time from urgent to when the MC starts | ||
883 | * to return data | ||
884 | * READ_DELAY_IDLE_MAX = constant of 1us | ||
885 | * ChunkTime = time it takes the DCP to send one chunk of data to the LB | ||
886 | * which consists of pipeline delay and inter chunk gap | ||
887 | */ | ||
888 | if (rfixed_trunc(wm->num_line_pair) > 1) { | ||
889 | a.full = rfixed_const(3); | ||
890 | wm->worst_case_latency.full = rfixed_mul(a, chunk_time); | ||
891 | wm->worst_case_latency.full += read_delay_latency.full; | ||
892 | } else { | ||
893 | wm->worst_case_latency.full = chunk_time.full + read_delay_latency.full; | ||
894 | } | ||
895 | |||
896 | /* Determine the tolerable latency | ||
897 | * TolerableLatency = Any given request has only 1 line time | ||
898 | * for the data to be returned | ||
899 | * LBRequestFifoDepth = Number of chunk requests the LB can | ||
900 | * put into the request FIFO for a display | ||
901 | * LineTime = total time for one line of display | ||
902 | * ChunkTime = the time it takes the DCP to send one chunk | ||
903 | * of data to the LB which consists of | ||
904 | * pipeline delay and inter chunk gap | ||
905 | */ | ||
906 | if ((2+wm->lb_request_fifo_depth) >= rfixed_trunc(request_fifo_depth)) { | ||
907 | tolerable_latency.full = line_time.full; | ||
908 | } else { | ||
909 | tolerable_latency.full = rfixed_const(wm->lb_request_fifo_depth - 2); | ||
910 | tolerable_latency.full = request_fifo_depth.full - tolerable_latency.full; | ||
911 | tolerable_latency.full = rfixed_mul(tolerable_latency, chunk_time); | ||
912 | tolerable_latency.full = line_time.full - tolerable_latency.full; | ||
913 | } | ||
914 | /* We assume worst case 32bits (4 bytes) */ | ||
915 | wm->dbpp.full = rfixed_const(2 * 16); | ||
916 | |||
917 | /* Determine the maximum priority mark | ||
918 | * width = viewport width in pixels | ||
919 | */ | ||
920 | a.full = rfixed_const(16); | ||
921 | wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay); | ||
922 | wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a); | ||
923 | |||
924 | /* Determine estimated width */ | ||
925 | estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full; | ||
926 | estimated_width.full = rfixed_div(estimated_width, consumption_time); | ||
927 | if (rfixed_trunc(estimated_width) > crtc->base.mode.crtc_hdisplay) { | ||
928 | wm->priority_mark.full = rfixed_const(10); | ||
929 | } else { | ||
930 | a.full = rfixed_const(16); | ||
931 | wm->priority_mark.full = rfixed_div(estimated_width, a); | ||
932 | wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full; | ||
933 | } | ||
934 | } | ||
935 | |||
936 | void rv515_bandwidth_avivo_update(struct radeon_device *rdev) | ||
937 | { | ||
938 | struct drm_display_mode *mode0 = NULL; | ||
939 | struct drm_display_mode *mode1 = NULL; | ||
940 | struct rv515_watermark wm0; | ||
941 | struct rv515_watermark wm1; | ||
942 | u32 tmp; | ||
943 | fixed20_12 priority_mark02, priority_mark12, fill_rate; | ||
944 | fixed20_12 a, b; | ||
945 | |||
946 | if (rdev->mode_info.crtcs[0]->base.enabled) | ||
947 | mode0 = &rdev->mode_info.crtcs[0]->base.mode; | ||
948 | if (rdev->mode_info.crtcs[1]->base.enabled) | ||
949 | mode1 = &rdev->mode_info.crtcs[1]->base.mode; | ||
950 | rs690_line_buffer_adjust(rdev, mode0, mode1); | ||
951 | |||
952 | rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[0], &wm0); | ||
953 | rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[1], &wm1); | ||
954 | |||
955 | tmp = wm0.lb_request_fifo_depth; | ||
956 | tmp |= wm1.lb_request_fifo_depth << 16; | ||
957 | WREG32(LB_MAX_REQ_OUTSTANDING, tmp); | ||
958 | |||
959 | if (mode0 && mode1) { | ||
960 | if (rfixed_trunc(wm0.dbpp) > 64) | ||
961 | a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair); | ||
962 | else | ||
963 | a.full = wm0.num_line_pair.full; | ||
964 | if (rfixed_trunc(wm1.dbpp) > 64) | ||
965 | b.full = rfixed_div(wm1.dbpp, wm1.num_line_pair); | ||
966 | else | ||
967 | b.full = wm1.num_line_pair.full; | ||
968 | a.full += b.full; | ||
969 | fill_rate.full = rfixed_div(wm0.sclk, a); | ||
970 | if (wm0.consumption_rate.full > fill_rate.full) { | ||
971 | b.full = wm0.consumption_rate.full - fill_rate.full; | ||
972 | b.full = rfixed_mul(b, wm0.active_time); | ||
973 | a.full = rfixed_const(16); | ||
974 | b.full = rfixed_div(b, a); | ||
975 | a.full = rfixed_mul(wm0.worst_case_latency, | ||
976 | wm0.consumption_rate); | ||
977 | priority_mark02.full = a.full + b.full; | ||
978 | } else { | ||
979 | a.full = rfixed_mul(wm0.worst_case_latency, | ||
980 | wm0.consumption_rate); | ||
981 | b.full = rfixed_const(16 * 1000); | ||
982 | priority_mark02.full = rfixed_div(a, b); | ||
983 | } | ||
984 | if (wm1.consumption_rate.full > fill_rate.full) { | ||
985 | b.full = wm1.consumption_rate.full - fill_rate.full; | ||
986 | b.full = rfixed_mul(b, wm1.active_time); | ||
987 | a.full = rfixed_const(16); | ||
988 | b.full = rfixed_div(b, a); | ||
989 | a.full = rfixed_mul(wm1.worst_case_latency, | ||
990 | wm1.consumption_rate); | ||
991 | priority_mark12.full = a.full + b.full; | ||
992 | } else { | ||
993 | a.full = rfixed_mul(wm1.worst_case_latency, | ||
994 | wm1.consumption_rate); | ||
995 | b.full = rfixed_const(16 * 1000); | ||
996 | priority_mark12.full = rfixed_div(a, b); | ||
997 | } | ||
998 | if (wm0.priority_mark.full > priority_mark02.full) | ||
999 | priority_mark02.full = wm0.priority_mark.full; | ||
1000 | if (rfixed_trunc(priority_mark02) < 0) | ||
1001 | priority_mark02.full = 0; | ||
1002 | if (wm0.priority_mark_max.full > priority_mark02.full) | ||
1003 | priority_mark02.full = wm0.priority_mark_max.full; | ||
1004 | if (wm1.priority_mark.full > priority_mark12.full) | ||
1005 | priority_mark12.full = wm1.priority_mark.full; | ||
1006 | if (rfixed_trunc(priority_mark12) < 0) | ||
1007 | priority_mark12.full = 0; | ||
1008 | if (wm1.priority_mark_max.full > priority_mark12.full) | ||
1009 | priority_mark12.full = wm1.priority_mark_max.full; | ||
1010 | WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02)); | ||
1011 | WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02)); | ||
1012 | WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12)); | ||
1013 | WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12)); | ||
1014 | } else if (mode0) { | ||
1015 | if (rfixed_trunc(wm0.dbpp) > 64) | ||
1016 | a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair); | ||
1017 | else | ||
1018 | a.full = wm0.num_line_pair.full; | ||
1019 | fill_rate.full = rfixed_div(wm0.sclk, a); | ||
1020 | if (wm0.consumption_rate.full > fill_rate.full) { | ||
1021 | b.full = wm0.consumption_rate.full - fill_rate.full; | ||
1022 | b.full = rfixed_mul(b, wm0.active_time); | ||
1023 | a.full = rfixed_const(16); | ||
1024 | b.full = rfixed_div(b, a); | ||
1025 | a.full = rfixed_mul(wm0.worst_case_latency, | ||
1026 | wm0.consumption_rate); | ||
1027 | priority_mark02.full = a.full + b.full; | ||
1028 | } else { | ||
1029 | a.full = rfixed_mul(wm0.worst_case_latency, | ||
1030 | wm0.consumption_rate); | ||
1031 | b.full = rfixed_const(16); | ||
1032 | priority_mark02.full = rfixed_div(a, b); | ||
1033 | } | ||
1034 | if (wm0.priority_mark.full > priority_mark02.full) | ||
1035 | priority_mark02.full = wm0.priority_mark.full; | ||
1036 | if (rfixed_trunc(priority_mark02) < 0) | ||
1037 | priority_mark02.full = 0; | ||
1038 | if (wm0.priority_mark_max.full > priority_mark02.full) | ||
1039 | priority_mark02.full = wm0.priority_mark_max.full; | ||
1040 | WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02)); | ||
1041 | WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02)); | ||
1042 | WREG32(D2MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF); | ||
1043 | WREG32(D2MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF); | ||
1044 | } else { | ||
1045 | if (rfixed_trunc(wm1.dbpp) > 64) | ||
1046 | a.full = rfixed_div(wm1.dbpp, wm1.num_line_pair); | ||
1047 | else | ||
1048 | a.full = wm1.num_line_pair.full; | ||
1049 | fill_rate.full = rfixed_div(wm1.sclk, a); | ||
1050 | if (wm1.consumption_rate.full > fill_rate.full) { | ||
1051 | b.full = wm1.consumption_rate.full - fill_rate.full; | ||
1052 | b.full = rfixed_mul(b, wm1.active_time); | ||
1053 | a.full = rfixed_const(16); | ||
1054 | b.full = rfixed_div(b, a); | ||
1055 | a.full = rfixed_mul(wm1.worst_case_latency, | ||
1056 | wm1.consumption_rate); | ||
1057 | priority_mark12.full = a.full + b.full; | ||
1058 | } else { | ||
1059 | a.full = rfixed_mul(wm1.worst_case_latency, | ||
1060 | wm1.consumption_rate); | ||
1061 | b.full = rfixed_const(16 * 1000); | ||
1062 | priority_mark12.full = rfixed_div(a, b); | ||
1063 | } | ||
1064 | if (wm1.priority_mark.full > priority_mark12.full) | ||
1065 | priority_mark12.full = wm1.priority_mark.full; | ||
1066 | if (rfixed_trunc(priority_mark12) < 0) | ||
1067 | priority_mark12.full = 0; | ||
1068 | if (wm1.priority_mark_max.full > priority_mark12.full) | ||
1069 | priority_mark12.full = wm1.priority_mark_max.full; | ||
1070 | WREG32(D1MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF); | ||
1071 | WREG32(D1MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF); | ||
1072 | WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12)); | ||
1073 | WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12)); | ||
1074 | } | ||
1075 | } | ||
1076 | |||
1077 | void rv515_bandwidth_update(struct radeon_device *rdev) | ||
1078 | { | ||
1079 | uint32_t tmp; | ||
1080 | struct drm_display_mode *mode0 = NULL; | ||
1081 | struct drm_display_mode *mode1 = NULL; | ||
1082 | |||
1083 | if (rdev->mode_info.crtcs[0]->base.enabled) | ||
1084 | mode0 = &rdev->mode_info.crtcs[0]->base.mode; | ||
1085 | if (rdev->mode_info.crtcs[1]->base.enabled) | ||
1086 | mode1 = &rdev->mode_info.crtcs[1]->base.mode; | ||
1087 | /* | ||
1088 | * Set display0/1 priority up in the memory controller for | ||
1089 | * modes if the user specifies HIGH for displaypriority | ||
1090 | * option. | ||
1091 | */ | ||
1092 | if (rdev->disp_priority == 2) { | ||
1093 | tmp = RREG32_MC(MC_MISC_LAT_TIMER); | ||
1094 | tmp &= ~MC_DISP1R_INIT_LAT_MASK; | ||
1095 | tmp &= ~MC_DISP0R_INIT_LAT_MASK; | ||
1096 | if (mode1) | ||
1097 | tmp |= (1 << MC_DISP1R_INIT_LAT_SHIFT); | ||
1098 | if (mode0) | ||
1099 | tmp |= (1 << MC_DISP0R_INIT_LAT_SHIFT); | ||
1100 | WREG32_MC(MC_MISC_LAT_TIMER, tmp); | ||
1101 | } | ||
1102 | rv515_bandwidth_avivo_update(rdev); | ||
1103 | } | ||