aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/rv515.c
diff options
context:
space:
mode:
authorPaul Mundt <lethal@linux-sh.org>2009-08-12 22:48:01 -0400
committerPaul Mundt <lethal@linux-sh.org>2009-08-12 22:48:01 -0400
commite290861f99131fc42d98012a9ea2dc185f08f8f9 (patch)
tree5c8b92d095d74d03b281711c81bac54bd59989cd /drivers/gpu/drm/radeon/rv515.c
parentb4a757367d36cebddcd332a4024d92f1e87af370 (diff)
parentdbefd606a3b3634799b625f4900336e61c89e868 (diff)
Merge branch 'sh/stable-updates'
Diffstat (limited to 'drivers/gpu/drm/radeon/rv515.c')
-rw-r--r--drivers/gpu/drm/radeon/rv515.c799
1 files changed, 669 insertions, 130 deletions
diff --git a/drivers/gpu/drm/radeon/rv515.c b/drivers/gpu/drm/radeon/rv515.c
index ffea37b1b3e2..fd8f3ca716ea 100644
--- a/drivers/gpu/drm/radeon/rv515.c
+++ b/drivers/gpu/drm/radeon/rv515.c
@@ -27,8 +27,9 @@
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include "drmP.h" 29#include "drmP.h"
30#include "radeon_reg.h" 30#include "rv515r.h"
31#include "radeon.h" 31#include "radeon.h"
32#include "radeon_share.h"
32 33
33/* rv515 depends on : */ 34/* rv515 depends on : */
34void r100_hdp_reset(struct radeon_device *rdev); 35void r100_hdp_reset(struct radeon_device *rdev);
@@ -99,26 +100,26 @@ int rv515_mc_init(struct radeon_device *rdev)
99 "programming pipes. Bad things might happen.\n"); 100 "programming pipes. Bad things might happen.\n");
100 } 101 }
101 /* Write VRAM size in case we are limiting it */ 102 /* Write VRAM size in case we are limiting it */
102 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.vram_size); 103 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size);
103 tmp = REG_SET(RV515_MC_FB_START, rdev->mc.vram_location >> 16); 104 tmp = REG_SET(MC_FB_START, rdev->mc.vram_location >> 16);
104 WREG32(0x134, tmp); 105 WREG32(0x134, tmp);
105 tmp = rdev->mc.vram_location + rdev->mc.vram_size - 1; 106 tmp = rdev->mc.vram_location + rdev->mc.mc_vram_size - 1;
106 tmp = REG_SET(RV515_MC_FB_TOP, tmp >> 16); 107 tmp = REG_SET(MC_FB_TOP, tmp >> 16);
107 tmp |= REG_SET(RV515_MC_FB_START, rdev->mc.vram_location >> 16); 108 tmp |= REG_SET(MC_FB_START, rdev->mc.vram_location >> 16);
108 WREG32_MC(RV515_MC_FB_LOCATION, tmp); 109 WREG32_MC(MC_FB_LOCATION, tmp);
109 WREG32(RS690_HDP_FB_LOCATION, rdev->mc.vram_location >> 16); 110 WREG32(HDP_FB_LOCATION, rdev->mc.vram_location >> 16);
110 WREG32(0x310, rdev->mc.vram_location); 111 WREG32(0x310, rdev->mc.vram_location);
111 if (rdev->flags & RADEON_IS_AGP) { 112 if (rdev->flags & RADEON_IS_AGP) {
112 tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - 1; 113 tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - 1;
113 tmp = REG_SET(RV515_MC_AGP_TOP, tmp >> 16); 114 tmp = REG_SET(MC_AGP_TOP, tmp >> 16);
114 tmp |= REG_SET(RV515_MC_AGP_START, rdev->mc.gtt_location >> 16); 115 tmp |= REG_SET(MC_AGP_START, rdev->mc.gtt_location >> 16);
115 WREG32_MC(RV515_MC_AGP_LOCATION, tmp); 116 WREG32_MC(MC_AGP_LOCATION, tmp);
116 WREG32_MC(RV515_MC_AGP_BASE, rdev->mc.agp_base); 117 WREG32_MC(MC_AGP_BASE, rdev->mc.agp_base);
117 WREG32_MC(RV515_MC_AGP_BASE_2, 0); 118 WREG32_MC(MC_AGP_BASE_2, 0);
118 } else { 119 } else {
119 WREG32_MC(RV515_MC_AGP_LOCATION, 0x0FFFFFFF); 120 WREG32_MC(MC_AGP_LOCATION, 0x0FFFFFFF);
120 WREG32_MC(RV515_MC_AGP_BASE, 0); 121 WREG32_MC(MC_AGP_BASE, 0);
121 WREG32_MC(RV515_MC_AGP_BASE_2, 0); 122 WREG32_MC(MC_AGP_BASE_2, 0);
122 } 123 }
123 return 0; 124 return 0;
124} 125}
@@ -136,95 +137,67 @@ void rv515_mc_fini(struct radeon_device *rdev)
136 */ 137 */
137void rv515_ring_start(struct radeon_device *rdev) 138void rv515_ring_start(struct radeon_device *rdev)
138{ 139{
139 unsigned gb_tile_config;
140 int r; 140 int r;
141 141
142 /* Sub pixel 1/12 so we can have 4K rendering according to doc */
143 gb_tile_config = R300_ENABLE_TILING | R300_TILE_SIZE_16;
144 switch (rdev->num_gb_pipes) {
145 case 2:
146 gb_tile_config |= R300_PIPE_COUNT_R300;
147 break;
148 case 3:
149 gb_tile_config |= R300_PIPE_COUNT_R420_3P;
150 break;
151 case 4:
152 gb_tile_config |= R300_PIPE_COUNT_R420;
153 break;
154 case 1:
155 default:
156 gb_tile_config |= R300_PIPE_COUNT_RV350;
157 break;
158 }
159
160 r = radeon_ring_lock(rdev, 64); 142 r = radeon_ring_lock(rdev, 64);
161 if (r) { 143 if (r) {
162 return; 144 return;
163 } 145 }
164 radeon_ring_write(rdev, PACKET0(RADEON_ISYNC_CNTL, 0)); 146 radeon_ring_write(rdev, PACKET0(ISYNC_CNTL, 0));
165 radeon_ring_write(rdev,
166 RADEON_ISYNC_ANY2D_IDLE3D |
167 RADEON_ISYNC_ANY3D_IDLE2D |
168 RADEON_ISYNC_WAIT_IDLEGUI |
169 RADEON_ISYNC_CPSCRATCH_IDLEGUI);
170 radeon_ring_write(rdev, PACKET0(R300_GB_TILE_CONFIG, 0));
171 radeon_ring_write(rdev, gb_tile_config);
172 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
173 radeon_ring_write(rdev, 147 radeon_ring_write(rdev,
174 RADEON_WAIT_2D_IDLECLEAN | 148 ISYNC_ANY2D_IDLE3D |
175 RADEON_WAIT_3D_IDLECLEAN); 149 ISYNC_ANY3D_IDLE2D |
150 ISYNC_WAIT_IDLEGUI |
151 ISYNC_CPSCRATCH_IDLEGUI);
152 radeon_ring_write(rdev, PACKET0(WAIT_UNTIL, 0));
153 radeon_ring_write(rdev, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
176 radeon_ring_write(rdev, PACKET0(0x170C, 0)); 154 radeon_ring_write(rdev, PACKET0(0x170C, 0));
177 radeon_ring_write(rdev, 1 << 31); 155 radeon_ring_write(rdev, 1 << 31);
178 radeon_ring_write(rdev, PACKET0(R300_GB_SELECT, 0)); 156 radeon_ring_write(rdev, PACKET0(GB_SELECT, 0));
179 radeon_ring_write(rdev, 0); 157 radeon_ring_write(rdev, 0);
180 radeon_ring_write(rdev, PACKET0(R300_GB_ENABLE, 0)); 158 radeon_ring_write(rdev, PACKET0(GB_ENABLE, 0));
181 radeon_ring_write(rdev, 0); 159 radeon_ring_write(rdev, 0);
182 radeon_ring_write(rdev, PACKET0(0x42C8, 0)); 160 radeon_ring_write(rdev, PACKET0(0x42C8, 0));
183 radeon_ring_write(rdev, (1 << rdev->num_gb_pipes) - 1); 161 radeon_ring_write(rdev, (1 << rdev->num_gb_pipes) - 1);
184 radeon_ring_write(rdev, PACKET0(R500_VAP_INDEX_OFFSET, 0)); 162 radeon_ring_write(rdev, PACKET0(VAP_INDEX_OFFSET, 0));
185 radeon_ring_write(rdev, 0); 163 radeon_ring_write(rdev, 0);
186 radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); 164 radeon_ring_write(rdev, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
187 radeon_ring_write(rdev, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE); 165 radeon_ring_write(rdev, RB3D_DC_FLUSH | RB3D_DC_FREE);
188 radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0)); 166 radeon_ring_write(rdev, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
189 radeon_ring_write(rdev, R300_ZC_FLUSH | R300_ZC_FREE); 167 radeon_ring_write(rdev, ZC_FLUSH | ZC_FREE);
190 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0)); 168 radeon_ring_write(rdev, PACKET0(WAIT_UNTIL, 0));
191 radeon_ring_write(rdev, 169 radeon_ring_write(rdev, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
192 RADEON_WAIT_2D_IDLECLEAN | 170 radeon_ring_write(rdev, PACKET0(GB_AA_CONFIG, 0));
193 RADEON_WAIT_3D_IDLECLEAN);
194 radeon_ring_write(rdev, PACKET0(R300_GB_AA_CONFIG, 0));
195 radeon_ring_write(rdev, 0); 171 radeon_ring_write(rdev, 0);
196 radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); 172 radeon_ring_write(rdev, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
197 radeon_ring_write(rdev, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE); 173 radeon_ring_write(rdev, RB3D_DC_FLUSH | RB3D_DC_FREE);
198 radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0)); 174 radeon_ring_write(rdev, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
199 radeon_ring_write(rdev, R300_ZC_FLUSH | R300_ZC_FREE); 175 radeon_ring_write(rdev, ZC_FLUSH | ZC_FREE);
200 radeon_ring_write(rdev, PACKET0(R300_GB_MSPOS0, 0)); 176 radeon_ring_write(rdev, PACKET0(GB_MSPOS0, 0));
201 radeon_ring_write(rdev,
202 ((6 << R300_MS_X0_SHIFT) |
203 (6 << R300_MS_Y0_SHIFT) |
204 (6 << R300_MS_X1_SHIFT) |
205 (6 << R300_MS_Y1_SHIFT) |
206 (6 << R300_MS_X2_SHIFT) |
207 (6 << R300_MS_Y2_SHIFT) |
208 (6 << R300_MSBD0_Y_SHIFT) |
209 (6 << R300_MSBD0_X_SHIFT)));
210 radeon_ring_write(rdev, PACKET0(R300_GB_MSPOS1, 0));
211 radeon_ring_write(rdev, 177 radeon_ring_write(rdev,
212 ((6 << R300_MS_X3_SHIFT) | 178 ((6 << MS_X0_SHIFT) |
213 (6 << R300_MS_Y3_SHIFT) | 179 (6 << MS_Y0_SHIFT) |
214 (6 << R300_MS_X4_SHIFT) | 180 (6 << MS_X1_SHIFT) |
215 (6 << R300_MS_Y4_SHIFT) | 181 (6 << MS_Y1_SHIFT) |
216 (6 << R300_MS_X5_SHIFT) | 182 (6 << MS_X2_SHIFT) |
217 (6 << R300_MS_Y5_SHIFT) | 183 (6 << MS_Y2_SHIFT) |
218 (6 << R300_MSBD1_SHIFT))); 184 (6 << MSBD0_Y_SHIFT) |
219 radeon_ring_write(rdev, PACKET0(R300_GA_ENHANCE, 0)); 185 (6 << MSBD0_X_SHIFT)));
220 radeon_ring_write(rdev, R300_GA_DEADLOCK_CNTL | R300_GA_FASTSYNC_CNTL); 186 radeon_ring_write(rdev, PACKET0(GB_MSPOS1, 0));
221 radeon_ring_write(rdev, PACKET0(R300_GA_POLY_MODE, 0));
222 radeon_ring_write(rdev, 187 radeon_ring_write(rdev,
223 R300_FRONT_PTYPE_TRIANGE | R300_BACK_PTYPE_TRIANGE); 188 ((6 << MS_X3_SHIFT) |
224 radeon_ring_write(rdev, PACKET0(R300_GA_ROUND_MODE, 0)); 189 (6 << MS_Y3_SHIFT) |
225 radeon_ring_write(rdev, 190 (6 << MS_X4_SHIFT) |
226 R300_GEOMETRY_ROUND_NEAREST | 191 (6 << MS_Y4_SHIFT) |
227 R300_COLOR_ROUND_NEAREST); 192 (6 << MS_X5_SHIFT) |
193 (6 << MS_Y5_SHIFT) |
194 (6 << MSBD1_SHIFT)));
195 radeon_ring_write(rdev, PACKET0(GA_ENHANCE, 0));
196 radeon_ring_write(rdev, GA_DEADLOCK_CNTL | GA_FASTSYNC_CNTL);
197 radeon_ring_write(rdev, PACKET0(GA_POLY_MODE, 0));
198 radeon_ring_write(rdev, FRONT_PTYPE_TRIANGE | BACK_PTYPE_TRIANGE);
199 radeon_ring_write(rdev, PACKET0(GA_ROUND_MODE, 0));
200 radeon_ring_write(rdev, GEOMETRY_ROUND_NEAREST | COLOR_ROUND_NEAREST);
228 radeon_ring_write(rdev, PACKET0(0x20C8, 0)); 201 radeon_ring_write(rdev, PACKET0(0x20C8, 0));
229 radeon_ring_write(rdev, 0); 202 radeon_ring_write(rdev, 0);
230 radeon_ring_unlock_commit(rdev); 203 radeon_ring_unlock_commit(rdev);
@@ -242,8 +215,8 @@ int rv515_mc_wait_for_idle(struct radeon_device *rdev)
242 215
243 for (i = 0; i < rdev->usec_timeout; i++) { 216 for (i = 0; i < rdev->usec_timeout; i++) {
244 /* read MC_STATUS */ 217 /* read MC_STATUS */
245 tmp = RREG32_MC(RV515_MC_STATUS); 218 tmp = RREG32_MC(MC_STATUS);
246 if (tmp & RV515_MC_STATUS_IDLE) { 219 if (tmp & MC_STATUS_IDLE) {
247 return 0; 220 return 0;
248 } 221 }
249 DRM_UDELAY(1); 222 DRM_UDELAY(1);
@@ -291,33 +264,33 @@ int rv515_ga_reset(struct radeon_device *rdev)
291 reinit_cp = rdev->cp.ready; 264 reinit_cp = rdev->cp.ready;
292 rdev->cp.ready = false; 265 rdev->cp.ready = false;
293 for (i = 0; i < rdev->usec_timeout; i++) { 266 for (i = 0; i < rdev->usec_timeout; i++) {
294 WREG32(RADEON_CP_CSQ_MODE, 0); 267 WREG32(CP_CSQ_MODE, 0);
295 WREG32(RADEON_CP_CSQ_CNTL, 0); 268 WREG32(CP_CSQ_CNTL, 0);
296 WREG32(RADEON_RBBM_SOFT_RESET, 0x32005); 269 WREG32(RBBM_SOFT_RESET, 0x32005);
297 (void)RREG32(RADEON_RBBM_SOFT_RESET); 270 (void)RREG32(RBBM_SOFT_RESET);
298 udelay(200); 271 udelay(200);
299 WREG32(RADEON_RBBM_SOFT_RESET, 0); 272 WREG32(RBBM_SOFT_RESET, 0);
300 /* Wait to prevent race in RBBM_STATUS */ 273 /* Wait to prevent race in RBBM_STATUS */
301 mdelay(1); 274 mdelay(1);
302 tmp = RREG32(RADEON_RBBM_STATUS); 275 tmp = RREG32(RBBM_STATUS);
303 if (tmp & ((1 << 20) | (1 << 26))) { 276 if (tmp & ((1 << 20) | (1 << 26))) {
304 DRM_ERROR("VAP & CP still busy (RBBM_STATUS=0x%08X)\n", tmp); 277 DRM_ERROR("VAP & CP still busy (RBBM_STATUS=0x%08X)\n", tmp);
305 /* GA still busy soft reset it */ 278 /* GA still busy soft reset it */
306 WREG32(0x429C, 0x200); 279 WREG32(0x429C, 0x200);
307 WREG32(R300_VAP_PVS_STATE_FLUSH_REG, 0); 280 WREG32(VAP_PVS_STATE_FLUSH_REG, 0);
308 WREG32(0x43E0, 0); 281 WREG32(0x43E0, 0);
309 WREG32(0x43E4, 0); 282 WREG32(0x43E4, 0);
310 WREG32(0x24AC, 0); 283 WREG32(0x24AC, 0);
311 } 284 }
312 /* Wait to prevent race in RBBM_STATUS */ 285 /* Wait to prevent race in RBBM_STATUS */
313 mdelay(1); 286 mdelay(1);
314 tmp = RREG32(RADEON_RBBM_STATUS); 287 tmp = RREG32(RBBM_STATUS);
315 if (!(tmp & ((1 << 20) | (1 << 26)))) { 288 if (!(tmp & ((1 << 20) | (1 << 26)))) {
316 break; 289 break;
317 } 290 }
318 } 291 }
319 for (i = 0; i < rdev->usec_timeout; i++) { 292 for (i = 0; i < rdev->usec_timeout; i++) {
320 tmp = RREG32(RADEON_RBBM_STATUS); 293 tmp = RREG32(RBBM_STATUS);
321 if (!(tmp & ((1 << 20) | (1 << 26)))) { 294 if (!(tmp & ((1 << 20) | (1 << 26)))) {
322 DRM_INFO("GA reset succeed (RBBM_STATUS=0x%08X)\n", 295 DRM_INFO("GA reset succeed (RBBM_STATUS=0x%08X)\n",
323 tmp); 296 tmp);
@@ -331,7 +304,7 @@ int rv515_ga_reset(struct radeon_device *rdev)
331 } 304 }
332 DRM_UDELAY(1); 305 DRM_UDELAY(1);
333 } 306 }
334 tmp = RREG32(RADEON_RBBM_STATUS); 307 tmp = RREG32(RBBM_STATUS);
335 DRM_ERROR("Failed to reset GA ! (RBBM_STATUS=0x%08X)\n", tmp); 308 DRM_ERROR("Failed to reset GA ! (RBBM_STATUS=0x%08X)\n", tmp);
336 return -1; 309 return -1;
337} 310}
@@ -341,7 +314,7 @@ int rv515_gpu_reset(struct radeon_device *rdev)
341 uint32_t status; 314 uint32_t status;
342 315
343 /* reset order likely matter */ 316 /* reset order likely matter */
344 status = RREG32(RADEON_RBBM_STATUS); 317 status = RREG32(RBBM_STATUS);
345 /* reset HDP */ 318 /* reset HDP */
346 r100_hdp_reset(rdev); 319 r100_hdp_reset(rdev);
347 /* reset rb2d */ 320 /* reset rb2d */
@@ -353,12 +326,12 @@ int rv515_gpu_reset(struct radeon_device *rdev)
353 rv515_ga_reset(rdev); 326 rv515_ga_reset(rdev);
354 } 327 }
355 /* reset CP */ 328 /* reset CP */
356 status = RREG32(RADEON_RBBM_STATUS); 329 status = RREG32(RBBM_STATUS);
357 if (status & (1 << 16)) { 330 if (status & (1 << 16)) {
358 r100_cp_reset(rdev); 331 r100_cp_reset(rdev);
359 } 332 }
360 /* Check if GPU is idle */ 333 /* Check if GPU is idle */
361 status = RREG32(RADEON_RBBM_STATUS); 334 status = RREG32(RBBM_STATUS);
362 if (status & (1 << 31)) { 335 if (status & (1 << 31)) {
363 DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status); 336 DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status);
364 return -1; 337 return -1;
@@ -377,8 +350,7 @@ static void rv515_vram_get_type(struct radeon_device *rdev)
377 350
378 rdev->mc.vram_width = 128; 351 rdev->mc.vram_width = 128;
379 rdev->mc.vram_is_ddr = true; 352 rdev->mc.vram_is_ddr = true;
380 tmp = RREG32_MC(RV515_MC_CNTL); 353 tmp = RREG32_MC(RV515_MC_CNTL) & MEM_NUM_CHANNELS_MASK;
381 tmp &= RV515_MEM_NUM_CHANNELS_MASK;
382 switch (tmp) { 354 switch (tmp) {
383 case 0: 355 case 0:
384 rdev->mc.vram_width = 64; 356 rdev->mc.vram_width = 64;
@@ -394,11 +366,17 @@ static void rv515_vram_get_type(struct radeon_device *rdev)
394 366
395void rv515_vram_info(struct radeon_device *rdev) 367void rv515_vram_info(struct radeon_device *rdev)
396{ 368{
369 fixed20_12 a;
370
397 rv515_vram_get_type(rdev); 371 rv515_vram_get_type(rdev);
398 rdev->mc.vram_size = RREG32(RADEON_CONFIG_MEMSIZE);
399 372
400 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0); 373 r100_vram_init_sizes(rdev);
401 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0); 374 /* FIXME: we should enforce default clock in case GPU is not in
375 * default setup
376 */
377 a.full = rfixed_const(100);
378 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
379 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
402} 380}
403 381
404 382
@@ -409,35 +387,35 @@ uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg)
409{ 387{
410 uint32_t r; 388 uint32_t r;
411 389
412 WREG32(R520_MC_IND_INDEX, 0x7f0000 | (reg & 0xffff)); 390 WREG32(MC_IND_INDEX, 0x7f0000 | (reg & 0xffff));
413 r = RREG32(R520_MC_IND_DATA); 391 r = RREG32(MC_IND_DATA);
414 WREG32(R520_MC_IND_INDEX, 0); 392 WREG32(MC_IND_INDEX, 0);
415 return r; 393 return r;
416} 394}
417 395
418void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) 396void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
419{ 397{
420 WREG32(R520_MC_IND_INDEX, 0xff0000 | ((reg) & 0xffff)); 398 WREG32(MC_IND_INDEX, 0xff0000 | ((reg) & 0xffff));
421 WREG32(R520_MC_IND_DATA, (v)); 399 WREG32(MC_IND_DATA, (v));
422 WREG32(R520_MC_IND_INDEX, 0); 400 WREG32(MC_IND_INDEX, 0);
423} 401}
424 402
425uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg) 403uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg)
426{ 404{
427 uint32_t r; 405 uint32_t r;
428 406
429 WREG32(RADEON_PCIE_INDEX, ((reg) & 0x7ff)); 407 WREG32(PCIE_INDEX, ((reg) & 0x7ff));
430 (void)RREG32(RADEON_PCIE_INDEX); 408 (void)RREG32(PCIE_INDEX);
431 r = RREG32(RADEON_PCIE_DATA); 409 r = RREG32(PCIE_DATA);
432 return r; 410 return r;
433} 411}
434 412
435void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) 413void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
436{ 414{
437 WREG32(RADEON_PCIE_INDEX, ((reg) & 0x7ff)); 415 WREG32(PCIE_INDEX, ((reg) & 0x7ff));
438 (void)RREG32(RADEON_PCIE_INDEX); 416 (void)RREG32(PCIE_INDEX);
439 WREG32(RADEON_PCIE_DATA, (v)); 417 WREG32(PCIE_DATA, (v));
440 (void)RREG32(RADEON_PCIE_DATA); 418 (void)RREG32(PCIE_DATA);
441} 419}
442 420
443 421
@@ -452,13 +430,13 @@ static int rv515_debugfs_pipes_info(struct seq_file *m, void *data)
452 struct radeon_device *rdev = dev->dev_private; 430 struct radeon_device *rdev = dev->dev_private;
453 uint32_t tmp; 431 uint32_t tmp;
454 432
455 tmp = RREG32(R400_GB_PIPE_SELECT); 433 tmp = RREG32(GB_PIPE_SELECT);
456 seq_printf(m, "GB_PIPE_SELECT 0x%08x\n", tmp); 434 seq_printf(m, "GB_PIPE_SELECT 0x%08x\n", tmp);
457 tmp = RREG32(R500_SU_REG_DEST); 435 tmp = RREG32(SU_REG_DEST);
458 seq_printf(m, "SU_REG_DEST 0x%08x\n", tmp); 436 seq_printf(m, "SU_REG_DEST 0x%08x\n", tmp);
459 tmp = RREG32(R300_GB_TILE_CONFIG); 437 tmp = RREG32(GB_TILE_CONFIG);
460 seq_printf(m, "GB_TILE_CONFIG 0x%08x\n", tmp); 438 seq_printf(m, "GB_TILE_CONFIG 0x%08x\n", tmp);
461 tmp = RREG32(R300_DST_PIPE_CONFIG); 439 tmp = RREG32(DST_PIPE_CONFIG);
462 seq_printf(m, "DST_PIPE_CONFIG 0x%08x\n", tmp); 440 seq_printf(m, "DST_PIPE_CONFIG 0x%08x\n", tmp);
463 return 0; 441 return 0;
464} 442}
@@ -509,9 +487,9 @@ int rv515_debugfs_ga_info_init(struct radeon_device *rdev)
509/* 487/*
510 * Asic initialization 488 * Asic initialization
511 */ 489 */
512static const unsigned r500_reg_safe_bm[159] = { 490static const unsigned r500_reg_safe_bm[219] = {
491 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
513 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 492 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
514 0xFFFFFFBF, 0xFFFFFFFF, 0xFFFFFFBF, 0xFFFFFFFF,
515 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 493 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
516 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 494 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
517 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 495 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
@@ -549,14 +527,575 @@ static const unsigned r500_reg_safe_bm[159] = {
549 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 527 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
550 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFF80FFFF, 528 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFF80FFFF,
551 0x00000000, 0x00000000, 0x00000000, 0x00000000, 529 0x00000000, 0x00000000, 0x00000000, 0x00000000,
552 0x0003FC01, 0x3FFFFCF8, 0xFE800B19, 530 0x0003FC01, 0x3FFFFCF8, 0xFE800B19, 0xFFFFFFFF,
531 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
532 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
533 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
534 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
535 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
536 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
537 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
538 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
539 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
540 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
541 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
542 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
543 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
544 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
545 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF,
553}; 546};
554 547
555
556
557int rv515_init(struct radeon_device *rdev) 548int rv515_init(struct radeon_device *rdev)
558{ 549{
559 rdev->config.r300.reg_safe_bm = r500_reg_safe_bm; 550 rdev->config.r300.reg_safe_bm = r500_reg_safe_bm;
560 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r500_reg_safe_bm); 551 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r500_reg_safe_bm);
561 return 0; 552 return 0;
562} 553}
554
555void atom_rv515_force_tv_scaler(struct radeon_device *rdev)
556{
557
558 WREG32(0x659C, 0x0);
559 WREG32(0x6594, 0x705);
560 WREG32(0x65A4, 0x10001);
561 WREG32(0x65D8, 0x0);
562 WREG32(0x65B0, 0x0);
563 WREG32(0x65C0, 0x0);
564 WREG32(0x65D4, 0x0);
565 WREG32(0x6578, 0x0);
566 WREG32(0x657C, 0x841880A8);
567 WREG32(0x6578, 0x1);
568 WREG32(0x657C, 0x84208680);
569 WREG32(0x6578, 0x2);
570 WREG32(0x657C, 0xBFF880B0);
571 WREG32(0x6578, 0x100);
572 WREG32(0x657C, 0x83D88088);
573 WREG32(0x6578, 0x101);
574 WREG32(0x657C, 0x84608680);
575 WREG32(0x6578, 0x102);
576 WREG32(0x657C, 0xBFF080D0);
577 WREG32(0x6578, 0x200);
578 WREG32(0x657C, 0x83988068);
579 WREG32(0x6578, 0x201);
580 WREG32(0x657C, 0x84A08680);
581 WREG32(0x6578, 0x202);
582 WREG32(0x657C, 0xBFF080F8);
583 WREG32(0x6578, 0x300);
584 WREG32(0x657C, 0x83588058);
585 WREG32(0x6578, 0x301);
586 WREG32(0x657C, 0x84E08660);
587 WREG32(0x6578, 0x302);
588 WREG32(0x657C, 0xBFF88120);
589 WREG32(0x6578, 0x400);
590 WREG32(0x657C, 0x83188040);
591 WREG32(0x6578, 0x401);
592 WREG32(0x657C, 0x85008660);
593 WREG32(0x6578, 0x402);
594 WREG32(0x657C, 0xBFF88150);
595 WREG32(0x6578, 0x500);
596 WREG32(0x657C, 0x82D88030);
597 WREG32(0x6578, 0x501);
598 WREG32(0x657C, 0x85408640);
599 WREG32(0x6578, 0x502);
600 WREG32(0x657C, 0xBFF88180);
601 WREG32(0x6578, 0x600);
602 WREG32(0x657C, 0x82A08018);
603 WREG32(0x6578, 0x601);
604 WREG32(0x657C, 0x85808620);
605 WREG32(0x6578, 0x602);
606 WREG32(0x657C, 0xBFF081B8);
607 WREG32(0x6578, 0x700);
608 WREG32(0x657C, 0x82608010);
609 WREG32(0x6578, 0x701);
610 WREG32(0x657C, 0x85A08600);
611 WREG32(0x6578, 0x702);
612 WREG32(0x657C, 0x800081F0);
613 WREG32(0x6578, 0x800);
614 WREG32(0x657C, 0x8228BFF8);
615 WREG32(0x6578, 0x801);
616 WREG32(0x657C, 0x85E085E0);
617 WREG32(0x6578, 0x802);
618 WREG32(0x657C, 0xBFF88228);
619 WREG32(0x6578, 0x10000);
620 WREG32(0x657C, 0x82A8BF00);
621 WREG32(0x6578, 0x10001);
622 WREG32(0x657C, 0x82A08CC0);
623 WREG32(0x6578, 0x10002);
624 WREG32(0x657C, 0x8008BEF8);
625 WREG32(0x6578, 0x10100);
626 WREG32(0x657C, 0x81F0BF28);
627 WREG32(0x6578, 0x10101);
628 WREG32(0x657C, 0x83608CA0);
629 WREG32(0x6578, 0x10102);
630 WREG32(0x657C, 0x8018BED0);
631 WREG32(0x6578, 0x10200);
632 WREG32(0x657C, 0x8148BF38);
633 WREG32(0x6578, 0x10201);
634 WREG32(0x657C, 0x84408C80);
635 WREG32(0x6578, 0x10202);
636 WREG32(0x657C, 0x8008BEB8);
637 WREG32(0x6578, 0x10300);
638 WREG32(0x657C, 0x80B0BF78);
639 WREG32(0x6578, 0x10301);
640 WREG32(0x657C, 0x85008C20);
641 WREG32(0x6578, 0x10302);
642 WREG32(0x657C, 0x8020BEA0);
643 WREG32(0x6578, 0x10400);
644 WREG32(0x657C, 0x8028BF90);
645 WREG32(0x6578, 0x10401);
646 WREG32(0x657C, 0x85E08BC0);
647 WREG32(0x6578, 0x10402);
648 WREG32(0x657C, 0x8018BE90);
649 WREG32(0x6578, 0x10500);
650 WREG32(0x657C, 0xBFB8BFB0);
651 WREG32(0x6578, 0x10501);
652 WREG32(0x657C, 0x86C08B40);
653 WREG32(0x6578, 0x10502);
654 WREG32(0x657C, 0x8010BE90);
655 WREG32(0x6578, 0x10600);
656 WREG32(0x657C, 0xBF58BFC8);
657 WREG32(0x6578, 0x10601);
658 WREG32(0x657C, 0x87A08AA0);
659 WREG32(0x6578, 0x10602);
660 WREG32(0x657C, 0x8010BE98);
661 WREG32(0x6578, 0x10700);
662 WREG32(0x657C, 0xBF10BFF0);
663 WREG32(0x6578, 0x10701);
664 WREG32(0x657C, 0x886089E0);
665 WREG32(0x6578, 0x10702);
666 WREG32(0x657C, 0x8018BEB0);
667 WREG32(0x6578, 0x10800);
668 WREG32(0x657C, 0xBED8BFE8);
669 WREG32(0x6578, 0x10801);
670 WREG32(0x657C, 0x89408940);
671 WREG32(0x6578, 0x10802);
672 WREG32(0x657C, 0xBFE8BED8);
673 WREG32(0x6578, 0x20000);
674 WREG32(0x657C, 0x80008000);
675 WREG32(0x6578, 0x20001);
676 WREG32(0x657C, 0x90008000);
677 WREG32(0x6578, 0x20002);
678 WREG32(0x657C, 0x80008000);
679 WREG32(0x6578, 0x20003);
680 WREG32(0x657C, 0x80008000);
681 WREG32(0x6578, 0x20100);
682 WREG32(0x657C, 0x80108000);
683 WREG32(0x6578, 0x20101);
684 WREG32(0x657C, 0x8FE0BF70);
685 WREG32(0x6578, 0x20102);
686 WREG32(0x657C, 0xBFE880C0);
687 WREG32(0x6578, 0x20103);
688 WREG32(0x657C, 0x80008000);
689 WREG32(0x6578, 0x20200);
690 WREG32(0x657C, 0x8018BFF8);
691 WREG32(0x6578, 0x20201);
692 WREG32(0x657C, 0x8F80BF08);
693 WREG32(0x6578, 0x20202);
694 WREG32(0x657C, 0xBFD081A0);
695 WREG32(0x6578, 0x20203);
696 WREG32(0x657C, 0xBFF88000);
697 WREG32(0x6578, 0x20300);
698 WREG32(0x657C, 0x80188000);
699 WREG32(0x6578, 0x20301);
700 WREG32(0x657C, 0x8EE0BEC0);
701 WREG32(0x6578, 0x20302);
702 WREG32(0x657C, 0xBFB082A0);
703 WREG32(0x6578, 0x20303);
704 WREG32(0x657C, 0x80008000);
705 WREG32(0x6578, 0x20400);
706 WREG32(0x657C, 0x80188000);
707 WREG32(0x6578, 0x20401);
708 WREG32(0x657C, 0x8E00BEA0);
709 WREG32(0x6578, 0x20402);
710 WREG32(0x657C, 0xBF8883C0);
711 WREG32(0x6578, 0x20403);
712 WREG32(0x657C, 0x80008000);
713 WREG32(0x6578, 0x20500);
714 WREG32(0x657C, 0x80188000);
715 WREG32(0x6578, 0x20501);
716 WREG32(0x657C, 0x8D00BE90);
717 WREG32(0x6578, 0x20502);
718 WREG32(0x657C, 0xBF588500);
719 WREG32(0x6578, 0x20503);
720 WREG32(0x657C, 0x80008008);
721 WREG32(0x6578, 0x20600);
722 WREG32(0x657C, 0x80188000);
723 WREG32(0x6578, 0x20601);
724 WREG32(0x657C, 0x8BC0BE98);
725 WREG32(0x6578, 0x20602);
726 WREG32(0x657C, 0xBF308660);
727 WREG32(0x6578, 0x20603);
728 WREG32(0x657C, 0x80008008);
729 WREG32(0x6578, 0x20700);
730 WREG32(0x657C, 0x80108000);
731 WREG32(0x6578, 0x20701);
732 WREG32(0x657C, 0x8A80BEB0);
733 WREG32(0x6578, 0x20702);
734 WREG32(0x657C, 0xBF0087C0);
735 WREG32(0x6578, 0x20703);
736 WREG32(0x657C, 0x80008008);
737 WREG32(0x6578, 0x20800);
738 WREG32(0x657C, 0x80108000);
739 WREG32(0x6578, 0x20801);
740 WREG32(0x657C, 0x8920BED0);
741 WREG32(0x6578, 0x20802);
742 WREG32(0x657C, 0xBED08920);
743 WREG32(0x6578, 0x20803);
744 WREG32(0x657C, 0x80008010);
745 WREG32(0x6578, 0x30000);
746 WREG32(0x657C, 0x90008000);
747 WREG32(0x6578, 0x30001);
748 WREG32(0x657C, 0x80008000);
749 WREG32(0x6578, 0x30100);
750 WREG32(0x657C, 0x8FE0BF90);
751 WREG32(0x6578, 0x30101);
752 WREG32(0x657C, 0xBFF880A0);
753 WREG32(0x6578, 0x30200);
754 WREG32(0x657C, 0x8F60BF40);
755 WREG32(0x6578, 0x30201);
756 WREG32(0x657C, 0xBFE88180);
757 WREG32(0x6578, 0x30300);
758 WREG32(0x657C, 0x8EC0BF00);
759 WREG32(0x6578, 0x30301);
760 WREG32(0x657C, 0xBFC88280);
761 WREG32(0x6578, 0x30400);
762 WREG32(0x657C, 0x8DE0BEE0);
763 WREG32(0x6578, 0x30401);
764 WREG32(0x657C, 0xBFA083A0);
765 WREG32(0x6578, 0x30500);
766 WREG32(0x657C, 0x8CE0BED0);
767 WREG32(0x6578, 0x30501);
768 WREG32(0x657C, 0xBF7884E0);
769 WREG32(0x6578, 0x30600);
770 WREG32(0x657C, 0x8BA0BED8);
771 WREG32(0x6578, 0x30601);
772 WREG32(0x657C, 0xBF508640);
773 WREG32(0x6578, 0x30700);
774 WREG32(0x657C, 0x8A60BEE8);
775 WREG32(0x6578, 0x30701);
776 WREG32(0x657C, 0xBF2087A0);
777 WREG32(0x6578, 0x30800);
778 WREG32(0x657C, 0x8900BF00);
779 WREG32(0x6578, 0x30801);
780 WREG32(0x657C, 0xBF008900);
781}
782
783struct rv515_watermark {
784 u32 lb_request_fifo_depth;
785 fixed20_12 num_line_pair;
786 fixed20_12 estimated_width;
787 fixed20_12 worst_case_latency;
788 fixed20_12 consumption_rate;
789 fixed20_12 active_time;
790 fixed20_12 dbpp;
791 fixed20_12 priority_mark_max;
792 fixed20_12 priority_mark;
793 fixed20_12 sclk;
794};
795
796void rv515_crtc_bandwidth_compute(struct radeon_device *rdev,
797 struct radeon_crtc *crtc,
798 struct rv515_watermark *wm)
799{
800 struct drm_display_mode *mode = &crtc->base.mode;
801 fixed20_12 a, b, c;
802 fixed20_12 pclk, request_fifo_depth, tolerable_latency, estimated_width;
803 fixed20_12 consumption_time, line_time, chunk_time, read_delay_latency;
804
805 if (!crtc->base.enabled) {
806 /* FIXME: wouldn't it better to set priority mark to maximum */
807 wm->lb_request_fifo_depth = 4;
808 return;
809 }
810
811 if (crtc->vsc.full > rfixed_const(2))
812 wm->num_line_pair.full = rfixed_const(2);
813 else
814 wm->num_line_pair.full = rfixed_const(1);
815
816 b.full = rfixed_const(mode->crtc_hdisplay);
817 c.full = rfixed_const(256);
818 a.full = rfixed_mul(wm->num_line_pair, b);
819 request_fifo_depth.full = rfixed_div(a, c);
820 if (a.full < rfixed_const(4)) {
821 wm->lb_request_fifo_depth = 4;
822 } else {
823 wm->lb_request_fifo_depth = rfixed_trunc(request_fifo_depth);
824 }
825
826 /* Determine consumption rate
827 * pclk = pixel clock period(ns) = 1000 / (mode.clock / 1000)
828 * vtaps = number of vertical taps,
829 * vsc = vertical scaling ratio, defined as source/destination
830 * hsc = horizontal scaling ration, defined as source/destination
831 */
832 a.full = rfixed_const(mode->clock);
833 b.full = rfixed_const(1000);
834 a.full = rfixed_div(a, b);
835 pclk.full = rfixed_div(b, a);
836 if (crtc->rmx_type != RMX_OFF) {
837 b.full = rfixed_const(2);
838 if (crtc->vsc.full > b.full)
839 b.full = crtc->vsc.full;
840 b.full = rfixed_mul(b, crtc->hsc);
841 c.full = rfixed_const(2);
842 b.full = rfixed_div(b, c);
843 consumption_time.full = rfixed_div(pclk, b);
844 } else {
845 consumption_time.full = pclk.full;
846 }
847 a.full = rfixed_const(1);
848 wm->consumption_rate.full = rfixed_div(a, consumption_time);
849
850
851 /* Determine line time
852 * LineTime = total time for one line of displayhtotal
853 * LineTime = total number of horizontal pixels
854 * pclk = pixel clock period(ns)
855 */
856 a.full = rfixed_const(crtc->base.mode.crtc_htotal);
857 line_time.full = rfixed_mul(a, pclk);
858
859 /* Determine active time
860 * ActiveTime = time of active region of display within one line,
861 * hactive = total number of horizontal active pixels
862 * htotal = total number of horizontal pixels
863 */
864 a.full = rfixed_const(crtc->base.mode.crtc_htotal);
865 b.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
866 wm->active_time.full = rfixed_mul(line_time, b);
867 wm->active_time.full = rfixed_div(wm->active_time, a);
868
869 /* Determine chunk time
870 * ChunkTime = the time it takes the DCP to send one chunk of data
871 * to the LB which consists of pipeline delay and inter chunk gap
872 * sclk = system clock(Mhz)
873 */
874 a.full = rfixed_const(600 * 1000);
875 chunk_time.full = rfixed_div(a, rdev->pm.sclk);
876 read_delay_latency.full = rfixed_const(1000);
877
878 /* Determine the worst case latency
879 * NumLinePair = Number of line pairs to request(1=2 lines, 2=4 lines)
880 * WorstCaseLatency = worst case time from urgent to when the MC starts
881 * to return data
882 * READ_DELAY_IDLE_MAX = constant of 1us
883 * ChunkTime = time it takes the DCP to send one chunk of data to the LB
884 * which consists of pipeline delay and inter chunk gap
885 */
886 if (rfixed_trunc(wm->num_line_pair) > 1) {
887 a.full = rfixed_const(3);
888 wm->worst_case_latency.full = rfixed_mul(a, chunk_time);
889 wm->worst_case_latency.full += read_delay_latency.full;
890 } else {
891 wm->worst_case_latency.full = chunk_time.full + read_delay_latency.full;
892 }
893
894 /* Determine the tolerable latency
895 * TolerableLatency = Any given request has only 1 line time
896 * for the data to be returned
897 * LBRequestFifoDepth = Number of chunk requests the LB can
898 * put into the request FIFO for a display
899 * LineTime = total time for one line of display
900 * ChunkTime = the time it takes the DCP to send one chunk
901 * of data to the LB which consists of
902 * pipeline delay and inter chunk gap
903 */
904 if ((2+wm->lb_request_fifo_depth) >= rfixed_trunc(request_fifo_depth)) {
905 tolerable_latency.full = line_time.full;
906 } else {
907 tolerable_latency.full = rfixed_const(wm->lb_request_fifo_depth - 2);
908 tolerable_latency.full = request_fifo_depth.full - tolerable_latency.full;
909 tolerable_latency.full = rfixed_mul(tolerable_latency, chunk_time);
910 tolerable_latency.full = line_time.full - tolerable_latency.full;
911 }
912 /* We assume worst case 32bits (4 bytes) */
913 wm->dbpp.full = rfixed_const(2 * 16);
914
915 /* Determine the maximum priority mark
916 * width = viewport width in pixels
917 */
918 a.full = rfixed_const(16);
919 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
920 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a);
921
922 /* Determine estimated width */
923 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full;
924 estimated_width.full = rfixed_div(estimated_width, consumption_time);
925 if (rfixed_trunc(estimated_width) > crtc->base.mode.crtc_hdisplay) {
926 wm->priority_mark.full = rfixed_const(10);
927 } else {
928 a.full = rfixed_const(16);
929 wm->priority_mark.full = rfixed_div(estimated_width, a);
930 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full;
931 }
932}
933
934void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
935{
936 struct drm_display_mode *mode0 = NULL;
937 struct drm_display_mode *mode1 = NULL;
938 struct rv515_watermark wm0;
939 struct rv515_watermark wm1;
940 u32 tmp;
941 fixed20_12 priority_mark02, priority_mark12, fill_rate;
942 fixed20_12 a, b;
943
944 if (rdev->mode_info.crtcs[0]->base.enabled)
945 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
946 if (rdev->mode_info.crtcs[1]->base.enabled)
947 mode1 = &rdev->mode_info.crtcs[1]->base.mode;
948 rs690_line_buffer_adjust(rdev, mode0, mode1);
949
950 rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[0], &wm0);
951 rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[1], &wm1);
952
953 tmp = wm0.lb_request_fifo_depth;
954 tmp |= wm1.lb_request_fifo_depth << 16;
955 WREG32(LB_MAX_REQ_OUTSTANDING, tmp);
956
957 if (mode0 && mode1) {
958 if (rfixed_trunc(wm0.dbpp) > 64)
959 a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair);
960 else
961 a.full = wm0.num_line_pair.full;
962 if (rfixed_trunc(wm1.dbpp) > 64)
963 b.full = rfixed_div(wm1.dbpp, wm1.num_line_pair);
964 else
965 b.full = wm1.num_line_pair.full;
966 a.full += b.full;
967 fill_rate.full = rfixed_div(wm0.sclk, a);
968 if (wm0.consumption_rate.full > fill_rate.full) {
969 b.full = wm0.consumption_rate.full - fill_rate.full;
970 b.full = rfixed_mul(b, wm0.active_time);
971 a.full = rfixed_const(16);
972 b.full = rfixed_div(b, a);
973 a.full = rfixed_mul(wm0.worst_case_latency,
974 wm0.consumption_rate);
975 priority_mark02.full = a.full + b.full;
976 } else {
977 a.full = rfixed_mul(wm0.worst_case_latency,
978 wm0.consumption_rate);
979 b.full = rfixed_const(16 * 1000);
980 priority_mark02.full = rfixed_div(a, b);
981 }
982 if (wm1.consumption_rate.full > fill_rate.full) {
983 b.full = wm1.consumption_rate.full - fill_rate.full;
984 b.full = rfixed_mul(b, wm1.active_time);
985 a.full = rfixed_const(16);
986 b.full = rfixed_div(b, a);
987 a.full = rfixed_mul(wm1.worst_case_latency,
988 wm1.consumption_rate);
989 priority_mark12.full = a.full + b.full;
990 } else {
991 a.full = rfixed_mul(wm1.worst_case_latency,
992 wm1.consumption_rate);
993 b.full = rfixed_const(16 * 1000);
994 priority_mark12.full = rfixed_div(a, b);
995 }
996 if (wm0.priority_mark.full > priority_mark02.full)
997 priority_mark02.full = wm0.priority_mark.full;
998 if (rfixed_trunc(priority_mark02) < 0)
999 priority_mark02.full = 0;
1000 if (wm0.priority_mark_max.full > priority_mark02.full)
1001 priority_mark02.full = wm0.priority_mark_max.full;
1002 if (wm1.priority_mark.full > priority_mark12.full)
1003 priority_mark12.full = wm1.priority_mark.full;
1004 if (rfixed_trunc(priority_mark12) < 0)
1005 priority_mark12.full = 0;
1006 if (wm1.priority_mark_max.full > priority_mark12.full)
1007 priority_mark12.full = wm1.priority_mark_max.full;
1008 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02));
1009 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02));
1010 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12));
1011 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12));
1012 } else if (mode0) {
1013 if (rfixed_trunc(wm0.dbpp) > 64)
1014 a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair);
1015 else
1016 a.full = wm0.num_line_pair.full;
1017 fill_rate.full = rfixed_div(wm0.sclk, a);
1018 if (wm0.consumption_rate.full > fill_rate.full) {
1019 b.full = wm0.consumption_rate.full - fill_rate.full;
1020 b.full = rfixed_mul(b, wm0.active_time);
1021 a.full = rfixed_const(16);
1022 b.full = rfixed_div(b, a);
1023 a.full = rfixed_mul(wm0.worst_case_latency,
1024 wm0.consumption_rate);
1025 priority_mark02.full = a.full + b.full;
1026 } else {
1027 a.full = rfixed_mul(wm0.worst_case_latency,
1028 wm0.consumption_rate);
1029 b.full = rfixed_const(16);
1030 priority_mark02.full = rfixed_div(a, b);
1031 }
1032 if (wm0.priority_mark.full > priority_mark02.full)
1033 priority_mark02.full = wm0.priority_mark.full;
1034 if (rfixed_trunc(priority_mark02) < 0)
1035 priority_mark02.full = 0;
1036 if (wm0.priority_mark_max.full > priority_mark02.full)
1037 priority_mark02.full = wm0.priority_mark_max.full;
1038 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02));
1039 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02));
1040 WREG32(D2MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
1041 WREG32(D2MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
1042 } else {
1043 if (rfixed_trunc(wm1.dbpp) > 64)
1044 a.full = rfixed_div(wm1.dbpp, wm1.num_line_pair);
1045 else
1046 a.full = wm1.num_line_pair.full;
1047 fill_rate.full = rfixed_div(wm1.sclk, a);
1048 if (wm1.consumption_rate.full > fill_rate.full) {
1049 b.full = wm1.consumption_rate.full - fill_rate.full;
1050 b.full = rfixed_mul(b, wm1.active_time);
1051 a.full = rfixed_const(16);
1052 b.full = rfixed_div(b, a);
1053 a.full = rfixed_mul(wm1.worst_case_latency,
1054 wm1.consumption_rate);
1055 priority_mark12.full = a.full + b.full;
1056 } else {
1057 a.full = rfixed_mul(wm1.worst_case_latency,
1058 wm1.consumption_rate);
1059 b.full = rfixed_const(16 * 1000);
1060 priority_mark12.full = rfixed_div(a, b);
1061 }
1062 if (wm1.priority_mark.full > priority_mark12.full)
1063 priority_mark12.full = wm1.priority_mark.full;
1064 if (rfixed_trunc(priority_mark12) < 0)
1065 priority_mark12.full = 0;
1066 if (wm1.priority_mark_max.full > priority_mark12.full)
1067 priority_mark12.full = wm1.priority_mark_max.full;
1068 WREG32(D1MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
1069 WREG32(D1MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
1070 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12));
1071 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12));
1072 }
1073}
1074
1075void rv515_bandwidth_update(struct radeon_device *rdev)
1076{
1077 uint32_t tmp;
1078 struct drm_display_mode *mode0 = NULL;
1079 struct drm_display_mode *mode1 = NULL;
1080
1081 if (rdev->mode_info.crtcs[0]->base.enabled)
1082 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
1083 if (rdev->mode_info.crtcs[1]->base.enabled)
1084 mode1 = &rdev->mode_info.crtcs[1]->base.mode;
1085 /*
1086 * Set display0/1 priority up in the memory controller for
1087 * modes if the user specifies HIGH for displaypriority
1088 * option.
1089 */
1090 if (rdev->disp_priority == 2) {
1091 tmp = RREG32_MC(MC_MISC_LAT_TIMER);
1092 tmp &= ~MC_DISP1R_INIT_LAT_MASK;
1093 tmp &= ~MC_DISP0R_INIT_LAT_MASK;
1094 if (mode1)
1095 tmp |= (1 << MC_DISP1R_INIT_LAT_SHIFT);
1096 if (mode0)
1097 tmp |= (1 << MC_DISP0R_INIT_LAT_SHIFT);
1098 WREG32_MC(MC_MISC_LAT_TIMER, tmp);
1099 }
1100 rv515_bandwidth_avivo_update(rdev);
1101}