aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/rv515.c
diff options
context:
space:
mode:
authorAndrea Bastoni <bastoni@cs.unc.edu>2010-05-30 19:16:45 -0400
committerAndrea Bastoni <bastoni@cs.unc.edu>2010-05-30 19:16:45 -0400
commitada47b5fe13d89735805b566185f4885f5a3f750 (patch)
tree644b88f8a71896307d71438e9b3af49126ffb22b /drivers/gpu/drm/radeon/rv515.c
parent43e98717ad40a4ae64545b5ba047c7b86aa44f4f (diff)
parent3280f21d43ee541f97f8cda5792150d2dbec20d5 (diff)
Merge branch 'wip-2.6.34' into old-private-masterarchived-private-master
Diffstat (limited to 'drivers/gpu/drm/radeon/rv515.c')
-rw-r--r--drivers/gpu/drm/radeon/rv515.c96
1 files changed, 57 insertions, 39 deletions
diff --git a/drivers/gpu/drm/radeon/rv515.c b/drivers/gpu/drm/radeon/rv515.c
index ba68c9fe90a1..9035121f4b58 100644
--- a/drivers/gpu/drm/radeon/rv515.c
+++ b/drivers/gpu/drm/radeon/rv515.c
@@ -26,9 +26,11 @@
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include <linux/slab.h>
29#include "drmP.h" 30#include "drmP.h"
30#include "rv515d.h" 31#include "rv515d.h"
31#include "radeon.h" 32#include "radeon.h"
33#include "radeon_asic.h"
32#include "atom.h" 34#include "atom.h"
33#include "rv515_reg_safe.h" 35#include "rv515_reg_safe.h"
34 36
@@ -277,19 +279,15 @@ static void rv515_vram_get_type(struct radeon_device *rdev)
277 } 279 }
278} 280}
279 281
280void rv515_vram_info(struct radeon_device *rdev) 282void rv515_mc_init(struct radeon_device *rdev)
281{ 283{
282 fixed20_12 a;
283 284
284 rv515_vram_get_type(rdev); 285 rv515_vram_get_type(rdev);
285
286 r100_vram_init_sizes(rdev); 286 r100_vram_init_sizes(rdev);
287 /* FIXME: we should enforce default clock in case GPU is not in 287 radeon_vram_location(rdev, &rdev->mc, 0);
288 * default setup 288 if (!(rdev->flags & RADEON_IS_AGP))
289 */ 289 radeon_gtt_location(rdev, &rdev->mc);
290 a.full = rfixed_const(100); 290 radeon_update_bandwidth_info(rdev);
291 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
292 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
293} 291}
294 292
295uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg) 293uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg)
@@ -478,8 +476,8 @@ static int rv515_startup(struct radeon_device *rdev)
478 return r; 476 return r;
479 } 477 }
480 /* Enable IRQ */ 478 /* Enable IRQ */
481 rdev->irq.sw_int = true;
482 rs600_irq_set(rdev); 479 rs600_irq_set(rdev);
480 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
483 /* 1M ring buffer */ 481 /* 1M ring buffer */
484 r = r100_cp_init(rdev, 1024 * 1024); 482 r = r100_cp_init(rdev, 1024 * 1024);
485 if (r) { 483 if (r) {
@@ -514,6 +512,8 @@ int rv515_resume(struct radeon_device *rdev)
514 atom_asic_init(rdev->mode_info.atom_context); 512 atom_asic_init(rdev->mode_info.atom_context);
515 /* Resume clock after posting */ 513 /* Resume clock after posting */
516 rv515_clock_startup(rdev); 514 rv515_clock_startup(rdev);
515 /* Initialize surface registers */
516 radeon_surface_init(rdev);
517 return rv515_startup(rdev); 517 return rv515_startup(rdev);
518} 518}
519 519
@@ -535,16 +535,16 @@ void rv515_set_safe_registers(struct radeon_device *rdev)
535 535
536void rv515_fini(struct radeon_device *rdev) 536void rv515_fini(struct radeon_device *rdev)
537{ 537{
538 rv515_suspend(rdev); 538 radeon_pm_fini(rdev);
539 r100_cp_fini(rdev); 539 r100_cp_fini(rdev);
540 r100_wb_fini(rdev); 540 r100_wb_fini(rdev);
541 r100_ib_fini(rdev); 541 r100_ib_fini(rdev);
542 radeon_gem_fini(rdev); 542 radeon_gem_fini(rdev);
543 rv370_pcie_gart_fini(rdev); 543 rv370_pcie_gart_fini(rdev);
544 radeon_agp_fini(rdev); 544 radeon_agp_fini(rdev);
545 radeon_irq_kms_fini(rdev); 545 radeon_irq_kms_fini(rdev);
546 radeon_fence_driver_fini(rdev); 546 radeon_fence_driver_fini(rdev);
547 radeon_object_fini(rdev); 547 radeon_bo_fini(rdev);
548 radeon_atombios_fini(rdev); 548 radeon_atombios_fini(rdev);
549 kfree(rdev->bios); 549 kfree(rdev->bios);
550 rdev->bios = NULL; 550 rdev->bios = NULL;
@@ -580,20 +580,21 @@ int rv515_init(struct radeon_device *rdev)
580 RREG32(R_0007C0_CP_STAT)); 580 RREG32(R_0007C0_CP_STAT));
581 } 581 }
582 /* check if cards are posted or not */ 582 /* check if cards are posted or not */
583 if (!radeon_card_posted(rdev) && rdev->bios) { 583 if (radeon_boot_test_post_card(rdev) == false)
584 DRM_INFO("GPU not posted. posting now...\n"); 584 return -EINVAL;
585 atom_asic_init(rdev->mode_info.atom_context);
586 }
587 /* Initialize clocks */ 585 /* Initialize clocks */
588 radeon_get_clock_info(rdev->ddev); 586 radeon_get_clock_info(rdev->ddev);
589 /* Initialize power management */ 587 /* Initialize power management */
590 radeon_pm_init(rdev); 588 radeon_pm_init(rdev);
591 /* Get vram informations */ 589 /* initialize AGP */
592 rv515_vram_info(rdev); 590 if (rdev->flags & RADEON_IS_AGP) {
593 /* Initialize memory controller (also test AGP) */ 591 r = radeon_agp_init(rdev);
594 r = r420_mc_init(rdev); 592 if (r) {
595 if (r) 593 radeon_agp_disable(rdev);
596 return r; 594 }
595 }
596 /* initialize memory controller */
597 rv515_mc_init(rdev);
597 rv515_debugfs(rdev); 598 rv515_debugfs(rdev);
598 /* Fence driver */ 599 /* Fence driver */
599 r = radeon_fence_driver_init(rdev); 600 r = radeon_fence_driver_init(rdev);
@@ -603,7 +604,7 @@ int rv515_init(struct radeon_device *rdev)
603 if (r) 604 if (r)
604 return r; 605 return r;
605 /* Memory manager */ 606 /* Memory manager */
606 r = radeon_object_init(rdev); 607 r = radeon_bo_init(rdev);
607 if (r) 608 if (r)
608 return r; 609 return r;
609 r = rv370_pcie_gart_init(rdev); 610 r = rv370_pcie_gart_init(rdev);
@@ -615,13 +616,12 @@ int rv515_init(struct radeon_device *rdev)
615 if (r) { 616 if (r) {
616 /* Somethings want wront with the accel init stop accel */ 617 /* Somethings want wront with the accel init stop accel */
617 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 618 dev_err(rdev->dev, "Disabling GPU acceleration\n");
618 rv515_suspend(rdev);
619 r100_cp_fini(rdev); 619 r100_cp_fini(rdev);
620 r100_wb_fini(rdev); 620 r100_wb_fini(rdev);
621 r100_ib_fini(rdev); 621 r100_ib_fini(rdev);
622 radeon_irq_kms_fini(rdev);
622 rv370_pcie_gart_fini(rdev); 623 rv370_pcie_gart_fini(rdev);
623 radeon_agp_fini(rdev); 624 radeon_agp_fini(rdev);
624 radeon_irq_kms_fini(rdev);
625 rdev->accel_working = false; 625 rdev->accel_working = false;
626 } 626 }
627 return 0; 627 return 0;
@@ -892,8 +892,9 @@ void rv515_crtc_bandwidth_compute(struct radeon_device *rdev,
892 892
893 b.full = rfixed_const(mode->crtc_hdisplay); 893 b.full = rfixed_const(mode->crtc_hdisplay);
894 c.full = rfixed_const(256); 894 c.full = rfixed_const(256);
895 a.full = rfixed_mul(wm->num_line_pair, b); 895 a.full = rfixed_div(b, c);
896 request_fifo_depth.full = rfixed_div(a, c); 896 request_fifo_depth.full = rfixed_mul(a, wm->num_line_pair);
897 request_fifo_depth.full = rfixed_ceil(request_fifo_depth);
897 if (a.full < rfixed_const(4)) { 898 if (a.full < rfixed_const(4)) {
898 wm->lb_request_fifo_depth = 4; 899 wm->lb_request_fifo_depth = 4;
899 } else { 900 } else {
@@ -995,15 +996,17 @@ void rv515_crtc_bandwidth_compute(struct radeon_device *rdev,
995 a.full = rfixed_const(16); 996 a.full = rfixed_const(16);
996 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay); 997 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
997 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a); 998 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a);
999 wm->priority_mark_max.full = rfixed_ceil(wm->priority_mark_max);
998 1000
999 /* Determine estimated width */ 1001 /* Determine estimated width */
1000 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full; 1002 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full;
1001 estimated_width.full = rfixed_div(estimated_width, consumption_time); 1003 estimated_width.full = rfixed_div(estimated_width, consumption_time);
1002 if (rfixed_trunc(estimated_width) > crtc->base.mode.crtc_hdisplay) { 1004 if (rfixed_trunc(estimated_width) > crtc->base.mode.crtc_hdisplay) {
1003 wm->priority_mark.full = rfixed_const(10); 1005 wm->priority_mark.full = wm->priority_mark_max.full;
1004 } else { 1006 } else {
1005 a.full = rfixed_const(16); 1007 a.full = rfixed_const(16);
1006 wm->priority_mark.full = rfixed_div(estimated_width, a); 1008 wm->priority_mark.full = rfixed_div(estimated_width, a);
1009 wm->priority_mark.full = rfixed_ceil(wm->priority_mark);
1007 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full; 1010 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full;
1008 } 1011 }
1009} 1012}
@@ -1014,7 +1017,7 @@ void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
1014 struct drm_display_mode *mode1 = NULL; 1017 struct drm_display_mode *mode1 = NULL;
1015 struct rv515_watermark wm0; 1018 struct rv515_watermark wm0;
1016 struct rv515_watermark wm1; 1019 struct rv515_watermark wm1;
1017 u32 tmp; 1020 u32 tmp, d1mode_priority_a_cnt, d2mode_priority_a_cnt;
1018 fixed20_12 priority_mark02, priority_mark12, fill_rate; 1021 fixed20_12 priority_mark02, priority_mark12, fill_rate;
1019 fixed20_12 a, b; 1022 fixed20_12 a, b;
1020 1023
@@ -1082,10 +1085,16 @@ void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
1082 priority_mark12.full = 0; 1085 priority_mark12.full = 0;
1083 if (wm1.priority_mark_max.full > priority_mark12.full) 1086 if (wm1.priority_mark_max.full > priority_mark12.full)
1084 priority_mark12.full = wm1.priority_mark_max.full; 1087 priority_mark12.full = wm1.priority_mark_max.full;
1085 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02)); 1088 d1mode_priority_a_cnt = rfixed_trunc(priority_mark02);
1086 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02)); 1089 d2mode_priority_a_cnt = rfixed_trunc(priority_mark12);
1087 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12)); 1090 if (rdev->disp_priority == 2) {
1088 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12)); 1091 d1mode_priority_a_cnt |= MODE_PRIORITY_ALWAYS_ON;
1092 d2mode_priority_a_cnt |= MODE_PRIORITY_ALWAYS_ON;
1093 }
1094 WREG32(D1MODE_PRIORITY_A_CNT, d1mode_priority_a_cnt);
1095 WREG32(D1MODE_PRIORITY_B_CNT, d1mode_priority_a_cnt);
1096 WREG32(D2MODE_PRIORITY_A_CNT, d2mode_priority_a_cnt);
1097 WREG32(D2MODE_PRIORITY_B_CNT, d2mode_priority_a_cnt);
1089 } else if (mode0) { 1098 } else if (mode0) {
1090 if (rfixed_trunc(wm0.dbpp) > 64) 1099 if (rfixed_trunc(wm0.dbpp) > 64)
1091 a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair); 1100 a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair);
@@ -1112,8 +1121,11 @@ void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
1112 priority_mark02.full = 0; 1121 priority_mark02.full = 0;
1113 if (wm0.priority_mark_max.full > priority_mark02.full) 1122 if (wm0.priority_mark_max.full > priority_mark02.full)
1114 priority_mark02.full = wm0.priority_mark_max.full; 1123 priority_mark02.full = wm0.priority_mark_max.full;
1115 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02)); 1124 d1mode_priority_a_cnt = rfixed_trunc(priority_mark02);
1116 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02)); 1125 if (rdev->disp_priority == 2)
1126 d1mode_priority_a_cnt |= MODE_PRIORITY_ALWAYS_ON;
1127 WREG32(D1MODE_PRIORITY_A_CNT, d1mode_priority_a_cnt);
1128 WREG32(D1MODE_PRIORITY_B_CNT, d1mode_priority_a_cnt);
1117 WREG32(D2MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF); 1129 WREG32(D2MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
1118 WREG32(D2MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF); 1130 WREG32(D2MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
1119 } else { 1131 } else {
@@ -1142,10 +1154,13 @@ void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
1142 priority_mark12.full = 0; 1154 priority_mark12.full = 0;
1143 if (wm1.priority_mark_max.full > priority_mark12.full) 1155 if (wm1.priority_mark_max.full > priority_mark12.full)
1144 priority_mark12.full = wm1.priority_mark_max.full; 1156 priority_mark12.full = wm1.priority_mark_max.full;
1157 d2mode_priority_a_cnt = rfixed_trunc(priority_mark12);
1158 if (rdev->disp_priority == 2)
1159 d2mode_priority_a_cnt |= MODE_PRIORITY_ALWAYS_ON;
1145 WREG32(D1MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF); 1160 WREG32(D1MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
1146 WREG32(D1MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF); 1161 WREG32(D1MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
1147 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12)); 1162 WREG32(D2MODE_PRIORITY_A_CNT, d2mode_priority_a_cnt);
1148 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12)); 1163 WREG32(D2MODE_PRIORITY_B_CNT, d2mode_priority_a_cnt);
1149 } 1164 }
1150} 1165}
1151 1166
@@ -1155,6 +1170,8 @@ void rv515_bandwidth_update(struct radeon_device *rdev)
1155 struct drm_display_mode *mode0 = NULL; 1170 struct drm_display_mode *mode0 = NULL;
1156 struct drm_display_mode *mode1 = NULL; 1171 struct drm_display_mode *mode1 = NULL;
1157 1172
1173 radeon_update_display_priority(rdev);
1174
1158 if (rdev->mode_info.crtcs[0]->base.enabled) 1175 if (rdev->mode_info.crtcs[0]->base.enabled)
1159 mode0 = &rdev->mode_info.crtcs[0]->base.mode; 1176 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
1160 if (rdev->mode_info.crtcs[1]->base.enabled) 1177 if (rdev->mode_info.crtcs[1]->base.enabled)
@@ -1164,7 +1181,8 @@ void rv515_bandwidth_update(struct radeon_device *rdev)
1164 * modes if the user specifies HIGH for displaypriority 1181 * modes if the user specifies HIGH for displaypriority
1165 * option. 1182 * option.
1166 */ 1183 */
1167 if (rdev->disp_priority == 2) { 1184 if ((rdev->disp_priority == 2) &&
1185 (rdev->family == CHIP_RV515)) {
1168 tmp = RREG32_MC(MC_MISC_LAT_TIMER); 1186 tmp = RREG32_MC(MC_MISC_LAT_TIMER);
1169 tmp &= ~MC_DISP1R_INIT_LAT_MASK; 1187 tmp &= ~MC_DISP1R_INIT_LAT_MASK;
1170 tmp &= ~MC_DISP0R_INIT_LAT_MASK; 1188 tmp &= ~MC_DISP0R_INIT_LAT_MASK;