aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm
diff options
context:
space:
mode:
authorJerome Glisse <jglisse@redhat.com>2009-07-13 15:04:08 -0400
committerDave Airlie <airlied@redhat.com>2009-07-29 01:45:09 -0400
commitc93bb85b5cba3e3a06f2cad8e9bc5c23d3d10aac (patch)
tree3168bee69e08dcb1f0f509b03ea1693a688d34ef /drivers/gpu/drm
parente024e11070a0a0dc7163ce1ec2da354a638bdbed (diff)
drm/radeon/kms: fix bandwidth computation on avivo hardware
Fix bandwidth computation and crtc priority in memory controller so that crtc memory request are fullfill in time to avoid display artifact. Signed-off-by: Jerome Glisse <jglisse@redhat.com> Signed-off-by: Dave Airlie <airlied@redhat.com>
Diffstat (limited to 'drivers/gpu/drm')
-rw-r--r--drivers/gpu/drm/radeon/atombios_crtc.c276
-rw-r--r--drivers/gpu/drm/radeon/r100.c483
-rw-r--r--drivers/gpu/drm/radeon/r300.c1
-rw-r--r--drivers/gpu/drm/radeon/r520.c14
-rw-r--r--drivers/gpu/drm/radeon/radeon.h40
-rw-r--r--drivers/gpu/drm/radeon/radeon_asic.h13
-rw-r--r--drivers/gpu/drm/radeon/radeon_device.c10
-rw-r--r--drivers/gpu/drm/radeon/radeon_display.c68
-rw-r--r--drivers/gpu/drm/radeon/radeon_encoders.c369
-rw-r--r--drivers/gpu/drm/radeon/radeon_legacy_crtc.c661
-rw-r--r--drivers/gpu/drm/radeon/radeon_legacy_encoders.c188
-rw-r--r--drivers/gpu/drm/radeon/radeon_mode.h48
-rw-r--r--drivers/gpu/drm/radeon/radeon_share.h39
-rw-r--r--drivers/gpu/drm/radeon/rs400.c1
-rw-r--r--drivers/gpu/drm/radeon/rs600.c5
-rw-r--r--drivers/gpu/drm/radeon/rs690.c472
-rw-r--r--drivers/gpu/drm/radeon/rs690r.h99
-rw-r--r--drivers/gpu/drm/radeon/rv515.c774
-rw-r--r--drivers/gpu/drm/radeon/rv515r.h170
19 files changed, 2377 insertions, 1354 deletions
diff --git a/drivers/gpu/drm/radeon/atombios_crtc.c b/drivers/gpu/drm/radeon/atombios_crtc.c
index eac26cdb5dae..74d034f77c6b 100644
--- a/drivers/gpu/drm/radeon/atombios_crtc.c
+++ b/drivers/gpu/drm/radeon/atombios_crtc.c
@@ -31,6 +31,132 @@
31#include "atom.h" 31#include "atom.h"
32#include "atom-bits.h" 32#include "atom-bits.h"
33 33
34static void atombios_overscan_setup(struct drm_crtc *crtc,
35 struct drm_display_mode *mode,
36 struct drm_display_mode *adjusted_mode)
37{
38 struct drm_device *dev = crtc->dev;
39 struct radeon_device *rdev = dev->dev_private;
40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
41 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
42 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
43 int a1, a2;
44
45 memset(&args, 0, sizeof(args));
46
47 args.usOverscanRight = 0;
48 args.usOverscanLeft = 0;
49 args.usOverscanBottom = 0;
50 args.usOverscanTop = 0;
51 args.ucCRTC = radeon_crtc->crtc_id;
52
53 switch (radeon_crtc->rmx_type) {
54 case RMX_CENTER:
55 args.usOverscanTop = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2;
56 args.usOverscanBottom = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2;
57 args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2;
58 args.usOverscanRight = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2;
59 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
60 break;
61 case RMX_ASPECT:
62 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
63 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
64
65 if (a1 > a2) {
66 args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2;
67 args.usOverscanRight = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2;
68 } else if (a2 > a1) {
69 args.usOverscanLeft = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2;
70 args.usOverscanRight = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2;
71 }
72 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
73 break;
74 case RMX_FULL:
75 default:
76 args.usOverscanRight = 0;
77 args.usOverscanLeft = 0;
78 args.usOverscanBottom = 0;
79 args.usOverscanTop = 0;
80 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
81 break;
82 }
83}
84
85static void atombios_scaler_setup(struct drm_crtc *crtc)
86{
87 struct drm_device *dev = crtc->dev;
88 struct radeon_device *rdev = dev->dev_private;
89 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
90 ENABLE_SCALER_PS_ALLOCATION args;
91 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
92 /* fixme - fill in enc_priv for atom dac */
93 enum radeon_tv_std tv_std = TV_STD_NTSC;
94
95 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
96 return;
97
98 memset(&args, 0, sizeof(args));
99
100 args.ucScaler = radeon_crtc->crtc_id;
101
102 if (radeon_crtc->devices & (ATOM_DEVICE_TV_SUPPORT)) {
103 switch (tv_std) {
104 case TV_STD_NTSC:
105 default:
106 args.ucTVStandard = ATOM_TV_NTSC;
107 break;
108 case TV_STD_PAL:
109 args.ucTVStandard = ATOM_TV_PAL;
110 break;
111 case TV_STD_PAL_M:
112 args.ucTVStandard = ATOM_TV_PALM;
113 break;
114 case TV_STD_PAL_60:
115 args.ucTVStandard = ATOM_TV_PAL60;
116 break;
117 case TV_STD_NTSC_J:
118 args.ucTVStandard = ATOM_TV_NTSCJ;
119 break;
120 case TV_STD_SCART_PAL:
121 args.ucTVStandard = ATOM_TV_PAL; /* ??? */
122 break;
123 case TV_STD_SECAM:
124 args.ucTVStandard = ATOM_TV_SECAM;
125 break;
126 case TV_STD_PAL_CN:
127 args.ucTVStandard = ATOM_TV_PALCN;
128 break;
129 }
130 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
131 } else if (radeon_crtc->devices & (ATOM_DEVICE_CV_SUPPORT)) {
132 args.ucTVStandard = ATOM_TV_CV;
133 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
134 } else {
135 switch (radeon_crtc->rmx_type) {
136 case RMX_FULL:
137 args.ucEnable = ATOM_SCALER_EXPANSION;
138 break;
139 case RMX_CENTER:
140 args.ucEnable = ATOM_SCALER_CENTER;
141 break;
142 case RMX_ASPECT:
143 args.ucEnable = ATOM_SCALER_EXPANSION;
144 break;
145 default:
146 if (ASIC_IS_AVIVO(rdev))
147 args.ucEnable = ATOM_SCALER_DISABLE;
148 else
149 args.ucEnable = ATOM_SCALER_CENTER;
150 break;
151 }
152 }
153 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
154 if (radeon_crtc->devices & (ATOM_DEVICE_CV_SUPPORT | ATOM_DEVICE_TV_SUPPORT)
155 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_RV570) {
156 atom_rv515_force_tv_scaler(rdev);
157 }
158}
159
34static void atombios_lock_crtc(struct drm_crtc *crtc, int lock) 160static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
35{ 161{
36 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 162 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
@@ -522,6 +648,9 @@ int atombios_crtc_mode_set(struct drm_crtc *crtc,
522 radeon_crtc_set_base(crtc, x, y, old_fb); 648 radeon_crtc_set_base(crtc, x, y, old_fb);
523 radeon_legacy_atom_set_surface(crtc); 649 radeon_legacy_atom_set_surface(crtc);
524 } 650 }
651 atombios_overscan_setup(crtc, mode, adjusted_mode);
652 atombios_scaler_setup(crtc);
653 radeon_bandwidth_update(rdev);
525 return 0; 654 return 0;
526} 655}
527 656
@@ -529,6 +658,8 @@ static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
529 struct drm_display_mode *mode, 658 struct drm_display_mode *mode,
530 struct drm_display_mode *adjusted_mode) 659 struct drm_display_mode *adjusted_mode)
531{ 660{
661 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
662 return false;
532 return true; 663 return true;
533} 664}
534 665
@@ -561,148 +692,3 @@ void radeon_atombios_init_crtc(struct drm_device *dev,
561 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL; 692 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
562 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs); 693 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
563} 694}
564
565void radeon_init_disp_bw_avivo(struct drm_device *dev,
566 struct drm_display_mode *mode1,
567 uint32_t pixel_bytes1,
568 struct drm_display_mode *mode2,
569 uint32_t pixel_bytes2)
570{
571 struct radeon_device *rdev = dev->dev_private;
572 fixed20_12 min_mem_eff;
573 fixed20_12 peak_disp_bw, mem_bw, pix_clk, pix_clk2, temp_ff;
574 fixed20_12 sclk_ff, mclk_ff;
575 uint32_t dc_lb_memory_split, temp;
576
577 min_mem_eff.full = rfixed_const_8(0);
578 if (rdev->disp_priority == 2) {
579 uint32_t mc_init_misc_lat_timer = 0;
580 if (rdev->family == CHIP_RV515)
581 mc_init_misc_lat_timer =
582 RREG32_MC(RV515_MC_INIT_MISC_LAT_TIMER);
583 else if (rdev->family == CHIP_RS690)
584 mc_init_misc_lat_timer =
585 RREG32_MC(RS690_MC_INIT_MISC_LAT_TIMER);
586
587 mc_init_misc_lat_timer &=
588 ~(R300_MC_DISP1R_INIT_LAT_MASK <<
589 R300_MC_DISP1R_INIT_LAT_SHIFT);
590 mc_init_misc_lat_timer &=
591 ~(R300_MC_DISP0R_INIT_LAT_MASK <<
592 R300_MC_DISP0R_INIT_LAT_SHIFT);
593
594 if (mode2)
595 mc_init_misc_lat_timer |=
596 (1 << R300_MC_DISP1R_INIT_LAT_SHIFT);
597 if (mode1)
598 mc_init_misc_lat_timer |=
599 (1 << R300_MC_DISP0R_INIT_LAT_SHIFT);
600
601 if (rdev->family == CHIP_RV515)
602 WREG32_MC(RV515_MC_INIT_MISC_LAT_TIMER,
603 mc_init_misc_lat_timer);
604 else if (rdev->family == CHIP_RS690)
605 WREG32_MC(RS690_MC_INIT_MISC_LAT_TIMER,
606 mc_init_misc_lat_timer);
607 }
608
609 /*
610 * determine is there is enough bw for current mode
611 */
612 temp_ff.full = rfixed_const(100);
613 mclk_ff.full = rfixed_const(rdev->clock.default_mclk);
614 mclk_ff.full = rfixed_div(mclk_ff, temp_ff);
615 sclk_ff.full = rfixed_const(rdev->clock.default_sclk);
616 sclk_ff.full = rfixed_div(sclk_ff, temp_ff);
617
618 temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1);
619 temp_ff.full = rfixed_const(temp);
620 mem_bw.full = rfixed_mul(mclk_ff, temp_ff);
621 mem_bw.full = rfixed_mul(mem_bw, min_mem_eff);
622
623 pix_clk.full = 0;
624 pix_clk2.full = 0;
625 peak_disp_bw.full = 0;
626 if (mode1) {
627 temp_ff.full = rfixed_const(1000);
628 pix_clk.full = rfixed_const(mode1->clock); /* convert to fixed point */
629 pix_clk.full = rfixed_div(pix_clk, temp_ff);
630 temp_ff.full = rfixed_const(pixel_bytes1);
631 peak_disp_bw.full += rfixed_mul(pix_clk, temp_ff);
632 }
633 if (mode2) {
634 temp_ff.full = rfixed_const(1000);
635 pix_clk2.full = rfixed_const(mode2->clock); /* convert to fixed point */
636 pix_clk2.full = rfixed_div(pix_clk2, temp_ff);
637 temp_ff.full = rfixed_const(pixel_bytes2);
638 peak_disp_bw.full += rfixed_mul(pix_clk2, temp_ff);
639 }
640
641 if (peak_disp_bw.full >= mem_bw.full) {
642 DRM_ERROR
643 ("You may not have enough display bandwidth for current mode\n"
644 "If you have flickering problem, try to lower resolution, refresh rate, or color depth\n");
645 printk("peak disp bw %d, mem_bw %d\n",
646 rfixed_trunc(peak_disp_bw), rfixed_trunc(mem_bw));
647 }
648
649 /*
650 * Line Buffer Setup
651 * There is a single line buffer shared by both display controllers.
652 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between the display
653 * controllers. The paritioning can either be done manually or via one of four
654 * preset allocations specified in bits 1:0:
655 * 0 - line buffer is divided in half and shared between each display controller
656 * 1 - D1 gets 3/4 of the line buffer, D2 gets 1/4
657 * 2 - D1 gets the whole buffer
658 * 3 - D1 gets 1/4 of the line buffer, D2 gets 3/4
659 * Setting bit 2 of DC_LB_MEMORY_SPLIT controls switches to manual allocation mode.
660 * In manual allocation mode, D1 always starts at 0, D1 end/2 is specified in bits
661 * 14:4; D2 allocation follows D1.
662 */
663
664 /* is auto or manual better ? */
665 dc_lb_memory_split =
666 RREG32(AVIVO_DC_LB_MEMORY_SPLIT) & ~AVIVO_DC_LB_MEMORY_SPLIT_MASK;
667 dc_lb_memory_split &= ~AVIVO_DC_LB_MEMORY_SPLIT_SHIFT_MODE;
668#if 1
669 /* auto */
670 if (mode1 && mode2) {
671 if (mode1->hdisplay > mode2->hdisplay) {
672 if (mode1->hdisplay > 2560)
673 dc_lb_memory_split |=
674 AVIVO_DC_LB_MEMORY_SPLIT_D1_3Q_D2_1Q;
675 else
676 dc_lb_memory_split |=
677 AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
678 } else if (mode2->hdisplay > mode1->hdisplay) {
679 if (mode2->hdisplay > 2560)
680 dc_lb_memory_split |=
681 AVIVO_DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
682 else
683 dc_lb_memory_split |=
684 AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
685 } else
686 dc_lb_memory_split |=
687 AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
688 } else if (mode1) {
689 dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_D1_ONLY;
690 } else if (mode2) {
691 dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
692 }
693#else
694 /* manual */
695 dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_SHIFT_MODE;
696 dc_lb_memory_split &=
697 ~(AVIVO_DC_LB_DISP1_END_ADR_MASK <<
698 AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
699 if (mode1) {
700 dc_lb_memory_split |=
701 ((((mode1->hdisplay / 2) + 64) & AVIVO_DC_LB_DISP1_END_ADR_MASK)
702 << AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
703 } else if (mode2) {
704 dc_lb_memory_split |= (0 << AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
705 }
706#endif
707 WREG32(AVIVO_DC_LB_MEMORY_SPLIT, dc_lb_memory_split);
708}
diff --git a/drivers/gpu/drm/radeon/r100.c b/drivers/gpu/drm/radeon/r100.c
index 69bd7cb59972..0e00fef0b84f 100644
--- a/drivers/gpu/drm/radeon/r100.c
+++ b/drivers/gpu/drm/radeon/r100.c
@@ -1784,3 +1784,486 @@ void r100_clear_surface_reg(struct radeon_device *rdev, int reg)
1784 int surf_index = reg * 16; 1784 int surf_index = reg * 16;
1785 WREG32(RADEON_SURFACE0_INFO + surf_index, 0); 1785 WREG32(RADEON_SURFACE0_INFO + surf_index, 0);
1786} 1786}
1787
1788void r100_bandwidth_update(struct radeon_device *rdev)
1789{
1790 fixed20_12 trcd_ff, trp_ff, tras_ff, trbs_ff, tcas_ff;
1791 fixed20_12 sclk_ff, mclk_ff, sclk_eff_ff, sclk_delay_ff;
1792 fixed20_12 peak_disp_bw, mem_bw, pix_clk, pix_clk2, temp_ff, crit_point_ff;
1793 uint32_t temp, data, mem_trcd, mem_trp, mem_tras;
1794 fixed20_12 memtcas_ff[8] = {
1795 fixed_init(1),
1796 fixed_init(2),
1797 fixed_init(3),
1798 fixed_init(0),
1799 fixed_init_half(1),
1800 fixed_init_half(2),
1801 fixed_init(0),
1802 };
1803 fixed20_12 memtcas_rs480_ff[8] = {
1804 fixed_init(0),
1805 fixed_init(1),
1806 fixed_init(2),
1807 fixed_init(3),
1808 fixed_init(0),
1809 fixed_init_half(1),
1810 fixed_init_half(2),
1811 fixed_init_half(3),
1812 };
1813 fixed20_12 memtcas2_ff[8] = {
1814 fixed_init(0),
1815 fixed_init(1),
1816 fixed_init(2),
1817 fixed_init(3),
1818 fixed_init(4),
1819 fixed_init(5),
1820 fixed_init(6),
1821 fixed_init(7),
1822 };
1823 fixed20_12 memtrbs[8] = {
1824 fixed_init(1),
1825 fixed_init_half(1),
1826 fixed_init(2),
1827 fixed_init_half(2),
1828 fixed_init(3),
1829 fixed_init_half(3),
1830 fixed_init(4),
1831 fixed_init_half(4)
1832 };
1833 fixed20_12 memtrbs_r4xx[8] = {
1834 fixed_init(4),
1835 fixed_init(5),
1836 fixed_init(6),
1837 fixed_init(7),
1838 fixed_init(8),
1839 fixed_init(9),
1840 fixed_init(10),
1841 fixed_init(11)
1842 };
1843 fixed20_12 min_mem_eff;
1844 fixed20_12 mc_latency_sclk, mc_latency_mclk, k1;
1845 fixed20_12 cur_latency_mclk, cur_latency_sclk;
1846 fixed20_12 disp_latency, disp_latency_overhead, disp_drain_rate,
1847 disp_drain_rate2, read_return_rate;
1848 fixed20_12 time_disp1_drop_priority;
1849 int c;
1850 int cur_size = 16; /* in octawords */
1851 int critical_point = 0, critical_point2;
1852/* uint32_t read_return_rate, time_disp1_drop_priority; */
1853 int stop_req, max_stop_req;
1854 struct drm_display_mode *mode1 = NULL;
1855 struct drm_display_mode *mode2 = NULL;
1856 uint32_t pixel_bytes1 = 0;
1857 uint32_t pixel_bytes2 = 0;
1858
1859 if (rdev->mode_info.crtcs[0]->base.enabled) {
1860 mode1 = &rdev->mode_info.crtcs[0]->base.mode;
1861 pixel_bytes1 = rdev->mode_info.crtcs[0]->base.fb->bits_per_pixel / 8;
1862 }
1863 if (rdev->mode_info.crtcs[1]->base.enabled) {
1864 mode2 = &rdev->mode_info.crtcs[1]->base.mode;
1865 pixel_bytes2 = rdev->mode_info.crtcs[1]->base.fb->bits_per_pixel / 8;
1866 }
1867
1868 min_mem_eff.full = rfixed_const_8(0);
1869 /* get modes */
1870 if ((rdev->disp_priority == 2) && ASIC_IS_R300(rdev)) {
1871 uint32_t mc_init_misc_lat_timer = RREG32(R300_MC_INIT_MISC_LAT_TIMER);
1872 mc_init_misc_lat_timer &= ~(R300_MC_DISP1R_INIT_LAT_MASK << R300_MC_DISP1R_INIT_LAT_SHIFT);
1873 mc_init_misc_lat_timer &= ~(R300_MC_DISP0R_INIT_LAT_MASK << R300_MC_DISP0R_INIT_LAT_SHIFT);
1874 /* check crtc enables */
1875 if (mode2)
1876 mc_init_misc_lat_timer |= (1 << R300_MC_DISP1R_INIT_LAT_SHIFT);
1877 if (mode1)
1878 mc_init_misc_lat_timer |= (1 << R300_MC_DISP0R_INIT_LAT_SHIFT);
1879 WREG32(R300_MC_INIT_MISC_LAT_TIMER, mc_init_misc_lat_timer);
1880 }
1881
1882 /*
1883 * determine is there is enough bw for current mode
1884 */
1885 mclk_ff.full = rfixed_const(rdev->clock.default_mclk);
1886 temp_ff.full = rfixed_const(100);
1887 mclk_ff.full = rfixed_div(mclk_ff, temp_ff);
1888 sclk_ff.full = rfixed_const(rdev->clock.default_sclk);
1889 sclk_ff.full = rfixed_div(sclk_ff, temp_ff);
1890
1891 temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1);
1892 temp_ff.full = rfixed_const(temp);
1893 mem_bw.full = rfixed_mul(mclk_ff, temp_ff);
1894
1895 pix_clk.full = 0;
1896 pix_clk2.full = 0;
1897 peak_disp_bw.full = 0;
1898 if (mode1) {
1899 temp_ff.full = rfixed_const(1000);
1900 pix_clk.full = rfixed_const(mode1->clock); /* convert to fixed point */
1901 pix_clk.full = rfixed_div(pix_clk, temp_ff);
1902 temp_ff.full = rfixed_const(pixel_bytes1);
1903 peak_disp_bw.full += rfixed_mul(pix_clk, temp_ff);
1904 }
1905 if (mode2) {
1906 temp_ff.full = rfixed_const(1000);
1907 pix_clk2.full = rfixed_const(mode2->clock); /* convert to fixed point */
1908 pix_clk2.full = rfixed_div(pix_clk2, temp_ff);
1909 temp_ff.full = rfixed_const(pixel_bytes2);
1910 peak_disp_bw.full += rfixed_mul(pix_clk2, temp_ff);
1911 }
1912
1913 mem_bw.full = rfixed_mul(mem_bw, min_mem_eff);
1914 if (peak_disp_bw.full >= mem_bw.full) {
1915 DRM_ERROR("You may not have enough display bandwidth for current mode\n"
1916 "If you have flickering problem, try to lower resolution, refresh rate, or color depth\n");
1917 }
1918
1919 /* Get values from the EXT_MEM_CNTL register...converting its contents. */
1920 temp = RREG32(RADEON_MEM_TIMING_CNTL);
1921 if ((rdev->family == CHIP_RV100) || (rdev->flags & RADEON_IS_IGP)) { /* RV100, M6, IGPs */
1922 mem_trcd = ((temp >> 2) & 0x3) + 1;
1923 mem_trp = ((temp & 0x3)) + 1;
1924 mem_tras = ((temp & 0x70) >> 4) + 1;
1925 } else if (rdev->family == CHIP_R300 ||
1926 rdev->family == CHIP_R350) { /* r300, r350 */
1927 mem_trcd = (temp & 0x7) + 1;
1928 mem_trp = ((temp >> 8) & 0x7) + 1;
1929 mem_tras = ((temp >> 11) & 0xf) + 4;
1930 } else if (rdev->family == CHIP_RV350 ||
1931 rdev->family <= CHIP_RV380) {
1932 /* rv3x0 */
1933 mem_trcd = (temp & 0x7) + 3;
1934 mem_trp = ((temp >> 8) & 0x7) + 3;
1935 mem_tras = ((temp >> 11) & 0xf) + 6;
1936 } else if (rdev->family == CHIP_R420 ||
1937 rdev->family == CHIP_R423 ||
1938 rdev->family == CHIP_RV410) {
1939 /* r4xx */
1940 mem_trcd = (temp & 0xf) + 3;
1941 if (mem_trcd > 15)
1942 mem_trcd = 15;
1943 mem_trp = ((temp >> 8) & 0xf) + 3;
1944 if (mem_trp > 15)
1945 mem_trp = 15;
1946 mem_tras = ((temp >> 12) & 0x1f) + 6;
1947 if (mem_tras > 31)
1948 mem_tras = 31;
1949 } else { /* RV200, R200 */
1950 mem_trcd = (temp & 0x7) + 1;
1951 mem_trp = ((temp >> 8) & 0x7) + 1;
1952 mem_tras = ((temp >> 12) & 0xf) + 4;
1953 }
1954 /* convert to FF */
1955 trcd_ff.full = rfixed_const(mem_trcd);
1956 trp_ff.full = rfixed_const(mem_trp);
1957 tras_ff.full = rfixed_const(mem_tras);
1958
1959 /* Get values from the MEM_SDRAM_MODE_REG register...converting its */
1960 temp = RREG32(RADEON_MEM_SDRAM_MODE_REG);
1961 data = (temp & (7 << 20)) >> 20;
1962 if ((rdev->family == CHIP_RV100) || rdev->flags & RADEON_IS_IGP) {
1963 if (rdev->family == CHIP_RS480) /* don't think rs400 */
1964 tcas_ff = memtcas_rs480_ff[data];
1965 else
1966 tcas_ff = memtcas_ff[data];
1967 } else
1968 tcas_ff = memtcas2_ff[data];
1969
1970 if (rdev->family == CHIP_RS400 ||
1971 rdev->family == CHIP_RS480) {
1972 /* extra cas latency stored in bits 23-25 0-4 clocks */
1973 data = (temp >> 23) & 0x7;
1974 if (data < 5)
1975 tcas_ff.full += rfixed_const(data);
1976 }
1977
1978 if (ASIC_IS_R300(rdev) && !(rdev->flags & RADEON_IS_IGP)) {
1979 /* on the R300, Tcas is included in Trbs.
1980 */
1981 temp = RREG32(RADEON_MEM_CNTL);
1982 data = (R300_MEM_NUM_CHANNELS_MASK & temp);
1983 if (data == 1) {
1984 if (R300_MEM_USE_CD_CH_ONLY & temp) {
1985 temp = RREG32(R300_MC_IND_INDEX);
1986 temp &= ~R300_MC_IND_ADDR_MASK;
1987 temp |= R300_MC_READ_CNTL_CD_mcind;
1988 WREG32(R300_MC_IND_INDEX, temp);
1989 temp = RREG32(R300_MC_IND_DATA);
1990 data = (R300_MEM_RBS_POSITION_C_MASK & temp);
1991 } else {
1992 temp = RREG32(R300_MC_READ_CNTL_AB);
1993 data = (R300_MEM_RBS_POSITION_A_MASK & temp);
1994 }
1995 } else {
1996 temp = RREG32(R300_MC_READ_CNTL_AB);
1997 data = (R300_MEM_RBS_POSITION_A_MASK & temp);
1998 }
1999 if (rdev->family == CHIP_RV410 ||
2000 rdev->family == CHIP_R420 ||
2001 rdev->family == CHIP_R423)
2002 trbs_ff = memtrbs_r4xx[data];
2003 else
2004 trbs_ff = memtrbs[data];
2005 tcas_ff.full += trbs_ff.full;
2006 }
2007
2008 sclk_eff_ff.full = sclk_ff.full;
2009
2010 if (rdev->flags & RADEON_IS_AGP) {
2011 fixed20_12 agpmode_ff;
2012 agpmode_ff.full = rfixed_const(radeon_agpmode);
2013 temp_ff.full = rfixed_const_666(16);
2014 sclk_eff_ff.full -= rfixed_mul(agpmode_ff, temp_ff);
2015 }
2016 /* TODO PCIE lanes may affect this - agpmode == 16?? */
2017
2018 if (ASIC_IS_R300(rdev)) {
2019 sclk_delay_ff.full = rfixed_const(250);
2020 } else {
2021 if ((rdev->family == CHIP_RV100) ||
2022 rdev->flags & RADEON_IS_IGP) {
2023 if (rdev->mc.vram_is_ddr)
2024 sclk_delay_ff.full = rfixed_const(41);
2025 else
2026 sclk_delay_ff.full = rfixed_const(33);
2027 } else {
2028 if (rdev->mc.vram_width == 128)
2029 sclk_delay_ff.full = rfixed_const(57);
2030 else
2031 sclk_delay_ff.full = rfixed_const(41);
2032 }
2033 }
2034
2035 mc_latency_sclk.full = rfixed_div(sclk_delay_ff, sclk_eff_ff);
2036
2037 if (rdev->mc.vram_is_ddr) {
2038 if (rdev->mc.vram_width == 32) {
2039 k1.full = rfixed_const(40);
2040 c = 3;
2041 } else {
2042 k1.full = rfixed_const(20);
2043 c = 1;
2044 }
2045 } else {
2046 k1.full = rfixed_const(40);
2047 c = 3;
2048 }
2049
2050 temp_ff.full = rfixed_const(2);
2051 mc_latency_mclk.full = rfixed_mul(trcd_ff, temp_ff);
2052 temp_ff.full = rfixed_const(c);
2053 mc_latency_mclk.full += rfixed_mul(tcas_ff, temp_ff);
2054 temp_ff.full = rfixed_const(4);
2055 mc_latency_mclk.full += rfixed_mul(tras_ff, temp_ff);
2056 mc_latency_mclk.full += rfixed_mul(trp_ff, temp_ff);
2057 mc_latency_mclk.full += k1.full;
2058
2059 mc_latency_mclk.full = rfixed_div(mc_latency_mclk, mclk_ff);
2060 mc_latency_mclk.full += rfixed_div(temp_ff, sclk_eff_ff);
2061
2062 /*
2063 HW cursor time assuming worst case of full size colour cursor.
2064 */
2065 temp_ff.full = rfixed_const((2 * (cur_size - (rdev->mc.vram_is_ddr + 1))));
2066 temp_ff.full += trcd_ff.full;
2067 if (temp_ff.full < tras_ff.full)
2068 temp_ff.full = tras_ff.full;
2069 cur_latency_mclk.full = rfixed_div(temp_ff, mclk_ff);
2070
2071 temp_ff.full = rfixed_const(cur_size);
2072 cur_latency_sclk.full = rfixed_div(temp_ff, sclk_eff_ff);
2073 /*
2074 Find the total latency for the display data.
2075 */
2076 disp_latency_overhead.full = rfixed_const(80);
2077 disp_latency_overhead.full = rfixed_div(disp_latency_overhead, sclk_ff);
2078 mc_latency_mclk.full += disp_latency_overhead.full + cur_latency_mclk.full;
2079 mc_latency_sclk.full += disp_latency_overhead.full + cur_latency_sclk.full;
2080
2081 if (mc_latency_mclk.full > mc_latency_sclk.full)
2082 disp_latency.full = mc_latency_mclk.full;
2083 else
2084 disp_latency.full = mc_latency_sclk.full;
2085
2086 /* setup Max GRPH_STOP_REQ default value */
2087 if (ASIC_IS_RV100(rdev))
2088 max_stop_req = 0x5c;
2089 else
2090 max_stop_req = 0x7c;
2091
2092 if (mode1) {
2093 /* CRTC1
2094 Set GRPH_BUFFER_CNTL register using h/w defined optimal values.
2095 GRPH_STOP_REQ <= MIN[ 0x7C, (CRTC_H_DISP + 1) * (bit depth) / 0x10 ]
2096 */
2097 stop_req = mode1->hdisplay * pixel_bytes1 / 16;
2098
2099 if (stop_req > max_stop_req)
2100 stop_req = max_stop_req;
2101
2102 /*
2103 Find the drain rate of the display buffer.
2104 */
2105 temp_ff.full = rfixed_const((16/pixel_bytes1));
2106 disp_drain_rate.full = rfixed_div(pix_clk, temp_ff);
2107
2108 /*
2109 Find the critical point of the display buffer.
2110 */
2111 crit_point_ff.full = rfixed_mul(disp_drain_rate, disp_latency);
2112 crit_point_ff.full += rfixed_const_half(0);
2113
2114 critical_point = rfixed_trunc(crit_point_ff);
2115
2116 if (rdev->disp_priority == 2) {
2117 critical_point = 0;
2118 }
2119
2120 /*
2121 The critical point should never be above max_stop_req-4. Setting
2122 GRPH_CRITICAL_CNTL = 0 will thus force high priority all the time.
2123 */
2124 if (max_stop_req - critical_point < 4)
2125 critical_point = 0;
2126
2127 if (critical_point == 0 && mode2 && rdev->family == CHIP_R300) {
2128 /* some R300 cards have problem with this set to 0, when CRTC2 is enabled.*/
2129 critical_point = 0x10;
2130 }
2131
2132 temp = RREG32(RADEON_GRPH_BUFFER_CNTL);
2133 temp &= ~(RADEON_GRPH_STOP_REQ_MASK);
2134 temp |= (stop_req << RADEON_GRPH_STOP_REQ_SHIFT);
2135 temp &= ~(RADEON_GRPH_START_REQ_MASK);
2136 if ((rdev->family == CHIP_R350) &&
2137 (stop_req > 0x15)) {
2138 stop_req -= 0x10;
2139 }
2140 temp |= (stop_req << RADEON_GRPH_START_REQ_SHIFT);
2141 temp |= RADEON_GRPH_BUFFER_SIZE;
2142 temp &= ~(RADEON_GRPH_CRITICAL_CNTL |
2143 RADEON_GRPH_CRITICAL_AT_SOF |
2144 RADEON_GRPH_STOP_CNTL);
2145 /*
2146 Write the result into the register.
2147 */
2148 WREG32(RADEON_GRPH_BUFFER_CNTL, ((temp & ~RADEON_GRPH_CRITICAL_POINT_MASK) |
2149 (critical_point << RADEON_GRPH_CRITICAL_POINT_SHIFT)));
2150
2151#if 0
2152 if ((rdev->family == CHIP_RS400) ||
2153 (rdev->family == CHIP_RS480)) {
2154 /* attempt to program RS400 disp regs correctly ??? */
2155 temp = RREG32(RS400_DISP1_REG_CNTL);
2156 temp &= ~(RS400_DISP1_START_REQ_LEVEL_MASK |
2157 RS400_DISP1_STOP_REQ_LEVEL_MASK);
2158 WREG32(RS400_DISP1_REQ_CNTL1, (temp |
2159 (critical_point << RS400_DISP1_START_REQ_LEVEL_SHIFT) |
2160 (critical_point << RS400_DISP1_STOP_REQ_LEVEL_SHIFT)));
2161 temp = RREG32(RS400_DMIF_MEM_CNTL1);
2162 temp &= ~(RS400_DISP1_CRITICAL_POINT_START_MASK |
2163 RS400_DISP1_CRITICAL_POINT_STOP_MASK);
2164 WREG32(RS400_DMIF_MEM_CNTL1, (temp |
2165 (critical_point << RS400_DISP1_CRITICAL_POINT_START_SHIFT) |
2166 (critical_point << RS400_DISP1_CRITICAL_POINT_STOP_SHIFT)));
2167 }
2168#endif
2169
2170 DRM_DEBUG("GRPH_BUFFER_CNTL from to %x\n",
2171 /* (unsigned int)info->SavedReg->grph_buffer_cntl, */
2172 (unsigned int)RREG32(RADEON_GRPH_BUFFER_CNTL));
2173 }
2174
2175 if (mode2) {
2176 u32 grph2_cntl;
2177 stop_req = mode2->hdisplay * pixel_bytes2 / 16;
2178
2179 if (stop_req > max_stop_req)
2180 stop_req = max_stop_req;
2181
2182 /*
2183 Find the drain rate of the display buffer.
2184 */
2185 temp_ff.full = rfixed_const((16/pixel_bytes2));
2186 disp_drain_rate2.full = rfixed_div(pix_clk2, temp_ff);
2187
2188 grph2_cntl = RREG32(RADEON_GRPH2_BUFFER_CNTL);
2189 grph2_cntl &= ~(RADEON_GRPH_STOP_REQ_MASK);
2190 grph2_cntl |= (stop_req << RADEON_GRPH_STOP_REQ_SHIFT);
2191 grph2_cntl &= ~(RADEON_GRPH_START_REQ_MASK);
2192 if ((rdev->family == CHIP_R350) &&
2193 (stop_req > 0x15)) {
2194 stop_req -= 0x10;
2195 }
2196 grph2_cntl |= (stop_req << RADEON_GRPH_START_REQ_SHIFT);
2197 grph2_cntl |= RADEON_GRPH_BUFFER_SIZE;
2198 grph2_cntl &= ~(RADEON_GRPH_CRITICAL_CNTL |
2199 RADEON_GRPH_CRITICAL_AT_SOF |
2200 RADEON_GRPH_STOP_CNTL);
2201
2202 if ((rdev->family == CHIP_RS100) ||
2203 (rdev->family == CHIP_RS200))
2204 critical_point2 = 0;
2205 else {
2206 temp = (rdev->mc.vram_width * rdev->mc.vram_is_ddr + 1)/128;
2207 temp_ff.full = rfixed_const(temp);
2208 temp_ff.full = rfixed_mul(mclk_ff, temp_ff);
2209 if (sclk_ff.full < temp_ff.full)
2210 temp_ff.full = sclk_ff.full;
2211
2212 read_return_rate.full = temp_ff.full;
2213
2214 if (mode1) {
2215 temp_ff.full = read_return_rate.full - disp_drain_rate.full;
2216 time_disp1_drop_priority.full = rfixed_div(crit_point_ff, temp_ff);
2217 } else {
2218 time_disp1_drop_priority.full = 0;
2219 }
2220 crit_point_ff.full = disp_latency.full + time_disp1_drop_priority.full + disp_latency.full;
2221 crit_point_ff.full = rfixed_mul(crit_point_ff, disp_drain_rate2);
2222 crit_point_ff.full += rfixed_const_half(0);
2223
2224 critical_point2 = rfixed_trunc(crit_point_ff);
2225
2226 if (rdev->disp_priority == 2) {
2227 critical_point2 = 0;
2228 }
2229
2230 if (max_stop_req - critical_point2 < 4)
2231 critical_point2 = 0;
2232
2233 }
2234
2235 if (critical_point2 == 0 && rdev->family == CHIP_R300) {
2236 /* some R300 cards have problem with this set to 0 */
2237 critical_point2 = 0x10;
2238 }
2239
2240 WREG32(RADEON_GRPH2_BUFFER_CNTL, ((grph2_cntl & ~RADEON_GRPH_CRITICAL_POINT_MASK) |
2241 (critical_point2 << RADEON_GRPH_CRITICAL_POINT_SHIFT)));
2242
2243 if ((rdev->family == CHIP_RS400) ||
2244 (rdev->family == CHIP_RS480)) {
2245#if 0
2246 /* attempt to program RS400 disp2 regs correctly ??? */
2247 temp = RREG32(RS400_DISP2_REQ_CNTL1);
2248 temp &= ~(RS400_DISP2_START_REQ_LEVEL_MASK |
2249 RS400_DISP2_STOP_REQ_LEVEL_MASK);
2250 WREG32(RS400_DISP2_REQ_CNTL1, (temp |
2251 (critical_point2 << RS400_DISP1_START_REQ_LEVEL_SHIFT) |
2252 (critical_point2 << RS400_DISP1_STOP_REQ_LEVEL_SHIFT)));
2253 temp = RREG32(RS400_DISP2_REQ_CNTL2);
2254 temp &= ~(RS400_DISP2_CRITICAL_POINT_START_MASK |
2255 RS400_DISP2_CRITICAL_POINT_STOP_MASK);
2256 WREG32(RS400_DISP2_REQ_CNTL2, (temp |
2257 (critical_point2 << RS400_DISP2_CRITICAL_POINT_START_SHIFT) |
2258 (critical_point2 << RS400_DISP2_CRITICAL_POINT_STOP_SHIFT)));
2259#endif
2260 WREG32(RS400_DISP2_REQ_CNTL1, 0x105DC1CC);
2261 WREG32(RS400_DISP2_REQ_CNTL2, 0x2749D000);
2262 WREG32(RS400_DMIF_MEM_CNTL1, 0x29CA71DC);
2263 WREG32(RS400_DISP1_REQ_CNTL1, 0x28FBC3AC);
2264 }
2265
2266 DRM_DEBUG("GRPH2_BUFFER_CNTL from to %x\n",
2267 (unsigned int)RREG32(RADEON_GRPH2_BUFFER_CNTL));
2268 }
2269}
diff --git a/drivers/gpu/drm/radeon/r300.c b/drivers/gpu/drm/radeon/r300.c
index 28e5777658be..9c8d41534a5d 100644
--- a/drivers/gpu/drm/radeon/r300.c
+++ b/drivers/gpu/drm/radeon/r300.c
@@ -31,6 +31,7 @@
31#include "radeon_reg.h" 31#include "radeon_reg.h"
32#include "radeon.h" 32#include "radeon.h"
33#include "radeon_drm.h" 33#include "radeon_drm.h"
34#include "radeon_share.h"
34 35
35/* r300,r350,rv350,rv370,rv380 depends on : */ 36/* r300,r350,rv350,rv370,rv380 depends on : */
36void r100_hdp_reset(struct radeon_device *rdev); 37void r100_hdp_reset(struct radeon_device *rdev);
diff --git a/drivers/gpu/drm/radeon/r520.c b/drivers/gpu/drm/radeon/r520.c
index b6bd3758db6b..0a981e2ee2f8 100644
--- a/drivers/gpu/drm/radeon/r520.c
+++ b/drivers/gpu/drm/radeon/r520.c
@@ -28,6 +28,7 @@
28#include "drmP.h" 28#include "drmP.h"
29#include "radeon_reg.h" 29#include "radeon_reg.h"
30#include "radeon.h" 30#include "radeon.h"
31#include "radeon_share.h"
31 32
32/* r520,rv530,rv560,rv570,r580 depends on : */ 33/* r520,rv530,rv560,rv570,r580 depends on : */
33void r100_hdp_reset(struct radeon_device *rdev); 34void r100_hdp_reset(struct radeon_device *rdev);
@@ -226,7 +227,20 @@ static void r520_vram_get_type(struct radeon_device *rdev)
226 227
227void r520_vram_info(struct radeon_device *rdev) 228void r520_vram_info(struct radeon_device *rdev)
228{ 229{
230 fixed20_12 a;
231
229 r520_vram_get_type(rdev); 232 r520_vram_get_type(rdev);
230 233
231 r100_vram_init_sizes(rdev); 234 r100_vram_init_sizes(rdev);
235 /* FIXME: we should enforce default clock in case GPU is not in
236 * default setup
237 */
238 a.full = rfixed_const(100);
239 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
240 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
241}
242
243void r520_bandwidth_update(struct radeon_device *rdev)
244{
245 rv515_bandwidth_avivo_update(rdev);
232} 246}
diff --git a/drivers/gpu/drm/radeon/radeon.h b/drivers/gpu/drm/radeon/radeon.h
index af12a2fe3221..63a3fe32e584 100644
--- a/drivers/gpu/drm/radeon/radeon.h
+++ b/drivers/gpu/drm/radeon/radeon.h
@@ -113,6 +113,7 @@ enum radeon_family {
113 CHIP_RV770, 113 CHIP_RV770,
114 CHIP_RV730, 114 CHIP_RV730,
115 CHIP_RV710, 115 CHIP_RV710,
116 CHIP_RS880,
116 CHIP_LAST, 117 CHIP_LAST,
117}; 118};
118 119
@@ -490,6 +491,39 @@ struct radeon_wb {
490 uint64_t gpu_addr; 491 uint64_t gpu_addr;
491}; 492};
492 493
494/**
495 * struct radeon_pm - power management datas
496 * @max_bandwidth: maximum bandwidth the gpu has (MByte/s)
497 * @igp_sideport_mclk: sideport memory clock Mhz (rs690,rs740,rs780,rs880)
498 * @igp_system_mclk: system clock Mhz (rs690,rs740,rs780,rs880)
499 * @igp_ht_link_clk: ht link clock Mhz (rs690,rs740,rs780,rs880)
500 * @igp_ht_link_width: ht link width in bits (rs690,rs740,rs780,rs880)
501 * @k8_bandwidth: k8 bandwidth the gpu has (MByte/s) (IGP)
502 * @sideport_bandwidth: sideport bandwidth the gpu has (MByte/s) (IGP)
503 * @ht_bandwidth: ht bandwidth the gpu has (MByte/s) (IGP)
504 * @core_bandwidth: core GPU bandwidth the gpu has (MByte/s) (IGP)
505 * @sclk: GPU clock Mhz (core bandwith depends of this clock)
506 * @needed_bandwidth: current bandwidth needs
507 *
508 * It keeps track of various data needed to take powermanagement decision.
509 * Bandwith need is used to determine minimun clock of the GPU and memory.
510 * Equation between gpu/memory clock and available bandwidth is hw dependent
511 * (type of memory, bus size, efficiency, ...)
512 */
513struct radeon_pm {
514 fixed20_12 max_bandwidth;
515 fixed20_12 igp_sideport_mclk;
516 fixed20_12 igp_system_mclk;
517 fixed20_12 igp_ht_link_clk;
518 fixed20_12 igp_ht_link_width;
519 fixed20_12 k8_bandwidth;
520 fixed20_12 sideport_bandwidth;
521 fixed20_12 ht_bandwidth;
522 fixed20_12 core_bandwidth;
523 fixed20_12 sclk;
524 fixed20_12 needed_bandwidth;
525};
526
493 527
494/* 528/*
495 * Benchmarking 529 * Benchmarking
@@ -551,19 +585,17 @@ struct radeon_asic {
551 void (*set_memory_clock)(struct radeon_device *rdev, uint32_t mem_clock); 585 void (*set_memory_clock)(struct radeon_device *rdev, uint32_t mem_clock);
552 void (*set_pcie_lanes)(struct radeon_device *rdev, int lanes); 586 void (*set_pcie_lanes)(struct radeon_device *rdev, int lanes);
553 void (*set_clock_gating)(struct radeon_device *rdev, int enable); 587 void (*set_clock_gating)(struct radeon_device *rdev, int enable);
554
555 int (*set_surface_reg)(struct radeon_device *rdev, int reg, 588 int (*set_surface_reg)(struct radeon_device *rdev, int reg,
556 uint32_t tiling_flags, uint32_t pitch, 589 uint32_t tiling_flags, uint32_t pitch,
557 uint32_t offset, uint32_t obj_size); 590 uint32_t offset, uint32_t obj_size);
558 int (*clear_surface_reg)(struct radeon_device *rdev, int reg); 591 int (*clear_surface_reg)(struct radeon_device *rdev, int reg);
592 void (*bandwidth_update)(struct radeon_device *rdev);
559}; 593};
560 594
561union radeon_asic_config { 595union radeon_asic_config {
562 struct r300_asic r300; 596 struct r300_asic r300;
563}; 597};
564 598
565/* r100 */
566void r100_vram_init_sizes(struct radeon_device *rdev);
567 599
568/* 600/*
569 * IOCTL. 601 * IOCTL.
@@ -646,6 +678,7 @@ struct radeon_device {
646 struct radeon_irq irq; 678 struct radeon_irq irq;
647 struct radeon_asic *asic; 679 struct radeon_asic *asic;
648 struct radeon_gem gem; 680 struct radeon_gem gem;
681 struct radeon_pm pm;
649 struct mutex cs_mutex; 682 struct mutex cs_mutex;
650 struct radeon_wb wb; 683 struct radeon_wb wb;
651 bool gpu_lockup; 684 bool gpu_lockup;
@@ -829,5 +862,6 @@ static inline void radeon_ring_write(struct radeon_device *rdev, uint32_t v)
829#define radeon_set_clock_gating(rdev, e) (rdev)->asic->set_clock_gating((rdev), (e)) 862#define radeon_set_clock_gating(rdev, e) (rdev)->asic->set_clock_gating((rdev), (e))
830#define radeon_set_surface_reg(rdev, r, f, p, o, s) ((rdev)->asic->set_surface_reg((rdev), (r), (f), (p), (o), (s))) 863#define radeon_set_surface_reg(rdev, r, f, p, o, s) ((rdev)->asic->set_surface_reg((rdev), (r), (f), (p), (o), (s)))
831#define radeon_clear_surface_reg(rdev, r) ((rdev)->asic->clear_surface_reg((rdev), (r))) 864#define radeon_clear_surface_reg(rdev, r) ((rdev)->asic->clear_surface_reg((rdev), (r)))
865#define radeon_bandwidth_update(rdev) (rdev)->asic->bandwidth_update((rdev))
832 866
833#endif 867#endif
diff --git a/drivers/gpu/drm/radeon/radeon_asic.h b/drivers/gpu/drm/radeon/radeon_asic.h
index dd903d329406..9a75876e0c3b 100644
--- a/drivers/gpu/drm/radeon/radeon_asic.h
+++ b/drivers/gpu/drm/radeon/radeon_asic.h
@@ -75,6 +75,7 @@ int r100_set_surface_reg(struct radeon_device *rdev, int reg,
75 uint32_t tiling_flags, uint32_t pitch, 75 uint32_t tiling_flags, uint32_t pitch,
76 uint32_t offset, uint32_t obj_size); 76 uint32_t offset, uint32_t obj_size);
77int r100_clear_surface_reg(struct radeon_device *rdev, int reg); 77int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
78void r100_bandwidth_update(struct radeon_device *rdev);
78 79
79static struct radeon_asic r100_asic = { 80static struct radeon_asic r100_asic = {
80 .init = &r100_init, 81 .init = &r100_init,
@@ -106,6 +107,7 @@ static struct radeon_asic r100_asic = {
106 .set_clock_gating = &radeon_legacy_set_clock_gating, 107 .set_clock_gating = &radeon_legacy_set_clock_gating,
107 .set_surface_reg = r100_set_surface_reg, 108 .set_surface_reg = r100_set_surface_reg,
108 .clear_surface_reg = r100_clear_surface_reg, 109 .clear_surface_reg = r100_clear_surface_reg,
110 .bandwidth_update = &r100_bandwidth_update,
109}; 111};
110 112
111 113
@@ -165,6 +167,7 @@ static struct radeon_asic r300_asic = {
165 .set_clock_gating = &radeon_legacy_set_clock_gating, 167 .set_clock_gating = &radeon_legacy_set_clock_gating,
166 .set_surface_reg = r100_set_surface_reg, 168 .set_surface_reg = r100_set_surface_reg,
167 .clear_surface_reg = r100_clear_surface_reg, 169 .clear_surface_reg = r100_clear_surface_reg,
170 .bandwidth_update = &r100_bandwidth_update,
168}; 171};
169 172
170/* 173/*
@@ -204,6 +207,7 @@ static struct radeon_asic r420_asic = {
204 .set_clock_gating = &radeon_atom_set_clock_gating, 207 .set_clock_gating = &radeon_atom_set_clock_gating,
205 .set_surface_reg = r100_set_surface_reg, 208 .set_surface_reg = r100_set_surface_reg,
206 .clear_surface_reg = r100_clear_surface_reg, 209 .clear_surface_reg = r100_clear_surface_reg,
210 .bandwidth_update = &r100_bandwidth_update,
207}; 211};
208 212
209 213
@@ -250,6 +254,7 @@ static struct radeon_asic rs400_asic = {
250 .set_clock_gating = &radeon_legacy_set_clock_gating, 254 .set_clock_gating = &radeon_legacy_set_clock_gating,
251 .set_surface_reg = r100_set_surface_reg, 255 .set_surface_reg = r100_set_surface_reg,
252 .clear_surface_reg = r100_clear_surface_reg, 256 .clear_surface_reg = r100_clear_surface_reg,
257 .bandwidth_update = &r100_bandwidth_update,
253}; 258};
254 259
255 260
@@ -267,6 +272,7 @@ void rs600_gart_tlb_flush(struct radeon_device *rdev);
267int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 272int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
268uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); 273uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
269void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 274void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
275void rs600_bandwidth_update(struct radeon_device *rdev);
270static struct radeon_asic rs600_asic = { 276static struct radeon_asic rs600_asic = {
271 .init = &r300_init, 277 .init = &r300_init,
272 .errata = &rs600_errata, 278 .errata = &rs600_errata,
@@ -295,6 +301,7 @@ static struct radeon_asic rs600_asic = {
295 .set_memory_clock = &radeon_atom_set_memory_clock, 301 .set_memory_clock = &radeon_atom_set_memory_clock,
296 .set_pcie_lanes = NULL, 302 .set_pcie_lanes = NULL,
297 .set_clock_gating = &radeon_atom_set_clock_gating, 303 .set_clock_gating = &radeon_atom_set_clock_gating,
304 .bandwidth_update = &rs600_bandwidth_update,
298}; 305};
299 306
300 307
@@ -307,6 +314,7 @@ int rs690_mc_init(struct radeon_device *rdev);
307void rs690_mc_fini(struct radeon_device *rdev); 314void rs690_mc_fini(struct radeon_device *rdev);
308uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); 315uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
309void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 316void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
317void rs690_bandwidth_update(struct radeon_device *rdev);
310static struct radeon_asic rs690_asic = { 318static struct radeon_asic rs690_asic = {
311 .init = &r300_init, 319 .init = &r300_init,
312 .errata = &rs690_errata, 320 .errata = &rs690_errata,
@@ -337,6 +345,7 @@ static struct radeon_asic rs690_asic = {
337 .set_clock_gating = &radeon_atom_set_clock_gating, 345 .set_clock_gating = &radeon_atom_set_clock_gating,
338 .set_surface_reg = r100_set_surface_reg, 346 .set_surface_reg = r100_set_surface_reg,
339 .clear_surface_reg = r100_clear_surface_reg, 347 .clear_surface_reg = r100_clear_surface_reg,
348 .bandwidth_update = &rs690_bandwidth_update,
340}; 349};
341 350
342 351
@@ -354,6 +363,7 @@ void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
354void rv515_ring_start(struct radeon_device *rdev); 363void rv515_ring_start(struct radeon_device *rdev);
355uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); 364uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
356void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 365void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
366void rv515_bandwidth_update(struct radeon_device *rdev);
357static struct radeon_asic rv515_asic = { 367static struct radeon_asic rv515_asic = {
358 .init = &rv515_init, 368 .init = &rv515_init,
359 .errata = &rv515_errata, 369 .errata = &rv515_errata,
@@ -384,6 +394,7 @@ static struct radeon_asic rv515_asic = {
384 .set_clock_gating = &radeon_atom_set_clock_gating, 394 .set_clock_gating = &radeon_atom_set_clock_gating,
385 .set_surface_reg = r100_set_surface_reg, 395 .set_surface_reg = r100_set_surface_reg,
386 .clear_surface_reg = r100_clear_surface_reg, 396 .clear_surface_reg = r100_clear_surface_reg,
397 .bandwidth_update = &rv515_bandwidth_update,
387}; 398};
388 399
389 400
@@ -394,6 +405,7 @@ void r520_errata(struct radeon_device *rdev);
394void r520_vram_info(struct radeon_device *rdev); 405void r520_vram_info(struct radeon_device *rdev);
395int r520_mc_init(struct radeon_device *rdev); 406int r520_mc_init(struct radeon_device *rdev);
396void r520_mc_fini(struct radeon_device *rdev); 407void r520_mc_fini(struct radeon_device *rdev);
408void r520_bandwidth_update(struct radeon_device *rdev);
397static struct radeon_asic r520_asic = { 409static struct radeon_asic r520_asic = {
398 .init = &rv515_init, 410 .init = &rv515_init,
399 .errata = &r520_errata, 411 .errata = &r520_errata,
@@ -424,6 +436,7 @@ static struct radeon_asic r520_asic = {
424 .set_clock_gating = &radeon_atom_set_clock_gating, 436 .set_clock_gating = &radeon_atom_set_clock_gating,
425 .set_surface_reg = r100_set_surface_reg, 437 .set_surface_reg = r100_set_surface_reg,
426 .clear_surface_reg = r100_clear_surface_reg, 438 .clear_surface_reg = r100_clear_surface_reg,
439 .bandwidth_update = &r520_bandwidth_update,
427}; 440};
428 441
429/* 442/*
diff --git a/drivers/gpu/drm/radeon/radeon_device.c b/drivers/gpu/drm/radeon/radeon_device.c
index f23083bbba3f..f78db5c8008c 100644
--- a/drivers/gpu/drm/radeon/radeon_device.c
+++ b/drivers/gpu/drm/radeon/radeon_device.c
@@ -561,6 +561,11 @@ int radeon_device_init(struct radeon_device *rdev,
561 radeon_combios_asic_init(rdev->ddev); 561 radeon_combios_asic_init(rdev->ddev);
562 } 562 }
563 } 563 }
564 /* Initialize clocks */
565 r = radeon_clocks_init(rdev);
566 if (r) {
567 return r;
568 }
564 /* Get vram informations */ 569 /* Get vram informations */
565 radeon_vram_info(rdev); 570 radeon_vram_info(rdev);
566 571
@@ -572,11 +577,6 @@ int radeon_device_init(struct radeon_device *rdev,
572 (unsigned)rdev->mc.aper_size >> 20); 577 (unsigned)rdev->mc.aper_size >> 20);
573 DRM_INFO("RAM width %dbits %cDR\n", 578 DRM_INFO("RAM width %dbits %cDR\n",
574 rdev->mc.vram_width, rdev->mc.vram_is_ddr ? 'D' : 'S'); 579 rdev->mc.vram_width, rdev->mc.vram_is_ddr ? 'D' : 'S');
575 /* Initialize clocks */
576 r = radeon_clocks_init(rdev);
577 if (r) {
578 return r;
579 }
580 /* Initialize memory controller (also test AGP) */ 580 /* Initialize memory controller (also test AGP) */
581 r = radeon_mc_init(rdev); 581 r = radeon_mc_init(rdev);
582 if (r) { 582 if (r) {
diff --git a/drivers/gpu/drm/radeon/radeon_display.c b/drivers/gpu/drm/radeon/radeon_display.c
index bc312f3d9a0a..a8fa1bb84cf7 100644
--- a/drivers/gpu/drm/radeon/radeon_display.c
+++ b/drivers/gpu/drm/radeon/radeon_display.c
@@ -187,6 +187,7 @@ static void radeon_crtc_init(struct drm_device *dev, int index)
187 187
188 drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256); 188 drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256);
189 radeon_crtc->crtc_id = index; 189 radeon_crtc->crtc_id = index;
190 rdev->mode_info.crtcs[index] = radeon_crtc;
190 191
191 radeon_crtc->mode_set.crtc = &radeon_crtc->base; 192 radeon_crtc->mode_set.crtc = &radeon_crtc->base;
192 radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1); 193 radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1);
@@ -661,36 +662,51 @@ void radeon_modeset_fini(struct radeon_device *rdev)
661 } 662 }
662} 663}
663 664
664void radeon_init_disp_bandwidth(struct drm_device *dev) 665bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
666 struct drm_display_mode *mode,
667 struct drm_display_mode *adjusted_mode)
665{ 668{
666 struct radeon_device *rdev = dev->dev_private; 669 struct drm_device *dev = crtc->dev;
667 struct drm_display_mode *modes[2]; 670 struct drm_encoder *encoder;
668 int pixel_bytes[2]; 671 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
669 struct drm_crtc *crtc; 672 struct radeon_encoder *radeon_encoder;
670 673 bool first = true;
671 pixel_bytes[0] = pixel_bytes[1] = 0;
672 modes[0] = modes[1] = NULL;
673
674 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
675 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
676 674
677 if (crtc->enabled && crtc->fb) { 675 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
678 modes[radeon_crtc->crtc_id] = &crtc->mode; 676 radeon_encoder = to_radeon_encoder(encoder);
679 pixel_bytes[radeon_crtc->crtc_id] = crtc->fb->bits_per_pixel / 8; 677 if (encoder->crtc != crtc)
678 continue;
679 if (first) {
680 radeon_crtc->rmx_type = radeon_encoder->rmx_type;
681 radeon_crtc->devices = radeon_encoder->devices;
682 memcpy(&radeon_crtc->native_mode,
683 &radeon_encoder->native_mode,
684 sizeof(struct radeon_native_mode));
685 first = false;
686 } else {
687 if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) {
688 /* WARNING: Right now this can't happen but
689 * in the future we need to check that scaling
690 * are consistent accross different encoder
691 * (ie all encoder can work with the same
692 * scaling).
693 */
694 DRM_ERROR("Scaling not consistent accross encoder.\n");
695 return false;
696 }
680 } 697 }
681 } 698 }
682 699 if (radeon_crtc->rmx_type != RMX_OFF) {
683 if (ASIC_IS_AVIVO(rdev)) { 700 fixed20_12 a, b;
684 radeon_init_disp_bw_avivo(dev, 701 a.full = rfixed_const(crtc->mode.vdisplay);
685 modes[0], 702 b.full = rfixed_const(radeon_crtc->native_mode.panel_xres);
686 pixel_bytes[0], 703 radeon_crtc->vsc.full = rfixed_div(a, b);
687 modes[1], 704 a.full = rfixed_const(crtc->mode.hdisplay);
688 pixel_bytes[1]); 705 b.full = rfixed_const(radeon_crtc->native_mode.panel_yres);
706 radeon_crtc->hsc.full = rfixed_div(a, b);
689 } else { 707 } else {
690 radeon_init_disp_bw_legacy(dev, 708 radeon_crtc->vsc.full = rfixed_const(1);
691 modes[0], 709 radeon_crtc->hsc.full = rfixed_const(1);
692 pixel_bytes[0],
693 modes[1],
694 pixel_bytes[1]);
695 } 710 }
711 return true;
696} 712}
diff --git a/drivers/gpu/drm/radeon/radeon_encoders.c b/drivers/gpu/drm/radeon/radeon_encoders.c
index ea15284e7580..0a92706eac19 100644
--- a/drivers/gpu/drm/radeon/radeon_encoders.c
+++ b/drivers/gpu/drm/radeon/radeon_encoders.c
@@ -154,7 +154,6 @@ void radeon_rmx_mode_fixup(struct drm_encoder *encoder,
154 154
155 if (mode->hdisplay < native_mode->panel_xres || 155 if (mode->hdisplay < native_mode->panel_xres ||
156 mode->vdisplay < native_mode->panel_yres) { 156 mode->vdisplay < native_mode->panel_yres) {
157 radeon_encoder->flags |= RADEON_USE_RMX;
158 if (ASIC_IS_AVIVO(rdev)) { 157 if (ASIC_IS_AVIVO(rdev)) {
159 adjusted_mode->hdisplay = native_mode->panel_xres; 158 adjusted_mode->hdisplay = native_mode->panel_xres;
160 adjusted_mode->vdisplay = native_mode->panel_yres; 159 adjusted_mode->vdisplay = native_mode->panel_yres;
@@ -197,15 +196,13 @@ void radeon_rmx_mode_fixup(struct drm_encoder *encoder,
197 } 196 }
198} 197}
199 198
199
200static bool radeon_atom_mode_fixup(struct drm_encoder *encoder, 200static bool radeon_atom_mode_fixup(struct drm_encoder *encoder,
201 struct drm_display_mode *mode, 201 struct drm_display_mode *mode,
202 struct drm_display_mode *adjusted_mode) 202 struct drm_display_mode *adjusted_mode)
203{ 203{
204
205 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 204 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
206 205
207 radeon_encoder->flags &= ~RADEON_USE_RMX;
208
209 drm_mode_set_crtcinfo(adjusted_mode, 0); 206 drm_mode_set_crtcinfo(adjusted_mode, 0);
210 207
211 if (radeon_encoder->rmx_type != RMX_OFF) 208 if (radeon_encoder->rmx_type != RMX_OFF)
@@ -808,234 +805,6 @@ atombios_dig_transmitter_setup(struct drm_encoder *encoder, int action)
808 805
809} 806}
810 807
811static void atom_rv515_force_tv_scaler(struct radeon_device *rdev)
812{
813
814 WREG32(0x659C, 0x0);
815 WREG32(0x6594, 0x705);
816 WREG32(0x65A4, 0x10001);
817 WREG32(0x65D8, 0x0);
818 WREG32(0x65B0, 0x0);
819 WREG32(0x65C0, 0x0);
820 WREG32(0x65D4, 0x0);
821 WREG32(0x6578, 0x0);
822 WREG32(0x657C, 0x841880A8);
823 WREG32(0x6578, 0x1);
824 WREG32(0x657C, 0x84208680);
825 WREG32(0x6578, 0x2);
826 WREG32(0x657C, 0xBFF880B0);
827 WREG32(0x6578, 0x100);
828 WREG32(0x657C, 0x83D88088);
829 WREG32(0x6578, 0x101);
830 WREG32(0x657C, 0x84608680);
831 WREG32(0x6578, 0x102);
832 WREG32(0x657C, 0xBFF080D0);
833 WREG32(0x6578, 0x200);
834 WREG32(0x657C, 0x83988068);
835 WREG32(0x6578, 0x201);
836 WREG32(0x657C, 0x84A08680);
837 WREG32(0x6578, 0x202);
838 WREG32(0x657C, 0xBFF080F8);
839 WREG32(0x6578, 0x300);
840 WREG32(0x657C, 0x83588058);
841 WREG32(0x6578, 0x301);
842 WREG32(0x657C, 0x84E08660);
843 WREG32(0x6578, 0x302);
844 WREG32(0x657C, 0xBFF88120);
845 WREG32(0x6578, 0x400);
846 WREG32(0x657C, 0x83188040);
847 WREG32(0x6578, 0x401);
848 WREG32(0x657C, 0x85008660);
849 WREG32(0x6578, 0x402);
850 WREG32(0x657C, 0xBFF88150);
851 WREG32(0x6578, 0x500);
852 WREG32(0x657C, 0x82D88030);
853 WREG32(0x6578, 0x501);
854 WREG32(0x657C, 0x85408640);
855 WREG32(0x6578, 0x502);
856 WREG32(0x657C, 0xBFF88180);
857 WREG32(0x6578, 0x600);
858 WREG32(0x657C, 0x82A08018);
859 WREG32(0x6578, 0x601);
860 WREG32(0x657C, 0x85808620);
861 WREG32(0x6578, 0x602);
862 WREG32(0x657C, 0xBFF081B8);
863 WREG32(0x6578, 0x700);
864 WREG32(0x657C, 0x82608010);
865 WREG32(0x6578, 0x701);
866 WREG32(0x657C, 0x85A08600);
867 WREG32(0x6578, 0x702);
868 WREG32(0x657C, 0x800081F0);
869 WREG32(0x6578, 0x800);
870 WREG32(0x657C, 0x8228BFF8);
871 WREG32(0x6578, 0x801);
872 WREG32(0x657C, 0x85E085E0);
873 WREG32(0x6578, 0x802);
874 WREG32(0x657C, 0xBFF88228);
875 WREG32(0x6578, 0x10000);
876 WREG32(0x657C, 0x82A8BF00);
877 WREG32(0x6578, 0x10001);
878 WREG32(0x657C, 0x82A08CC0);
879 WREG32(0x6578, 0x10002);
880 WREG32(0x657C, 0x8008BEF8);
881 WREG32(0x6578, 0x10100);
882 WREG32(0x657C, 0x81F0BF28);
883 WREG32(0x6578, 0x10101);
884 WREG32(0x657C, 0x83608CA0);
885 WREG32(0x6578, 0x10102);
886 WREG32(0x657C, 0x8018BED0);
887 WREG32(0x6578, 0x10200);
888 WREG32(0x657C, 0x8148BF38);
889 WREG32(0x6578, 0x10201);
890 WREG32(0x657C, 0x84408C80);
891 WREG32(0x6578, 0x10202);
892 WREG32(0x657C, 0x8008BEB8);
893 WREG32(0x6578, 0x10300);
894 WREG32(0x657C, 0x80B0BF78);
895 WREG32(0x6578, 0x10301);
896 WREG32(0x657C, 0x85008C20);
897 WREG32(0x6578, 0x10302);
898 WREG32(0x657C, 0x8020BEA0);
899 WREG32(0x6578, 0x10400);
900 WREG32(0x657C, 0x8028BF90);
901 WREG32(0x6578, 0x10401);
902 WREG32(0x657C, 0x85E08BC0);
903 WREG32(0x6578, 0x10402);
904 WREG32(0x657C, 0x8018BE90);
905 WREG32(0x6578, 0x10500);
906 WREG32(0x657C, 0xBFB8BFB0);
907 WREG32(0x6578, 0x10501);
908 WREG32(0x657C, 0x86C08B40);
909 WREG32(0x6578, 0x10502);
910 WREG32(0x657C, 0x8010BE90);
911 WREG32(0x6578, 0x10600);
912 WREG32(0x657C, 0xBF58BFC8);
913 WREG32(0x6578, 0x10601);
914 WREG32(0x657C, 0x87A08AA0);
915 WREG32(0x6578, 0x10602);
916 WREG32(0x657C, 0x8010BE98);
917 WREG32(0x6578, 0x10700);
918 WREG32(0x657C, 0xBF10BFF0);
919 WREG32(0x6578, 0x10701);
920 WREG32(0x657C, 0x886089E0);
921 WREG32(0x6578, 0x10702);
922 WREG32(0x657C, 0x8018BEB0);
923 WREG32(0x6578, 0x10800);
924 WREG32(0x657C, 0xBED8BFE8);
925 WREG32(0x6578, 0x10801);
926 WREG32(0x657C, 0x89408940);
927 WREG32(0x6578, 0x10802);
928 WREG32(0x657C, 0xBFE8BED8);
929 WREG32(0x6578, 0x20000);
930 WREG32(0x657C, 0x80008000);
931 WREG32(0x6578, 0x20001);
932 WREG32(0x657C, 0x90008000);
933 WREG32(0x6578, 0x20002);
934 WREG32(0x657C, 0x80008000);
935 WREG32(0x6578, 0x20003);
936 WREG32(0x657C, 0x80008000);
937 WREG32(0x6578, 0x20100);
938 WREG32(0x657C, 0x80108000);
939 WREG32(0x6578, 0x20101);
940 WREG32(0x657C, 0x8FE0BF70);
941 WREG32(0x6578, 0x20102);
942 WREG32(0x657C, 0xBFE880C0);
943 WREG32(0x6578, 0x20103);
944 WREG32(0x657C, 0x80008000);
945 WREG32(0x6578, 0x20200);
946 WREG32(0x657C, 0x8018BFF8);
947 WREG32(0x6578, 0x20201);
948 WREG32(0x657C, 0x8F80BF08);
949 WREG32(0x6578, 0x20202);
950 WREG32(0x657C, 0xBFD081A0);
951 WREG32(0x6578, 0x20203);
952 WREG32(0x657C, 0xBFF88000);
953 WREG32(0x6578, 0x20300);
954 WREG32(0x657C, 0x80188000);
955 WREG32(0x6578, 0x20301);
956 WREG32(0x657C, 0x8EE0BEC0);
957 WREG32(0x6578, 0x20302);
958 WREG32(0x657C, 0xBFB082A0);
959 WREG32(0x6578, 0x20303);
960 WREG32(0x657C, 0x80008000);
961 WREG32(0x6578, 0x20400);
962 WREG32(0x657C, 0x80188000);
963 WREG32(0x6578, 0x20401);
964 WREG32(0x657C, 0x8E00BEA0);
965 WREG32(0x6578, 0x20402);
966 WREG32(0x657C, 0xBF8883C0);
967 WREG32(0x6578, 0x20403);
968 WREG32(0x657C, 0x80008000);
969 WREG32(0x6578, 0x20500);
970 WREG32(0x657C, 0x80188000);
971 WREG32(0x6578, 0x20501);
972 WREG32(0x657C, 0x8D00BE90);
973 WREG32(0x6578, 0x20502);
974 WREG32(0x657C, 0xBF588500);
975 WREG32(0x6578, 0x20503);
976 WREG32(0x657C, 0x80008008);
977 WREG32(0x6578, 0x20600);
978 WREG32(0x657C, 0x80188000);
979 WREG32(0x6578, 0x20601);
980 WREG32(0x657C, 0x8BC0BE98);
981 WREG32(0x6578, 0x20602);
982 WREG32(0x657C, 0xBF308660);
983 WREG32(0x6578, 0x20603);
984 WREG32(0x657C, 0x80008008);
985 WREG32(0x6578, 0x20700);
986 WREG32(0x657C, 0x80108000);
987 WREG32(0x6578, 0x20701);
988 WREG32(0x657C, 0x8A80BEB0);
989 WREG32(0x6578, 0x20702);
990 WREG32(0x657C, 0xBF0087C0);
991 WREG32(0x6578, 0x20703);
992 WREG32(0x657C, 0x80008008);
993 WREG32(0x6578, 0x20800);
994 WREG32(0x657C, 0x80108000);
995 WREG32(0x6578, 0x20801);
996 WREG32(0x657C, 0x8920BED0);
997 WREG32(0x6578, 0x20802);
998 WREG32(0x657C, 0xBED08920);
999 WREG32(0x6578, 0x20803);
1000 WREG32(0x657C, 0x80008010);
1001 WREG32(0x6578, 0x30000);
1002 WREG32(0x657C, 0x90008000);
1003 WREG32(0x6578, 0x30001);
1004 WREG32(0x657C, 0x80008000);
1005 WREG32(0x6578, 0x30100);
1006 WREG32(0x657C, 0x8FE0BF90);
1007 WREG32(0x6578, 0x30101);
1008 WREG32(0x657C, 0xBFF880A0);
1009 WREG32(0x6578, 0x30200);
1010 WREG32(0x657C, 0x8F60BF40);
1011 WREG32(0x6578, 0x30201);
1012 WREG32(0x657C, 0xBFE88180);
1013 WREG32(0x6578, 0x30300);
1014 WREG32(0x657C, 0x8EC0BF00);
1015 WREG32(0x6578, 0x30301);
1016 WREG32(0x657C, 0xBFC88280);
1017 WREG32(0x6578, 0x30400);
1018 WREG32(0x657C, 0x8DE0BEE0);
1019 WREG32(0x6578, 0x30401);
1020 WREG32(0x657C, 0xBFA083A0);
1021 WREG32(0x6578, 0x30500);
1022 WREG32(0x657C, 0x8CE0BED0);
1023 WREG32(0x6578, 0x30501);
1024 WREG32(0x657C, 0xBF7884E0);
1025 WREG32(0x6578, 0x30600);
1026 WREG32(0x657C, 0x8BA0BED8);
1027 WREG32(0x6578, 0x30601);
1028 WREG32(0x657C, 0xBF508640);
1029 WREG32(0x6578, 0x30700);
1030 WREG32(0x657C, 0x8A60BEE8);
1031 WREG32(0x6578, 0x30701);
1032 WREG32(0x657C, 0xBF2087A0);
1033 WREG32(0x6578, 0x30800);
1034 WREG32(0x657C, 0x8900BF00);
1035 WREG32(0x6578, 0x30801);
1036 WREG32(0x657C, 0xBF008900);
1037}
1038
1039static void 808static void
1040atombios_yuv_setup(struct drm_encoder *encoder, bool enable) 809atombios_yuv_setup(struct drm_encoder *encoder, bool enable)
1041{ 810{
@@ -1074,129 +843,6 @@ atombios_yuv_setup(struct drm_encoder *encoder, bool enable)
1074} 843}
1075 844
1076static void 845static void
1077atombios_overscan_setup(struct drm_encoder *encoder,
1078 struct drm_display_mode *mode,
1079 struct drm_display_mode *adjusted_mode)
1080{
1081 struct drm_device *dev = encoder->dev;
1082 struct radeon_device *rdev = dev->dev_private;
1083 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1084 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1085 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
1086 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
1087
1088 memset(&args, 0, sizeof(args));
1089
1090 args.usOverscanRight = 0;
1091 args.usOverscanLeft = 0;
1092 args.usOverscanBottom = 0;
1093 args.usOverscanTop = 0;
1094 args.ucCRTC = radeon_crtc->crtc_id;
1095
1096 if (radeon_encoder->flags & RADEON_USE_RMX) {
1097 if (radeon_encoder->rmx_type == RMX_FULL) {
1098 args.usOverscanRight = 0;
1099 args.usOverscanLeft = 0;
1100 args.usOverscanBottom = 0;
1101 args.usOverscanTop = 0;
1102 } else if (radeon_encoder->rmx_type == RMX_CENTER) {
1103 args.usOverscanTop = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2;
1104 args.usOverscanBottom = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2;
1105 args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2;
1106 args.usOverscanRight = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2;
1107 } else if (radeon_encoder->rmx_type == RMX_ASPECT) {
1108 int a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
1109 int a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
1110
1111 if (a1 > a2) {
1112 args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2;
1113 args.usOverscanRight = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2;
1114 } else if (a2 > a1) {
1115 args.usOverscanLeft = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2;
1116 args.usOverscanRight = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2;
1117 }
1118 }
1119 }
1120
1121 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
1122
1123}
1124
1125static void
1126atombios_scaler_setup(struct drm_encoder *encoder)
1127{
1128 struct drm_device *dev = encoder->dev;
1129 struct radeon_device *rdev = dev->dev_private;
1130 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1131 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1132 ENABLE_SCALER_PS_ALLOCATION args;
1133 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
1134 /* fixme - fill in enc_priv for atom dac */
1135 enum radeon_tv_std tv_std = TV_STD_NTSC;
1136
1137 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
1138 return;
1139
1140 memset(&args, 0, sizeof(args));
1141
1142 args.ucScaler = radeon_crtc->crtc_id;
1143
1144 if (radeon_encoder->devices & (ATOM_DEVICE_TV_SUPPORT)) {
1145 switch (tv_std) {
1146 case TV_STD_NTSC:
1147 default:
1148 args.ucTVStandard = ATOM_TV_NTSC;
1149 break;
1150 case TV_STD_PAL:
1151 args.ucTVStandard = ATOM_TV_PAL;
1152 break;
1153 case TV_STD_PAL_M:
1154 args.ucTVStandard = ATOM_TV_PALM;
1155 break;
1156 case TV_STD_PAL_60:
1157 args.ucTVStandard = ATOM_TV_PAL60;
1158 break;
1159 case TV_STD_NTSC_J:
1160 args.ucTVStandard = ATOM_TV_NTSCJ;
1161 break;
1162 case TV_STD_SCART_PAL:
1163 args.ucTVStandard = ATOM_TV_PAL; /* ??? */
1164 break;
1165 case TV_STD_SECAM:
1166 args.ucTVStandard = ATOM_TV_SECAM;
1167 break;
1168 case TV_STD_PAL_CN:
1169 args.ucTVStandard = ATOM_TV_PALCN;
1170 break;
1171 }
1172 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
1173 } else if (radeon_encoder->devices & (ATOM_DEVICE_CV_SUPPORT)) {
1174 args.ucTVStandard = ATOM_TV_CV;
1175 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
1176 } else if (radeon_encoder->flags & RADEON_USE_RMX) {
1177 if (radeon_encoder->rmx_type == RMX_FULL)
1178 args.ucEnable = ATOM_SCALER_EXPANSION;
1179 else if (radeon_encoder->rmx_type == RMX_CENTER)
1180 args.ucEnable = ATOM_SCALER_CENTER;
1181 else if (radeon_encoder->rmx_type == RMX_ASPECT)
1182 args.ucEnable = ATOM_SCALER_EXPANSION;
1183 } else {
1184 if (ASIC_IS_AVIVO(rdev))
1185 args.ucEnable = ATOM_SCALER_DISABLE;
1186 else
1187 args.ucEnable = ATOM_SCALER_CENTER;
1188 }
1189
1190 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
1191
1192 if (radeon_encoder->devices & (ATOM_DEVICE_CV_SUPPORT | ATOM_DEVICE_TV_SUPPORT)
1193 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_RV570) {
1194 atom_rv515_force_tv_scaler(rdev);
1195 }
1196
1197}
1198
1199static void
1200radeon_atom_encoder_dpms(struct drm_encoder *encoder, int mode) 846radeon_atom_encoder_dpms(struct drm_encoder *encoder, int mode)
1201{ 847{
1202 struct drm_device *dev = encoder->dev; 848 struct drm_device *dev = encoder->dev;
@@ -1448,8 +1094,6 @@ radeon_atom_encoder_mode_set(struct drm_encoder *encoder,
1448 radeon_encoder->pixel_clock = adjusted_mode->clock; 1094 radeon_encoder->pixel_clock = adjusted_mode->clock;
1449 1095
1450 radeon_atombios_encoder_crtc_scratch_regs(encoder, radeon_crtc->crtc_id); 1096 radeon_atombios_encoder_crtc_scratch_regs(encoder, radeon_crtc->crtc_id);
1451 atombios_overscan_setup(encoder, mode, adjusted_mode);
1452 atombios_scaler_setup(encoder);
1453 atombios_set_encoder_crtc_source(encoder); 1097 atombios_set_encoder_crtc_source(encoder);
1454 1098
1455 if (ASIC_IS_AVIVO(rdev)) { 1099 if (ASIC_IS_AVIVO(rdev)) {
@@ -1667,6 +1311,7 @@ radeon_add_atom_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t su
1667 1311
1668 radeon_encoder->encoder_id = encoder_id; 1312 radeon_encoder->encoder_id = encoder_id;
1669 radeon_encoder->devices = supported_device; 1313 radeon_encoder->devices = supported_device;
1314 radeon_encoder->rmx_type = RMX_OFF;
1670 1315
1671 switch (radeon_encoder->encoder_id) { 1316 switch (radeon_encoder->encoder_id) {
1672 case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1317 case ENCODER_OBJECT_ID_INTERNAL_LVDS:
@@ -1700,14 +1345,8 @@ radeon_add_atom_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t su
1700 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1345 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1701 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1346 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1702 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1347 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1703 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1348 drm_encoder_init(dev, encoder, &radeon_atom_enc_funcs, DRM_MODE_ENCODER_TMDS);
1704 radeon_encoder->rmx_type = RMX_FULL; 1349 radeon_encoder->enc_priv = radeon_atombios_set_dig_info(radeon_encoder);
1705 drm_encoder_init(dev, encoder, &radeon_atom_enc_funcs, DRM_MODE_ENCODER_LVDS);
1706 radeon_encoder->enc_priv = radeon_atombios_get_lvds_info(radeon_encoder);
1707 } else {
1708 drm_encoder_init(dev, encoder, &radeon_atom_enc_funcs, DRM_MODE_ENCODER_TMDS);
1709 radeon_encoder->enc_priv = radeon_atombios_set_dig_info(radeon_encoder);
1710 }
1711 drm_encoder_helper_add(encoder, &radeon_atom_dig_helper_funcs); 1350 drm_encoder_helper_add(encoder, &radeon_atom_dig_helper_funcs);
1712 break; 1351 break;
1713 } 1352 }
diff --git a/drivers/gpu/drm/radeon/radeon_legacy_crtc.c b/drivers/gpu/drm/radeon/radeon_legacy_crtc.c
index 0613790e2a5b..7d06dc98a42a 100644
--- a/drivers/gpu/drm/radeon/radeon_legacy_crtc.c
+++ b/drivers/gpu/drm/radeon/radeon_legacy_crtc.c
@@ -29,6 +29,171 @@
29#include "radeon_fixed.h" 29#include "radeon_fixed.h"
30#include "radeon.h" 30#include "radeon.h"
31 31
32static void radeon_legacy_rmx_mode_set(struct drm_crtc *crtc,
33 struct drm_display_mode *mode,
34 struct drm_display_mode *adjusted_mode)
35{
36 struct drm_device *dev = crtc->dev;
37 struct radeon_device *rdev = dev->dev_private;
38 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
39 int xres = mode->hdisplay;
40 int yres = mode->vdisplay;
41 bool hscale = true, vscale = true;
42 int hsync_wid;
43 int vsync_wid;
44 int hsync_start;
45 int blank_width;
46 u32 scale, inc, crtc_more_cntl;
47 u32 fp_horz_stretch, fp_vert_stretch, fp_horz_vert_active;
48 u32 fp_h_sync_strt_wid, fp_crtc_h_total_disp;
49 u32 fp_v_sync_strt_wid, fp_crtc_v_total_disp;
50 struct radeon_native_mode *native_mode = &radeon_crtc->native_mode;
51
52 fp_vert_stretch = RREG32(RADEON_FP_VERT_STRETCH) &
53 (RADEON_VERT_STRETCH_RESERVED |
54 RADEON_VERT_AUTO_RATIO_INC);
55 fp_horz_stretch = RREG32(RADEON_FP_HORZ_STRETCH) &
56 (RADEON_HORZ_FP_LOOP_STRETCH |
57 RADEON_HORZ_AUTO_RATIO_INC);
58
59 crtc_more_cntl = 0;
60 if ((rdev->family == CHIP_RS100) ||
61 (rdev->family == CHIP_RS200)) {
62 /* This is to workaround the asic bug for RMX, some versions
63 of BIOS dosen't have this register initialized correctly. */
64 crtc_more_cntl |= RADEON_CRTC_H_CUTOFF_ACTIVE_EN;
65 }
66
67
68 fp_crtc_h_total_disp = ((((mode->crtc_htotal / 8) - 1) & 0x3ff)
69 | ((((mode->crtc_hdisplay / 8) - 1) & 0x1ff) << 16));
70
71 hsync_wid = (mode->crtc_hsync_end - mode->crtc_hsync_start) / 8;
72 if (!hsync_wid)
73 hsync_wid = 1;
74 hsync_start = mode->crtc_hsync_start - 8;
75
76 fp_h_sync_strt_wid = ((hsync_start & 0x1fff)
77 | ((hsync_wid & 0x3f) << 16)
78 | ((mode->flags & DRM_MODE_FLAG_NHSYNC)
79 ? RADEON_CRTC_H_SYNC_POL
80 : 0));
81
82 fp_crtc_v_total_disp = (((mode->crtc_vtotal - 1) & 0xffff)
83 | ((mode->crtc_vdisplay - 1) << 16));
84
85 vsync_wid = mode->crtc_vsync_end - mode->crtc_vsync_start;
86 if (!vsync_wid)
87 vsync_wid = 1;
88
89 fp_v_sync_strt_wid = (((mode->crtc_vsync_start - 1) & 0xfff)
90 | ((vsync_wid & 0x1f) << 16)
91 | ((mode->flags & DRM_MODE_FLAG_NVSYNC)
92 ? RADEON_CRTC_V_SYNC_POL
93 : 0));
94
95 fp_horz_vert_active = 0;
96
97 if (native_mode->panel_xres == 0 ||
98 native_mode->panel_yres == 0) {
99 hscale = false;
100 vscale = false;
101 } else {
102 if (xres > native_mode->panel_xres)
103 xres = native_mode->panel_xres;
104 if (yres > native_mode->panel_yres)
105 yres = native_mode->panel_yres;
106
107 if (xres == native_mode->panel_xres)
108 hscale = false;
109 if (yres == native_mode->panel_yres)
110 vscale = false;
111 }
112
113 switch (radeon_crtc->rmx_type) {
114 case RMX_FULL:
115 case RMX_ASPECT:
116 if (!hscale)
117 fp_horz_stretch |= ((xres/8-1) << 16);
118 else {
119 inc = (fp_horz_stretch & RADEON_HORZ_AUTO_RATIO_INC) ? 1 : 0;
120 scale = ((xres + inc) * RADEON_HORZ_STRETCH_RATIO_MAX)
121 / native_mode->panel_xres + 1;
122 fp_horz_stretch |= (((scale) & RADEON_HORZ_STRETCH_RATIO_MASK) |
123 RADEON_HORZ_STRETCH_BLEND |
124 RADEON_HORZ_STRETCH_ENABLE |
125 ((native_mode->panel_xres/8-1) << 16));
126 }
127
128 if (!vscale)
129 fp_vert_stretch |= ((yres-1) << 12);
130 else {
131 inc = (fp_vert_stretch & RADEON_VERT_AUTO_RATIO_INC) ? 1 : 0;
132 scale = ((yres + inc) * RADEON_VERT_STRETCH_RATIO_MAX)
133 / native_mode->panel_yres + 1;
134 fp_vert_stretch |= (((scale) & RADEON_VERT_STRETCH_RATIO_MASK) |
135 RADEON_VERT_STRETCH_ENABLE |
136 RADEON_VERT_STRETCH_BLEND |
137 ((native_mode->panel_yres-1) << 12));
138 }
139 break;
140 case RMX_CENTER:
141 fp_horz_stretch |= ((xres/8-1) << 16);
142 fp_vert_stretch |= ((yres-1) << 12);
143
144 crtc_more_cntl |= (RADEON_CRTC_AUTO_HORZ_CENTER_EN |
145 RADEON_CRTC_AUTO_VERT_CENTER_EN);
146
147 blank_width = (mode->crtc_hblank_end - mode->crtc_hblank_start) / 8;
148 if (blank_width > 110)
149 blank_width = 110;
150
151 fp_crtc_h_total_disp = (((blank_width) & 0x3ff)
152 | ((((mode->crtc_hdisplay / 8) - 1) & 0x1ff) << 16));
153
154 hsync_wid = (mode->crtc_hsync_end - mode->crtc_hsync_start) / 8;
155 if (!hsync_wid)
156 hsync_wid = 1;
157
158 fp_h_sync_strt_wid = ((((mode->crtc_hsync_start - mode->crtc_hblank_start) / 8) & 0x1fff)
159 | ((hsync_wid & 0x3f) << 16)
160 | ((mode->flags & DRM_MODE_FLAG_NHSYNC)
161 ? RADEON_CRTC_H_SYNC_POL
162 : 0));
163
164 fp_crtc_v_total_disp = (((mode->crtc_vblank_end - mode->crtc_vblank_start) & 0xffff)
165 | ((mode->crtc_vdisplay - 1) << 16));
166
167 vsync_wid = mode->crtc_vsync_end - mode->crtc_vsync_start;
168 if (!vsync_wid)
169 vsync_wid = 1;
170
171 fp_v_sync_strt_wid = ((((mode->crtc_vsync_start - mode->crtc_vblank_start) & 0xfff)
172 | ((vsync_wid & 0x1f) << 16)
173 | ((mode->flags & DRM_MODE_FLAG_NVSYNC)
174 ? RADEON_CRTC_V_SYNC_POL
175 : 0)));
176
177 fp_horz_vert_active = (((native_mode->panel_yres) & 0xfff) |
178 (((native_mode->panel_xres / 8) & 0x1ff) << 16));
179 break;
180 case RMX_OFF:
181 default:
182 fp_horz_stretch |= ((xres/8-1) << 16);
183 fp_vert_stretch |= ((yres-1) << 12);
184 break;
185 }
186
187 WREG32(RADEON_FP_HORZ_STRETCH, fp_horz_stretch);
188 WREG32(RADEON_FP_VERT_STRETCH, fp_vert_stretch);
189 WREG32(RADEON_CRTC_MORE_CNTL, crtc_more_cntl);
190 WREG32(RADEON_FP_HORZ_VERT_ACTIVE, fp_horz_vert_active);
191 WREG32(RADEON_FP_H_SYNC_STRT_WID, fp_h_sync_strt_wid);
192 WREG32(RADEON_FP_V_SYNC_STRT_WID, fp_v_sync_strt_wid);
193 WREG32(RADEON_FP_CRTC_H_TOTAL_DISP, fp_crtc_h_total_disp);
194 WREG32(RADEON_FP_CRTC_V_TOTAL_DISP, fp_crtc_v_total_disp);
195}
196
32void radeon_restore_common_regs(struct drm_device *dev) 197void radeon_restore_common_regs(struct drm_device *dev)
33{ 198{
34 /* don't need this yet */ 199 /* don't need this yet */
@@ -757,6 +922,8 @@ static bool radeon_crtc_mode_fixup(struct drm_crtc *crtc,
757 struct drm_display_mode *mode, 922 struct drm_display_mode *mode,
758 struct drm_display_mode *adjusted_mode) 923 struct drm_display_mode *adjusted_mode)
759{ 924{
925 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
926 return false;
760 return true; 927 return true;
761} 928}
762 929
@@ -765,16 +932,25 @@ static int radeon_crtc_mode_set(struct drm_crtc *crtc,
765 struct drm_display_mode *adjusted_mode, 932 struct drm_display_mode *adjusted_mode,
766 int x, int y, struct drm_framebuffer *old_fb) 933 int x, int y, struct drm_framebuffer *old_fb)
767{ 934{
768 935 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
769 DRM_DEBUG("\n"); 936 struct drm_device *dev = crtc->dev;
937 struct radeon_device *rdev = dev->dev_private;
770 938
771 /* TODO TV */ 939 /* TODO TV */
772
773 radeon_crtc_set_base(crtc, x, y, old_fb); 940 radeon_crtc_set_base(crtc, x, y, old_fb);
774 radeon_set_crtc_timing(crtc, adjusted_mode); 941 radeon_set_crtc_timing(crtc, adjusted_mode);
775 radeon_set_pll(crtc, adjusted_mode); 942 radeon_set_pll(crtc, adjusted_mode);
776 radeon_init_disp_bandwidth(crtc->dev); 943 radeon_bandwidth_update(rdev);
777 944 if (radeon_crtc->crtc_id == 0) {
945 radeon_legacy_rmx_mode_set(crtc, mode, adjusted_mode);
946 } else {
947 if (radeon_crtc->rmx_type != RMX_OFF) {
948 /* FIXME: only first crtc has rmx what should we
949 * do ?
950 */
951 DRM_ERROR("Mode need scaling but only first crtc can do that.\n");
952 }
953 }
778 return 0; 954 return 0;
779} 955}
780 956
@@ -805,478 +981,3 @@ void radeon_legacy_init_crtc(struct drm_device *dev,
805 radeon_crtc->crtc_offset = RADEON_CRTC2_H_TOTAL_DISP - RADEON_CRTC_H_TOTAL_DISP; 981 radeon_crtc->crtc_offset = RADEON_CRTC2_H_TOTAL_DISP - RADEON_CRTC_H_TOTAL_DISP;
806 drm_crtc_helper_add(&radeon_crtc->base, &legacy_helper_funcs); 982 drm_crtc_helper_add(&radeon_crtc->base, &legacy_helper_funcs);
807} 983}
808
809void radeon_init_disp_bw_legacy(struct drm_device *dev,
810 struct drm_display_mode *mode1,
811 uint32_t pixel_bytes1,
812 struct drm_display_mode *mode2,
813 uint32_t pixel_bytes2)
814{
815 struct radeon_device *rdev = dev->dev_private;
816 fixed20_12 trcd_ff, trp_ff, tras_ff, trbs_ff, tcas_ff;
817 fixed20_12 sclk_ff, mclk_ff, sclk_eff_ff, sclk_delay_ff;
818 fixed20_12 peak_disp_bw, mem_bw, pix_clk, pix_clk2, temp_ff, crit_point_ff;
819 uint32_t temp, data, mem_trcd, mem_trp, mem_tras;
820 fixed20_12 memtcas_ff[8] = {
821 fixed_init(1),
822 fixed_init(2),
823 fixed_init(3),
824 fixed_init(0),
825 fixed_init_half(1),
826 fixed_init_half(2),
827 fixed_init(0),
828 };
829 fixed20_12 memtcas_rs480_ff[8] = {
830 fixed_init(0),
831 fixed_init(1),
832 fixed_init(2),
833 fixed_init(3),
834 fixed_init(0),
835 fixed_init_half(1),
836 fixed_init_half(2),
837 fixed_init_half(3),
838 };
839 fixed20_12 memtcas2_ff[8] = {
840 fixed_init(0),
841 fixed_init(1),
842 fixed_init(2),
843 fixed_init(3),
844 fixed_init(4),
845 fixed_init(5),
846 fixed_init(6),
847 fixed_init(7),
848 };
849 fixed20_12 memtrbs[8] = {
850 fixed_init(1),
851 fixed_init_half(1),
852 fixed_init(2),
853 fixed_init_half(2),
854 fixed_init(3),
855 fixed_init_half(3),
856 fixed_init(4),
857 fixed_init_half(4)
858 };
859 fixed20_12 memtrbs_r4xx[8] = {
860 fixed_init(4),
861 fixed_init(5),
862 fixed_init(6),
863 fixed_init(7),
864 fixed_init(8),
865 fixed_init(9),
866 fixed_init(10),
867 fixed_init(11)
868 };
869 fixed20_12 min_mem_eff;
870 fixed20_12 mc_latency_sclk, mc_latency_mclk, k1;
871 fixed20_12 cur_latency_mclk, cur_latency_sclk;
872 fixed20_12 disp_latency, disp_latency_overhead, disp_drain_rate,
873 disp_drain_rate2, read_return_rate;
874 fixed20_12 time_disp1_drop_priority;
875 int c;
876 int cur_size = 16; /* in octawords */
877 int critical_point = 0, critical_point2;
878/* uint32_t read_return_rate, time_disp1_drop_priority; */
879 int stop_req, max_stop_req;
880
881 min_mem_eff.full = rfixed_const_8(0);
882 /* get modes */
883 if ((rdev->disp_priority == 2) && ASIC_IS_R300(rdev)) {
884 uint32_t mc_init_misc_lat_timer = RREG32(R300_MC_INIT_MISC_LAT_TIMER);
885 mc_init_misc_lat_timer &= ~(R300_MC_DISP1R_INIT_LAT_MASK << R300_MC_DISP1R_INIT_LAT_SHIFT);
886 mc_init_misc_lat_timer &= ~(R300_MC_DISP0R_INIT_LAT_MASK << R300_MC_DISP0R_INIT_LAT_SHIFT);
887 /* check crtc enables */
888 if (mode2)
889 mc_init_misc_lat_timer |= (1 << R300_MC_DISP1R_INIT_LAT_SHIFT);
890 if (mode1)
891 mc_init_misc_lat_timer |= (1 << R300_MC_DISP0R_INIT_LAT_SHIFT);
892 WREG32(R300_MC_INIT_MISC_LAT_TIMER, mc_init_misc_lat_timer);
893 }
894
895 /*
896 * determine is there is enough bw for current mode
897 */
898 mclk_ff.full = rfixed_const(rdev->clock.default_mclk);
899 temp_ff.full = rfixed_const(100);
900 mclk_ff.full = rfixed_div(mclk_ff, temp_ff);
901 sclk_ff.full = rfixed_const(rdev->clock.default_sclk);
902 sclk_ff.full = rfixed_div(sclk_ff, temp_ff);
903
904 temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1);
905 temp_ff.full = rfixed_const(temp);
906 mem_bw.full = rfixed_mul(mclk_ff, temp_ff);
907
908 pix_clk.full = 0;
909 pix_clk2.full = 0;
910 peak_disp_bw.full = 0;
911 if (mode1) {
912 temp_ff.full = rfixed_const(1000);
913 pix_clk.full = rfixed_const(mode1->clock); /* convert to fixed point */
914 pix_clk.full = rfixed_div(pix_clk, temp_ff);
915 temp_ff.full = rfixed_const(pixel_bytes1);
916 peak_disp_bw.full += rfixed_mul(pix_clk, temp_ff);
917 }
918 if (mode2) {
919 temp_ff.full = rfixed_const(1000);
920 pix_clk2.full = rfixed_const(mode2->clock); /* convert to fixed point */
921 pix_clk2.full = rfixed_div(pix_clk2, temp_ff);
922 temp_ff.full = rfixed_const(pixel_bytes2);
923 peak_disp_bw.full += rfixed_mul(pix_clk2, temp_ff);
924 }
925
926 mem_bw.full = rfixed_mul(mem_bw, min_mem_eff);
927 if (peak_disp_bw.full >= mem_bw.full) {
928 DRM_ERROR("You may not have enough display bandwidth for current mode\n"
929 "If you have flickering problem, try to lower resolution, refresh rate, or color depth\n");
930 }
931
932 /* Get values from the EXT_MEM_CNTL register...converting its contents. */
933 temp = RREG32(RADEON_MEM_TIMING_CNTL);
934 if ((rdev->family == CHIP_RV100) || (rdev->flags & RADEON_IS_IGP)) { /* RV100, M6, IGPs */
935 mem_trcd = ((temp >> 2) & 0x3) + 1;
936 mem_trp = ((temp & 0x3)) + 1;
937 mem_tras = ((temp & 0x70) >> 4) + 1;
938 } else if (rdev->family == CHIP_R300 ||
939 rdev->family == CHIP_R350) { /* r300, r350 */
940 mem_trcd = (temp & 0x7) + 1;
941 mem_trp = ((temp >> 8) & 0x7) + 1;
942 mem_tras = ((temp >> 11) & 0xf) + 4;
943 } else if (rdev->family == CHIP_RV350 ||
944 rdev->family <= CHIP_RV380) {
945 /* rv3x0 */
946 mem_trcd = (temp & 0x7) + 3;
947 mem_trp = ((temp >> 8) & 0x7) + 3;
948 mem_tras = ((temp >> 11) & 0xf) + 6;
949 } else if (rdev->family == CHIP_R420 ||
950 rdev->family == CHIP_R423 ||
951 rdev->family == CHIP_RV410) {
952 /* r4xx */
953 mem_trcd = (temp & 0xf) + 3;
954 if (mem_trcd > 15)
955 mem_trcd = 15;
956 mem_trp = ((temp >> 8) & 0xf) + 3;
957 if (mem_trp > 15)
958 mem_trp = 15;
959 mem_tras = ((temp >> 12) & 0x1f) + 6;
960 if (mem_tras > 31)
961 mem_tras = 31;
962 } else { /* RV200, R200 */
963 mem_trcd = (temp & 0x7) + 1;
964 mem_trp = ((temp >> 8) & 0x7) + 1;
965 mem_tras = ((temp >> 12) & 0xf) + 4;
966 }
967 /* convert to FF */
968 trcd_ff.full = rfixed_const(mem_trcd);
969 trp_ff.full = rfixed_const(mem_trp);
970 tras_ff.full = rfixed_const(mem_tras);
971
972 /* Get values from the MEM_SDRAM_MODE_REG register...converting its */
973 temp = RREG32(RADEON_MEM_SDRAM_MODE_REG);
974 data = (temp & (7 << 20)) >> 20;
975 if ((rdev->family == CHIP_RV100) || rdev->flags & RADEON_IS_IGP) {
976 if (rdev->family == CHIP_RS480) /* don't think rs400 */
977 tcas_ff = memtcas_rs480_ff[data];
978 else
979 tcas_ff = memtcas_ff[data];
980 } else
981 tcas_ff = memtcas2_ff[data];
982
983 if (rdev->family == CHIP_RS400 ||
984 rdev->family == CHIP_RS480) {
985 /* extra cas latency stored in bits 23-25 0-4 clocks */
986 data = (temp >> 23) & 0x7;
987 if (data < 5)
988 tcas_ff.full += rfixed_const(data);
989 }
990
991 if (ASIC_IS_R300(rdev) && !(rdev->flags & RADEON_IS_IGP)) {
992 /* on the R300, Tcas is included in Trbs.
993 */
994 temp = RREG32(RADEON_MEM_CNTL);
995 data = (R300_MEM_NUM_CHANNELS_MASK & temp);
996 if (data == 1) {
997 if (R300_MEM_USE_CD_CH_ONLY & temp) {
998 temp = RREG32(R300_MC_IND_INDEX);
999 temp &= ~R300_MC_IND_ADDR_MASK;
1000 temp |= R300_MC_READ_CNTL_CD_mcind;
1001 WREG32(R300_MC_IND_INDEX, temp);
1002 temp = RREG32(R300_MC_IND_DATA);
1003 data = (R300_MEM_RBS_POSITION_C_MASK & temp);
1004 } else {
1005 temp = RREG32(R300_MC_READ_CNTL_AB);
1006 data = (R300_MEM_RBS_POSITION_A_MASK & temp);
1007 }
1008 } else {
1009 temp = RREG32(R300_MC_READ_CNTL_AB);
1010 data = (R300_MEM_RBS_POSITION_A_MASK & temp);
1011 }
1012 if (rdev->family == CHIP_RV410 ||
1013 rdev->family == CHIP_R420 ||
1014 rdev->family == CHIP_R423)
1015 trbs_ff = memtrbs_r4xx[data];
1016 else
1017 trbs_ff = memtrbs[data];
1018 tcas_ff.full += trbs_ff.full;
1019 }
1020
1021 sclk_eff_ff.full = sclk_ff.full;
1022
1023 if (rdev->flags & RADEON_IS_AGP) {
1024 fixed20_12 agpmode_ff;
1025 agpmode_ff.full = rfixed_const(radeon_agpmode);
1026 temp_ff.full = rfixed_const_666(16);
1027 sclk_eff_ff.full -= rfixed_mul(agpmode_ff, temp_ff);
1028 }
1029 /* TODO PCIE lanes may affect this - agpmode == 16?? */
1030
1031 if (ASIC_IS_R300(rdev)) {
1032 sclk_delay_ff.full = rfixed_const(250);
1033 } else {
1034 if ((rdev->family == CHIP_RV100) ||
1035 rdev->flags & RADEON_IS_IGP) {
1036 if (rdev->mc.vram_is_ddr)
1037 sclk_delay_ff.full = rfixed_const(41);
1038 else
1039 sclk_delay_ff.full = rfixed_const(33);
1040 } else {
1041 if (rdev->mc.vram_width == 128)
1042 sclk_delay_ff.full = rfixed_const(57);
1043 else
1044 sclk_delay_ff.full = rfixed_const(41);
1045 }
1046 }
1047
1048 mc_latency_sclk.full = rfixed_div(sclk_delay_ff, sclk_eff_ff);
1049
1050 if (rdev->mc.vram_is_ddr) {
1051 if (rdev->mc.vram_width == 32) {
1052 k1.full = rfixed_const(40);
1053 c = 3;
1054 } else {
1055 k1.full = rfixed_const(20);
1056 c = 1;
1057 }
1058 } else {
1059 k1.full = rfixed_const(40);
1060 c = 3;
1061 }
1062
1063 temp_ff.full = rfixed_const(2);
1064 mc_latency_mclk.full = rfixed_mul(trcd_ff, temp_ff);
1065 temp_ff.full = rfixed_const(c);
1066 mc_latency_mclk.full += rfixed_mul(tcas_ff, temp_ff);
1067 temp_ff.full = rfixed_const(4);
1068 mc_latency_mclk.full += rfixed_mul(tras_ff, temp_ff);
1069 mc_latency_mclk.full += rfixed_mul(trp_ff, temp_ff);
1070 mc_latency_mclk.full += k1.full;
1071
1072 mc_latency_mclk.full = rfixed_div(mc_latency_mclk, mclk_ff);
1073 mc_latency_mclk.full += rfixed_div(temp_ff, sclk_eff_ff);
1074
1075 /*
1076 HW cursor time assuming worst case of full size colour cursor.
1077 */
1078 temp_ff.full = rfixed_const((2 * (cur_size - (rdev->mc.vram_is_ddr + 1))));
1079 temp_ff.full += trcd_ff.full;
1080 if (temp_ff.full < tras_ff.full)
1081 temp_ff.full = tras_ff.full;
1082 cur_latency_mclk.full = rfixed_div(temp_ff, mclk_ff);
1083
1084 temp_ff.full = rfixed_const(cur_size);
1085 cur_latency_sclk.full = rfixed_div(temp_ff, sclk_eff_ff);
1086 /*
1087 Find the total latency for the display data.
1088 */
1089 disp_latency_overhead.full = rfixed_const(80);
1090 disp_latency_overhead.full = rfixed_div(disp_latency_overhead, sclk_ff);
1091 mc_latency_mclk.full += disp_latency_overhead.full + cur_latency_mclk.full;
1092 mc_latency_sclk.full += disp_latency_overhead.full + cur_latency_sclk.full;
1093
1094 if (mc_latency_mclk.full > mc_latency_sclk.full)
1095 disp_latency.full = mc_latency_mclk.full;
1096 else
1097 disp_latency.full = mc_latency_sclk.full;
1098
1099 /* setup Max GRPH_STOP_REQ default value */
1100 if (ASIC_IS_RV100(rdev))
1101 max_stop_req = 0x5c;
1102 else
1103 max_stop_req = 0x7c;
1104
1105 if (mode1) {
1106 /* CRTC1
1107 Set GRPH_BUFFER_CNTL register using h/w defined optimal values.
1108 GRPH_STOP_REQ <= MIN[ 0x7C, (CRTC_H_DISP + 1) * (bit depth) / 0x10 ]
1109 */
1110 stop_req = mode1->hdisplay * pixel_bytes1 / 16;
1111
1112 if (stop_req > max_stop_req)
1113 stop_req = max_stop_req;
1114
1115 /*
1116 Find the drain rate of the display buffer.
1117 */
1118 temp_ff.full = rfixed_const((16/pixel_bytes1));
1119 disp_drain_rate.full = rfixed_div(pix_clk, temp_ff);
1120
1121 /*
1122 Find the critical point of the display buffer.
1123 */
1124 crit_point_ff.full = rfixed_mul(disp_drain_rate, disp_latency);
1125 crit_point_ff.full += rfixed_const_half(0);
1126
1127 critical_point = rfixed_trunc(crit_point_ff);
1128
1129 if (rdev->disp_priority == 2) {
1130 critical_point = 0;
1131 }
1132
1133 /*
1134 The critical point should never be above max_stop_req-4. Setting
1135 GRPH_CRITICAL_CNTL = 0 will thus force high priority all the time.
1136 */
1137 if (max_stop_req - critical_point < 4)
1138 critical_point = 0;
1139
1140 if (critical_point == 0 && mode2 && rdev->family == CHIP_R300) {
1141 /* some R300 cards have problem with this set to 0, when CRTC2 is enabled.*/
1142 critical_point = 0x10;
1143 }
1144
1145 temp = RREG32(RADEON_GRPH_BUFFER_CNTL);
1146 temp &= ~(RADEON_GRPH_STOP_REQ_MASK);
1147 temp |= (stop_req << RADEON_GRPH_STOP_REQ_SHIFT);
1148 temp &= ~(RADEON_GRPH_START_REQ_MASK);
1149 if ((rdev->family == CHIP_R350) &&
1150 (stop_req > 0x15)) {
1151 stop_req -= 0x10;
1152 }
1153 temp |= (stop_req << RADEON_GRPH_START_REQ_SHIFT);
1154 temp |= RADEON_GRPH_BUFFER_SIZE;
1155 temp &= ~(RADEON_GRPH_CRITICAL_CNTL |
1156 RADEON_GRPH_CRITICAL_AT_SOF |
1157 RADEON_GRPH_STOP_CNTL);
1158 /*
1159 Write the result into the register.
1160 */
1161 WREG32(RADEON_GRPH_BUFFER_CNTL, ((temp & ~RADEON_GRPH_CRITICAL_POINT_MASK) |
1162 (critical_point << RADEON_GRPH_CRITICAL_POINT_SHIFT)));
1163
1164#if 0
1165 if ((rdev->family == CHIP_RS400) ||
1166 (rdev->family == CHIP_RS480)) {
1167 /* attempt to program RS400 disp regs correctly ??? */
1168 temp = RREG32(RS400_DISP1_REG_CNTL);
1169 temp &= ~(RS400_DISP1_START_REQ_LEVEL_MASK |
1170 RS400_DISP1_STOP_REQ_LEVEL_MASK);
1171 WREG32(RS400_DISP1_REQ_CNTL1, (temp |
1172 (critical_point << RS400_DISP1_START_REQ_LEVEL_SHIFT) |
1173 (critical_point << RS400_DISP1_STOP_REQ_LEVEL_SHIFT)));
1174 temp = RREG32(RS400_DMIF_MEM_CNTL1);
1175 temp &= ~(RS400_DISP1_CRITICAL_POINT_START_MASK |
1176 RS400_DISP1_CRITICAL_POINT_STOP_MASK);
1177 WREG32(RS400_DMIF_MEM_CNTL1, (temp |
1178 (critical_point << RS400_DISP1_CRITICAL_POINT_START_SHIFT) |
1179 (critical_point << RS400_DISP1_CRITICAL_POINT_STOP_SHIFT)));
1180 }
1181#endif
1182
1183 DRM_DEBUG("GRPH_BUFFER_CNTL from to %x\n",
1184 /* (unsigned int)info->SavedReg->grph_buffer_cntl, */
1185 (unsigned int)RREG32(RADEON_GRPH_BUFFER_CNTL));
1186 }
1187
1188 if (mode2) {
1189 u32 grph2_cntl;
1190 stop_req = mode2->hdisplay * pixel_bytes2 / 16;
1191
1192 if (stop_req > max_stop_req)
1193 stop_req = max_stop_req;
1194
1195 /*
1196 Find the drain rate of the display buffer.
1197 */
1198 temp_ff.full = rfixed_const((16/pixel_bytes2));
1199 disp_drain_rate2.full = rfixed_div(pix_clk2, temp_ff);
1200
1201 grph2_cntl = RREG32(RADEON_GRPH2_BUFFER_CNTL);
1202 grph2_cntl &= ~(RADEON_GRPH_STOP_REQ_MASK);
1203 grph2_cntl |= (stop_req << RADEON_GRPH_STOP_REQ_SHIFT);
1204 grph2_cntl &= ~(RADEON_GRPH_START_REQ_MASK);
1205 if ((rdev->family == CHIP_R350) &&
1206 (stop_req > 0x15)) {
1207 stop_req -= 0x10;
1208 }
1209 grph2_cntl |= (stop_req << RADEON_GRPH_START_REQ_SHIFT);
1210 grph2_cntl |= RADEON_GRPH_BUFFER_SIZE;
1211 grph2_cntl &= ~(RADEON_GRPH_CRITICAL_CNTL |
1212 RADEON_GRPH_CRITICAL_AT_SOF |
1213 RADEON_GRPH_STOP_CNTL);
1214
1215 if ((rdev->family == CHIP_RS100) ||
1216 (rdev->family == CHIP_RS200))
1217 critical_point2 = 0;
1218 else {
1219 temp = (rdev->mc.vram_width * rdev->mc.vram_is_ddr + 1)/128;
1220 temp_ff.full = rfixed_const(temp);
1221 temp_ff.full = rfixed_mul(mclk_ff, temp_ff);
1222 if (sclk_ff.full < temp_ff.full)
1223 temp_ff.full = sclk_ff.full;
1224
1225 read_return_rate.full = temp_ff.full;
1226
1227 if (mode1) {
1228 temp_ff.full = read_return_rate.full - disp_drain_rate.full;
1229 time_disp1_drop_priority.full = rfixed_div(crit_point_ff, temp_ff);
1230 } else {
1231 time_disp1_drop_priority.full = 0;
1232 }
1233 crit_point_ff.full = disp_latency.full + time_disp1_drop_priority.full + disp_latency.full;
1234 crit_point_ff.full = rfixed_mul(crit_point_ff, disp_drain_rate2);
1235 crit_point_ff.full += rfixed_const_half(0);
1236
1237 critical_point2 = rfixed_trunc(crit_point_ff);
1238
1239 if (rdev->disp_priority == 2) {
1240 critical_point2 = 0;
1241 }
1242
1243 if (max_stop_req - critical_point2 < 4)
1244 critical_point2 = 0;
1245
1246 }
1247
1248 if (critical_point2 == 0 && rdev->family == CHIP_R300) {
1249 /* some R300 cards have problem with this set to 0 */
1250 critical_point2 = 0x10;
1251 }
1252
1253 WREG32(RADEON_GRPH2_BUFFER_CNTL, ((grph2_cntl & ~RADEON_GRPH_CRITICAL_POINT_MASK) |
1254 (critical_point2 << RADEON_GRPH_CRITICAL_POINT_SHIFT)));
1255
1256 if ((rdev->family == CHIP_RS400) ||
1257 (rdev->family == CHIP_RS480)) {
1258#if 0
1259 /* attempt to program RS400 disp2 regs correctly ??? */
1260 temp = RREG32(RS400_DISP2_REQ_CNTL1);
1261 temp &= ~(RS400_DISP2_START_REQ_LEVEL_MASK |
1262 RS400_DISP2_STOP_REQ_LEVEL_MASK);
1263 WREG32(RS400_DISP2_REQ_CNTL1, (temp |
1264 (critical_point2 << RS400_DISP1_START_REQ_LEVEL_SHIFT) |
1265 (critical_point2 << RS400_DISP1_STOP_REQ_LEVEL_SHIFT)));
1266 temp = RREG32(RS400_DISP2_REQ_CNTL2);
1267 temp &= ~(RS400_DISP2_CRITICAL_POINT_START_MASK |
1268 RS400_DISP2_CRITICAL_POINT_STOP_MASK);
1269 WREG32(RS400_DISP2_REQ_CNTL2, (temp |
1270 (critical_point2 << RS400_DISP2_CRITICAL_POINT_START_SHIFT) |
1271 (critical_point2 << RS400_DISP2_CRITICAL_POINT_STOP_SHIFT)));
1272#endif
1273 WREG32(RS400_DISP2_REQ_CNTL1, 0x105DC1CC);
1274 WREG32(RS400_DISP2_REQ_CNTL2, 0x2749D000);
1275 WREG32(RS400_DMIF_MEM_CNTL1, 0x29CA71DC);
1276 WREG32(RS400_DISP1_REQ_CNTL1, 0x28FBC3AC);
1277 }
1278
1279 DRM_DEBUG("GRPH2_BUFFER_CNTL from to %x\n",
1280 (unsigned int)RREG32(RADEON_GRPH2_BUFFER_CNTL));
1281 }
1282}
diff --git a/drivers/gpu/drm/radeon/radeon_legacy_encoders.c b/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
index 2c2f42de1d4c..34d0f58eb944 100644
--- a/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
+++ b/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
@@ -30,170 +30,6 @@
30#include "atom.h" 30#include "atom.h"
31 31
32 32
33static void radeon_legacy_rmx_mode_set(struct drm_encoder *encoder,
34 struct drm_display_mode *mode,
35 struct drm_display_mode *adjusted_mode)
36{
37 struct drm_device *dev = encoder->dev;
38 struct radeon_device *rdev = dev->dev_private;
39 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
40 int xres = mode->hdisplay;
41 int yres = mode->vdisplay;
42 bool hscale = true, vscale = true;
43 int hsync_wid;
44 int vsync_wid;
45 int hsync_start;
46 uint32_t scale, inc;
47 uint32_t fp_horz_stretch, fp_vert_stretch, crtc_more_cntl, fp_horz_vert_active;
48 uint32_t fp_h_sync_strt_wid, fp_v_sync_strt_wid, fp_crtc_h_total_disp, fp_crtc_v_total_disp;
49 struct radeon_native_mode *native_mode = &radeon_encoder->native_mode;
50
51 DRM_DEBUG("\n");
52
53 fp_vert_stretch = RREG32(RADEON_FP_VERT_STRETCH) &
54 (RADEON_VERT_STRETCH_RESERVED |
55 RADEON_VERT_AUTO_RATIO_INC);
56 fp_horz_stretch = RREG32(RADEON_FP_HORZ_STRETCH) &
57 (RADEON_HORZ_FP_LOOP_STRETCH |
58 RADEON_HORZ_AUTO_RATIO_INC);
59
60 crtc_more_cntl = 0;
61 if ((rdev->family == CHIP_RS100) ||
62 (rdev->family == CHIP_RS200)) {
63 /* This is to workaround the asic bug for RMX, some versions
64 of BIOS dosen't have this register initialized correctly. */
65 crtc_more_cntl |= RADEON_CRTC_H_CUTOFF_ACTIVE_EN;
66 }
67
68
69 fp_crtc_h_total_disp = ((((mode->crtc_htotal / 8) - 1) & 0x3ff)
70 | ((((mode->crtc_hdisplay / 8) - 1) & 0x1ff) << 16));
71
72 hsync_wid = (mode->crtc_hsync_end - mode->crtc_hsync_start) / 8;
73 if (!hsync_wid)
74 hsync_wid = 1;
75 hsync_start = mode->crtc_hsync_start - 8;
76
77 fp_h_sync_strt_wid = ((hsync_start & 0x1fff)
78 | ((hsync_wid & 0x3f) << 16)
79 | ((mode->flags & DRM_MODE_FLAG_NHSYNC)
80 ? RADEON_CRTC_H_SYNC_POL
81 : 0));
82
83 fp_crtc_v_total_disp = (((mode->crtc_vtotal - 1) & 0xffff)
84 | ((mode->crtc_vdisplay - 1) << 16));
85
86 vsync_wid = mode->crtc_vsync_end - mode->crtc_vsync_start;
87 if (!vsync_wid)
88 vsync_wid = 1;
89
90 fp_v_sync_strt_wid = (((mode->crtc_vsync_start - 1) & 0xfff)
91 | ((vsync_wid & 0x1f) << 16)
92 | ((mode->flags & DRM_MODE_FLAG_NVSYNC)
93 ? RADEON_CRTC_V_SYNC_POL
94 : 0));
95
96 fp_horz_vert_active = 0;
97
98 if (native_mode->panel_xres == 0 ||
99 native_mode->panel_yres == 0) {
100 hscale = false;
101 vscale = false;
102 } else {
103 if (xres > native_mode->panel_xres)
104 xres = native_mode->panel_xres;
105 if (yres > native_mode->panel_yres)
106 yres = native_mode->panel_yres;
107
108 if (xres == native_mode->panel_xres)
109 hscale = false;
110 if (yres == native_mode->panel_yres)
111 vscale = false;
112 }
113
114 if (radeon_encoder->flags & RADEON_USE_RMX) {
115 if (radeon_encoder->rmx_type != RMX_CENTER) {
116 if (!hscale)
117 fp_horz_stretch |= ((xres/8-1) << 16);
118 else {
119 inc = (fp_horz_stretch & RADEON_HORZ_AUTO_RATIO_INC) ? 1 : 0;
120 scale = ((xres + inc) * RADEON_HORZ_STRETCH_RATIO_MAX)
121 / native_mode->panel_xres + 1;
122 fp_horz_stretch |= (((scale) & RADEON_HORZ_STRETCH_RATIO_MASK) |
123 RADEON_HORZ_STRETCH_BLEND |
124 RADEON_HORZ_STRETCH_ENABLE |
125 ((native_mode->panel_xres/8-1) << 16));
126 }
127
128 if (!vscale)
129 fp_vert_stretch |= ((yres-1) << 12);
130 else {
131 inc = (fp_vert_stretch & RADEON_VERT_AUTO_RATIO_INC) ? 1 : 0;
132 scale = ((yres + inc) * RADEON_VERT_STRETCH_RATIO_MAX)
133 / native_mode->panel_yres + 1;
134 fp_vert_stretch |= (((scale) & RADEON_VERT_STRETCH_RATIO_MASK) |
135 RADEON_VERT_STRETCH_ENABLE |
136 RADEON_VERT_STRETCH_BLEND |
137 ((native_mode->panel_yres-1) << 12));
138 }
139 } else if (radeon_encoder->rmx_type == RMX_CENTER) {
140 int blank_width;
141
142 fp_horz_stretch |= ((xres/8-1) << 16);
143 fp_vert_stretch |= ((yres-1) << 12);
144
145 crtc_more_cntl |= (RADEON_CRTC_AUTO_HORZ_CENTER_EN |
146 RADEON_CRTC_AUTO_VERT_CENTER_EN);
147
148 blank_width = (mode->crtc_hblank_end - mode->crtc_hblank_start) / 8;
149 if (blank_width > 110)
150 blank_width = 110;
151
152 fp_crtc_h_total_disp = (((blank_width) & 0x3ff)
153 | ((((mode->crtc_hdisplay / 8) - 1) & 0x1ff) << 16));
154
155 hsync_wid = (mode->crtc_hsync_end - mode->crtc_hsync_start) / 8;
156 if (!hsync_wid)
157 hsync_wid = 1;
158
159 fp_h_sync_strt_wid = ((((mode->crtc_hsync_start - mode->crtc_hblank_start) / 8) & 0x1fff)
160 | ((hsync_wid & 0x3f) << 16)
161 | ((mode->flags & DRM_MODE_FLAG_NHSYNC)
162 ? RADEON_CRTC_H_SYNC_POL
163 : 0));
164
165 fp_crtc_v_total_disp = (((mode->crtc_vblank_end - mode->crtc_vblank_start) & 0xffff)
166 | ((mode->crtc_vdisplay - 1) << 16));
167
168 vsync_wid = mode->crtc_vsync_end - mode->crtc_vsync_start;
169 if (!vsync_wid)
170 vsync_wid = 1;
171
172 fp_v_sync_strt_wid = ((((mode->crtc_vsync_start - mode->crtc_vblank_start) & 0xfff)
173 | ((vsync_wid & 0x1f) << 16)
174 | ((mode->flags & DRM_MODE_FLAG_NVSYNC)
175 ? RADEON_CRTC_V_SYNC_POL
176 : 0)));
177
178 fp_horz_vert_active = (((native_mode->panel_yres) & 0xfff) |
179 (((native_mode->panel_xres / 8) & 0x1ff) << 16));
180 }
181 } else {
182 fp_horz_stretch |= ((xres/8-1) << 16);
183 fp_vert_stretch |= ((yres-1) << 12);
184 }
185
186 WREG32(RADEON_FP_HORZ_STRETCH, fp_horz_stretch);
187 WREG32(RADEON_FP_VERT_STRETCH, fp_vert_stretch);
188 WREG32(RADEON_CRTC_MORE_CNTL, crtc_more_cntl);
189 WREG32(RADEON_FP_HORZ_VERT_ACTIVE, fp_horz_vert_active);
190 WREG32(RADEON_FP_H_SYNC_STRT_WID, fp_h_sync_strt_wid);
191 WREG32(RADEON_FP_V_SYNC_STRT_WID, fp_v_sync_strt_wid);
192 WREG32(RADEON_FP_CRTC_H_TOTAL_DISP, fp_crtc_h_total_disp);
193 WREG32(RADEON_FP_CRTC_V_TOTAL_DISP, fp_crtc_v_total_disp);
194
195}
196
197static void radeon_legacy_lvds_dpms(struct drm_encoder *encoder, int mode) 33static void radeon_legacy_lvds_dpms(struct drm_encoder *encoder, int mode)
198{ 34{
199 struct drm_device *dev = encoder->dev; 35 struct drm_device *dev = encoder->dev;
@@ -287,9 +123,6 @@ static void radeon_legacy_lvds_mode_set(struct drm_encoder *encoder,
287 123
288 DRM_DEBUG("\n"); 124 DRM_DEBUG("\n");
289 125
290 if (radeon_crtc->crtc_id == 0)
291 radeon_legacy_rmx_mode_set(encoder, mode, adjusted_mode);
292
293 lvds_pll_cntl = RREG32(RADEON_LVDS_PLL_CNTL); 126 lvds_pll_cntl = RREG32(RADEON_LVDS_PLL_CNTL);
294 lvds_pll_cntl &= ~RADEON_LVDS_PLL_EN; 127 lvds_pll_cntl &= ~RADEON_LVDS_PLL_EN;
295 128
@@ -318,7 +151,7 @@ static void radeon_legacy_lvds_mode_set(struct drm_encoder *encoder,
318 151
319 if (radeon_crtc->crtc_id == 0) { 152 if (radeon_crtc->crtc_id == 0) {
320 if (ASIC_IS_R300(rdev)) { 153 if (ASIC_IS_R300(rdev)) {
321 if (radeon_encoder->flags & RADEON_USE_RMX) 154 if (radeon_encoder->rmx_type != RMX_OFF)
322 lvds_pll_cntl |= R300_LVDS_SRC_SEL_RMX; 155 lvds_pll_cntl |= R300_LVDS_SRC_SEL_RMX;
323 } else 156 } else
324 lvds_gen_cntl &= ~RADEON_LVDS_SEL_CRTC2; 157 lvds_gen_cntl &= ~RADEON_LVDS_SEL_CRTC2;
@@ -350,8 +183,6 @@ static bool radeon_legacy_lvds_mode_fixup(struct drm_encoder *encoder,
350 183
351 drm_mode_set_crtcinfo(adjusted_mode, 0); 184 drm_mode_set_crtcinfo(adjusted_mode, 0);
352 185
353 radeon_encoder->flags &= ~RADEON_USE_RMX;
354
355 if (radeon_encoder->rmx_type != RMX_OFF) 186 if (radeon_encoder->rmx_type != RMX_OFF)
356 radeon_rmx_mode_fixup(encoder, mode, adjusted_mode); 187 radeon_rmx_mode_fixup(encoder, mode, adjusted_mode);
357 188
@@ -455,9 +286,6 @@ static void radeon_legacy_primary_dac_mode_set(struct drm_encoder *encoder,
455 286
456 DRM_DEBUG("\n"); 287 DRM_DEBUG("\n");
457 288
458 if (radeon_crtc->crtc_id == 0)
459 radeon_legacy_rmx_mode_set(encoder, mode, adjusted_mode);
460
461 if (radeon_crtc->crtc_id == 0) { 289 if (radeon_crtc->crtc_id == 0) {
462 if (rdev->family == CHIP_R200 || ASIC_IS_R300(rdev)) { 290 if (rdev->family == CHIP_R200 || ASIC_IS_R300(rdev)) {
463 disp_output_cntl = RREG32(RADEON_DISP_OUTPUT_CNTL) & 291 disp_output_cntl = RREG32(RADEON_DISP_OUTPUT_CNTL) &
@@ -653,9 +481,6 @@ static void radeon_legacy_tmds_int_mode_set(struct drm_encoder *encoder,
653 481
654 DRM_DEBUG("\n"); 482 DRM_DEBUG("\n");
655 483
656 if (radeon_crtc->crtc_id == 0)
657 radeon_legacy_rmx_mode_set(encoder, mode, adjusted_mode);
658
659 tmp = tmds_pll_cntl = RREG32(RADEON_TMDS_PLL_CNTL); 484 tmp = tmds_pll_cntl = RREG32(RADEON_TMDS_PLL_CNTL);
660 tmp &= 0xfffff; 485 tmp &= 0xfffff;
661 if (rdev->family == CHIP_RV280) { 486 if (rdev->family == CHIP_RV280) {
@@ -711,7 +536,7 @@ static void radeon_legacy_tmds_int_mode_set(struct drm_encoder *encoder,
711 if (radeon_crtc->crtc_id == 0) { 536 if (radeon_crtc->crtc_id == 0) {
712 if (ASIC_IS_R300(rdev) || rdev->family == CHIP_R200) { 537 if (ASIC_IS_R300(rdev) || rdev->family == CHIP_R200) {
713 fp_gen_cntl &= ~R200_FP_SOURCE_SEL_MASK; 538 fp_gen_cntl &= ~R200_FP_SOURCE_SEL_MASK;
714 if (radeon_encoder->flags & RADEON_USE_RMX) 539 if (radeon_encoder->rmx_type != RMX_OFF)
715 fp_gen_cntl |= R200_FP_SOURCE_SEL_RMX; 540 fp_gen_cntl |= R200_FP_SOURCE_SEL_RMX;
716 else 541 else
717 fp_gen_cntl |= R200_FP_SOURCE_SEL_CRTC1; 542 fp_gen_cntl |= R200_FP_SOURCE_SEL_CRTC1;
@@ -820,9 +645,6 @@ static void radeon_legacy_tmds_ext_mode_set(struct drm_encoder *encoder,
820 645
821 DRM_DEBUG("\n"); 646 DRM_DEBUG("\n");
822 647
823 if (radeon_crtc->crtc_id == 0)
824 radeon_legacy_rmx_mode_set(encoder, mode, adjusted_mode);
825
826 if (rdev->is_atom_bios) { 648 if (rdev->is_atom_bios) {
827 radeon_encoder->pixel_clock = adjusted_mode->clock; 649 radeon_encoder->pixel_clock = adjusted_mode->clock;
828 atombios_external_tmds_setup(encoder, ATOM_ENABLE); 650 atombios_external_tmds_setup(encoder, ATOM_ENABLE);
@@ -856,7 +678,7 @@ static void radeon_legacy_tmds_ext_mode_set(struct drm_encoder *encoder,
856 if (radeon_crtc->crtc_id == 0) { 678 if (radeon_crtc->crtc_id == 0) {
857 if ((rdev->family == CHIP_R200) || ASIC_IS_R300(rdev)) { 679 if ((rdev->family == CHIP_R200) || ASIC_IS_R300(rdev)) {
858 fp2_gen_cntl &= ~R200_FP2_SOURCE_SEL_MASK; 680 fp2_gen_cntl &= ~R200_FP2_SOURCE_SEL_MASK;
859 if (radeon_encoder->flags & RADEON_USE_RMX) 681 if (radeon_encoder->rmx_type != RMX_OFF)
860 fp2_gen_cntl |= R200_FP2_SOURCE_SEL_RMX; 682 fp2_gen_cntl |= R200_FP2_SOURCE_SEL_RMX;
861 else 683 else
862 fp2_gen_cntl |= R200_FP2_SOURCE_SEL_CRTC1; 684 fp2_gen_cntl |= R200_FP2_SOURCE_SEL_CRTC1;
@@ -1014,9 +836,6 @@ static void radeon_legacy_tv_dac_mode_set(struct drm_encoder *encoder,
1014 836
1015 DRM_DEBUG("\n"); 837 DRM_DEBUG("\n");
1016 838
1017 if (radeon_crtc->crtc_id == 0)
1018 radeon_legacy_rmx_mode_set(encoder, mode, adjusted_mode);
1019
1020 if (rdev->family != CHIP_R200) { 839 if (rdev->family != CHIP_R200) {
1021 tv_dac_cntl = RREG32(RADEON_TV_DAC_CNTL); 840 tv_dac_cntl = RREG32(RADEON_TV_DAC_CNTL);
1022 if (rdev->family == CHIP_R420 || 841 if (rdev->family == CHIP_R420 ||
@@ -1243,6 +1062,7 @@ radeon_add_legacy_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t
1243 1062
1244 radeon_encoder->encoder_id = encoder_id; 1063 radeon_encoder->encoder_id = encoder_id;
1245 radeon_encoder->devices = supported_device; 1064 radeon_encoder->devices = supported_device;
1065 radeon_encoder->rmx_type = RMX_OFF;
1246 1066
1247 switch (radeon_encoder->encoder_id) { 1067 switch (radeon_encoder->encoder_id) {
1248 case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1068 case ENCODER_OBJECT_ID_INTERNAL_LVDS:
diff --git a/drivers/gpu/drm/radeon/radeon_mode.h b/drivers/gpu/drm/radeon/radeon_mode.h
index ba89b59f6e50..3b09a1f2d8f9 100644
--- a/drivers/gpu/drm/radeon/radeon_mode.h
+++ b/drivers/gpu/drm/radeon/radeon_mode.h
@@ -36,6 +36,9 @@
36#include <linux/i2c.h> 36#include <linux/i2c.h>
37#include <linux/i2c-id.h> 37#include <linux/i2c-id.h>
38#include <linux/i2c-algo-bit.h> 38#include <linux/i2c-algo-bit.h>
39#include "radeon_fixed.h"
40
41struct radeon_device;
39 42
40#define to_radeon_crtc(x) container_of(x, struct radeon_crtc, base) 43#define to_radeon_crtc(x) container_of(x, struct radeon_crtc, base)
41#define to_radeon_connector(x) container_of(x, struct radeon_connector, base) 44#define to_radeon_connector(x) container_of(x, struct radeon_connector, base)
@@ -171,6 +174,18 @@ struct radeon_mode_info {
171 struct atom_context *atom_context; 174 struct atom_context *atom_context;
172 enum radeon_connector_table connector_table; 175 enum radeon_connector_table connector_table;
173 bool mode_config_initialized; 176 bool mode_config_initialized;
177 struct radeon_crtc *crtcs[2];
178};
179
180struct radeon_native_mode {
181 /* preferred mode */
182 uint32_t panel_xres, panel_yres;
183 uint32_t hoverplus, hsync_width;
184 uint32_t hblank;
185 uint32_t voverplus, vsync_width;
186 uint32_t vblank;
187 uint32_t dotclock;
188 uint32_t flags;
174}; 189};
175 190
176struct radeon_crtc { 191struct radeon_crtc {
@@ -188,19 +203,11 @@ struct radeon_crtc {
188 int cursor_height; 203 int cursor_height;
189 uint32_t legacy_display_base_addr; 204 uint32_t legacy_display_base_addr;
190 uint32_t legacy_cursor_offset; 205 uint32_t legacy_cursor_offset;
191}; 206 enum radeon_rmx_type rmx_type;
192 207 uint32_t devices;
193#define RADEON_USE_RMX 1 208 fixed20_12 vsc;
194 209 fixed20_12 hsc;
195struct radeon_native_mode { 210 struct radeon_native_mode native_mode;
196 /* preferred mode */
197 uint32_t panel_xres, panel_yres;
198 uint32_t hoverplus, hsync_width;
199 uint32_t hblank;
200 uint32_t voverplus, vsync_width;
201 uint32_t vblank;
202 uint32_t dotclock;
203 uint32_t flags;
204}; 211};
205 212
206struct radeon_encoder_primary_dac { 213struct radeon_encoder_primary_dac {
@@ -386,16 +393,9 @@ void radeon_enc_destroy(struct drm_encoder *encoder);
386void radeon_copy_fb(struct drm_device *dev, struct drm_gem_object *dst_obj); 393void radeon_copy_fb(struct drm_device *dev, struct drm_gem_object *dst_obj);
387void radeon_combios_asic_init(struct drm_device *dev); 394void radeon_combios_asic_init(struct drm_device *dev);
388extern int radeon_static_clocks_init(struct drm_device *dev); 395extern int radeon_static_clocks_init(struct drm_device *dev);
389void radeon_init_disp_bw_legacy(struct drm_device *dev, 396bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
390 struct drm_display_mode *mode1, 397 struct drm_display_mode *mode,
391 uint32_t pixel_bytes1, 398 struct drm_display_mode *adjusted_mode);
392 struct drm_display_mode *mode2, 399void atom_rv515_force_tv_scaler(struct radeon_device *rdev);
393 uint32_t pixel_bytes2);
394void radeon_init_disp_bw_avivo(struct drm_device *dev,
395 struct drm_display_mode *mode1,
396 uint32_t pixel_bytes1,
397 struct drm_display_mode *mode2,
398 uint32_t pixel_bytes2);
399void radeon_init_disp_bandwidth(struct drm_device *dev);
400 400
401#endif 401#endif
diff --git a/drivers/gpu/drm/radeon/radeon_share.h b/drivers/gpu/drm/radeon/radeon_share.h
new file mode 100644
index 000000000000..63a773578f17
--- /dev/null
+++ b/drivers/gpu/drm/radeon/radeon_share.h
@@ -0,0 +1,39 @@
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_SHARE_H__
29#define __RADEON_SHARE_H__
30
31void r100_vram_init_sizes(struct radeon_device *rdev);
32
33void rs690_line_buffer_adjust(struct radeon_device *rdev,
34 struct drm_display_mode *mode1,
35 struct drm_display_mode *mode2);
36
37void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
38
39#endif
diff --git a/drivers/gpu/drm/radeon/rs400.c b/drivers/gpu/drm/radeon/rs400.c
index daf24e85cba3..96a3c8486d25 100644
--- a/drivers/gpu/drm/radeon/rs400.c
+++ b/drivers/gpu/drm/radeon/rs400.c
@@ -29,6 +29,7 @@
29#include <drm/drmP.h> 29#include <drm/drmP.h>
30#include "radeon_reg.h" 30#include "radeon_reg.h"
31#include "radeon.h" 31#include "radeon.h"
32#include "radeon_share.h"
32 33
33/* rs400,rs480 depends on : */ 34/* rs400,rs480 depends on : */
34void r100_hdp_reset(struct radeon_device *rdev); 35void r100_hdp_reset(struct radeon_device *rdev);
diff --git a/drivers/gpu/drm/radeon/rs600.c b/drivers/gpu/drm/radeon/rs600.c
index ab0c967553e6..bccdce7fd379 100644
--- a/drivers/gpu/drm/radeon/rs600.c
+++ b/drivers/gpu/drm/radeon/rs600.c
@@ -301,6 +301,11 @@ void rs600_vram_info(struct radeon_device *rdev)
301 rdev->mc.vram_width = 128; 301 rdev->mc.vram_width = 128;
302} 302}
303 303
304void rs600_bandwidth_update(struct radeon_device *rdev)
305{
306 /* FIXME: implement, should this be like rs690 ? */
307}
308
304 309
305/* 310/*
306 * Indirect registers accessor 311 * Indirect registers accessor
diff --git a/drivers/gpu/drm/radeon/rs690.c b/drivers/gpu/drm/radeon/rs690.c
index 79ba85042b5f..97eaee3d28b8 100644
--- a/drivers/gpu/drm/radeon/rs690.c
+++ b/drivers/gpu/drm/radeon/rs690.c
@@ -28,6 +28,9 @@
28#include "drmP.h" 28#include "drmP.h"
29#include "radeon_reg.h" 29#include "radeon_reg.h"
30#include "radeon.h" 30#include "radeon.h"
31#include "rs690r.h"
32#include "atom.h"
33#include "atom-bits.h"
31 34
32/* rs690,rs740 depends on : */ 35/* rs690,rs740 depends on : */
33void r100_hdp_reset(struct radeon_device *rdev); 36void r100_hdp_reset(struct radeon_device *rdev);
@@ -138,9 +141,82 @@ void rs690_gpu_init(struct radeon_device *rdev)
138/* 141/*
139 * VRAM info. 142 * VRAM info.
140 */ 143 */
144void rs690_pm_info(struct radeon_device *rdev)
145{
146 int index = GetIndexIntoMasterTable(DATA, IntegratedSystemInfo);
147 struct _ATOM_INTEGRATED_SYSTEM_INFO *info;
148 struct _ATOM_INTEGRATED_SYSTEM_INFO_V2 *info_v2;
149 void *ptr;
150 uint16_t data_offset;
151 uint8_t frev, crev;
152 fixed20_12 tmp;
153
154 atom_parse_data_header(rdev->mode_info.atom_context, index, NULL,
155 &frev, &crev, &data_offset);
156 ptr = rdev->mode_info.atom_context->bios + data_offset;
157 info = (struct _ATOM_INTEGRATED_SYSTEM_INFO *)ptr;
158 info_v2 = (struct _ATOM_INTEGRATED_SYSTEM_INFO_V2 *)ptr;
159 /* Get various system informations from bios */
160 switch (crev) {
161 case 1:
162 tmp.full = rfixed_const(100);
163 rdev->pm.igp_sideport_mclk.full = rfixed_const(info->ulBootUpMemoryClock);
164 rdev->pm.igp_sideport_mclk.full = rfixed_div(rdev->pm.igp_sideport_mclk, tmp);
165 rdev->pm.igp_system_mclk.full = rfixed_const(le16_to_cpu(info->usK8MemoryClock));
166 rdev->pm.igp_ht_link_clk.full = rfixed_const(le16_to_cpu(info->usFSBClock));
167 rdev->pm.igp_ht_link_width.full = rfixed_const(info->ucHTLinkWidth);
168 break;
169 case 2:
170 tmp.full = rfixed_const(100);
171 rdev->pm.igp_sideport_mclk.full = rfixed_const(info_v2->ulBootUpSidePortClock);
172 rdev->pm.igp_sideport_mclk.full = rfixed_div(rdev->pm.igp_sideport_mclk, tmp);
173 rdev->pm.igp_system_mclk.full = rfixed_const(info_v2->ulBootUpUMAClock);
174 rdev->pm.igp_system_mclk.full = rfixed_div(rdev->pm.igp_system_mclk, tmp);
175 rdev->pm.igp_ht_link_clk.full = rfixed_const(info_v2->ulHTLinkFreq);
176 rdev->pm.igp_ht_link_clk.full = rfixed_div(rdev->pm.igp_ht_link_clk, tmp);
177 rdev->pm.igp_ht_link_width.full = rfixed_const(le16_to_cpu(info_v2->usMinHTLinkWidth));
178 break;
179 default:
180 tmp.full = rfixed_const(100);
181 /* We assume the slower possible clock ie worst case */
182 /* DDR 333Mhz */
183 rdev->pm.igp_sideport_mclk.full = rfixed_const(333);
184 /* FIXME: system clock ? */
185 rdev->pm.igp_system_mclk.full = rfixed_const(100);
186 rdev->pm.igp_system_mclk.full = rfixed_div(rdev->pm.igp_system_mclk, tmp);
187 rdev->pm.igp_ht_link_clk.full = rfixed_const(200);
188 rdev->pm.igp_ht_link_width.full = rfixed_const(8);
189 DRM_ERROR("No integrated system info for your GPU, using safe default\n");
190 break;
191 }
192 /* Compute various bandwidth */
193 /* k8_bandwidth = (memory_clk / 2) * 2 * 8 * 0.5 = memory_clk * 4 */
194 tmp.full = rfixed_const(4);
195 rdev->pm.k8_bandwidth.full = rfixed_mul(rdev->pm.igp_system_mclk, tmp);
196 /* ht_bandwidth = ht_clk * 2 * ht_width / 8 * 0.8
197 * = ht_clk * ht_width / 5
198 */
199 tmp.full = rfixed_const(5);
200 rdev->pm.ht_bandwidth.full = rfixed_mul(rdev->pm.igp_ht_link_clk,
201 rdev->pm.igp_ht_link_width);
202 rdev->pm.ht_bandwidth.full = rfixed_div(rdev->pm.ht_bandwidth, tmp);
203 if (tmp.full < rdev->pm.max_bandwidth.full) {
204 /* HT link is a limiting factor */
205 rdev->pm.max_bandwidth.full = tmp.full;
206 }
207 /* sideport_bandwidth = (sideport_clk / 2) * 2 * 2 * 0.7
208 * = (sideport_clk * 14) / 10
209 */
210 tmp.full = rfixed_const(14);
211 rdev->pm.sideport_bandwidth.full = rfixed_mul(rdev->pm.igp_sideport_mclk, tmp);
212 tmp.full = rfixed_const(10);
213 rdev->pm.sideport_bandwidth.full = rfixed_div(rdev->pm.sideport_bandwidth, tmp);
214}
215
141void rs690_vram_info(struct radeon_device *rdev) 216void rs690_vram_info(struct radeon_device *rdev)
142{ 217{
143 uint32_t tmp; 218 uint32_t tmp;
219 fixed20_12 a;
144 220
145 rs400_gart_adjust_size(rdev); 221 rs400_gart_adjust_size(rdev);
146 /* DDR for all card after R300 & IGP */ 222 /* DDR for all card after R300 & IGP */
@@ -156,8 +232,404 @@ void rs690_vram_info(struct radeon_device *rdev)
156 232
157 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0); 233 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
158 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0); 234 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
235 rs690_pm_info(rdev);
236 /* FIXME: we should enforce default clock in case GPU is not in
237 * default setup
238 */
239 a.full = rfixed_const(100);
240 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
241 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
242 a.full = rfixed_const(16);
243 /* core_bandwidth = sclk(Mhz) * 16 */
244 rdev->pm.core_bandwidth.full = rfixed_div(rdev->pm.sclk, a);
245}
246
247void rs690_line_buffer_adjust(struct radeon_device *rdev,
248 struct drm_display_mode *mode1,
249 struct drm_display_mode *mode2)
250{
251 u32 tmp;
252
253 /*
254 * Line Buffer Setup
255 * There is a single line buffer shared by both display controllers.
256 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
257 * the display controllers. The paritioning can either be done
258 * manually or via one of four preset allocations specified in bits 1:0:
259 * 0 - line buffer is divided in half and shared between crtc
260 * 1 - D1 gets 3/4 of the line buffer, D2 gets 1/4
261 * 2 - D1 gets the whole buffer
262 * 3 - D1 gets 1/4 of the line buffer, D2 gets 3/4
263 * Setting bit 2 of DC_LB_MEMORY_SPLIT controls switches to manual
264 * allocation mode. In manual allocation mode, D1 always starts at 0,
265 * D1 end/2 is specified in bits 14:4; D2 allocation follows D1.
266 */
267 tmp = RREG32(DC_LB_MEMORY_SPLIT) & ~DC_LB_MEMORY_SPLIT_MASK;
268 tmp &= ~DC_LB_MEMORY_SPLIT_SHIFT_MODE;
269 /* auto */
270 if (mode1 && mode2) {
271 if (mode1->hdisplay > mode2->hdisplay) {
272 if (mode1->hdisplay > 2560)
273 tmp |= DC_LB_MEMORY_SPLIT_D1_3Q_D2_1Q;
274 else
275 tmp |= DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
276 } else if (mode2->hdisplay > mode1->hdisplay) {
277 if (mode2->hdisplay > 2560)
278 tmp |= DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
279 else
280 tmp |= DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
281 } else
282 tmp |= AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
283 } else if (mode1) {
284 tmp |= DC_LB_MEMORY_SPLIT_D1_ONLY;
285 } else if (mode2) {
286 tmp |= DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
287 }
288 WREG32(DC_LB_MEMORY_SPLIT, tmp);
159} 289}
160 290
291struct rs690_watermark {
292 u32 lb_request_fifo_depth;
293 fixed20_12 num_line_pair;
294 fixed20_12 estimated_width;
295 fixed20_12 worst_case_latency;
296 fixed20_12 consumption_rate;
297 fixed20_12 active_time;
298 fixed20_12 dbpp;
299 fixed20_12 priority_mark_max;
300 fixed20_12 priority_mark;
301 fixed20_12 sclk;
302};
303
304void rs690_crtc_bandwidth_compute(struct radeon_device *rdev,
305 struct radeon_crtc *crtc,
306 struct rs690_watermark *wm)
307{
308 struct drm_display_mode *mode = &crtc->base.mode;
309 fixed20_12 a, b, c;
310 fixed20_12 pclk, request_fifo_depth, tolerable_latency, estimated_width;
311 fixed20_12 consumption_time, line_time, chunk_time, read_delay_latency;
312 /* FIXME: detect IGP with sideport memory, i don't think there is any
313 * such product available
314 */
315 bool sideport = false;
316
317 if (!crtc->base.enabled) {
318 /* FIXME: wouldn't it better to set priority mark to maximum */
319 wm->lb_request_fifo_depth = 4;
320 return;
321 }
322
323 if (crtc->vsc.full > rfixed_const(2))
324 wm->num_line_pair.full = rfixed_const(2);
325 else
326 wm->num_line_pair.full = rfixed_const(1);
327
328 b.full = rfixed_const(mode->crtc_hdisplay);
329 c.full = rfixed_const(256);
330 a.full = rfixed_mul(wm->num_line_pair, b);
331 request_fifo_depth.full = rfixed_div(a, c);
332 if (a.full < rfixed_const(4)) {
333 wm->lb_request_fifo_depth = 4;
334 } else {
335 wm->lb_request_fifo_depth = rfixed_trunc(request_fifo_depth);
336 }
337
338 /* Determine consumption rate
339 * pclk = pixel clock period(ns) = 1000 / (mode.clock / 1000)
340 * vtaps = number of vertical taps,
341 * vsc = vertical scaling ratio, defined as source/destination
342 * hsc = horizontal scaling ration, defined as source/destination
343 */
344 a.full = rfixed_const(mode->clock);
345 b.full = rfixed_const(1000);
346 a.full = rfixed_div(a, b);
347 pclk.full = rfixed_div(b, a);
348 if (crtc->rmx_type != RMX_OFF) {
349 b.full = rfixed_const(2);
350 if (crtc->vsc.full > b.full)
351 b.full = crtc->vsc.full;
352 b.full = rfixed_mul(b, crtc->hsc);
353 c.full = rfixed_const(2);
354 b.full = rfixed_div(b, c);
355 consumption_time.full = rfixed_div(pclk, b);
356 } else {
357 consumption_time.full = pclk.full;
358 }
359 a.full = rfixed_const(1);
360 wm->consumption_rate.full = rfixed_div(a, consumption_time);
361
362
363 /* Determine line time
364 * LineTime = total time for one line of displayhtotal
365 * LineTime = total number of horizontal pixels
366 * pclk = pixel clock period(ns)
367 */
368 a.full = rfixed_const(crtc->base.mode.crtc_htotal);
369 line_time.full = rfixed_mul(a, pclk);
370
371 /* Determine active time
372 * ActiveTime = time of active region of display within one line,
373 * hactive = total number of horizontal active pixels
374 * htotal = total number of horizontal pixels
375 */
376 a.full = rfixed_const(crtc->base.mode.crtc_htotal);
377 b.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
378 wm->active_time.full = rfixed_mul(line_time, b);
379 wm->active_time.full = rfixed_div(wm->active_time, a);
380
381 /* Maximun bandwidth is the minimun bandwidth of all component */
382 rdev->pm.max_bandwidth = rdev->pm.core_bandwidth;
383 if (sideport) {
384 if (rdev->pm.max_bandwidth.full > rdev->pm.sideport_bandwidth.full &&
385 rdev->pm.sideport_bandwidth.full)
386 rdev->pm.max_bandwidth = rdev->pm.sideport_bandwidth;
387 read_delay_latency.full = rfixed_const(370 * 800 * 1000);
388 read_delay_latency.full = rfixed_div(read_delay_latency,
389 rdev->pm.igp_sideport_mclk);
390 } else {
391 if (rdev->pm.max_bandwidth.full > rdev->pm.k8_bandwidth.full &&
392 rdev->pm.k8_bandwidth.full)
393 rdev->pm.max_bandwidth = rdev->pm.k8_bandwidth;
394 if (rdev->pm.max_bandwidth.full > rdev->pm.ht_bandwidth.full &&
395 rdev->pm.ht_bandwidth.full)
396 rdev->pm.max_bandwidth = rdev->pm.ht_bandwidth;
397 read_delay_latency.full = rfixed_const(5000);
398 }
399
400 /* sclk = system clocks(ns) = 1000 / max_bandwidth / 16 */
401 a.full = rfixed_const(16);
402 rdev->pm.sclk.full = rfixed_mul(rdev->pm.max_bandwidth, a);
403 a.full = rfixed_const(1000);
404 rdev->pm.sclk.full = rfixed_div(a, rdev->pm.sclk);
405 /* Determine chunk time
406 * ChunkTime = the time it takes the DCP to send one chunk of data
407 * to the LB which consists of pipeline delay and inter chunk gap
408 * sclk = system clock(ns)
409 */
410 a.full = rfixed_const(256 * 13);
411 chunk_time.full = rfixed_mul(rdev->pm.sclk, a);
412 a.full = rfixed_const(10);
413 chunk_time.full = rfixed_div(chunk_time, a);
414
415 /* Determine the worst case latency
416 * NumLinePair = Number of line pairs to request(1=2 lines, 2=4 lines)
417 * WorstCaseLatency = worst case time from urgent to when the MC starts
418 * to return data
419 * READ_DELAY_IDLE_MAX = constant of 1us
420 * ChunkTime = time it takes the DCP to send one chunk of data to the LB
421 * which consists of pipeline delay and inter chunk gap
422 */
423 if (rfixed_trunc(wm->num_line_pair) > 1) {
424 a.full = rfixed_const(3);
425 wm->worst_case_latency.full = rfixed_mul(a, chunk_time);
426 wm->worst_case_latency.full += read_delay_latency.full;
427 } else {
428 a.full = rfixed_const(2);
429 wm->worst_case_latency.full = rfixed_mul(a, chunk_time);
430 wm->worst_case_latency.full += read_delay_latency.full;
431 }
432
433 /* Determine the tolerable latency
434 * TolerableLatency = Any given request has only 1 line time
435 * for the data to be returned
436 * LBRequestFifoDepth = Number of chunk requests the LB can
437 * put into the request FIFO for a display
438 * LineTime = total time for one line of display
439 * ChunkTime = the time it takes the DCP to send one chunk
440 * of data to the LB which consists of
441 * pipeline delay and inter chunk gap
442 */
443 if ((2+wm->lb_request_fifo_depth) >= rfixed_trunc(request_fifo_depth)) {
444 tolerable_latency.full = line_time.full;
445 } else {
446 tolerable_latency.full = rfixed_const(wm->lb_request_fifo_depth - 2);
447 tolerable_latency.full = request_fifo_depth.full - tolerable_latency.full;
448 tolerable_latency.full = rfixed_mul(tolerable_latency, chunk_time);
449 tolerable_latency.full = line_time.full - tolerable_latency.full;
450 }
451 /* We assume worst case 32bits (4 bytes) */
452 wm->dbpp.full = rfixed_const(4 * 8);
453
454 /* Determine the maximum priority mark
455 * width = viewport width in pixels
456 */
457 a.full = rfixed_const(16);
458 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
459 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a);
460
461 /* Determine estimated width */
462 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full;
463 estimated_width.full = rfixed_div(estimated_width, consumption_time);
464 if (rfixed_trunc(estimated_width) > crtc->base.mode.crtc_hdisplay) {
465 wm->priority_mark.full = rfixed_const(10);
466 } else {
467 a.full = rfixed_const(16);
468 wm->priority_mark.full = rfixed_div(estimated_width, a);
469 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full;
470 }
471}
472
473void rs690_bandwidth_update(struct radeon_device *rdev)
474{
475 struct drm_display_mode *mode0 = NULL;
476 struct drm_display_mode *mode1 = NULL;
477 struct rs690_watermark wm0;
478 struct rs690_watermark wm1;
479 u32 tmp;
480 fixed20_12 priority_mark02, priority_mark12, fill_rate;
481 fixed20_12 a, b;
482
483 if (rdev->mode_info.crtcs[0]->base.enabled)
484 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
485 if (rdev->mode_info.crtcs[1]->base.enabled)
486 mode1 = &rdev->mode_info.crtcs[1]->base.mode;
487 /*
488 * Set display0/1 priority up in the memory controller for
489 * modes if the user specifies HIGH for displaypriority
490 * option.
491 */
492 if (rdev->disp_priority == 2) {
493 tmp = RREG32_MC(MC_INIT_MISC_LAT_TIMER);
494 tmp &= ~MC_DISP1R_INIT_LAT_MASK;
495 tmp &= ~MC_DISP0R_INIT_LAT_MASK;
496 if (mode1)
497 tmp |= (1 << MC_DISP1R_INIT_LAT_SHIFT);
498 if (mode0)
499 tmp |= (1 << MC_DISP0R_INIT_LAT_SHIFT);
500 WREG32_MC(MC_INIT_MISC_LAT_TIMER, tmp);
501 }
502 rs690_line_buffer_adjust(rdev, mode0, mode1);
503
504 if ((rdev->family == CHIP_RS690) || (rdev->family == CHIP_RS740))
505 WREG32(DCP_CONTROL, 0);
506 if ((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880))
507 WREG32(DCP_CONTROL, 2);
508
509 rs690_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[0], &wm0);
510 rs690_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[1], &wm1);
511
512 tmp = (wm0.lb_request_fifo_depth - 1);
513 tmp |= (wm1.lb_request_fifo_depth - 1) << 16;
514 WREG32(LB_MAX_REQ_OUTSTANDING, tmp);
515
516 if (mode0 && mode1) {
517 if (rfixed_trunc(wm0.dbpp) > 64)
518 a.full = rfixed_mul(wm0.dbpp, wm0.num_line_pair);
519 else
520 a.full = wm0.num_line_pair.full;
521 if (rfixed_trunc(wm1.dbpp) > 64)
522 b.full = rfixed_mul(wm1.dbpp, wm1.num_line_pair);
523 else
524 b.full = wm1.num_line_pair.full;
525 a.full += b.full;
526 fill_rate.full = rfixed_div(wm0.sclk, a);
527 if (wm0.consumption_rate.full > fill_rate.full) {
528 b.full = wm0.consumption_rate.full - fill_rate.full;
529 b.full = rfixed_mul(b, wm0.active_time);
530 a.full = rfixed_mul(wm0.worst_case_latency,
531 wm0.consumption_rate);
532 a.full = a.full + b.full;
533 b.full = rfixed_const(16 * 1000);
534 priority_mark02.full = rfixed_div(a, b);
535 } else {
536 a.full = rfixed_mul(wm0.worst_case_latency,
537 wm0.consumption_rate);
538 b.full = rfixed_const(16 * 1000);
539 priority_mark02.full = rfixed_div(a, b);
540 }
541 if (wm1.consumption_rate.full > fill_rate.full) {
542 b.full = wm1.consumption_rate.full - fill_rate.full;
543 b.full = rfixed_mul(b, wm1.active_time);
544 a.full = rfixed_mul(wm1.worst_case_latency,
545 wm1.consumption_rate);
546 a.full = a.full + b.full;
547 b.full = rfixed_const(16 * 1000);
548 priority_mark12.full = rfixed_div(a, b);
549 } else {
550 a.full = rfixed_mul(wm1.worst_case_latency,
551 wm1.consumption_rate);
552 b.full = rfixed_const(16 * 1000);
553 priority_mark12.full = rfixed_div(a, b);
554 }
555 if (wm0.priority_mark.full > priority_mark02.full)
556 priority_mark02.full = wm0.priority_mark.full;
557 if (rfixed_trunc(priority_mark02) < 0)
558 priority_mark02.full = 0;
559 if (wm0.priority_mark_max.full > priority_mark02.full)
560 priority_mark02.full = wm0.priority_mark_max.full;
561 if (wm1.priority_mark.full > priority_mark12.full)
562 priority_mark12.full = wm1.priority_mark.full;
563 if (rfixed_trunc(priority_mark12) < 0)
564 priority_mark12.full = 0;
565 if (wm1.priority_mark_max.full > priority_mark12.full)
566 priority_mark12.full = wm1.priority_mark_max.full;
567 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02));
568 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02));
569 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12));
570 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12));
571 } else if (mode0) {
572 if (rfixed_trunc(wm0.dbpp) > 64)
573 a.full = rfixed_mul(wm0.dbpp, wm0.num_line_pair);
574 else
575 a.full = wm0.num_line_pair.full;
576 fill_rate.full = rfixed_div(wm0.sclk, a);
577 if (wm0.consumption_rate.full > fill_rate.full) {
578 b.full = wm0.consumption_rate.full - fill_rate.full;
579 b.full = rfixed_mul(b, wm0.active_time);
580 a.full = rfixed_mul(wm0.worst_case_latency,
581 wm0.consumption_rate);
582 a.full = a.full + b.full;
583 b.full = rfixed_const(16 * 1000);
584 priority_mark02.full = rfixed_div(a, b);
585 } else {
586 a.full = rfixed_mul(wm0.worst_case_latency,
587 wm0.consumption_rate);
588 b.full = rfixed_const(16 * 1000);
589 priority_mark02.full = rfixed_div(a, b);
590 }
591 if (wm0.priority_mark.full > priority_mark02.full)
592 priority_mark02.full = wm0.priority_mark.full;
593 if (rfixed_trunc(priority_mark02) < 0)
594 priority_mark02.full = 0;
595 if (wm0.priority_mark_max.full > priority_mark02.full)
596 priority_mark02.full = wm0.priority_mark_max.full;
597 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02));
598 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02));
599 WREG32(D2MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
600 WREG32(D2MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
601 } else {
602 if (rfixed_trunc(wm1.dbpp) > 64)
603 a.full = rfixed_mul(wm1.dbpp, wm1.num_line_pair);
604 else
605 a.full = wm1.num_line_pair.full;
606 fill_rate.full = rfixed_div(wm1.sclk, a);
607 if (wm1.consumption_rate.full > fill_rate.full) {
608 b.full = wm1.consumption_rate.full - fill_rate.full;
609 b.full = rfixed_mul(b, wm1.active_time);
610 a.full = rfixed_mul(wm1.worst_case_latency,
611 wm1.consumption_rate);
612 a.full = a.full + b.full;
613 b.full = rfixed_const(16 * 1000);
614 priority_mark12.full = rfixed_div(a, b);
615 } else {
616 a.full = rfixed_mul(wm1.worst_case_latency,
617 wm1.consumption_rate);
618 b.full = rfixed_const(16 * 1000);
619 priority_mark12.full = rfixed_div(a, b);
620 }
621 if (wm1.priority_mark.full > priority_mark12.full)
622 priority_mark12.full = wm1.priority_mark.full;
623 if (rfixed_trunc(priority_mark12) < 0)
624 priority_mark12.full = 0;
625 if (wm1.priority_mark_max.full > priority_mark12.full)
626 priority_mark12.full = wm1.priority_mark_max.full;
627 WREG32(D1MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
628 WREG32(D1MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
629 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12));
630 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12));
631 }
632}
161 633
162/* 634/*
163 * Indirect registers accessor 635 * Indirect registers accessor
diff --git a/drivers/gpu/drm/radeon/rs690r.h b/drivers/gpu/drm/radeon/rs690r.h
new file mode 100644
index 000000000000..c0d9faa2175b
--- /dev/null
+++ b/drivers/gpu/drm/radeon/rs690r.h
@@ -0,0 +1,99 @@
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef RS690R_H
29#define RS690R_H
30
31/* RS690/RS740 registers */
32#define MC_INDEX 0x0078
33# define MC_INDEX_MASK 0x1FF
34# define MC_INDEX_WR_EN (1 << 9)
35# define MC_INDEX_WR_ACK 0x7F
36#define MC_DATA 0x007C
37#define HDP_FB_LOCATION 0x0134
38#define DC_LB_MEMORY_SPLIT 0x6520
39#define DC_LB_MEMORY_SPLIT_MASK 0x00000003
40#define DC_LB_MEMORY_SPLIT_SHIFT 0
41#define DC_LB_MEMORY_SPLIT_D1HALF_D2HALF 0
42#define DC_LB_MEMORY_SPLIT_D1_3Q_D2_1Q 1
43#define DC_LB_MEMORY_SPLIT_D1_ONLY 2
44#define DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q 3
45#define DC_LB_MEMORY_SPLIT_SHIFT_MODE (1 << 2)
46#define DC_LB_DISP1_END_ADR_SHIFT 4
47#define DC_LB_DISP1_END_ADR_MASK 0x00007FF0
48#define D1MODE_PRIORITY_A_CNT 0x6548
49#define MODE_PRIORITY_MARK_MASK 0x00007FFF
50#define MODE_PRIORITY_OFF (1 << 16)
51#define MODE_PRIORITY_ALWAYS_ON (1 << 20)
52#define MODE_PRIORITY_FORCE_MASK (1 << 24)
53#define D1MODE_PRIORITY_B_CNT 0x654C
54#define LB_MAX_REQ_OUTSTANDING 0x6D58
55#define LB_D1_MAX_REQ_OUTSTANDING_MASK 0x0000000F
56#define LB_D1_MAX_REQ_OUTSTANDING_SHIFT 0
57#define LB_D2_MAX_REQ_OUTSTANDING_MASK 0x000F0000
58#define LB_D2_MAX_REQ_OUTSTANDING_SHIFT 16
59#define DCP_CONTROL 0x6C9C
60#define D2MODE_PRIORITY_A_CNT 0x6D48
61#define D2MODE_PRIORITY_B_CNT 0x6D4C
62
63/* MC indirect registers */
64#define MC_STATUS_IDLE (1 << 0)
65#define MC_MISC_CNTL 0x18
66#define DISABLE_GTW (1 << 1)
67#define GART_INDEX_REG_EN (1 << 12)
68#define BLOCK_GFX_D3_EN (1 << 14)
69#define GART_FEATURE_ID 0x2B
70#define HANG_EN (1 << 11)
71#define TLB_ENABLE (1 << 18)
72#define P2P_ENABLE (1 << 19)
73#define GTW_LAC_EN (1 << 25)
74#define LEVEL2_GART (0 << 30)
75#define LEVEL1_GART (1 << 30)
76#define PDC_EN (1 << 31)
77#define GART_BASE 0x2C
78#define GART_CACHE_CNTRL 0x2E
79# define GART_CACHE_INVALIDATE (1 << 0)
80#define MC_STATUS 0x90
81#define MCCFG_FB_LOCATION 0x100
82#define MC_FB_START_MASK 0x0000FFFF
83#define MC_FB_START_SHIFT 0
84#define MC_FB_TOP_MASK 0xFFFF0000
85#define MC_FB_TOP_SHIFT 16
86#define MCCFG_AGP_LOCATION 0x101
87#define MC_AGP_START_MASK 0x0000FFFF
88#define MC_AGP_START_SHIFT 0
89#define MC_AGP_TOP_MASK 0xFFFF0000
90#define MC_AGP_TOP_SHIFT 16
91#define MCCFG_AGP_BASE 0x102
92#define MCCFG_AGP_BASE_2 0x103
93#define MC_INIT_MISC_LAT_TIMER 0x104
94#define MC_DISP0R_INIT_LAT_SHIFT 8
95#define MC_DISP0R_INIT_LAT_MASK 0x00000F00
96#define MC_DISP1R_INIT_LAT_SHIFT 12
97#define MC_DISP1R_INIT_LAT_MASK 0x0000F000
98
99#endif
diff --git a/drivers/gpu/drm/radeon/rv515.c b/drivers/gpu/drm/radeon/rv515.c
index 677929ed8ed3..4fd411893b91 100644
--- a/drivers/gpu/drm/radeon/rv515.c
+++ b/drivers/gpu/drm/radeon/rv515.c
@@ -27,8 +27,9 @@
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include "drmP.h" 29#include "drmP.h"
30#include "radeon_reg.h" 30#include "rv515r.h"
31#include "radeon.h" 31#include "radeon.h"
32#include "radeon_share.h"
32 33
33/* rv515 depends on : */ 34/* rv515 depends on : */
34void r100_hdp_reset(struct radeon_device *rdev); 35void r100_hdp_reset(struct radeon_device *rdev);
@@ -100,25 +101,25 @@ int rv515_mc_init(struct radeon_device *rdev)
100 } 101 }
101 /* Write VRAM size in case we are limiting it */ 102 /* Write VRAM size in case we are limiting it */
102 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.vram_size); 103 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.vram_size);
103 tmp = REG_SET(RV515_MC_FB_START, rdev->mc.vram_location >> 16); 104 tmp = REG_SET(MC_FB_START, rdev->mc.vram_location >> 16);
104 WREG32(0x134, tmp); 105 WREG32(0x134, tmp);
105 tmp = rdev->mc.vram_location + rdev->mc.vram_size - 1; 106 tmp = rdev->mc.vram_location + rdev->mc.vram_size - 1;
106 tmp = REG_SET(RV515_MC_FB_TOP, tmp >> 16); 107 tmp = REG_SET(MC_FB_TOP, tmp >> 16);
107 tmp |= REG_SET(RV515_MC_FB_START, rdev->mc.vram_location >> 16); 108 tmp |= REG_SET(MC_FB_START, rdev->mc.vram_location >> 16);
108 WREG32_MC(RV515_MC_FB_LOCATION, tmp); 109 WREG32_MC(MC_FB_LOCATION, tmp);
109 WREG32(RS690_HDP_FB_LOCATION, rdev->mc.vram_location >> 16); 110 WREG32(HDP_FB_LOCATION, rdev->mc.vram_location >> 16);
110 WREG32(0x310, rdev->mc.vram_location); 111 WREG32(0x310, rdev->mc.vram_location);
111 if (rdev->flags & RADEON_IS_AGP) { 112 if (rdev->flags & RADEON_IS_AGP) {
112 tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - 1; 113 tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - 1;
113 tmp = REG_SET(RV515_MC_AGP_TOP, tmp >> 16); 114 tmp = REG_SET(MC_AGP_TOP, tmp >> 16);
114 tmp |= REG_SET(RV515_MC_AGP_START, rdev->mc.gtt_location >> 16); 115 tmp |= REG_SET(MC_AGP_START, rdev->mc.gtt_location >> 16);
115 WREG32_MC(RV515_MC_AGP_LOCATION, tmp); 116 WREG32_MC(MC_AGP_LOCATION, tmp);
116 WREG32_MC(RV515_MC_AGP_BASE, rdev->mc.agp_base); 117 WREG32_MC(MC_AGP_BASE, rdev->mc.agp_base);
117 WREG32_MC(RV515_MC_AGP_BASE_2, 0); 118 WREG32_MC(MC_AGP_BASE_2, 0);
118 } else { 119 } else {
119 WREG32_MC(RV515_MC_AGP_LOCATION, 0x0FFFFFFF); 120 WREG32_MC(MC_AGP_LOCATION, 0x0FFFFFFF);
120 WREG32_MC(RV515_MC_AGP_BASE, 0); 121 WREG32_MC(MC_AGP_BASE, 0);
121 WREG32_MC(RV515_MC_AGP_BASE_2, 0); 122 WREG32_MC(MC_AGP_BASE_2, 0);
122 } 123 }
123 return 0; 124 return 0;
124} 125}
@@ -136,95 +137,67 @@ void rv515_mc_fini(struct radeon_device *rdev)
136 */ 137 */
137void rv515_ring_start(struct radeon_device *rdev) 138void rv515_ring_start(struct radeon_device *rdev)
138{ 139{
139 unsigned gb_tile_config;
140 int r; 140 int r;
141 141
142 /* Sub pixel 1/12 so we can have 4K rendering according to doc */
143 gb_tile_config = R300_ENABLE_TILING | R300_TILE_SIZE_16;
144 switch (rdev->num_gb_pipes) {
145 case 2:
146 gb_tile_config |= R300_PIPE_COUNT_R300;
147 break;
148 case 3:
149 gb_tile_config |= R300_PIPE_COUNT_R420_3P;
150 break;
151 case 4:
152 gb_tile_config |= R300_PIPE_COUNT_R420;
153 break;
154 case 1:
155 default:
156 gb_tile_config |= R300_PIPE_COUNT_RV350;
157 break;
158 }
159
160 r = radeon_ring_lock(rdev, 64); 142 r = radeon_ring_lock(rdev, 64);
161 if (r) { 143 if (r) {
162 return; 144 return;
163 } 145 }
164 radeon_ring_write(rdev, PACKET0(RADEON_ISYNC_CNTL, 0)); 146 radeon_ring_write(rdev, PACKET0(ISYNC_CNTL, 0));
165 radeon_ring_write(rdev,
166 RADEON_ISYNC_ANY2D_IDLE3D |
167 RADEON_ISYNC_ANY3D_IDLE2D |
168 RADEON_ISYNC_WAIT_IDLEGUI |
169 RADEON_ISYNC_CPSCRATCH_IDLEGUI);
170 radeon_ring_write(rdev, PACKET0(R300_GB_TILE_CONFIG, 0));
171 radeon_ring_write(rdev, gb_tile_config);
172 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
173 radeon_ring_write(rdev, 147 radeon_ring_write(rdev,
174 RADEON_WAIT_2D_IDLECLEAN | 148 ISYNC_ANY2D_IDLE3D |
175 RADEON_WAIT_3D_IDLECLEAN); 149 ISYNC_ANY3D_IDLE2D |
150 ISYNC_WAIT_IDLEGUI |
151 ISYNC_CPSCRATCH_IDLEGUI);
152 radeon_ring_write(rdev, PACKET0(WAIT_UNTIL, 0));
153 radeon_ring_write(rdev, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
176 radeon_ring_write(rdev, PACKET0(0x170C, 0)); 154 radeon_ring_write(rdev, PACKET0(0x170C, 0));
177 radeon_ring_write(rdev, 1 << 31); 155 radeon_ring_write(rdev, 1 << 31);
178 radeon_ring_write(rdev, PACKET0(R300_GB_SELECT, 0)); 156 radeon_ring_write(rdev, PACKET0(GB_SELECT, 0));
179 radeon_ring_write(rdev, 0); 157 radeon_ring_write(rdev, 0);
180 radeon_ring_write(rdev, PACKET0(R300_GB_ENABLE, 0)); 158 radeon_ring_write(rdev, PACKET0(GB_ENABLE, 0));
181 radeon_ring_write(rdev, 0); 159 radeon_ring_write(rdev, 0);
182 radeon_ring_write(rdev, PACKET0(0x42C8, 0)); 160 radeon_ring_write(rdev, PACKET0(0x42C8, 0));
183 radeon_ring_write(rdev, (1 << rdev->num_gb_pipes) - 1); 161 radeon_ring_write(rdev, (1 << rdev->num_gb_pipes) - 1);
184 radeon_ring_write(rdev, PACKET0(R500_VAP_INDEX_OFFSET, 0)); 162 radeon_ring_write(rdev, PACKET0(VAP_INDEX_OFFSET, 0));
185 radeon_ring_write(rdev, 0); 163 radeon_ring_write(rdev, 0);
186 radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); 164 radeon_ring_write(rdev, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
187 radeon_ring_write(rdev, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE); 165 radeon_ring_write(rdev, RB3D_DC_FLUSH | RB3D_DC_FREE);
188 radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0)); 166 radeon_ring_write(rdev, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
189 radeon_ring_write(rdev, R300_ZC_FLUSH | R300_ZC_FREE); 167 radeon_ring_write(rdev, ZC_FLUSH | ZC_FREE);
190 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0)); 168 radeon_ring_write(rdev, PACKET0(WAIT_UNTIL, 0));
191 radeon_ring_write(rdev, 169 radeon_ring_write(rdev, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
192 RADEON_WAIT_2D_IDLECLEAN | 170 radeon_ring_write(rdev, PACKET0(GB_AA_CONFIG, 0));
193 RADEON_WAIT_3D_IDLECLEAN);
194 radeon_ring_write(rdev, PACKET0(R300_GB_AA_CONFIG, 0));
195 radeon_ring_write(rdev, 0); 171 radeon_ring_write(rdev, 0);
196 radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); 172 radeon_ring_write(rdev, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
197 radeon_ring_write(rdev, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE); 173 radeon_ring_write(rdev, RB3D_DC_FLUSH | RB3D_DC_FREE);
198 radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0)); 174 radeon_ring_write(rdev, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
199 radeon_ring_write(rdev, R300_ZC_FLUSH | R300_ZC_FREE); 175 radeon_ring_write(rdev, ZC_FLUSH | ZC_FREE);
200 radeon_ring_write(rdev, PACKET0(R300_GB_MSPOS0, 0)); 176 radeon_ring_write(rdev, PACKET0(GB_MSPOS0, 0));
201 radeon_ring_write(rdev,
202 ((6 << R300_MS_X0_SHIFT) |
203 (6 << R300_MS_Y0_SHIFT) |
204 (6 << R300_MS_X1_SHIFT) |
205 (6 << R300_MS_Y1_SHIFT) |
206 (6 << R300_MS_X2_SHIFT) |
207 (6 << R300_MS_Y2_SHIFT) |
208 (6 << R300_MSBD0_Y_SHIFT) |
209 (6 << R300_MSBD0_X_SHIFT)));
210 radeon_ring_write(rdev, PACKET0(R300_GB_MSPOS1, 0));
211 radeon_ring_write(rdev,
212 ((6 << R300_MS_X3_SHIFT) |
213 (6 << R300_MS_Y3_SHIFT) |
214 (6 << R300_MS_X4_SHIFT) |
215 (6 << R300_MS_Y4_SHIFT) |
216 (6 << R300_MS_X5_SHIFT) |
217 (6 << R300_MS_Y5_SHIFT) |
218 (6 << R300_MSBD1_SHIFT)));
219 radeon_ring_write(rdev, PACKET0(R300_GA_ENHANCE, 0));
220 radeon_ring_write(rdev, R300_GA_DEADLOCK_CNTL | R300_GA_FASTSYNC_CNTL);
221 radeon_ring_write(rdev, PACKET0(R300_GA_POLY_MODE, 0));
222 radeon_ring_write(rdev, 177 radeon_ring_write(rdev,
223 R300_FRONT_PTYPE_TRIANGE | R300_BACK_PTYPE_TRIANGE); 178 ((6 << MS_X0_SHIFT) |
224 radeon_ring_write(rdev, PACKET0(R300_GA_ROUND_MODE, 0)); 179 (6 << MS_Y0_SHIFT) |
180 (6 << MS_X1_SHIFT) |
181 (6 << MS_Y1_SHIFT) |
182 (6 << MS_X2_SHIFT) |
183 (6 << MS_Y2_SHIFT) |
184 (6 << MSBD0_Y_SHIFT) |
185 (6 << MSBD0_X_SHIFT)));
186 radeon_ring_write(rdev, PACKET0(GB_MSPOS1, 0));
225 radeon_ring_write(rdev, 187 radeon_ring_write(rdev,
226 R300_GEOMETRY_ROUND_NEAREST | 188 ((6 << MS_X3_SHIFT) |
227 R300_COLOR_ROUND_NEAREST); 189 (6 << MS_Y3_SHIFT) |
190 (6 << MS_X4_SHIFT) |
191 (6 << MS_Y4_SHIFT) |
192 (6 << MS_X5_SHIFT) |
193 (6 << MS_Y5_SHIFT) |
194 (6 << MSBD1_SHIFT)));
195 radeon_ring_write(rdev, PACKET0(GA_ENHANCE, 0));
196 radeon_ring_write(rdev, GA_DEADLOCK_CNTL | GA_FASTSYNC_CNTL);
197 radeon_ring_write(rdev, PACKET0(GA_POLY_MODE, 0));
198 radeon_ring_write(rdev, FRONT_PTYPE_TRIANGE | BACK_PTYPE_TRIANGE);
199 radeon_ring_write(rdev, PACKET0(GA_ROUND_MODE, 0));
200 radeon_ring_write(rdev, GEOMETRY_ROUND_NEAREST | COLOR_ROUND_NEAREST);
228 radeon_ring_write(rdev, PACKET0(0x20C8, 0)); 201 radeon_ring_write(rdev, PACKET0(0x20C8, 0));
229 radeon_ring_write(rdev, 0); 202 radeon_ring_write(rdev, 0);
230 radeon_ring_unlock_commit(rdev); 203 radeon_ring_unlock_commit(rdev);
@@ -242,8 +215,8 @@ int rv515_mc_wait_for_idle(struct radeon_device *rdev)
242 215
243 for (i = 0; i < rdev->usec_timeout; i++) { 216 for (i = 0; i < rdev->usec_timeout; i++) {
244 /* read MC_STATUS */ 217 /* read MC_STATUS */
245 tmp = RREG32_MC(RV515_MC_STATUS); 218 tmp = RREG32_MC(MC_STATUS);
246 if (tmp & RV515_MC_STATUS_IDLE) { 219 if (tmp & MC_STATUS_IDLE) {
247 return 0; 220 return 0;
248 } 221 }
249 DRM_UDELAY(1); 222 DRM_UDELAY(1);
@@ -291,33 +264,33 @@ int rv515_ga_reset(struct radeon_device *rdev)
291 reinit_cp = rdev->cp.ready; 264 reinit_cp = rdev->cp.ready;
292 rdev->cp.ready = false; 265 rdev->cp.ready = false;
293 for (i = 0; i < rdev->usec_timeout; i++) { 266 for (i = 0; i < rdev->usec_timeout; i++) {
294 WREG32(RADEON_CP_CSQ_MODE, 0); 267 WREG32(CP_CSQ_MODE, 0);
295 WREG32(RADEON_CP_CSQ_CNTL, 0); 268 WREG32(CP_CSQ_CNTL, 0);
296 WREG32(RADEON_RBBM_SOFT_RESET, 0x32005); 269 WREG32(RBBM_SOFT_RESET, 0x32005);
297 (void)RREG32(RADEON_RBBM_SOFT_RESET); 270 (void)RREG32(RBBM_SOFT_RESET);
298 udelay(200); 271 udelay(200);
299 WREG32(RADEON_RBBM_SOFT_RESET, 0); 272 WREG32(RBBM_SOFT_RESET, 0);
300 /* Wait to prevent race in RBBM_STATUS */ 273 /* Wait to prevent race in RBBM_STATUS */
301 mdelay(1); 274 mdelay(1);
302 tmp = RREG32(RADEON_RBBM_STATUS); 275 tmp = RREG32(RBBM_STATUS);
303 if (tmp & ((1 << 20) | (1 << 26))) { 276 if (tmp & ((1 << 20) | (1 << 26))) {
304 DRM_ERROR("VAP & CP still busy (RBBM_STATUS=0x%08X)\n", tmp); 277 DRM_ERROR("VAP & CP still busy (RBBM_STATUS=0x%08X)\n", tmp);
305 /* GA still busy soft reset it */ 278 /* GA still busy soft reset it */
306 WREG32(0x429C, 0x200); 279 WREG32(0x429C, 0x200);
307 WREG32(R300_VAP_PVS_STATE_FLUSH_REG, 0); 280 WREG32(VAP_PVS_STATE_FLUSH_REG, 0);
308 WREG32(0x43E0, 0); 281 WREG32(0x43E0, 0);
309 WREG32(0x43E4, 0); 282 WREG32(0x43E4, 0);
310 WREG32(0x24AC, 0); 283 WREG32(0x24AC, 0);
311 } 284 }
312 /* Wait to prevent race in RBBM_STATUS */ 285 /* Wait to prevent race in RBBM_STATUS */
313 mdelay(1); 286 mdelay(1);
314 tmp = RREG32(RADEON_RBBM_STATUS); 287 tmp = RREG32(RBBM_STATUS);
315 if (!(tmp & ((1 << 20) | (1 << 26)))) { 288 if (!(tmp & ((1 << 20) | (1 << 26)))) {
316 break; 289 break;
317 } 290 }
318 } 291 }
319 for (i = 0; i < rdev->usec_timeout; i++) { 292 for (i = 0; i < rdev->usec_timeout; i++) {
320 tmp = RREG32(RADEON_RBBM_STATUS); 293 tmp = RREG32(RBBM_STATUS);
321 if (!(tmp & ((1 << 20) | (1 << 26)))) { 294 if (!(tmp & ((1 << 20) | (1 << 26)))) {
322 DRM_INFO("GA reset succeed (RBBM_STATUS=0x%08X)\n", 295 DRM_INFO("GA reset succeed (RBBM_STATUS=0x%08X)\n",
323 tmp); 296 tmp);
@@ -331,7 +304,7 @@ int rv515_ga_reset(struct radeon_device *rdev)
331 } 304 }
332 DRM_UDELAY(1); 305 DRM_UDELAY(1);
333 } 306 }
334 tmp = RREG32(RADEON_RBBM_STATUS); 307 tmp = RREG32(RBBM_STATUS);
335 DRM_ERROR("Failed to reset GA ! (RBBM_STATUS=0x%08X)\n", tmp); 308 DRM_ERROR("Failed to reset GA ! (RBBM_STATUS=0x%08X)\n", tmp);
336 return -1; 309 return -1;
337} 310}
@@ -341,7 +314,7 @@ int rv515_gpu_reset(struct radeon_device *rdev)
341 uint32_t status; 314 uint32_t status;
342 315
343 /* reset order likely matter */ 316 /* reset order likely matter */
344 status = RREG32(RADEON_RBBM_STATUS); 317 status = RREG32(RBBM_STATUS);
345 /* reset HDP */ 318 /* reset HDP */
346 r100_hdp_reset(rdev); 319 r100_hdp_reset(rdev);
347 /* reset rb2d */ 320 /* reset rb2d */
@@ -353,12 +326,12 @@ int rv515_gpu_reset(struct radeon_device *rdev)
353 rv515_ga_reset(rdev); 326 rv515_ga_reset(rdev);
354 } 327 }
355 /* reset CP */ 328 /* reset CP */
356 status = RREG32(RADEON_RBBM_STATUS); 329 status = RREG32(RBBM_STATUS);
357 if (status & (1 << 16)) { 330 if (status & (1 << 16)) {
358 r100_cp_reset(rdev); 331 r100_cp_reset(rdev);
359 } 332 }
360 /* Check if GPU is idle */ 333 /* Check if GPU is idle */
361 status = RREG32(RADEON_RBBM_STATUS); 334 status = RREG32(RBBM_STATUS);
362 if (status & (1 << 31)) { 335 if (status & (1 << 31)) {
363 DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status); 336 DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status);
364 return -1; 337 return -1;
@@ -377,8 +350,7 @@ static void rv515_vram_get_type(struct radeon_device *rdev)
377 350
378 rdev->mc.vram_width = 128; 351 rdev->mc.vram_width = 128;
379 rdev->mc.vram_is_ddr = true; 352 rdev->mc.vram_is_ddr = true;
380 tmp = RREG32_MC(RV515_MC_CNTL); 353 tmp = RREG32_MC(RV515_MC_CNTL) & MEM_NUM_CHANNELS_MASK;
381 tmp &= RV515_MEM_NUM_CHANNELS_MASK;
382 switch (tmp) { 354 switch (tmp) {
383 case 0: 355 case 0:
384 rdev->mc.vram_width = 64; 356 rdev->mc.vram_width = 64;
@@ -394,9 +366,19 @@ static void rv515_vram_get_type(struct radeon_device *rdev)
394 366
395void rv515_vram_info(struct radeon_device *rdev) 367void rv515_vram_info(struct radeon_device *rdev)
396{ 368{
369 fixed20_12 a;
370
397 rv515_vram_get_type(rdev); 371 rv515_vram_get_type(rdev);
398 372 rdev->mc.vram_size = RREG32(CONFIG_MEMSIZE);
399 r100_vram_init_sizes(rdev); 373
374 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
375 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
376 /* FIXME: we should enforce default clock in case GPU is not in
377 * default setup
378 */
379 a.full = rfixed_const(100);
380 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
381 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
400} 382}
401 383
402 384
@@ -407,35 +389,35 @@ uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg)
407{ 389{
408 uint32_t r; 390 uint32_t r;
409 391
410 WREG32(R520_MC_IND_INDEX, 0x7f0000 | (reg & 0xffff)); 392 WREG32(MC_IND_INDEX, 0x7f0000 | (reg & 0xffff));
411 r = RREG32(R520_MC_IND_DATA); 393 r = RREG32(MC_IND_DATA);
412 WREG32(R520_MC_IND_INDEX, 0); 394 WREG32(MC_IND_INDEX, 0);
413 return r; 395 return r;
414} 396}
415 397
416void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) 398void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
417{ 399{
418 WREG32(R520_MC_IND_INDEX, 0xff0000 | ((reg) & 0xffff)); 400 WREG32(MC_IND_INDEX, 0xff0000 | ((reg) & 0xffff));
419 WREG32(R520_MC_IND_DATA, (v)); 401 WREG32(MC_IND_DATA, (v));
420 WREG32(R520_MC_IND_INDEX, 0); 402 WREG32(MC_IND_INDEX, 0);
421} 403}
422 404
423uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg) 405uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg)
424{ 406{
425 uint32_t r; 407 uint32_t r;
426 408
427 WREG32(RADEON_PCIE_INDEX, ((reg) & 0x7ff)); 409 WREG32(PCIE_INDEX, ((reg) & 0x7ff));
428 (void)RREG32(RADEON_PCIE_INDEX); 410 (void)RREG32(PCIE_INDEX);
429 r = RREG32(RADEON_PCIE_DATA); 411 r = RREG32(PCIE_DATA);
430 return r; 412 return r;
431} 413}
432 414
433void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) 415void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
434{ 416{
435 WREG32(RADEON_PCIE_INDEX, ((reg) & 0x7ff)); 417 WREG32(PCIE_INDEX, ((reg) & 0x7ff));
436 (void)RREG32(RADEON_PCIE_INDEX); 418 (void)RREG32(PCIE_INDEX);
437 WREG32(RADEON_PCIE_DATA, (v)); 419 WREG32(PCIE_DATA, (v));
438 (void)RREG32(RADEON_PCIE_DATA); 420 (void)RREG32(PCIE_DATA);
439} 421}
440 422
441 423
@@ -450,13 +432,13 @@ static int rv515_debugfs_pipes_info(struct seq_file *m, void *data)
450 struct radeon_device *rdev = dev->dev_private; 432 struct radeon_device *rdev = dev->dev_private;
451 uint32_t tmp; 433 uint32_t tmp;
452 434
453 tmp = RREG32(R400_GB_PIPE_SELECT); 435 tmp = RREG32(GB_PIPE_SELECT);
454 seq_printf(m, "GB_PIPE_SELECT 0x%08x\n", tmp); 436 seq_printf(m, "GB_PIPE_SELECT 0x%08x\n", tmp);
455 tmp = RREG32(R500_SU_REG_DEST); 437 tmp = RREG32(SU_REG_DEST);
456 seq_printf(m, "SU_REG_DEST 0x%08x\n", tmp); 438 seq_printf(m, "SU_REG_DEST 0x%08x\n", tmp);
457 tmp = RREG32(R300_GB_TILE_CONFIG); 439 tmp = RREG32(GB_TILE_CONFIG);
458 seq_printf(m, "GB_TILE_CONFIG 0x%08x\n", tmp); 440 seq_printf(m, "GB_TILE_CONFIG 0x%08x\n", tmp);
459 tmp = RREG32(R300_DST_PIPE_CONFIG); 441 tmp = RREG32(DST_PIPE_CONFIG);
460 seq_printf(m, "DST_PIPE_CONFIG 0x%08x\n", tmp); 442 seq_printf(m, "DST_PIPE_CONFIG 0x%08x\n", tmp);
461 return 0; 443 return 0;
462} 444}
@@ -571,3 +553,551 @@ int rv515_init(struct radeon_device *rdev)
571 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r500_reg_safe_bm); 553 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r500_reg_safe_bm);
572 return 0; 554 return 0;
573} 555}
556
557void atom_rv515_force_tv_scaler(struct radeon_device *rdev)
558{
559
560 WREG32(0x659C, 0x0);
561 WREG32(0x6594, 0x705);
562 WREG32(0x65A4, 0x10001);
563 WREG32(0x65D8, 0x0);
564 WREG32(0x65B0, 0x0);
565 WREG32(0x65C0, 0x0);
566 WREG32(0x65D4, 0x0);
567 WREG32(0x6578, 0x0);
568 WREG32(0x657C, 0x841880A8);
569 WREG32(0x6578, 0x1);
570 WREG32(0x657C, 0x84208680);
571 WREG32(0x6578, 0x2);
572 WREG32(0x657C, 0xBFF880B0);
573 WREG32(0x6578, 0x100);
574 WREG32(0x657C, 0x83D88088);
575 WREG32(0x6578, 0x101);
576 WREG32(0x657C, 0x84608680);
577 WREG32(0x6578, 0x102);
578 WREG32(0x657C, 0xBFF080D0);
579 WREG32(0x6578, 0x200);
580 WREG32(0x657C, 0x83988068);
581 WREG32(0x6578, 0x201);
582 WREG32(0x657C, 0x84A08680);
583 WREG32(0x6578, 0x202);
584 WREG32(0x657C, 0xBFF080F8);
585 WREG32(0x6578, 0x300);
586 WREG32(0x657C, 0x83588058);
587 WREG32(0x6578, 0x301);
588 WREG32(0x657C, 0x84E08660);
589 WREG32(0x6578, 0x302);
590 WREG32(0x657C, 0xBFF88120);
591 WREG32(0x6578, 0x400);
592 WREG32(0x657C, 0x83188040);
593 WREG32(0x6578, 0x401);
594 WREG32(0x657C, 0x85008660);
595 WREG32(0x6578, 0x402);
596 WREG32(0x657C, 0xBFF88150);
597 WREG32(0x6578, 0x500);
598 WREG32(0x657C, 0x82D88030);
599 WREG32(0x6578, 0x501);
600 WREG32(0x657C, 0x85408640);
601 WREG32(0x6578, 0x502);
602 WREG32(0x657C, 0xBFF88180);
603 WREG32(0x6578, 0x600);
604 WREG32(0x657C, 0x82A08018);
605 WREG32(0x6578, 0x601);
606 WREG32(0x657C, 0x85808620);
607 WREG32(0x6578, 0x602);
608 WREG32(0x657C, 0xBFF081B8);
609 WREG32(0x6578, 0x700);
610 WREG32(0x657C, 0x82608010);
611 WREG32(0x6578, 0x701);
612 WREG32(0x657C, 0x85A08600);
613 WREG32(0x6578, 0x702);
614 WREG32(0x657C, 0x800081F0);
615 WREG32(0x6578, 0x800);
616 WREG32(0x657C, 0x8228BFF8);
617 WREG32(0x6578, 0x801);
618 WREG32(0x657C, 0x85E085E0);
619 WREG32(0x6578, 0x802);
620 WREG32(0x657C, 0xBFF88228);
621 WREG32(0x6578, 0x10000);
622 WREG32(0x657C, 0x82A8BF00);
623 WREG32(0x6578, 0x10001);
624 WREG32(0x657C, 0x82A08CC0);
625 WREG32(0x6578, 0x10002);
626 WREG32(0x657C, 0x8008BEF8);
627 WREG32(0x6578, 0x10100);
628 WREG32(0x657C, 0x81F0BF28);
629 WREG32(0x6578, 0x10101);
630 WREG32(0x657C, 0x83608CA0);
631 WREG32(0x6578, 0x10102);
632 WREG32(0x657C, 0x8018BED0);
633 WREG32(0x6578, 0x10200);
634 WREG32(0x657C, 0x8148BF38);
635 WREG32(0x6578, 0x10201);
636 WREG32(0x657C, 0x84408C80);
637 WREG32(0x6578, 0x10202);
638 WREG32(0x657C, 0x8008BEB8);
639 WREG32(0x6578, 0x10300);
640 WREG32(0x657C, 0x80B0BF78);
641 WREG32(0x6578, 0x10301);
642 WREG32(0x657C, 0x85008C20);
643 WREG32(0x6578, 0x10302);
644 WREG32(0x657C, 0x8020BEA0);
645 WREG32(0x6578, 0x10400);
646 WREG32(0x657C, 0x8028BF90);
647 WREG32(0x6578, 0x10401);
648 WREG32(0x657C, 0x85E08BC0);
649 WREG32(0x6578, 0x10402);
650 WREG32(0x657C, 0x8018BE90);
651 WREG32(0x6578, 0x10500);
652 WREG32(0x657C, 0xBFB8BFB0);
653 WREG32(0x6578, 0x10501);
654 WREG32(0x657C, 0x86C08B40);
655 WREG32(0x6578, 0x10502);
656 WREG32(0x657C, 0x8010BE90);
657 WREG32(0x6578, 0x10600);
658 WREG32(0x657C, 0xBF58BFC8);
659 WREG32(0x6578, 0x10601);
660 WREG32(0x657C, 0x87A08AA0);
661 WREG32(0x6578, 0x10602);
662 WREG32(0x657C, 0x8010BE98);
663 WREG32(0x6578, 0x10700);
664 WREG32(0x657C, 0xBF10BFF0);
665 WREG32(0x6578, 0x10701);
666 WREG32(0x657C, 0x886089E0);
667 WREG32(0x6578, 0x10702);
668 WREG32(0x657C, 0x8018BEB0);
669 WREG32(0x6578, 0x10800);
670 WREG32(0x657C, 0xBED8BFE8);
671 WREG32(0x6578, 0x10801);
672 WREG32(0x657C, 0x89408940);
673 WREG32(0x6578, 0x10802);
674 WREG32(0x657C, 0xBFE8BED8);
675 WREG32(0x6578, 0x20000);
676 WREG32(0x657C, 0x80008000);
677 WREG32(0x6578, 0x20001);
678 WREG32(0x657C, 0x90008000);
679 WREG32(0x6578, 0x20002);
680 WREG32(0x657C, 0x80008000);
681 WREG32(0x6578, 0x20003);
682 WREG32(0x657C, 0x80008000);
683 WREG32(0x6578, 0x20100);
684 WREG32(0x657C, 0x80108000);
685 WREG32(0x6578, 0x20101);
686 WREG32(0x657C, 0x8FE0BF70);
687 WREG32(0x6578, 0x20102);
688 WREG32(0x657C, 0xBFE880C0);
689 WREG32(0x6578, 0x20103);
690 WREG32(0x657C, 0x80008000);
691 WREG32(0x6578, 0x20200);
692 WREG32(0x657C, 0x8018BFF8);
693 WREG32(0x6578, 0x20201);
694 WREG32(0x657C, 0x8F80BF08);
695 WREG32(0x6578, 0x20202);
696 WREG32(0x657C, 0xBFD081A0);
697 WREG32(0x6578, 0x20203);
698 WREG32(0x657C, 0xBFF88000);
699 WREG32(0x6578, 0x20300);
700 WREG32(0x657C, 0x80188000);
701 WREG32(0x6578, 0x20301);
702 WREG32(0x657C, 0x8EE0BEC0);
703 WREG32(0x6578, 0x20302);
704 WREG32(0x657C, 0xBFB082A0);
705 WREG32(0x6578, 0x20303);
706 WREG32(0x657C, 0x80008000);
707 WREG32(0x6578, 0x20400);
708 WREG32(0x657C, 0x80188000);
709 WREG32(0x6578, 0x20401);
710 WREG32(0x657C, 0x8E00BEA0);
711 WREG32(0x6578, 0x20402);
712 WREG32(0x657C, 0xBF8883C0);
713 WREG32(0x6578, 0x20403);
714 WREG32(0x657C, 0x80008000);
715 WREG32(0x6578, 0x20500);
716 WREG32(0x657C, 0x80188000);
717 WREG32(0x6578, 0x20501);
718 WREG32(0x657C, 0x8D00BE90);
719 WREG32(0x6578, 0x20502);
720 WREG32(0x657C, 0xBF588500);
721 WREG32(0x6578, 0x20503);
722 WREG32(0x657C, 0x80008008);
723 WREG32(0x6578, 0x20600);
724 WREG32(0x657C, 0x80188000);
725 WREG32(0x6578, 0x20601);
726 WREG32(0x657C, 0x8BC0BE98);
727 WREG32(0x6578, 0x20602);
728 WREG32(0x657C, 0xBF308660);
729 WREG32(0x6578, 0x20603);
730 WREG32(0x657C, 0x80008008);
731 WREG32(0x6578, 0x20700);
732 WREG32(0x657C, 0x80108000);
733 WREG32(0x6578, 0x20701);
734 WREG32(0x657C, 0x8A80BEB0);
735 WREG32(0x6578, 0x20702);
736 WREG32(0x657C, 0xBF0087C0);
737 WREG32(0x6578, 0x20703);
738 WREG32(0x657C, 0x80008008);
739 WREG32(0x6578, 0x20800);
740 WREG32(0x657C, 0x80108000);
741 WREG32(0x6578, 0x20801);
742 WREG32(0x657C, 0x8920BED0);
743 WREG32(0x6578, 0x20802);
744 WREG32(0x657C, 0xBED08920);
745 WREG32(0x6578, 0x20803);
746 WREG32(0x657C, 0x80008010);
747 WREG32(0x6578, 0x30000);
748 WREG32(0x657C, 0x90008000);
749 WREG32(0x6578, 0x30001);
750 WREG32(0x657C, 0x80008000);
751 WREG32(0x6578, 0x30100);
752 WREG32(0x657C, 0x8FE0BF90);
753 WREG32(0x6578, 0x30101);
754 WREG32(0x657C, 0xBFF880A0);
755 WREG32(0x6578, 0x30200);
756 WREG32(0x657C, 0x8F60BF40);
757 WREG32(0x6578, 0x30201);
758 WREG32(0x657C, 0xBFE88180);
759 WREG32(0x6578, 0x30300);
760 WREG32(0x657C, 0x8EC0BF00);
761 WREG32(0x6578, 0x30301);
762 WREG32(0x657C, 0xBFC88280);
763 WREG32(0x6578, 0x30400);
764 WREG32(0x657C, 0x8DE0BEE0);
765 WREG32(0x6578, 0x30401);
766 WREG32(0x657C, 0xBFA083A0);
767 WREG32(0x6578, 0x30500);
768 WREG32(0x657C, 0x8CE0BED0);
769 WREG32(0x6578, 0x30501);
770 WREG32(0x657C, 0xBF7884E0);
771 WREG32(0x6578, 0x30600);
772 WREG32(0x657C, 0x8BA0BED8);
773 WREG32(0x6578, 0x30601);
774 WREG32(0x657C, 0xBF508640);
775 WREG32(0x6578, 0x30700);
776 WREG32(0x657C, 0x8A60BEE8);
777 WREG32(0x6578, 0x30701);
778 WREG32(0x657C, 0xBF2087A0);
779 WREG32(0x6578, 0x30800);
780 WREG32(0x657C, 0x8900BF00);
781 WREG32(0x6578, 0x30801);
782 WREG32(0x657C, 0xBF008900);
783}
784
785struct rv515_watermark {
786 u32 lb_request_fifo_depth;
787 fixed20_12 num_line_pair;
788 fixed20_12 estimated_width;
789 fixed20_12 worst_case_latency;
790 fixed20_12 consumption_rate;
791 fixed20_12 active_time;
792 fixed20_12 dbpp;
793 fixed20_12 priority_mark_max;
794 fixed20_12 priority_mark;
795 fixed20_12 sclk;
796};
797
798void rv515_crtc_bandwidth_compute(struct radeon_device *rdev,
799 struct radeon_crtc *crtc,
800 struct rv515_watermark *wm)
801{
802 struct drm_display_mode *mode = &crtc->base.mode;
803 fixed20_12 a, b, c;
804 fixed20_12 pclk, request_fifo_depth, tolerable_latency, estimated_width;
805 fixed20_12 consumption_time, line_time, chunk_time, read_delay_latency;
806
807 if (!crtc->base.enabled) {
808 /* FIXME: wouldn't it better to set priority mark to maximum */
809 wm->lb_request_fifo_depth = 4;
810 return;
811 }
812
813 if (crtc->vsc.full > rfixed_const(2))
814 wm->num_line_pair.full = rfixed_const(2);
815 else
816 wm->num_line_pair.full = rfixed_const(1);
817
818 b.full = rfixed_const(mode->crtc_hdisplay);
819 c.full = rfixed_const(256);
820 a.full = rfixed_mul(wm->num_line_pair, b);
821 request_fifo_depth.full = rfixed_div(a, c);
822 if (a.full < rfixed_const(4)) {
823 wm->lb_request_fifo_depth = 4;
824 } else {
825 wm->lb_request_fifo_depth = rfixed_trunc(request_fifo_depth);
826 }
827
828 /* Determine consumption rate
829 * pclk = pixel clock period(ns) = 1000 / (mode.clock / 1000)
830 * vtaps = number of vertical taps,
831 * vsc = vertical scaling ratio, defined as source/destination
832 * hsc = horizontal scaling ration, defined as source/destination
833 */
834 a.full = rfixed_const(mode->clock);
835 b.full = rfixed_const(1000);
836 a.full = rfixed_div(a, b);
837 pclk.full = rfixed_div(b, a);
838 if (crtc->rmx_type != RMX_OFF) {
839 b.full = rfixed_const(2);
840 if (crtc->vsc.full > b.full)
841 b.full = crtc->vsc.full;
842 b.full = rfixed_mul(b, crtc->hsc);
843 c.full = rfixed_const(2);
844 b.full = rfixed_div(b, c);
845 consumption_time.full = rfixed_div(pclk, b);
846 } else {
847 consumption_time.full = pclk.full;
848 }
849 a.full = rfixed_const(1);
850 wm->consumption_rate.full = rfixed_div(a, consumption_time);
851
852
853 /* Determine line time
854 * LineTime = total time for one line of displayhtotal
855 * LineTime = total number of horizontal pixels
856 * pclk = pixel clock period(ns)
857 */
858 a.full = rfixed_const(crtc->base.mode.crtc_htotal);
859 line_time.full = rfixed_mul(a, pclk);
860
861 /* Determine active time
862 * ActiveTime = time of active region of display within one line,
863 * hactive = total number of horizontal active pixels
864 * htotal = total number of horizontal pixels
865 */
866 a.full = rfixed_const(crtc->base.mode.crtc_htotal);
867 b.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
868 wm->active_time.full = rfixed_mul(line_time, b);
869 wm->active_time.full = rfixed_div(wm->active_time, a);
870
871 /* Determine chunk time
872 * ChunkTime = the time it takes the DCP to send one chunk of data
873 * to the LB which consists of pipeline delay and inter chunk gap
874 * sclk = system clock(Mhz)
875 */
876 a.full = rfixed_const(600 * 1000);
877 chunk_time.full = rfixed_div(a, rdev->pm.sclk);
878 read_delay_latency.full = rfixed_const(1000);
879
880 /* Determine the worst case latency
881 * NumLinePair = Number of line pairs to request(1=2 lines, 2=4 lines)
882 * WorstCaseLatency = worst case time from urgent to when the MC starts
883 * to return data
884 * READ_DELAY_IDLE_MAX = constant of 1us
885 * ChunkTime = time it takes the DCP to send one chunk of data to the LB
886 * which consists of pipeline delay and inter chunk gap
887 */
888 if (rfixed_trunc(wm->num_line_pair) > 1) {
889 a.full = rfixed_const(3);
890 wm->worst_case_latency.full = rfixed_mul(a, chunk_time);
891 wm->worst_case_latency.full += read_delay_latency.full;
892 } else {
893 wm->worst_case_latency.full = chunk_time.full + read_delay_latency.full;
894 }
895
896 /* Determine the tolerable latency
897 * TolerableLatency = Any given request has only 1 line time
898 * for the data to be returned
899 * LBRequestFifoDepth = Number of chunk requests the LB can
900 * put into the request FIFO for a display
901 * LineTime = total time for one line of display
902 * ChunkTime = the time it takes the DCP to send one chunk
903 * of data to the LB which consists of
904 * pipeline delay and inter chunk gap
905 */
906 if ((2+wm->lb_request_fifo_depth) >= rfixed_trunc(request_fifo_depth)) {
907 tolerable_latency.full = line_time.full;
908 } else {
909 tolerable_latency.full = rfixed_const(wm->lb_request_fifo_depth - 2);
910 tolerable_latency.full = request_fifo_depth.full - tolerable_latency.full;
911 tolerable_latency.full = rfixed_mul(tolerable_latency, chunk_time);
912 tolerable_latency.full = line_time.full - tolerable_latency.full;
913 }
914 /* We assume worst case 32bits (4 bytes) */
915 wm->dbpp.full = rfixed_const(2 * 16);
916
917 /* Determine the maximum priority mark
918 * width = viewport width in pixels
919 */
920 a.full = rfixed_const(16);
921 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
922 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a);
923
924 /* Determine estimated width */
925 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full;
926 estimated_width.full = rfixed_div(estimated_width, consumption_time);
927 if (rfixed_trunc(estimated_width) > crtc->base.mode.crtc_hdisplay) {
928 wm->priority_mark.full = rfixed_const(10);
929 } else {
930 a.full = rfixed_const(16);
931 wm->priority_mark.full = rfixed_div(estimated_width, a);
932 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full;
933 }
934}
935
936void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
937{
938 struct drm_display_mode *mode0 = NULL;
939 struct drm_display_mode *mode1 = NULL;
940 struct rv515_watermark wm0;
941 struct rv515_watermark wm1;
942 u32 tmp;
943 fixed20_12 priority_mark02, priority_mark12, fill_rate;
944 fixed20_12 a, b;
945
946 if (rdev->mode_info.crtcs[0]->base.enabled)
947 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
948 if (rdev->mode_info.crtcs[1]->base.enabled)
949 mode1 = &rdev->mode_info.crtcs[1]->base.mode;
950 rs690_line_buffer_adjust(rdev, mode0, mode1);
951
952 rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[0], &wm0);
953 rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[1], &wm1);
954
955 tmp = wm0.lb_request_fifo_depth;
956 tmp |= wm1.lb_request_fifo_depth << 16;
957 WREG32(LB_MAX_REQ_OUTSTANDING, tmp);
958
959 if (mode0 && mode1) {
960 if (rfixed_trunc(wm0.dbpp) > 64)
961 a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair);
962 else
963 a.full = wm0.num_line_pair.full;
964 if (rfixed_trunc(wm1.dbpp) > 64)
965 b.full = rfixed_div(wm1.dbpp, wm1.num_line_pair);
966 else
967 b.full = wm1.num_line_pair.full;
968 a.full += b.full;
969 fill_rate.full = rfixed_div(wm0.sclk, a);
970 if (wm0.consumption_rate.full > fill_rate.full) {
971 b.full = wm0.consumption_rate.full - fill_rate.full;
972 b.full = rfixed_mul(b, wm0.active_time);
973 a.full = rfixed_const(16);
974 b.full = rfixed_div(b, a);
975 a.full = rfixed_mul(wm0.worst_case_latency,
976 wm0.consumption_rate);
977 priority_mark02.full = a.full + b.full;
978 } else {
979 a.full = rfixed_mul(wm0.worst_case_latency,
980 wm0.consumption_rate);
981 b.full = rfixed_const(16 * 1000);
982 priority_mark02.full = rfixed_div(a, b);
983 }
984 if (wm1.consumption_rate.full > fill_rate.full) {
985 b.full = wm1.consumption_rate.full - fill_rate.full;
986 b.full = rfixed_mul(b, wm1.active_time);
987 a.full = rfixed_const(16);
988 b.full = rfixed_div(b, a);
989 a.full = rfixed_mul(wm1.worst_case_latency,
990 wm1.consumption_rate);
991 priority_mark12.full = a.full + b.full;
992 } else {
993 a.full = rfixed_mul(wm1.worst_case_latency,
994 wm1.consumption_rate);
995 b.full = rfixed_const(16 * 1000);
996 priority_mark12.full = rfixed_div(a, b);
997 }
998 if (wm0.priority_mark.full > priority_mark02.full)
999 priority_mark02.full = wm0.priority_mark.full;
1000 if (rfixed_trunc(priority_mark02) < 0)
1001 priority_mark02.full = 0;
1002 if (wm0.priority_mark_max.full > priority_mark02.full)
1003 priority_mark02.full = wm0.priority_mark_max.full;
1004 if (wm1.priority_mark.full > priority_mark12.full)
1005 priority_mark12.full = wm1.priority_mark.full;
1006 if (rfixed_trunc(priority_mark12) < 0)
1007 priority_mark12.full = 0;
1008 if (wm1.priority_mark_max.full > priority_mark12.full)
1009 priority_mark12.full = wm1.priority_mark_max.full;
1010 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02));
1011 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02));
1012 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12));
1013 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12));
1014 } else if (mode0) {
1015 if (rfixed_trunc(wm0.dbpp) > 64)
1016 a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair);
1017 else
1018 a.full = wm0.num_line_pair.full;
1019 fill_rate.full = rfixed_div(wm0.sclk, a);
1020 if (wm0.consumption_rate.full > fill_rate.full) {
1021 b.full = wm0.consumption_rate.full - fill_rate.full;
1022 b.full = rfixed_mul(b, wm0.active_time);
1023 a.full = rfixed_const(16);
1024 b.full = rfixed_div(b, a);
1025 a.full = rfixed_mul(wm0.worst_case_latency,
1026 wm0.consumption_rate);
1027 priority_mark02.full = a.full + b.full;
1028 } else {
1029 a.full = rfixed_mul(wm0.worst_case_latency,
1030 wm0.consumption_rate);
1031 b.full = rfixed_const(16);
1032 priority_mark02.full = rfixed_div(a, b);
1033 }
1034 if (wm0.priority_mark.full > priority_mark02.full)
1035 priority_mark02.full = wm0.priority_mark.full;
1036 if (rfixed_trunc(priority_mark02) < 0)
1037 priority_mark02.full = 0;
1038 if (wm0.priority_mark_max.full > priority_mark02.full)
1039 priority_mark02.full = wm0.priority_mark_max.full;
1040 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02));
1041 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02));
1042 WREG32(D2MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
1043 WREG32(D2MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
1044 } else {
1045 if (rfixed_trunc(wm1.dbpp) > 64)
1046 a.full = rfixed_div(wm1.dbpp, wm1.num_line_pair);
1047 else
1048 a.full = wm1.num_line_pair.full;
1049 fill_rate.full = rfixed_div(wm1.sclk, a);
1050 if (wm1.consumption_rate.full > fill_rate.full) {
1051 b.full = wm1.consumption_rate.full - fill_rate.full;
1052 b.full = rfixed_mul(b, wm1.active_time);
1053 a.full = rfixed_const(16);
1054 b.full = rfixed_div(b, a);
1055 a.full = rfixed_mul(wm1.worst_case_latency,
1056 wm1.consumption_rate);
1057 priority_mark12.full = a.full + b.full;
1058 } else {
1059 a.full = rfixed_mul(wm1.worst_case_latency,
1060 wm1.consumption_rate);
1061 b.full = rfixed_const(16 * 1000);
1062 priority_mark12.full = rfixed_div(a, b);
1063 }
1064 if (wm1.priority_mark.full > priority_mark12.full)
1065 priority_mark12.full = wm1.priority_mark.full;
1066 if (rfixed_trunc(priority_mark12) < 0)
1067 priority_mark12.full = 0;
1068 if (wm1.priority_mark_max.full > priority_mark12.full)
1069 priority_mark12.full = wm1.priority_mark_max.full;
1070 WREG32(D1MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
1071 WREG32(D1MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
1072 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12));
1073 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12));
1074 }
1075}
1076
1077void rv515_bandwidth_update(struct radeon_device *rdev)
1078{
1079 uint32_t tmp;
1080 struct drm_display_mode *mode0 = NULL;
1081 struct drm_display_mode *mode1 = NULL;
1082
1083 if (rdev->mode_info.crtcs[0]->base.enabled)
1084 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
1085 if (rdev->mode_info.crtcs[1]->base.enabled)
1086 mode1 = &rdev->mode_info.crtcs[1]->base.mode;
1087 /*
1088 * Set display0/1 priority up in the memory controller for
1089 * modes if the user specifies HIGH for displaypriority
1090 * option.
1091 */
1092 if (rdev->disp_priority == 2) {
1093 tmp = RREG32_MC(MC_MISC_LAT_TIMER);
1094 tmp &= ~MC_DISP1R_INIT_LAT_MASK;
1095 tmp &= ~MC_DISP0R_INIT_LAT_MASK;
1096 if (mode1)
1097 tmp |= (1 << MC_DISP1R_INIT_LAT_SHIFT);
1098 if (mode0)
1099 tmp |= (1 << MC_DISP0R_INIT_LAT_SHIFT);
1100 WREG32_MC(MC_MISC_LAT_TIMER, tmp);
1101 }
1102 rv515_bandwidth_avivo_update(rdev);
1103}
diff --git a/drivers/gpu/drm/radeon/rv515r.h b/drivers/gpu/drm/radeon/rv515r.h
new file mode 100644
index 000000000000..f3cf84039906
--- /dev/null
+++ b/drivers/gpu/drm/radeon/rv515r.h
@@ -0,0 +1,170 @@
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef RV515R_H
29#define RV515R_H
30
31/* RV515 registers */
32#define PCIE_INDEX 0x0030
33#define PCIE_DATA 0x0034
34#define MC_IND_INDEX 0x0070
35#define MC_IND_WR_EN (1 << 24)
36#define MC_IND_DATA 0x0074
37#define RBBM_SOFT_RESET 0x00F0
38#define CONFIG_MEMSIZE 0x00F8
39#define HDP_FB_LOCATION 0x0134
40#define CP_CSQ_CNTL 0x0740
41#define CP_CSQ_MODE 0x0744
42#define CP_CSQ_ADDR 0x07F0
43#define CP_CSQ_DATA 0x07F4
44#define CP_CSQ_STAT 0x07F8
45#define CP_CSQ2_STAT 0x07FC
46#define RBBM_STATUS 0x0E40
47#define DST_PIPE_CONFIG 0x170C
48#define WAIT_UNTIL 0x1720
49#define WAIT_2D_IDLE (1 << 14)
50#define WAIT_3D_IDLE (1 << 15)
51#define WAIT_2D_IDLECLEAN (1 << 16)
52#define WAIT_3D_IDLECLEAN (1 << 17)
53#define ISYNC_CNTL 0x1724
54#define ISYNC_ANY2D_IDLE3D (1 << 0)
55#define ISYNC_ANY3D_IDLE2D (1 << 1)
56#define ISYNC_TRIG2D_IDLE3D (1 << 2)
57#define ISYNC_TRIG3D_IDLE2D (1 << 3)
58#define ISYNC_WAIT_IDLEGUI (1 << 4)
59#define ISYNC_CPSCRATCH_IDLEGUI (1 << 5)
60#define VAP_INDEX_OFFSET 0x208C
61#define VAP_PVS_STATE_FLUSH_REG 0x2284
62#define GB_ENABLE 0x4008
63#define GB_MSPOS0 0x4010
64#define MS_X0_SHIFT 0
65#define MS_Y0_SHIFT 4
66#define MS_X1_SHIFT 8
67#define MS_Y1_SHIFT 12
68#define MS_X2_SHIFT 16
69#define MS_Y2_SHIFT 20
70#define MSBD0_Y_SHIFT 24
71#define MSBD0_X_SHIFT 28
72#define GB_MSPOS1 0x4014
73#define MS_X3_SHIFT 0
74#define MS_Y3_SHIFT 4
75#define MS_X4_SHIFT 8
76#define MS_Y4_SHIFT 12
77#define MS_X5_SHIFT 16
78#define MS_Y5_SHIFT 20
79#define MSBD1_SHIFT 24
80#define GB_TILE_CONFIG 0x4018
81#define ENABLE_TILING (1 << 0)
82#define PIPE_COUNT_MASK 0x0000000E
83#define PIPE_COUNT_SHIFT 1
84#define TILE_SIZE_8 (0 << 4)
85#define TILE_SIZE_16 (1 << 4)
86#define TILE_SIZE_32 (2 << 4)
87#define SUBPIXEL_1_12 (0 << 16)
88#define SUBPIXEL_1_16 (1 << 16)
89#define GB_SELECT 0x401C
90#define GB_AA_CONFIG 0x4020
91#define GB_PIPE_SELECT 0x402C
92#define GA_ENHANCE 0x4274
93#define GA_DEADLOCK_CNTL (1 << 0)
94#define GA_FASTSYNC_CNTL (1 << 1)
95#define GA_POLY_MODE 0x4288
96#define FRONT_PTYPE_POINT (0 << 4)
97#define FRONT_PTYPE_LINE (1 << 4)
98#define FRONT_PTYPE_TRIANGE (2 << 4)
99#define BACK_PTYPE_POINT (0 << 7)
100#define BACK_PTYPE_LINE (1 << 7)
101#define BACK_PTYPE_TRIANGE (2 << 7)
102#define GA_ROUND_MODE 0x428C
103#define GEOMETRY_ROUND_TRUNC (0 << 0)
104#define GEOMETRY_ROUND_NEAREST (1 << 0)
105#define COLOR_ROUND_TRUNC (0 << 2)
106#define COLOR_ROUND_NEAREST (1 << 2)
107#define SU_REG_DEST 0x42C8
108#define RB3D_DSTCACHE_CTLSTAT 0x4E4C
109#define RB3D_DC_FLUSH (2 << 0)
110#define RB3D_DC_FREE (2 << 2)
111#define RB3D_DC_FINISH (1 << 4)
112#define ZB_ZCACHE_CTLSTAT 0x4F18
113#define ZC_FLUSH (1 << 0)
114#define ZC_FREE (1 << 1)
115#define DC_LB_MEMORY_SPLIT 0x6520
116#define DC_LB_MEMORY_SPLIT_MASK 0x00000003
117#define DC_LB_MEMORY_SPLIT_SHIFT 0
118#define DC_LB_MEMORY_SPLIT_D1HALF_D2HALF 0
119#define DC_LB_MEMORY_SPLIT_D1_3Q_D2_1Q 1
120#define DC_LB_MEMORY_SPLIT_D1_ONLY 2
121#define DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q 3
122#define DC_LB_MEMORY_SPLIT_SHIFT_MODE (1 << 2)
123#define DC_LB_DISP1_END_ADR_SHIFT 4
124#define DC_LB_DISP1_END_ADR_MASK 0x00007FF0
125#define D1MODE_PRIORITY_A_CNT 0x6548
126#define MODE_PRIORITY_MARK_MASK 0x00007FFF
127#define MODE_PRIORITY_OFF (1 << 16)
128#define MODE_PRIORITY_ALWAYS_ON (1 << 20)
129#define MODE_PRIORITY_FORCE_MASK (1 << 24)
130#define D1MODE_PRIORITY_B_CNT 0x654C
131#define LB_MAX_REQ_OUTSTANDING 0x6D58
132#define LB_D1_MAX_REQ_OUTSTANDING_MASK 0x0000000F
133#define LB_D1_MAX_REQ_OUTSTANDING_SHIFT 0
134#define LB_D2_MAX_REQ_OUTSTANDING_MASK 0x000F0000
135#define LB_D2_MAX_REQ_OUTSTANDING_SHIFT 16
136#define D2MODE_PRIORITY_A_CNT 0x6D48
137#define D2MODE_PRIORITY_B_CNT 0x6D4C
138
139/* ix[MC] registers */
140#define MC_FB_LOCATION 0x01
141#define MC_FB_START_MASK 0x0000FFFF
142#define MC_FB_START_SHIFT 0
143#define MC_FB_TOP_MASK 0xFFFF0000
144#define MC_FB_TOP_SHIFT 16
145#define MC_AGP_LOCATION 0x02
146#define MC_AGP_START_MASK 0x0000FFFF
147#define MC_AGP_START_SHIFT 0
148#define MC_AGP_TOP_MASK 0xFFFF0000
149#define MC_AGP_TOP_SHIFT 16
150#define MC_AGP_BASE 0x03
151#define MC_AGP_BASE_2 0x04
152#define MC_CNTL 0x5
153#define MEM_NUM_CHANNELS_MASK 0x00000003
154#define MC_STATUS 0x08
155#define MC_STATUS_IDLE (1 << 4)
156#define MC_MISC_LAT_TIMER 0x09
157#define MC_CPR_INIT_LAT_MASK 0x0000000F
158#define MC_VF_INIT_LAT_MASK 0x000000F0
159#define MC_DISP0R_INIT_LAT_MASK 0x00000F00
160#define MC_DISP0R_INIT_LAT_SHIFT 8
161#define MC_DISP1R_INIT_LAT_MASK 0x0000F000
162#define MC_DISP1R_INIT_LAT_SHIFT 12
163#define MC_FIXED_INIT_LAT_MASK 0x000F0000
164#define MC_E2R_INIT_LAT_MASK 0x00F00000
165#define SAME_PAGE_PRIO_MASK 0x0F000000
166#define MC_GLOBW_INIT_LAT_MASK 0xF0000000
167
168
169#endif
170