aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/evergreen.c
diff options
context:
space:
mode:
authorAlex Deucher <alexdeucher@gmail.com>2010-11-21 10:59:01 -0500
committerDave Airlie <airlied@redhat.com>2010-11-21 20:51:08 -0500
commit6f34be50bd1bdd2ff3c955940e033a80d05f248a (patch)
tree7e9635a2e589cd3a49490a4656611c112e485059 /drivers/gpu/drm/radeon/evergreen.c
parentf5a8020903932624cf020dc72455a10a3e005087 (diff)
drm/radeon/kms: add pageflip ioctl support (v3)
This adds support for dri2 pageflipping. v2: precision updates from Mario Kleiner. v3: Multihead fixes from Mario Kleiner; missing crtc offset add note about update pending bit on pre-avivo chips Signed-off-by: Alex Deucher <alexdeucher@gmail.com> Signed-off-by: Mario Kleiner <mario.kleiner@tuebingen.mpg.de> Signed-off-by: Dave Airlie <airlied@redhat.com>
Diffstat (limited to 'drivers/gpu/drm/radeon/evergreen.c')
-rw-r--r--drivers/gpu/drm/radeon/evergreen.c301
1 files changed, 200 insertions, 101 deletions
diff --git a/drivers/gpu/drm/radeon/evergreen.c b/drivers/gpu/drm/radeon/evergreen.c
index 4dc5b4714c5a..df3f37243222 100644
--- a/drivers/gpu/drm/radeon/evergreen.c
+++ b/drivers/gpu/drm/radeon/evergreen.c
@@ -40,6 +40,61 @@
40static void evergreen_gpu_init(struct radeon_device *rdev); 40static void evergreen_gpu_init(struct radeon_device *rdev);
41void evergreen_fini(struct radeon_device *rdev); 41void evergreen_fini(struct radeon_device *rdev);
42 42
43void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
44{
45 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc];
46 u32 tmp;
47
48 /* make sure flip is at vb rather than hb */
49 tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
50 tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN;
51 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
52
53 /* set pageflip to happen anywhere in vblank interval */
54 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
55
56 /* enable the pflip int */
57 radeon_irq_kms_pflip_irq_get(rdev, crtc);
58}
59
60void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
61{
62 /* disable the pflip int */
63 radeon_irq_kms_pflip_irq_put(rdev, crtc);
64}
65
66u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
67{
68 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
69 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
70
71 /* Lock the graphics update lock */
72 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
73 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
74
75 /* update the scanout addresses */
76 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
77 upper_32_bits(crtc_base));
78 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
79 (u32)crtc_base);
80
81 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
82 upper_32_bits(crtc_base));
83 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
84 (u32)crtc_base);
85
86 /* Wait for update_pending to go high. */
87 while (!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING));
88 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
89
90 /* Unlock the lock, so double-buffering can take place inside vblank */
91 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
92 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
93
94 /* Return current update_pending status: */
95 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
96}
97
43/* get temperature in millidegrees */ 98/* get temperature in millidegrees */
44u32 evergreen_get_temp(struct radeon_device *rdev) 99u32 evergreen_get_temp(struct radeon_device *rdev)
45{ 100{
@@ -2060,6 +2115,7 @@ int evergreen_irq_set(struct radeon_device *rdev)
2060 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0; 2115 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
2061 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6; 2116 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
2062 u32 grbm_int_cntl = 0; 2117 u32 grbm_int_cntl = 0;
2118 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
2063 2119
2064 if (!rdev->irq.installed) { 2120 if (!rdev->irq.installed) {
2065 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n"); 2121 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
@@ -2085,27 +2141,33 @@ int evergreen_irq_set(struct radeon_device *rdev)
2085 cp_int_cntl |= RB_INT_ENABLE; 2141 cp_int_cntl |= RB_INT_ENABLE;
2086 cp_int_cntl |= TIME_STAMP_INT_ENABLE; 2142 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
2087 } 2143 }
2088 if (rdev->irq.crtc_vblank_int[0]) { 2144 if (rdev->irq.crtc_vblank_int[0] ||
2145 rdev->irq.pflip[0]) {
2089 DRM_DEBUG("evergreen_irq_set: vblank 0\n"); 2146 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
2090 crtc1 |= VBLANK_INT_MASK; 2147 crtc1 |= VBLANK_INT_MASK;
2091 } 2148 }
2092 if (rdev->irq.crtc_vblank_int[1]) { 2149 if (rdev->irq.crtc_vblank_int[1] ||
2150 rdev->irq.pflip[1]) {
2093 DRM_DEBUG("evergreen_irq_set: vblank 1\n"); 2151 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
2094 crtc2 |= VBLANK_INT_MASK; 2152 crtc2 |= VBLANK_INT_MASK;
2095 } 2153 }
2096 if (rdev->irq.crtc_vblank_int[2]) { 2154 if (rdev->irq.crtc_vblank_int[2] ||
2155 rdev->irq.pflip[2]) {
2097 DRM_DEBUG("evergreen_irq_set: vblank 2\n"); 2156 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
2098 crtc3 |= VBLANK_INT_MASK; 2157 crtc3 |= VBLANK_INT_MASK;
2099 } 2158 }
2100 if (rdev->irq.crtc_vblank_int[3]) { 2159 if (rdev->irq.crtc_vblank_int[3] ||
2160 rdev->irq.pflip[3]) {
2101 DRM_DEBUG("evergreen_irq_set: vblank 3\n"); 2161 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
2102 crtc4 |= VBLANK_INT_MASK; 2162 crtc4 |= VBLANK_INT_MASK;
2103 } 2163 }
2104 if (rdev->irq.crtc_vblank_int[4]) { 2164 if (rdev->irq.crtc_vblank_int[4] ||
2165 rdev->irq.pflip[4]) {
2105 DRM_DEBUG("evergreen_irq_set: vblank 4\n"); 2166 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
2106 crtc5 |= VBLANK_INT_MASK; 2167 crtc5 |= VBLANK_INT_MASK;
2107 } 2168 }
2108 if (rdev->irq.crtc_vblank_int[5]) { 2169 if (rdev->irq.crtc_vblank_int[5] ||
2170 rdev->irq.pflip[5]) {
2109 DRM_DEBUG("evergreen_irq_set: vblank 5\n"); 2171 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
2110 crtc6 |= VBLANK_INT_MASK; 2172 crtc6 |= VBLANK_INT_MASK;
2111 } 2173 }
@@ -2148,6 +2210,13 @@ int evergreen_irq_set(struct radeon_device *rdev)
2148 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5); 2210 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
2149 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6); 2211 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
2150 2212
2213 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
2214 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
2215 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
2216 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
2217 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
2218 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
2219
2151 WREG32(DC_HPD1_INT_CONTROL, hpd1); 2220 WREG32(DC_HPD1_INT_CONTROL, hpd1);
2152 WREG32(DC_HPD2_INT_CONTROL, hpd2); 2221 WREG32(DC_HPD2_INT_CONTROL, hpd2);
2153 WREG32(DC_HPD3_INT_CONTROL, hpd3); 2222 WREG32(DC_HPD3_INT_CONTROL, hpd3);
@@ -2158,79 +2227,92 @@ int evergreen_irq_set(struct radeon_device *rdev)
2158 return 0; 2227 return 0;
2159} 2228}
2160 2229
2161static inline void evergreen_irq_ack(struct radeon_device *rdev, 2230static inline void evergreen_irq_ack(struct radeon_device *rdev)
2162 u32 *disp_int,
2163 u32 *disp_int_cont,
2164 u32 *disp_int_cont2,
2165 u32 *disp_int_cont3,
2166 u32 *disp_int_cont4,
2167 u32 *disp_int_cont5)
2168{ 2231{
2169 u32 tmp; 2232 u32 tmp;
2170 2233
2171 *disp_int = RREG32(DISP_INTERRUPT_STATUS); 2234 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
2172 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); 2235 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
2173 *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); 2236 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
2174 *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); 2237 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
2175 *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); 2238 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
2176 *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); 2239 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
2177 2240 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
2178 if (*disp_int & LB_D1_VBLANK_INTERRUPT) 2241 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
2242 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
2243 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
2244 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2245 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2246
2247 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
2248 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2249 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
2250 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2251 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
2252 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2253 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
2254 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2255 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
2256 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2257 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
2258 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2259
2260 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
2179 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK); 2261 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
2180 if (*disp_int & LB_D1_VLINE_INTERRUPT) 2262 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
2181 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK); 2263 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
2182 2264
2183 if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT) 2265 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
2184 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK); 2266 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
2185 if (*disp_int_cont & LB_D2_VLINE_INTERRUPT) 2267 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
2186 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK); 2268 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
2187 2269
2188 if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) 2270 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
2189 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK); 2271 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
2190 if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT) 2272 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
2191 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK); 2273 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
2192 2274
2193 if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) 2275 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
2194 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK); 2276 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
2195 if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT) 2277 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
2196 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK); 2278 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
2197 2279
2198 if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) 2280 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
2199 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK); 2281 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
2200 if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT) 2282 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
2201 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK); 2283 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
2202 2284
2203 if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) 2285 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
2204 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK); 2286 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
2205 if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT) 2287 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
2206 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK); 2288 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
2207 2289
2208 if (*disp_int & DC_HPD1_INTERRUPT) { 2290 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
2209 tmp = RREG32(DC_HPD1_INT_CONTROL); 2291 tmp = RREG32(DC_HPD1_INT_CONTROL);
2210 tmp |= DC_HPDx_INT_ACK; 2292 tmp |= DC_HPDx_INT_ACK;
2211 WREG32(DC_HPD1_INT_CONTROL, tmp); 2293 WREG32(DC_HPD1_INT_CONTROL, tmp);
2212 } 2294 }
2213 if (*disp_int_cont & DC_HPD2_INTERRUPT) { 2295 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
2214 tmp = RREG32(DC_HPD2_INT_CONTROL); 2296 tmp = RREG32(DC_HPD2_INT_CONTROL);
2215 tmp |= DC_HPDx_INT_ACK; 2297 tmp |= DC_HPDx_INT_ACK;
2216 WREG32(DC_HPD2_INT_CONTROL, tmp); 2298 WREG32(DC_HPD2_INT_CONTROL, tmp);
2217 } 2299 }
2218 if (*disp_int_cont2 & DC_HPD3_INTERRUPT) { 2300 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
2219 tmp = RREG32(DC_HPD3_INT_CONTROL); 2301 tmp = RREG32(DC_HPD3_INT_CONTROL);
2220 tmp |= DC_HPDx_INT_ACK; 2302 tmp |= DC_HPDx_INT_ACK;
2221 WREG32(DC_HPD3_INT_CONTROL, tmp); 2303 WREG32(DC_HPD3_INT_CONTROL, tmp);
2222 } 2304 }
2223 if (*disp_int_cont3 & DC_HPD4_INTERRUPT) { 2305 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
2224 tmp = RREG32(DC_HPD4_INT_CONTROL); 2306 tmp = RREG32(DC_HPD4_INT_CONTROL);
2225 tmp |= DC_HPDx_INT_ACK; 2307 tmp |= DC_HPDx_INT_ACK;
2226 WREG32(DC_HPD4_INT_CONTROL, tmp); 2308 WREG32(DC_HPD4_INT_CONTROL, tmp);
2227 } 2309 }
2228 if (*disp_int_cont4 & DC_HPD5_INTERRUPT) { 2310 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
2229 tmp = RREG32(DC_HPD5_INT_CONTROL); 2311 tmp = RREG32(DC_HPD5_INT_CONTROL);
2230 tmp |= DC_HPDx_INT_ACK; 2312 tmp |= DC_HPDx_INT_ACK;
2231 WREG32(DC_HPD5_INT_CONTROL, tmp); 2313 WREG32(DC_HPD5_INT_CONTROL, tmp);
2232 } 2314 }
2233 if (*disp_int_cont5 & DC_HPD6_INTERRUPT) { 2315 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2234 tmp = RREG32(DC_HPD5_INT_CONTROL); 2316 tmp = RREG32(DC_HPD5_INT_CONTROL);
2235 tmp |= DC_HPDx_INT_ACK; 2317 tmp |= DC_HPDx_INT_ACK;
2236 WREG32(DC_HPD6_INT_CONTROL, tmp); 2318 WREG32(DC_HPD6_INT_CONTROL, tmp);
@@ -2239,14 +2321,10 @@ static inline void evergreen_irq_ack(struct radeon_device *rdev,
2239 2321
2240void evergreen_irq_disable(struct radeon_device *rdev) 2322void evergreen_irq_disable(struct radeon_device *rdev)
2241{ 2323{
2242 u32 disp_int, disp_int_cont, disp_int_cont2;
2243 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
2244
2245 r600_disable_interrupts(rdev); 2324 r600_disable_interrupts(rdev);
2246 /* Wait and acknowledge irq */ 2325 /* Wait and acknowledge irq */
2247 mdelay(1); 2326 mdelay(1);
2248 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2, 2327 evergreen_irq_ack(rdev);
2249 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
2250 evergreen_disable_interrupt_state(rdev); 2328 evergreen_disable_interrupt_state(rdev);
2251} 2329}
2252 2330
@@ -2286,8 +2364,6 @@ int evergreen_irq_process(struct radeon_device *rdev)
2286 u32 rptr = rdev->ih.rptr; 2364 u32 rptr = rdev->ih.rptr;
2287 u32 src_id, src_data; 2365 u32 src_id, src_data;
2288 u32 ring_index; 2366 u32 ring_index;
2289 u32 disp_int, disp_int_cont, disp_int_cont2;
2290 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
2291 unsigned long flags; 2367 unsigned long flags;
2292 bool queue_hotplug = false; 2368 bool queue_hotplug = false;
2293 2369
@@ -2308,8 +2384,7 @@ int evergreen_irq_process(struct radeon_device *rdev)
2308 2384
2309restart_ih: 2385restart_ih:
2310 /* display interrupts */ 2386 /* display interrupts */
2311 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2, 2387 evergreen_irq_ack(rdev);
2312 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
2313 2388
2314 rdev->ih.wptr = wptr; 2389 rdev->ih.wptr = wptr;
2315 while (rptr != wptr) { 2390 while (rptr != wptr) {
@@ -2322,17 +2397,21 @@ restart_ih:
2322 case 1: /* D1 vblank/vline */ 2397 case 1: /* D1 vblank/vline */
2323 switch (src_data) { 2398 switch (src_data) {
2324 case 0: /* D1 vblank */ 2399 case 0: /* D1 vblank */
2325 if (disp_int & LB_D1_VBLANK_INTERRUPT) { 2400 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
2326 drm_handle_vblank(rdev->ddev, 0); 2401 if (rdev->irq.pflip[0])
2327 rdev->pm.vblank_sync = true; 2402 radeon_crtc_handle_flip(rdev, 0);
2328 wake_up(&rdev->irq.vblank_queue); 2403 if (rdev->irq.crtc_vblank_int[0]) {
2329 disp_int &= ~LB_D1_VBLANK_INTERRUPT; 2404 drm_handle_vblank(rdev->ddev, 0);
2405 rdev->pm.vblank_sync = true;
2406 wake_up(&rdev->irq.vblank_queue);
2407 }
2408 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
2330 DRM_DEBUG("IH: D1 vblank\n"); 2409 DRM_DEBUG("IH: D1 vblank\n");
2331 } 2410 }
2332 break; 2411 break;
2333 case 1: /* D1 vline */ 2412 case 1: /* D1 vline */
2334 if (disp_int & LB_D1_VLINE_INTERRUPT) { 2413 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
2335 disp_int &= ~LB_D1_VLINE_INTERRUPT; 2414 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
2336 DRM_DEBUG("IH: D1 vline\n"); 2415 DRM_DEBUG("IH: D1 vline\n");
2337 } 2416 }
2338 break; 2417 break;
@@ -2344,17 +2423,21 @@ restart_ih:
2344 case 2: /* D2 vblank/vline */ 2423 case 2: /* D2 vblank/vline */
2345 switch (src_data) { 2424 switch (src_data) {
2346 case 0: /* D2 vblank */ 2425 case 0: /* D2 vblank */
2347 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) { 2426 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
2348 drm_handle_vblank(rdev->ddev, 1); 2427 if (rdev->irq.pflip[1])
2349 rdev->pm.vblank_sync = true; 2428 radeon_crtc_handle_flip(rdev, 1);
2350 wake_up(&rdev->irq.vblank_queue); 2429 if (rdev->irq.crtc_vblank_int[1]) {
2351 disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; 2430 drm_handle_vblank(rdev->ddev, 1);
2431 rdev->pm.vblank_sync = true;
2432 wake_up(&rdev->irq.vblank_queue);
2433 }
2434 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
2352 DRM_DEBUG("IH: D2 vblank\n"); 2435 DRM_DEBUG("IH: D2 vblank\n");
2353 } 2436 }
2354 break; 2437 break;
2355 case 1: /* D2 vline */ 2438 case 1: /* D2 vline */
2356 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) { 2439 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
2357 disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; 2440 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
2358 DRM_DEBUG("IH: D2 vline\n"); 2441 DRM_DEBUG("IH: D2 vline\n");
2359 } 2442 }
2360 break; 2443 break;
@@ -2366,17 +2449,21 @@ restart_ih:
2366 case 3: /* D3 vblank/vline */ 2449 case 3: /* D3 vblank/vline */
2367 switch (src_data) { 2450 switch (src_data) {
2368 case 0: /* D3 vblank */ 2451 case 0: /* D3 vblank */
2369 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) { 2452 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
2370 drm_handle_vblank(rdev->ddev, 2); 2453 if (rdev->irq.crtc_vblank_int[2]) {
2371 rdev->pm.vblank_sync = true; 2454 drm_handle_vblank(rdev->ddev, 2);
2372 wake_up(&rdev->irq.vblank_queue); 2455 rdev->pm.vblank_sync = true;
2373 disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; 2456 wake_up(&rdev->irq.vblank_queue);
2457 }
2458 if (rdev->irq.pflip[2])
2459 radeon_crtc_handle_flip(rdev, 2);
2460 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
2374 DRM_DEBUG("IH: D3 vblank\n"); 2461 DRM_DEBUG("IH: D3 vblank\n");
2375 } 2462 }
2376 break; 2463 break;
2377 case 1: /* D3 vline */ 2464 case 1: /* D3 vline */
2378 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) { 2465 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
2379 disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; 2466 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
2380 DRM_DEBUG("IH: D3 vline\n"); 2467 DRM_DEBUG("IH: D3 vline\n");
2381 } 2468 }
2382 break; 2469 break;
@@ -2388,17 +2475,21 @@ restart_ih:
2388 case 4: /* D4 vblank/vline */ 2475 case 4: /* D4 vblank/vline */
2389 switch (src_data) { 2476 switch (src_data) {
2390 case 0: /* D4 vblank */ 2477 case 0: /* D4 vblank */
2391 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) { 2478 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
2392 drm_handle_vblank(rdev->ddev, 3); 2479 if (rdev->irq.crtc_vblank_int[3]) {
2393 rdev->pm.vblank_sync = true; 2480 drm_handle_vblank(rdev->ddev, 3);
2394 wake_up(&rdev->irq.vblank_queue); 2481 rdev->pm.vblank_sync = true;
2395 disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; 2482 wake_up(&rdev->irq.vblank_queue);
2483 }
2484 if (rdev->irq.pflip[3])
2485 radeon_crtc_handle_flip(rdev, 3);
2486 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
2396 DRM_DEBUG("IH: D4 vblank\n"); 2487 DRM_DEBUG("IH: D4 vblank\n");
2397 } 2488 }
2398 break; 2489 break;
2399 case 1: /* D4 vline */ 2490 case 1: /* D4 vline */
2400 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) { 2491 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
2401 disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; 2492 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
2402 DRM_DEBUG("IH: D4 vline\n"); 2493 DRM_DEBUG("IH: D4 vline\n");
2403 } 2494 }
2404 break; 2495 break;
@@ -2410,17 +2501,21 @@ restart_ih:
2410 case 5: /* D5 vblank/vline */ 2501 case 5: /* D5 vblank/vline */
2411 switch (src_data) { 2502 switch (src_data) {
2412 case 0: /* D5 vblank */ 2503 case 0: /* D5 vblank */
2413 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) { 2504 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
2414 drm_handle_vblank(rdev->ddev, 4); 2505 if (rdev->irq.crtc_vblank_int[4]) {
2415 rdev->pm.vblank_sync = true; 2506 drm_handle_vblank(rdev->ddev, 4);
2416 wake_up(&rdev->irq.vblank_queue); 2507 rdev->pm.vblank_sync = true;
2417 disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; 2508 wake_up(&rdev->irq.vblank_queue);
2509 }
2510 if (rdev->irq.pflip[4])
2511 radeon_crtc_handle_flip(rdev, 4);
2512 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
2418 DRM_DEBUG("IH: D5 vblank\n"); 2513 DRM_DEBUG("IH: D5 vblank\n");
2419 } 2514 }
2420 break; 2515 break;
2421 case 1: /* D5 vline */ 2516 case 1: /* D5 vline */
2422 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) { 2517 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
2423 disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; 2518 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
2424 DRM_DEBUG("IH: D5 vline\n"); 2519 DRM_DEBUG("IH: D5 vline\n");
2425 } 2520 }
2426 break; 2521 break;
@@ -2432,17 +2527,21 @@ restart_ih:
2432 case 6: /* D6 vblank/vline */ 2527 case 6: /* D6 vblank/vline */
2433 switch (src_data) { 2528 switch (src_data) {
2434 case 0: /* D6 vblank */ 2529 case 0: /* D6 vblank */
2435 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) { 2530 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
2436 drm_handle_vblank(rdev->ddev, 5); 2531 if (rdev->irq.crtc_vblank_int[5]) {
2437 rdev->pm.vblank_sync = true; 2532 drm_handle_vblank(rdev->ddev, 5);
2438 wake_up(&rdev->irq.vblank_queue); 2533 rdev->pm.vblank_sync = true;
2439 disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; 2534 wake_up(&rdev->irq.vblank_queue);
2535 }
2536 if (rdev->irq.pflip[5])
2537 radeon_crtc_handle_flip(rdev, 5);
2538 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
2440 DRM_DEBUG("IH: D6 vblank\n"); 2539 DRM_DEBUG("IH: D6 vblank\n");
2441 } 2540 }
2442 break; 2541 break;
2443 case 1: /* D6 vline */ 2542 case 1: /* D6 vline */
2444 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) { 2543 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
2445 disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; 2544 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
2446 DRM_DEBUG("IH: D6 vline\n"); 2545 DRM_DEBUG("IH: D6 vline\n");
2447 } 2546 }
2448 break; 2547 break;
@@ -2454,43 +2553,43 @@ restart_ih:
2454 case 42: /* HPD hotplug */ 2553 case 42: /* HPD hotplug */
2455 switch (src_data) { 2554 switch (src_data) {
2456 case 0: 2555 case 0:
2457 if (disp_int & DC_HPD1_INTERRUPT) { 2556 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
2458 disp_int &= ~DC_HPD1_INTERRUPT; 2557 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
2459 queue_hotplug = true; 2558 queue_hotplug = true;
2460 DRM_DEBUG("IH: HPD1\n"); 2559 DRM_DEBUG("IH: HPD1\n");
2461 } 2560 }
2462 break; 2561 break;
2463 case 1: 2562 case 1:
2464 if (disp_int_cont & DC_HPD2_INTERRUPT) { 2563 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
2465 disp_int_cont &= ~DC_HPD2_INTERRUPT; 2564 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
2466 queue_hotplug = true; 2565 queue_hotplug = true;
2467 DRM_DEBUG("IH: HPD2\n"); 2566 DRM_DEBUG("IH: HPD2\n");
2468 } 2567 }
2469 break; 2568 break;
2470 case 2: 2569 case 2:
2471 if (disp_int_cont2 & DC_HPD3_INTERRUPT) { 2570 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
2472 disp_int_cont2 &= ~DC_HPD3_INTERRUPT; 2571 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
2473 queue_hotplug = true; 2572 queue_hotplug = true;
2474 DRM_DEBUG("IH: HPD3\n"); 2573 DRM_DEBUG("IH: HPD3\n");
2475 } 2574 }
2476 break; 2575 break;
2477 case 3: 2576 case 3:
2478 if (disp_int_cont3 & DC_HPD4_INTERRUPT) { 2577 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
2479 disp_int_cont3 &= ~DC_HPD4_INTERRUPT; 2578 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
2480 queue_hotplug = true; 2579 queue_hotplug = true;
2481 DRM_DEBUG("IH: HPD4\n"); 2580 DRM_DEBUG("IH: HPD4\n");
2482 } 2581 }
2483 break; 2582 break;
2484 case 4: 2583 case 4:
2485 if (disp_int_cont4 & DC_HPD5_INTERRUPT) { 2584 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
2486 disp_int_cont4 &= ~DC_HPD5_INTERRUPT; 2585 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
2487 queue_hotplug = true; 2586 queue_hotplug = true;
2488 DRM_DEBUG("IH: HPD5\n"); 2587 DRM_DEBUG("IH: HPD5\n");
2489 } 2588 }
2490 break; 2589 break;
2491 case 5: 2590 case 5:
2492 if (disp_int_cont5 & DC_HPD6_INTERRUPT) { 2591 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2493 disp_int_cont5 &= ~DC_HPD6_INTERRUPT; 2592 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
2494 queue_hotplug = true; 2593 queue_hotplug = true;
2495 DRM_DEBUG("IH: HPD6\n"); 2594 DRM_DEBUG("IH: HPD6\n");
2496 } 2595 }