aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm')
-rw-r--r--drivers/gpu/drm/radeon/evergreen.c301
-rw-r--r--drivers/gpu/drm/radeon/evergreen_reg.h6
-rw-r--r--drivers/gpu/drm/radeon/r100.c76
-rw-r--r--drivers/gpu/drm/radeon/r500_reg.h4
-rw-r--r--drivers/gpu/drm/radeon/r600.c126
-rw-r--r--drivers/gpu/drm/radeon/r600d.h9
-rw-r--r--drivers/gpu/drm/radeon/radeon.h57
-rw-r--r--drivers/gpu/drm/radeon/radeon_asic.c42
-rw-r--r--drivers/gpu/drm/radeon/radeon_asic.h10
-rw-r--r--drivers/gpu/drm/radeon/radeon_display.c300
-rw-r--r--drivers/gpu/drm/radeon/radeon_drv.c11
-rw-r--r--drivers/gpu/drm/radeon/radeon_irq_kms.c39
-rw-r--r--drivers/gpu/drm/radeon/radeon_kms.c21
-rw-r--r--drivers/gpu/drm/radeon/radeon_mode.h13
-rw-r--r--drivers/gpu/drm/radeon/radeon_pm.c6
-rw-r--r--drivers/gpu/drm/radeon/radeon_reg.h3
-rw-r--r--drivers/gpu/drm/radeon/rs600.c116
-rw-r--r--drivers/gpu/drm/radeon/rv770.c34
-rw-r--r--drivers/gpu/drm/radeon/rv770d.h7
19 files changed, 958 insertions, 223 deletions
diff --git a/drivers/gpu/drm/radeon/evergreen.c b/drivers/gpu/drm/radeon/evergreen.c
index 4dc5b4714c5a..25e84379e7c6 100644
--- a/drivers/gpu/drm/radeon/evergreen.c
+++ b/drivers/gpu/drm/radeon/evergreen.c
@@ -40,6 +40,61 @@
40static void evergreen_gpu_init(struct radeon_device *rdev); 40static void evergreen_gpu_init(struct radeon_device *rdev);
41void evergreen_fini(struct radeon_device *rdev); 41void evergreen_fini(struct radeon_device *rdev);
42 42
43void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
44{
45 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc];
46 u32 tmp;
47
48 /* make sure flip is at vb rather than hb */
49 tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
50 tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN;
51 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
52
53 /* set pageflip to happen anywhere in vblank interval */
54 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
55
56 /* enable the pflip int */
57 radeon_irq_kms_pflip_irq_get(rdev, crtc);
58}
59
60void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
61{
62 /* disable the pflip int */
63 radeon_irq_kms_pflip_irq_put(rdev, crtc);
64}
65
66u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
67{
68 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
69 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
70
71 /* Lock the graphics update lock */
72 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
73 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
74
75 /* update the scanout addresses */
76 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
77 upper_32_bits(crtc_base));
78 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
79 (u32)crtc_base);
80
81 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
82 upper_32_bits(crtc_base));
83 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
84 (u32)crtc_base);
85
86 /* Wait for update_pending to go high. */
87 while (!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING));
88 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
89
90 /* Unlock the lock, so double-buffering can take place inside vblank */
91 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
92 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
93
94 /* Return current update_pending status: */
95 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
96}
97
43/* get temperature in millidegrees */ 98/* get temperature in millidegrees */
44u32 evergreen_get_temp(struct radeon_device *rdev) 99u32 evergreen_get_temp(struct radeon_device *rdev)
45{ 100{
@@ -2060,6 +2115,7 @@ int evergreen_irq_set(struct radeon_device *rdev)
2060 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0; 2115 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
2061 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6; 2116 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
2062 u32 grbm_int_cntl = 0; 2117 u32 grbm_int_cntl = 0;
2118 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
2063 2119
2064 if (!rdev->irq.installed) { 2120 if (!rdev->irq.installed) {
2065 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n"); 2121 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
@@ -2085,27 +2141,33 @@ int evergreen_irq_set(struct radeon_device *rdev)
2085 cp_int_cntl |= RB_INT_ENABLE; 2141 cp_int_cntl |= RB_INT_ENABLE;
2086 cp_int_cntl |= TIME_STAMP_INT_ENABLE; 2142 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
2087 } 2143 }
2088 if (rdev->irq.crtc_vblank_int[0]) { 2144 if (rdev->irq.crtc_vblank_int[0] ||
2145 rdev->irq.pflip[0]) {
2089 DRM_DEBUG("evergreen_irq_set: vblank 0\n"); 2146 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
2090 crtc1 |= VBLANK_INT_MASK; 2147 crtc1 |= VBLANK_INT_MASK;
2091 } 2148 }
2092 if (rdev->irq.crtc_vblank_int[1]) { 2149 if (rdev->irq.crtc_vblank_int[1] ||
2150 rdev->irq.pflip[1]) {
2093 DRM_DEBUG("evergreen_irq_set: vblank 1\n"); 2151 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
2094 crtc2 |= VBLANK_INT_MASK; 2152 crtc2 |= VBLANK_INT_MASK;
2095 } 2153 }
2096 if (rdev->irq.crtc_vblank_int[2]) { 2154 if (rdev->irq.crtc_vblank_int[2] ||
2155 rdev->irq.pflip[2]) {
2097 DRM_DEBUG("evergreen_irq_set: vblank 2\n"); 2156 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
2098 crtc3 |= VBLANK_INT_MASK; 2157 crtc3 |= VBLANK_INT_MASK;
2099 } 2158 }
2100 if (rdev->irq.crtc_vblank_int[3]) { 2159 if (rdev->irq.crtc_vblank_int[3] ||
2160 rdev->irq.pflip[3]) {
2101 DRM_DEBUG("evergreen_irq_set: vblank 3\n"); 2161 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
2102 crtc4 |= VBLANK_INT_MASK; 2162 crtc4 |= VBLANK_INT_MASK;
2103 } 2163 }
2104 if (rdev->irq.crtc_vblank_int[4]) { 2164 if (rdev->irq.crtc_vblank_int[4] ||
2165 rdev->irq.pflip[4]) {
2105 DRM_DEBUG("evergreen_irq_set: vblank 4\n"); 2166 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
2106 crtc5 |= VBLANK_INT_MASK; 2167 crtc5 |= VBLANK_INT_MASK;
2107 } 2168 }
2108 if (rdev->irq.crtc_vblank_int[5]) { 2169 if (rdev->irq.crtc_vblank_int[5] ||
2170 rdev->irq.pflip[5]) {
2109 DRM_DEBUG("evergreen_irq_set: vblank 5\n"); 2171 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
2110 crtc6 |= VBLANK_INT_MASK; 2172 crtc6 |= VBLANK_INT_MASK;
2111 } 2173 }
@@ -2148,6 +2210,13 @@ int evergreen_irq_set(struct radeon_device *rdev)
2148 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5); 2210 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
2149 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6); 2211 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
2150 2212
2213 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
2214 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
2215 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
2216 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
2217 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
2218 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
2219
2151 WREG32(DC_HPD1_INT_CONTROL, hpd1); 2220 WREG32(DC_HPD1_INT_CONTROL, hpd1);
2152 WREG32(DC_HPD2_INT_CONTROL, hpd2); 2221 WREG32(DC_HPD2_INT_CONTROL, hpd2);
2153 WREG32(DC_HPD3_INT_CONTROL, hpd3); 2222 WREG32(DC_HPD3_INT_CONTROL, hpd3);
@@ -2158,79 +2227,92 @@ int evergreen_irq_set(struct radeon_device *rdev)
2158 return 0; 2227 return 0;
2159} 2228}
2160 2229
2161static inline void evergreen_irq_ack(struct radeon_device *rdev, 2230static inline void evergreen_irq_ack(struct radeon_device *rdev)
2162 u32 *disp_int,
2163 u32 *disp_int_cont,
2164 u32 *disp_int_cont2,
2165 u32 *disp_int_cont3,
2166 u32 *disp_int_cont4,
2167 u32 *disp_int_cont5)
2168{ 2231{
2169 u32 tmp; 2232 u32 tmp;
2170 2233
2171 *disp_int = RREG32(DISP_INTERRUPT_STATUS); 2234 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
2172 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); 2235 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
2173 *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); 2236 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
2174 *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); 2237 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
2175 *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); 2238 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
2176 *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); 2239 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
2177 2240 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
2178 if (*disp_int & LB_D1_VBLANK_INTERRUPT) 2241 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
2242 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
2243 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
2244 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2245 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2246
2247 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
2248 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2249 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
2250 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2251 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
2252 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2253 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
2254 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2255 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
2256 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2257 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
2258 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2259
2260 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
2179 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK); 2261 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
2180 if (*disp_int & LB_D1_VLINE_INTERRUPT) 2262 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
2181 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK); 2263 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
2182 2264
2183 if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT) 2265 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
2184 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK); 2266 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
2185 if (*disp_int_cont & LB_D2_VLINE_INTERRUPT) 2267 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
2186 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK); 2268 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
2187 2269
2188 if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) 2270 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
2189 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK); 2271 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
2190 if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT) 2272 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
2191 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK); 2273 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
2192 2274
2193 if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) 2275 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
2194 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK); 2276 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
2195 if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT) 2277 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
2196 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK); 2278 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
2197 2279
2198 if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) 2280 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
2199 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK); 2281 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
2200 if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT) 2282 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
2201 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK); 2283 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
2202 2284
2203 if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) 2285 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
2204 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK); 2286 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
2205 if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT) 2287 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
2206 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK); 2288 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
2207 2289
2208 if (*disp_int & DC_HPD1_INTERRUPT) { 2290 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
2209 tmp = RREG32(DC_HPD1_INT_CONTROL); 2291 tmp = RREG32(DC_HPD1_INT_CONTROL);
2210 tmp |= DC_HPDx_INT_ACK; 2292 tmp |= DC_HPDx_INT_ACK;
2211 WREG32(DC_HPD1_INT_CONTROL, tmp); 2293 WREG32(DC_HPD1_INT_CONTROL, tmp);
2212 } 2294 }
2213 if (*disp_int_cont & DC_HPD2_INTERRUPT) { 2295 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
2214 tmp = RREG32(DC_HPD2_INT_CONTROL); 2296 tmp = RREG32(DC_HPD2_INT_CONTROL);
2215 tmp |= DC_HPDx_INT_ACK; 2297 tmp |= DC_HPDx_INT_ACK;
2216 WREG32(DC_HPD2_INT_CONTROL, tmp); 2298 WREG32(DC_HPD2_INT_CONTROL, tmp);
2217 } 2299 }
2218 if (*disp_int_cont2 & DC_HPD3_INTERRUPT) { 2300 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
2219 tmp = RREG32(DC_HPD3_INT_CONTROL); 2301 tmp = RREG32(DC_HPD3_INT_CONTROL);
2220 tmp |= DC_HPDx_INT_ACK; 2302 tmp |= DC_HPDx_INT_ACK;
2221 WREG32(DC_HPD3_INT_CONTROL, tmp); 2303 WREG32(DC_HPD3_INT_CONTROL, tmp);
2222 } 2304 }
2223 if (*disp_int_cont3 & DC_HPD4_INTERRUPT) { 2305 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
2224 tmp = RREG32(DC_HPD4_INT_CONTROL); 2306 tmp = RREG32(DC_HPD4_INT_CONTROL);
2225 tmp |= DC_HPDx_INT_ACK; 2307 tmp |= DC_HPDx_INT_ACK;
2226 WREG32(DC_HPD4_INT_CONTROL, tmp); 2308 WREG32(DC_HPD4_INT_CONTROL, tmp);
2227 } 2309 }
2228 if (*disp_int_cont4 & DC_HPD5_INTERRUPT) { 2310 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
2229 tmp = RREG32(DC_HPD5_INT_CONTROL); 2311 tmp = RREG32(DC_HPD5_INT_CONTROL);
2230 tmp |= DC_HPDx_INT_ACK; 2312 tmp |= DC_HPDx_INT_ACK;
2231 WREG32(DC_HPD5_INT_CONTROL, tmp); 2313 WREG32(DC_HPD5_INT_CONTROL, tmp);
2232 } 2314 }
2233 if (*disp_int_cont5 & DC_HPD6_INTERRUPT) { 2315 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2234 tmp = RREG32(DC_HPD5_INT_CONTROL); 2316 tmp = RREG32(DC_HPD5_INT_CONTROL);
2235 tmp |= DC_HPDx_INT_ACK; 2317 tmp |= DC_HPDx_INT_ACK;
2236 WREG32(DC_HPD6_INT_CONTROL, tmp); 2318 WREG32(DC_HPD6_INT_CONTROL, tmp);
@@ -2239,14 +2321,10 @@ static inline void evergreen_irq_ack(struct radeon_device *rdev,
2239 2321
2240void evergreen_irq_disable(struct radeon_device *rdev) 2322void evergreen_irq_disable(struct radeon_device *rdev)
2241{ 2323{
2242 u32 disp_int, disp_int_cont, disp_int_cont2;
2243 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
2244
2245 r600_disable_interrupts(rdev); 2324 r600_disable_interrupts(rdev);
2246 /* Wait and acknowledge irq */ 2325 /* Wait and acknowledge irq */
2247 mdelay(1); 2326 mdelay(1);
2248 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2, 2327 evergreen_irq_ack(rdev);
2249 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
2250 evergreen_disable_interrupt_state(rdev); 2328 evergreen_disable_interrupt_state(rdev);
2251} 2329}
2252 2330
@@ -2286,8 +2364,6 @@ int evergreen_irq_process(struct radeon_device *rdev)
2286 u32 rptr = rdev->ih.rptr; 2364 u32 rptr = rdev->ih.rptr;
2287 u32 src_id, src_data; 2365 u32 src_id, src_data;
2288 u32 ring_index; 2366 u32 ring_index;
2289 u32 disp_int, disp_int_cont, disp_int_cont2;
2290 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
2291 unsigned long flags; 2367 unsigned long flags;
2292 bool queue_hotplug = false; 2368 bool queue_hotplug = false;
2293 2369
@@ -2308,8 +2384,7 @@ int evergreen_irq_process(struct radeon_device *rdev)
2308 2384
2309restart_ih: 2385restart_ih:
2310 /* display interrupts */ 2386 /* display interrupts */
2311 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2, 2387 evergreen_irq_ack(rdev);
2312 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
2313 2388
2314 rdev->ih.wptr = wptr; 2389 rdev->ih.wptr = wptr;
2315 while (rptr != wptr) { 2390 while (rptr != wptr) {
@@ -2322,17 +2397,21 @@ restart_ih:
2322 case 1: /* D1 vblank/vline */ 2397 case 1: /* D1 vblank/vline */
2323 switch (src_data) { 2398 switch (src_data) {
2324 case 0: /* D1 vblank */ 2399 case 0: /* D1 vblank */
2325 if (disp_int & LB_D1_VBLANK_INTERRUPT) { 2400 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
2326 drm_handle_vblank(rdev->ddev, 0); 2401 if (rdev->irq.crtc_vblank_int[0]) {
2327 rdev->pm.vblank_sync = true; 2402 drm_handle_vblank(rdev->ddev, 0);
2328 wake_up(&rdev->irq.vblank_queue); 2403 rdev->pm.vblank_sync = true;
2329 disp_int &= ~LB_D1_VBLANK_INTERRUPT; 2404 wake_up(&rdev->irq.vblank_queue);
2405 }
2406 if (rdev->irq.pflip[0])
2407 radeon_crtc_handle_flip(rdev, 0);
2408 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
2330 DRM_DEBUG("IH: D1 vblank\n"); 2409 DRM_DEBUG("IH: D1 vblank\n");
2331 } 2410 }
2332 break; 2411 break;
2333 case 1: /* D1 vline */ 2412 case 1: /* D1 vline */
2334 if (disp_int & LB_D1_VLINE_INTERRUPT) { 2413 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
2335 disp_int &= ~LB_D1_VLINE_INTERRUPT; 2414 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
2336 DRM_DEBUG("IH: D1 vline\n"); 2415 DRM_DEBUG("IH: D1 vline\n");
2337 } 2416 }
2338 break; 2417 break;
@@ -2344,17 +2423,21 @@ restart_ih:
2344 case 2: /* D2 vblank/vline */ 2423 case 2: /* D2 vblank/vline */
2345 switch (src_data) { 2424 switch (src_data) {
2346 case 0: /* D2 vblank */ 2425 case 0: /* D2 vblank */
2347 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) { 2426 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
2348 drm_handle_vblank(rdev->ddev, 1); 2427 if (rdev->irq.crtc_vblank_int[1]) {
2349 rdev->pm.vblank_sync = true; 2428 drm_handle_vblank(rdev->ddev, 1);
2350 wake_up(&rdev->irq.vblank_queue); 2429 rdev->pm.vblank_sync = true;
2351 disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; 2430 wake_up(&rdev->irq.vblank_queue);
2431 }
2432 if (rdev->irq.pflip[1])
2433 radeon_crtc_handle_flip(rdev, 1);
2434 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
2352 DRM_DEBUG("IH: D2 vblank\n"); 2435 DRM_DEBUG("IH: D2 vblank\n");
2353 } 2436 }
2354 break; 2437 break;
2355 case 1: /* D2 vline */ 2438 case 1: /* D2 vline */
2356 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) { 2439 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
2357 disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; 2440 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
2358 DRM_DEBUG("IH: D2 vline\n"); 2441 DRM_DEBUG("IH: D2 vline\n");
2359 } 2442 }
2360 break; 2443 break;
@@ -2366,17 +2449,21 @@ restart_ih:
2366 case 3: /* D3 vblank/vline */ 2449 case 3: /* D3 vblank/vline */
2367 switch (src_data) { 2450 switch (src_data) {
2368 case 0: /* D3 vblank */ 2451 case 0: /* D3 vblank */
2369 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) { 2452 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
2370 drm_handle_vblank(rdev->ddev, 2); 2453 if (rdev->irq.crtc_vblank_int[2]) {
2371 rdev->pm.vblank_sync = true; 2454 drm_handle_vblank(rdev->ddev, 2);
2372 wake_up(&rdev->irq.vblank_queue); 2455 rdev->pm.vblank_sync = true;
2373 disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; 2456 wake_up(&rdev->irq.vblank_queue);
2457 }
2458 if (rdev->irq.pflip[2])
2459 radeon_crtc_handle_flip(rdev, 2);
2460 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
2374 DRM_DEBUG("IH: D3 vblank\n"); 2461 DRM_DEBUG("IH: D3 vblank\n");
2375 } 2462 }
2376 break; 2463 break;
2377 case 1: /* D3 vline */ 2464 case 1: /* D3 vline */
2378 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) { 2465 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
2379 disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; 2466 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
2380 DRM_DEBUG("IH: D3 vline\n"); 2467 DRM_DEBUG("IH: D3 vline\n");
2381 } 2468 }
2382 break; 2469 break;
@@ -2388,17 +2475,21 @@ restart_ih:
2388 case 4: /* D4 vblank/vline */ 2475 case 4: /* D4 vblank/vline */
2389 switch (src_data) { 2476 switch (src_data) {
2390 case 0: /* D4 vblank */ 2477 case 0: /* D4 vblank */
2391 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) { 2478 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
2392 drm_handle_vblank(rdev->ddev, 3); 2479 if (rdev->irq.crtc_vblank_int[3]) {
2393 rdev->pm.vblank_sync = true; 2480 drm_handle_vblank(rdev->ddev, 3);
2394 wake_up(&rdev->irq.vblank_queue); 2481 rdev->pm.vblank_sync = true;
2395 disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; 2482 wake_up(&rdev->irq.vblank_queue);
2483 }
2484 if (rdev->irq.pflip[3])
2485 radeon_crtc_handle_flip(rdev, 3);
2486 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
2396 DRM_DEBUG("IH: D4 vblank\n"); 2487 DRM_DEBUG("IH: D4 vblank\n");
2397 } 2488 }
2398 break; 2489 break;
2399 case 1: /* D4 vline */ 2490 case 1: /* D4 vline */
2400 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) { 2491 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
2401 disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; 2492 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
2402 DRM_DEBUG("IH: D4 vline\n"); 2493 DRM_DEBUG("IH: D4 vline\n");
2403 } 2494 }
2404 break; 2495 break;
@@ -2410,17 +2501,21 @@ restart_ih:
2410 case 5: /* D5 vblank/vline */ 2501 case 5: /* D5 vblank/vline */
2411 switch (src_data) { 2502 switch (src_data) {
2412 case 0: /* D5 vblank */ 2503 case 0: /* D5 vblank */
2413 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) { 2504 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
2414 drm_handle_vblank(rdev->ddev, 4); 2505 if (rdev->irq.crtc_vblank_int[4]) {
2415 rdev->pm.vblank_sync = true; 2506 drm_handle_vblank(rdev->ddev, 4);
2416 wake_up(&rdev->irq.vblank_queue); 2507 rdev->pm.vblank_sync = true;
2417 disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; 2508 wake_up(&rdev->irq.vblank_queue);
2509 }
2510 if (rdev->irq.pflip[4])
2511 radeon_crtc_handle_flip(rdev, 4);
2512 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
2418 DRM_DEBUG("IH: D5 vblank\n"); 2513 DRM_DEBUG("IH: D5 vblank\n");
2419 } 2514 }
2420 break; 2515 break;
2421 case 1: /* D5 vline */ 2516 case 1: /* D5 vline */
2422 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) { 2517 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
2423 disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; 2518 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
2424 DRM_DEBUG("IH: D5 vline\n"); 2519 DRM_DEBUG("IH: D5 vline\n");
2425 } 2520 }
2426 break; 2521 break;
@@ -2432,17 +2527,21 @@ restart_ih:
2432 case 6: /* D6 vblank/vline */ 2527 case 6: /* D6 vblank/vline */
2433 switch (src_data) { 2528 switch (src_data) {
2434 case 0: /* D6 vblank */ 2529 case 0: /* D6 vblank */
2435 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) { 2530 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
2436 drm_handle_vblank(rdev->ddev, 5); 2531 if (rdev->irq.crtc_vblank_int[5]) {
2437 rdev->pm.vblank_sync = true; 2532 drm_handle_vblank(rdev->ddev, 5);
2438 wake_up(&rdev->irq.vblank_queue); 2533 rdev->pm.vblank_sync = true;
2439 disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; 2534 wake_up(&rdev->irq.vblank_queue);
2535 }
2536 if (rdev->irq.pflip[5])
2537 radeon_crtc_handle_flip(rdev, 5);
2538 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
2440 DRM_DEBUG("IH: D6 vblank\n"); 2539 DRM_DEBUG("IH: D6 vblank\n");
2441 } 2540 }
2442 break; 2541 break;
2443 case 1: /* D6 vline */ 2542 case 1: /* D6 vline */
2444 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) { 2543 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
2445 disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; 2544 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
2446 DRM_DEBUG("IH: D6 vline\n"); 2545 DRM_DEBUG("IH: D6 vline\n");
2447 } 2546 }
2448 break; 2547 break;
@@ -2454,43 +2553,43 @@ restart_ih:
2454 case 42: /* HPD hotplug */ 2553 case 42: /* HPD hotplug */
2455 switch (src_data) { 2554 switch (src_data) {
2456 case 0: 2555 case 0:
2457 if (disp_int & DC_HPD1_INTERRUPT) { 2556 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
2458 disp_int &= ~DC_HPD1_INTERRUPT; 2557 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
2459 queue_hotplug = true; 2558 queue_hotplug = true;
2460 DRM_DEBUG("IH: HPD1\n"); 2559 DRM_DEBUG("IH: HPD1\n");
2461 } 2560 }
2462 break; 2561 break;
2463 case 1: 2562 case 1:
2464 if (disp_int_cont & DC_HPD2_INTERRUPT) { 2563 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
2465 disp_int_cont &= ~DC_HPD2_INTERRUPT; 2564 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
2466 queue_hotplug = true; 2565 queue_hotplug = true;
2467 DRM_DEBUG("IH: HPD2\n"); 2566 DRM_DEBUG("IH: HPD2\n");
2468 } 2567 }
2469 break; 2568 break;
2470 case 2: 2569 case 2:
2471 if (disp_int_cont2 & DC_HPD3_INTERRUPT) { 2570 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
2472 disp_int_cont2 &= ~DC_HPD3_INTERRUPT; 2571 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
2473 queue_hotplug = true; 2572 queue_hotplug = true;
2474 DRM_DEBUG("IH: HPD3\n"); 2573 DRM_DEBUG("IH: HPD3\n");
2475 } 2574 }
2476 break; 2575 break;
2477 case 3: 2576 case 3:
2478 if (disp_int_cont3 & DC_HPD4_INTERRUPT) { 2577 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
2479 disp_int_cont3 &= ~DC_HPD4_INTERRUPT; 2578 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
2480 queue_hotplug = true; 2579 queue_hotplug = true;
2481 DRM_DEBUG("IH: HPD4\n"); 2580 DRM_DEBUG("IH: HPD4\n");
2482 } 2581 }
2483 break; 2582 break;
2484 case 4: 2583 case 4:
2485 if (disp_int_cont4 & DC_HPD5_INTERRUPT) { 2584 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
2486 disp_int_cont4 &= ~DC_HPD5_INTERRUPT; 2585 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
2487 queue_hotplug = true; 2586 queue_hotplug = true;
2488 DRM_DEBUG("IH: HPD5\n"); 2587 DRM_DEBUG("IH: HPD5\n");
2489 } 2588 }
2490 break; 2589 break;
2491 case 5: 2590 case 5:
2492 if (disp_int_cont5 & DC_HPD6_INTERRUPT) { 2591 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2493 disp_int_cont5 &= ~DC_HPD6_INTERRUPT; 2592 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
2494 queue_hotplug = true; 2593 queue_hotplug = true;
2495 DRM_DEBUG("IH: HPD6\n"); 2594 DRM_DEBUG("IH: HPD6\n");
2496 } 2595 }
diff --git a/drivers/gpu/drm/radeon/evergreen_reg.h b/drivers/gpu/drm/radeon/evergreen_reg.h
index 2330f3a36fd5..c781c92c3451 100644
--- a/drivers/gpu/drm/radeon/evergreen_reg.h
+++ b/drivers/gpu/drm/radeon/evergreen_reg.h
@@ -105,6 +105,11 @@
105#define EVERGREEN_GRPH_Y_START 0x6830 105#define EVERGREEN_GRPH_Y_START 0x6830
106#define EVERGREEN_GRPH_X_END 0x6834 106#define EVERGREEN_GRPH_X_END 0x6834
107#define EVERGREEN_GRPH_Y_END 0x6838 107#define EVERGREEN_GRPH_Y_END 0x6838
108#define EVERGREEN_GRPH_UPDATE 0x6844
109# define EVERGREEN_GRPH_SURFACE_UPDATE_PENDING (1 << 2)
110# define EVERGREEN_GRPH_UPDATE_LOCK (1 << 16)
111#define EVERGREEN_GRPH_FLIP_CONTROL 0x6848
112# define EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN (1 << 0)
108 113
109/* CUR blocks at 0x6998, 0x7598, 0x10198, 0x10d98, 0x11998, 0x12598 */ 114/* CUR blocks at 0x6998, 0x7598, 0x10198, 0x10d98, 0x11998, 0x12598 */
110#define EVERGREEN_CUR_CONTROL 0x6998 115#define EVERGREEN_CUR_CONTROL 0x6998
@@ -178,6 +183,7 @@
178# define EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE (1 << 24) 183# define EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE (1 << 24)
179#define EVERGREEN_CRTC_STATUS 0x6e8c 184#define EVERGREEN_CRTC_STATUS 0x6e8c
180#define EVERGREEN_CRTC_STATUS_POSITION 0x6e90 185#define EVERGREEN_CRTC_STATUS_POSITION 0x6e90
186#define EVERGREEN_MASTER_UPDATE_MODE 0x6ef8
181#define EVERGREEN_CRTC_UPDATE_LOCK 0x6ed4 187#define EVERGREEN_CRTC_UPDATE_LOCK 0x6ed4
182 188
183#define EVERGREEN_DC_GPIO_HPD_MASK 0x64b0 189#define EVERGREEN_DC_GPIO_HPD_MASK 0x64b0
diff --git a/drivers/gpu/drm/radeon/r100.c b/drivers/gpu/drm/radeon/r100.c
index 8e10aa9f74b0..300b4a64d8fe 100644
--- a/drivers/gpu/drm/radeon/r100.c
+++ b/drivers/gpu/drm/radeon/r100.c
@@ -68,6 +68,56 @@ MODULE_FIRMWARE(FIRMWARE_R520);
68 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 68 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
69 */ 69 */
70 70
71void r100_pre_page_flip(struct radeon_device *rdev, int crtc)
72{
73 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc];
74 u32 tmp;
75
76 /* make sure flip is at vb rather than hb */
77 tmp = RREG32(RADEON_CRTC_OFFSET_CNTL + radeon_crtc->crtc_offset);
78 tmp &= ~RADEON_CRTC_OFFSET_FLIP_CNTL;
79 /* make sure pending bit is asserted */
80 tmp |= RADEON_CRTC_GUI_TRIG_OFFSET_LEFT_EN;
81 WREG32(RADEON_CRTC_OFFSET_CNTL + radeon_crtc->crtc_offset, tmp);
82
83 /* set pageflip to happen as late as possible in the vblank interval.
84 * same field for crtc1/2
85 */
86 tmp = RREG32(RADEON_CRTC_GEN_CNTL);
87 tmp &= ~RADEON_CRTC_VSTAT_MODE_MASK;
88 WREG32(RADEON_CRTC_GEN_CNTL, tmp);
89
90 /* enable the pflip int */
91 radeon_irq_kms_pflip_irq_get(rdev, crtc);
92}
93
94void r100_post_page_flip(struct radeon_device *rdev, int crtc)
95{
96 /* disable the pflip int */
97 radeon_irq_kms_pflip_irq_put(rdev, crtc);
98}
99
100u32 r100_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
101{
102 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
103 u32 tmp = ((u32)crtc_base) | RADEON_CRTC_OFFSET__OFFSET_LOCK;
104
105 /* Lock the graphics update lock */
106 /* update the scanout addresses */
107 WREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset, tmp);
108
109 /* Wait for update_pending to go high. */
110 while (!(RREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset) & RADEON_CRTC_OFFSET__GUI_TRIG_OFFSET));
111 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
112
113 /* Unlock the lock, so double-buffering can take place inside vblank */
114 tmp &= ~RADEON_CRTC_OFFSET__OFFSET_LOCK;
115 WREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset, tmp);
116
117 /* Return current update_pending status: */
118 return RREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset) & RADEON_CRTC_OFFSET__GUI_TRIG_OFFSET;
119}
120
71void r100_pm_get_dynpm_state(struct radeon_device *rdev) 121void r100_pm_get_dynpm_state(struct radeon_device *rdev)
72{ 122{
73 int i; 123 int i;
@@ -526,10 +576,12 @@ int r100_irq_set(struct radeon_device *rdev)
526 if (rdev->irq.gui_idle) { 576 if (rdev->irq.gui_idle) {
527 tmp |= RADEON_GUI_IDLE_MASK; 577 tmp |= RADEON_GUI_IDLE_MASK;
528 } 578 }
529 if (rdev->irq.crtc_vblank_int[0]) { 579 if (rdev->irq.crtc_vblank_int[0] ||
580 rdev->irq.pflip[0]) {
530 tmp |= RADEON_CRTC_VBLANK_MASK; 581 tmp |= RADEON_CRTC_VBLANK_MASK;
531 } 582 }
532 if (rdev->irq.crtc_vblank_int[1]) { 583 if (rdev->irq.crtc_vblank_int[1] ||
584 rdev->irq.pflip[1]) {
533 tmp |= RADEON_CRTC2_VBLANK_MASK; 585 tmp |= RADEON_CRTC2_VBLANK_MASK;
534 } 586 }
535 if (rdev->irq.hpd[0]) { 587 if (rdev->irq.hpd[0]) {
@@ -600,14 +652,22 @@ int r100_irq_process(struct radeon_device *rdev)
600 } 652 }
601 /* Vertical blank interrupts */ 653 /* Vertical blank interrupts */
602 if (status & RADEON_CRTC_VBLANK_STAT) { 654 if (status & RADEON_CRTC_VBLANK_STAT) {
603 drm_handle_vblank(rdev->ddev, 0); 655 if (rdev->irq.crtc_vblank_int[0]) {
604 rdev->pm.vblank_sync = true; 656 drm_handle_vblank(rdev->ddev, 0);
605 wake_up(&rdev->irq.vblank_queue); 657 rdev->pm.vblank_sync = true;
658 wake_up(&rdev->irq.vblank_queue);
659 }
660 if (rdev->irq.pflip[0])
661 radeon_crtc_handle_flip(rdev, 0);
606 } 662 }
607 if (status & RADEON_CRTC2_VBLANK_STAT) { 663 if (status & RADEON_CRTC2_VBLANK_STAT) {
608 drm_handle_vblank(rdev->ddev, 1); 664 if (rdev->irq.crtc_vblank_int[1]) {
609 rdev->pm.vblank_sync = true; 665 drm_handle_vblank(rdev->ddev, 1);
610 wake_up(&rdev->irq.vblank_queue); 666 rdev->pm.vblank_sync = true;
667 wake_up(&rdev->irq.vblank_queue);
668 }
669 if (rdev->irq.pflip[1])
670 radeon_crtc_handle_flip(rdev, 1);
611 } 671 }
612 if (status & RADEON_FP_DETECT_STAT) { 672 if (status & RADEON_FP_DETECT_STAT) {
613 queue_hotplug = true; 673 queue_hotplug = true;
diff --git a/drivers/gpu/drm/radeon/r500_reg.h b/drivers/gpu/drm/radeon/r500_reg.h
index 6ac1f604e29b..fc437059918f 100644
--- a/drivers/gpu/drm/radeon/r500_reg.h
+++ b/drivers/gpu/drm/radeon/r500_reg.h
@@ -355,6 +355,8 @@
355#define AVIVO_D1CRTC_FRAME_COUNT 0x60a4 355#define AVIVO_D1CRTC_FRAME_COUNT 0x60a4
356#define AVIVO_D1CRTC_STEREO_CONTROL 0x60c4 356#define AVIVO_D1CRTC_STEREO_CONTROL 0x60c4
357 357
358#define AVIVO_D1MODE_MASTER_UPDATE_MODE 0x60e4
359
358/* master controls */ 360/* master controls */
359#define AVIVO_DC_CRTC_MASTER_EN 0x60f8 361#define AVIVO_DC_CRTC_MASTER_EN 0x60f8
360#define AVIVO_DC_CRTC_TV_CONTROL 0x60fc 362#define AVIVO_DC_CRTC_TV_CONTROL 0x60fc
@@ -409,8 +411,10 @@
409#define AVIVO_D1GRPH_X_END 0x6134 411#define AVIVO_D1GRPH_X_END 0x6134
410#define AVIVO_D1GRPH_Y_END 0x6138 412#define AVIVO_D1GRPH_Y_END 0x6138
411#define AVIVO_D1GRPH_UPDATE 0x6144 413#define AVIVO_D1GRPH_UPDATE 0x6144
414# define AVIVO_D1GRPH_SURFACE_UPDATE_PENDING (1 << 2)
412# define AVIVO_D1GRPH_UPDATE_LOCK (1 << 16) 415# define AVIVO_D1GRPH_UPDATE_LOCK (1 << 16)
413#define AVIVO_D1GRPH_FLIP_CONTROL 0x6148 416#define AVIVO_D1GRPH_FLIP_CONTROL 0x6148
417# define AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN (1 << 0)
414 418
415#define AVIVO_D1CUR_CONTROL 0x6400 419#define AVIVO_D1CUR_CONTROL 0x6400
416# define AVIVO_D1CURSOR_EN (1 << 0) 420# define AVIVO_D1CURSOR_EN (1 << 0)
diff --git a/drivers/gpu/drm/radeon/r600.c b/drivers/gpu/drm/radeon/r600.c
index a3552594ccc4..7057b392e005 100644
--- a/drivers/gpu/drm/radeon/r600.c
+++ b/drivers/gpu/drm/radeon/r600.c
@@ -2863,6 +2863,8 @@ static void r600_disable_interrupt_state(struct radeon_device *rdev)
2863 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE); 2863 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
2864 WREG32(GRBM_INT_CNTL, 0); 2864 WREG32(GRBM_INT_CNTL, 0);
2865 WREG32(DxMODE_INT_MASK, 0); 2865 WREG32(DxMODE_INT_MASK, 0);
2866 WREG32(D1GRPH_INTERRUPT_CONTROL, 0);
2867 WREG32(D2GRPH_INTERRUPT_CONTROL, 0);
2866 if (ASIC_IS_DCE3(rdev)) { 2868 if (ASIC_IS_DCE3(rdev)) {
2867 WREG32(DCE3_DACA_AUTODETECT_INT_CONTROL, 0); 2869 WREG32(DCE3_DACA_AUTODETECT_INT_CONTROL, 0);
2868 WREG32(DCE3_DACB_AUTODETECT_INT_CONTROL, 0); 2870 WREG32(DCE3_DACB_AUTODETECT_INT_CONTROL, 0);
@@ -2987,6 +2989,7 @@ int r600_irq_set(struct radeon_device *rdev)
2987 u32 hpd1, hpd2, hpd3, hpd4 = 0, hpd5 = 0, hpd6 = 0; 2989 u32 hpd1, hpd2, hpd3, hpd4 = 0, hpd5 = 0, hpd6 = 0;
2988 u32 grbm_int_cntl = 0; 2990 u32 grbm_int_cntl = 0;
2989 u32 hdmi1, hdmi2; 2991 u32 hdmi1, hdmi2;
2992 u32 d1grph = 0, d2grph = 0;
2990 2993
2991 if (!rdev->irq.installed) { 2994 if (!rdev->irq.installed) {
2992 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n"); 2995 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
@@ -3023,11 +3026,13 @@ int r600_irq_set(struct radeon_device *rdev)
3023 cp_int_cntl |= RB_INT_ENABLE; 3026 cp_int_cntl |= RB_INT_ENABLE;
3024 cp_int_cntl |= TIME_STAMP_INT_ENABLE; 3027 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3025 } 3028 }
3026 if (rdev->irq.crtc_vblank_int[0]) { 3029 if (rdev->irq.crtc_vblank_int[0] ||
3030 rdev->irq.pflip[0]) {
3027 DRM_DEBUG("r600_irq_set: vblank 0\n"); 3031 DRM_DEBUG("r600_irq_set: vblank 0\n");
3028 mode_int |= D1MODE_VBLANK_INT_MASK; 3032 mode_int |= D1MODE_VBLANK_INT_MASK;
3029 } 3033 }
3030 if (rdev->irq.crtc_vblank_int[1]) { 3034 if (rdev->irq.crtc_vblank_int[1] ||
3035 rdev->irq.pflip[1]) {
3031 DRM_DEBUG("r600_irq_set: vblank 1\n"); 3036 DRM_DEBUG("r600_irq_set: vblank 1\n");
3032 mode_int |= D2MODE_VBLANK_INT_MASK; 3037 mode_int |= D2MODE_VBLANK_INT_MASK;
3033 } 3038 }
@@ -3070,6 +3075,8 @@ int r600_irq_set(struct radeon_device *rdev)
3070 3075
3071 WREG32(CP_INT_CNTL, cp_int_cntl); 3076 WREG32(CP_INT_CNTL, cp_int_cntl);
3072 WREG32(DxMODE_INT_MASK, mode_int); 3077 WREG32(DxMODE_INT_MASK, mode_int);
3078 WREG32(D1GRPH_INTERRUPT_CONTROL, d1grph);
3079 WREG32(D2GRPH_INTERRUPT_CONTROL, d2grph);
3073 WREG32(GRBM_INT_CNTL, grbm_int_cntl); 3080 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3074 WREG32(R600_HDMI_BLOCK1 + R600_HDMI_CNTL, hdmi1); 3081 WREG32(R600_HDMI_BLOCK1 + R600_HDMI_CNTL, hdmi1);
3075 if (ASIC_IS_DCE3(rdev)) { 3082 if (ASIC_IS_DCE3(rdev)) {
@@ -3092,32 +3099,35 @@ int r600_irq_set(struct radeon_device *rdev)
3092 return 0; 3099 return 0;
3093} 3100}
3094 3101
3095static inline void r600_irq_ack(struct radeon_device *rdev, 3102static inline void r600_irq_ack(struct radeon_device *rdev)
3096 u32 *disp_int,
3097 u32 *disp_int_cont,
3098 u32 *disp_int_cont2)
3099{ 3103{
3100 u32 tmp; 3104 u32 tmp;
3101 3105
3102 if (ASIC_IS_DCE3(rdev)) { 3106 if (ASIC_IS_DCE3(rdev)) {
3103 *disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS); 3107 rdev->irq.stat_regs.r600.disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS);
3104 *disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE); 3108 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE);
3105 *disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2); 3109 rdev->irq.stat_regs.r600.disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2);
3106 } else { 3110 } else {
3107 *disp_int = RREG32(DISP_INTERRUPT_STATUS); 3111 rdev->irq.stat_regs.r600.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3108 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); 3112 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3109 *disp_int_cont2 = 0; 3113 rdev->irq.stat_regs.r600.disp_int_cont2 = 0;
3110 } 3114 }
3111 3115 rdev->irq.stat_regs.r600.d1grph_int = RREG32(D1GRPH_INTERRUPT_STATUS);
3112 if (*disp_int & LB_D1_VBLANK_INTERRUPT) 3116 rdev->irq.stat_regs.r600.d2grph_int = RREG32(D2GRPH_INTERRUPT_STATUS);
3117
3118 if (rdev->irq.stat_regs.r600.d1grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3119 WREG32(D1GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3120 if (rdev->irq.stat_regs.r600.d2grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3121 WREG32(D2GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3122 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT)
3113 WREG32(D1MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK); 3123 WREG32(D1MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3114 if (*disp_int & LB_D1_VLINE_INTERRUPT) 3124 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT)
3115 WREG32(D1MODE_VLINE_STATUS, DxMODE_VLINE_ACK); 3125 WREG32(D1MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3116 if (*disp_int & LB_D2_VBLANK_INTERRUPT) 3126 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT)
3117 WREG32(D2MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK); 3127 WREG32(D2MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3118 if (*disp_int & LB_D2_VLINE_INTERRUPT) 3128 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT)
3119 WREG32(D2MODE_VLINE_STATUS, DxMODE_VLINE_ACK); 3129 WREG32(D2MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3120 if (*disp_int & DC_HPD1_INTERRUPT) { 3130 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3121 if (ASIC_IS_DCE3(rdev)) { 3131 if (ASIC_IS_DCE3(rdev)) {
3122 tmp = RREG32(DC_HPD1_INT_CONTROL); 3132 tmp = RREG32(DC_HPD1_INT_CONTROL);
3123 tmp |= DC_HPDx_INT_ACK; 3133 tmp |= DC_HPDx_INT_ACK;
@@ -3128,7 +3138,7 @@ static inline void r600_irq_ack(struct radeon_device *rdev,
3128 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp); 3138 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3129 } 3139 }
3130 } 3140 }
3131 if (*disp_int & DC_HPD2_INTERRUPT) { 3141 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3132 if (ASIC_IS_DCE3(rdev)) { 3142 if (ASIC_IS_DCE3(rdev)) {
3133 tmp = RREG32(DC_HPD2_INT_CONTROL); 3143 tmp = RREG32(DC_HPD2_INT_CONTROL);
3134 tmp |= DC_HPDx_INT_ACK; 3144 tmp |= DC_HPDx_INT_ACK;
@@ -3139,7 +3149,7 @@ static inline void r600_irq_ack(struct radeon_device *rdev,
3139 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp); 3149 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3140 } 3150 }
3141 } 3151 }
3142 if (*disp_int_cont & DC_HPD3_INTERRUPT) { 3152 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3143 if (ASIC_IS_DCE3(rdev)) { 3153 if (ASIC_IS_DCE3(rdev)) {
3144 tmp = RREG32(DC_HPD3_INT_CONTROL); 3154 tmp = RREG32(DC_HPD3_INT_CONTROL);
3145 tmp |= DC_HPDx_INT_ACK; 3155 tmp |= DC_HPDx_INT_ACK;
@@ -3150,18 +3160,18 @@ static inline void r600_irq_ack(struct radeon_device *rdev,
3150 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp); 3160 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3151 } 3161 }
3152 } 3162 }
3153 if (*disp_int_cont & DC_HPD4_INTERRUPT) { 3163 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3154 tmp = RREG32(DC_HPD4_INT_CONTROL); 3164 tmp = RREG32(DC_HPD4_INT_CONTROL);
3155 tmp |= DC_HPDx_INT_ACK; 3165 tmp |= DC_HPDx_INT_ACK;
3156 WREG32(DC_HPD4_INT_CONTROL, tmp); 3166 WREG32(DC_HPD4_INT_CONTROL, tmp);
3157 } 3167 }
3158 if (ASIC_IS_DCE32(rdev)) { 3168 if (ASIC_IS_DCE32(rdev)) {
3159 if (*disp_int_cont2 & DC_HPD5_INTERRUPT) { 3169 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3160 tmp = RREG32(DC_HPD5_INT_CONTROL); 3170 tmp = RREG32(DC_HPD5_INT_CONTROL);
3161 tmp |= DC_HPDx_INT_ACK; 3171 tmp |= DC_HPDx_INT_ACK;
3162 WREG32(DC_HPD5_INT_CONTROL, tmp); 3172 WREG32(DC_HPD5_INT_CONTROL, tmp);
3163 } 3173 }
3164 if (*disp_int_cont2 & DC_HPD6_INTERRUPT) { 3174 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3165 tmp = RREG32(DC_HPD5_INT_CONTROL); 3175 tmp = RREG32(DC_HPD5_INT_CONTROL);
3166 tmp |= DC_HPDx_INT_ACK; 3176 tmp |= DC_HPDx_INT_ACK;
3167 WREG32(DC_HPD6_INT_CONTROL, tmp); 3177 WREG32(DC_HPD6_INT_CONTROL, tmp);
@@ -3183,12 +3193,10 @@ static inline void r600_irq_ack(struct radeon_device *rdev,
3183 3193
3184void r600_irq_disable(struct radeon_device *rdev) 3194void r600_irq_disable(struct radeon_device *rdev)
3185{ 3195{
3186 u32 disp_int, disp_int_cont, disp_int_cont2;
3187
3188 r600_disable_interrupts(rdev); 3196 r600_disable_interrupts(rdev);
3189 /* Wait and acknowledge irq */ 3197 /* Wait and acknowledge irq */
3190 mdelay(1); 3198 mdelay(1);
3191 r600_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2); 3199 r600_irq_ack(rdev);
3192 r600_disable_interrupt_state(rdev); 3200 r600_disable_interrupt_state(rdev);
3193} 3201}
3194 3202
@@ -3251,7 +3259,7 @@ int r600_irq_process(struct radeon_device *rdev)
3251 u32 wptr = r600_get_ih_wptr(rdev); 3259 u32 wptr = r600_get_ih_wptr(rdev);
3252 u32 rptr = rdev->ih.rptr; 3260 u32 rptr = rdev->ih.rptr;
3253 u32 src_id, src_data; 3261 u32 src_id, src_data;
3254 u32 ring_index, disp_int, disp_int_cont, disp_int_cont2; 3262 u32 ring_index;
3255 unsigned long flags; 3263 unsigned long flags;
3256 bool queue_hotplug = false; 3264 bool queue_hotplug = false;
3257 3265
@@ -3272,7 +3280,7 @@ int r600_irq_process(struct radeon_device *rdev)
3272 3280
3273restart_ih: 3281restart_ih:
3274 /* display interrupts */ 3282 /* display interrupts */
3275 r600_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2); 3283 r600_irq_ack(rdev);
3276 3284
3277 rdev->ih.wptr = wptr; 3285 rdev->ih.wptr = wptr;
3278 while (rptr != wptr) { 3286 while (rptr != wptr) {
@@ -3285,17 +3293,21 @@ restart_ih:
3285 case 1: /* D1 vblank/vline */ 3293 case 1: /* D1 vblank/vline */
3286 switch (src_data) { 3294 switch (src_data) {
3287 case 0: /* D1 vblank */ 3295 case 0: /* D1 vblank */
3288 if (disp_int & LB_D1_VBLANK_INTERRUPT) { 3296 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT) {
3289 drm_handle_vblank(rdev->ddev, 0); 3297 if (rdev->irq.crtc_vblank_int[0]) {
3290 rdev->pm.vblank_sync = true; 3298 drm_handle_vblank(rdev->ddev, 0);
3291 wake_up(&rdev->irq.vblank_queue); 3299 rdev->pm.vblank_sync = true;
3292 disp_int &= ~LB_D1_VBLANK_INTERRUPT; 3300 wake_up(&rdev->irq.vblank_queue);
3301 }
3302 if (rdev->irq.pflip[0])
3303 radeon_crtc_handle_flip(rdev, 0);
3304 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
3293 DRM_DEBUG("IH: D1 vblank\n"); 3305 DRM_DEBUG("IH: D1 vblank\n");
3294 } 3306 }
3295 break; 3307 break;
3296 case 1: /* D1 vline */ 3308 case 1: /* D1 vline */
3297 if (disp_int & LB_D1_VLINE_INTERRUPT) { 3309 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT) {
3298 disp_int &= ~LB_D1_VLINE_INTERRUPT; 3310 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VLINE_INTERRUPT;
3299 DRM_DEBUG("IH: D1 vline\n"); 3311 DRM_DEBUG("IH: D1 vline\n");
3300 } 3312 }
3301 break; 3313 break;
@@ -3307,17 +3319,21 @@ restart_ih:
3307 case 5: /* D2 vblank/vline */ 3319 case 5: /* D2 vblank/vline */
3308 switch (src_data) { 3320 switch (src_data) {
3309 case 0: /* D2 vblank */ 3321 case 0: /* D2 vblank */
3310 if (disp_int & LB_D2_VBLANK_INTERRUPT) { 3322 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT) {
3311 drm_handle_vblank(rdev->ddev, 1); 3323 if (rdev->irq.crtc_vblank_int[1]) {
3312 rdev->pm.vblank_sync = true; 3324 drm_handle_vblank(rdev->ddev, 1);
3313 wake_up(&rdev->irq.vblank_queue); 3325 rdev->pm.vblank_sync = true;
3314 disp_int &= ~LB_D2_VBLANK_INTERRUPT; 3326 wake_up(&rdev->irq.vblank_queue);
3327 }
3328 if (rdev->irq.pflip[1])
3329 radeon_crtc_handle_flip(rdev, 1);
3330 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VBLANK_INTERRUPT;
3315 DRM_DEBUG("IH: D2 vblank\n"); 3331 DRM_DEBUG("IH: D2 vblank\n");
3316 } 3332 }
3317 break; 3333 break;
3318 case 1: /* D1 vline */ 3334 case 1: /* D1 vline */
3319 if (disp_int & LB_D2_VLINE_INTERRUPT) { 3335 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT) {
3320 disp_int &= ~LB_D2_VLINE_INTERRUPT; 3336 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VLINE_INTERRUPT;
3321 DRM_DEBUG("IH: D2 vline\n"); 3337 DRM_DEBUG("IH: D2 vline\n");
3322 } 3338 }
3323 break; 3339 break;
@@ -3329,43 +3345,43 @@ restart_ih:
3329 case 19: /* HPD/DAC hotplug */ 3345 case 19: /* HPD/DAC hotplug */
3330 switch (src_data) { 3346 switch (src_data) {
3331 case 0: 3347 case 0:
3332 if (disp_int & DC_HPD1_INTERRUPT) { 3348 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3333 disp_int &= ~DC_HPD1_INTERRUPT; 3349 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD1_INTERRUPT;
3334 queue_hotplug = true; 3350 queue_hotplug = true;
3335 DRM_DEBUG("IH: HPD1\n"); 3351 DRM_DEBUG("IH: HPD1\n");
3336 } 3352 }
3337 break; 3353 break;
3338 case 1: 3354 case 1:
3339 if (disp_int & DC_HPD2_INTERRUPT) { 3355 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3340 disp_int &= ~DC_HPD2_INTERRUPT; 3356 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD2_INTERRUPT;
3341 queue_hotplug = true; 3357 queue_hotplug = true;
3342 DRM_DEBUG("IH: HPD2\n"); 3358 DRM_DEBUG("IH: HPD2\n");
3343 } 3359 }
3344 break; 3360 break;
3345 case 4: 3361 case 4:
3346 if (disp_int_cont & DC_HPD3_INTERRUPT) { 3362 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3347 disp_int_cont &= ~DC_HPD3_INTERRUPT; 3363 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD3_INTERRUPT;
3348 queue_hotplug = true; 3364 queue_hotplug = true;
3349 DRM_DEBUG("IH: HPD3\n"); 3365 DRM_DEBUG("IH: HPD3\n");
3350 } 3366 }
3351 break; 3367 break;
3352 case 5: 3368 case 5:
3353 if (disp_int_cont & DC_HPD4_INTERRUPT) { 3369 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3354 disp_int_cont &= ~DC_HPD4_INTERRUPT; 3370 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD4_INTERRUPT;
3355 queue_hotplug = true; 3371 queue_hotplug = true;
3356 DRM_DEBUG("IH: HPD4\n"); 3372 DRM_DEBUG("IH: HPD4\n");
3357 } 3373 }
3358 break; 3374 break;
3359 case 10: 3375 case 10:
3360 if (disp_int_cont2 & DC_HPD5_INTERRUPT) { 3376 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3361 disp_int_cont2 &= ~DC_HPD5_INTERRUPT; 3377 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD5_INTERRUPT;
3362 queue_hotplug = true; 3378 queue_hotplug = true;
3363 DRM_DEBUG("IH: HPD5\n"); 3379 DRM_DEBUG("IH: HPD5\n");
3364 } 3380 }
3365 break; 3381 break;
3366 case 12: 3382 case 12:
3367 if (disp_int_cont2 & DC_HPD6_INTERRUPT) { 3383 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3368 disp_int_cont2 &= ~DC_HPD6_INTERRUPT; 3384 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD6_INTERRUPT;
3369 queue_hotplug = true; 3385 queue_hotplug = true;
3370 DRM_DEBUG("IH: HPD6\n"); 3386 DRM_DEBUG("IH: HPD6\n");
3371 } 3387 }
diff --git a/drivers/gpu/drm/radeon/r600d.h b/drivers/gpu/drm/radeon/r600d.h
index bff4dc4f410f..c89cfa8e0c05 100644
--- a/drivers/gpu/drm/radeon/r600d.h
+++ b/drivers/gpu/drm/radeon/r600d.h
@@ -728,6 +728,15 @@
728/* DCE 3.2 */ 728/* DCE 3.2 */
729# define DC_HPDx_EN (1 << 28) 729# define DC_HPDx_EN (1 << 28)
730 730
731#define D1GRPH_INTERRUPT_STATUS 0x6158
732#define D2GRPH_INTERRUPT_STATUS 0x6958
733# define DxGRPH_PFLIP_INT_OCCURRED (1 << 0)
734# define DxGRPH_PFLIP_INT_CLEAR (1 << 8)
735#define D1GRPH_INTERRUPT_CONTROL 0x615c
736#define D2GRPH_INTERRUPT_CONTROL 0x695c
737# define DxGRPH_PFLIP_INT_MASK (1 << 0)
738# define DxGRPH_PFLIP_INT_TYPE (1 << 8)
739
731/* 740/*
732 * PM4 741 * PM4
733 */ 742 */
diff --git a/drivers/gpu/drm/radeon/radeon.h b/drivers/gpu/drm/radeon/radeon.h
index b1e073b7381f..5827a71e4094 100644
--- a/drivers/gpu/drm/radeon/radeon.h
+++ b/drivers/gpu/drm/radeon/radeon.h
@@ -377,11 +377,56 @@ void radeon_scratch_free(struct radeon_device *rdev, uint32_t reg);
377/* 377/*
378 * IRQS. 378 * IRQS.
379 */ 379 */
380
381struct radeon_unpin_work {
382 struct work_struct work;
383 struct radeon_device *rdev;
384 int crtc_id;
385 struct radeon_fence *fence;
386 struct drm_pending_vblank_event *event;
387 struct radeon_bo *old_rbo;
388 u64 new_crtc_base;
389};
390
391struct r500_irq_stat_regs {
392 u32 disp_int;
393};
394
395struct r600_irq_stat_regs {
396 u32 disp_int;
397 u32 disp_int_cont;
398 u32 disp_int_cont2;
399 u32 d1grph_int;
400 u32 d2grph_int;
401};
402
403struct evergreen_irq_stat_regs {
404 u32 disp_int;
405 u32 disp_int_cont;
406 u32 disp_int_cont2;
407 u32 disp_int_cont3;
408 u32 disp_int_cont4;
409 u32 disp_int_cont5;
410 u32 d1grph_int;
411 u32 d2grph_int;
412 u32 d3grph_int;
413 u32 d4grph_int;
414 u32 d5grph_int;
415 u32 d6grph_int;
416};
417
418union radeon_irq_stat_regs {
419 struct r500_irq_stat_regs r500;
420 struct r600_irq_stat_regs r600;
421 struct evergreen_irq_stat_regs evergreen;
422};
423
380struct radeon_irq { 424struct radeon_irq {
381 bool installed; 425 bool installed;
382 bool sw_int; 426 bool sw_int;
383 /* FIXME: use a define max crtc rather than hardcode it */ 427 /* FIXME: use a define max crtc rather than hardcode it */
384 bool crtc_vblank_int[6]; 428 bool crtc_vblank_int[6];
429 bool pflip[6];
385 wait_queue_head_t vblank_queue; 430 wait_queue_head_t vblank_queue;
386 /* FIXME: use defines for max hpd/dacs */ 431 /* FIXME: use defines for max hpd/dacs */
387 bool hpd[6]; 432 bool hpd[6];
@@ -392,12 +437,17 @@ struct radeon_irq {
392 bool hdmi[2]; 437 bool hdmi[2];
393 spinlock_t sw_lock; 438 spinlock_t sw_lock;
394 int sw_refcount; 439 int sw_refcount;
440 union radeon_irq_stat_regs stat_regs;
441 spinlock_t pflip_lock[6];
442 int pflip_refcount[6];
395}; 443};
396 444
397int radeon_irq_kms_init(struct radeon_device *rdev); 445int radeon_irq_kms_init(struct radeon_device *rdev);
398void radeon_irq_kms_fini(struct radeon_device *rdev); 446void radeon_irq_kms_fini(struct radeon_device *rdev);
399void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev); 447void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev);
400void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev); 448void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev);
449void radeon_irq_kms_pflip_irq_get(struct radeon_device *rdev, int crtc);
450void radeon_irq_kms_pflip_irq_put(struct radeon_device *rdev, int crtc);
401 451
402/* 452/*
403 * CP & ring. 453 * CP & ring.
@@ -881,6 +931,10 @@ struct radeon_asic {
881 void (*pm_finish)(struct radeon_device *rdev); 931 void (*pm_finish)(struct radeon_device *rdev);
882 void (*pm_init_profile)(struct radeon_device *rdev); 932 void (*pm_init_profile)(struct radeon_device *rdev);
883 void (*pm_get_dynpm_state)(struct radeon_device *rdev); 933 void (*pm_get_dynpm_state)(struct radeon_device *rdev);
934 /* pageflipping */
935 void (*pre_page_flip)(struct radeon_device *rdev, int crtc);
936 u32 (*page_flip)(struct radeon_device *rdev, int crtc, u64 crtc_base);
937 void (*post_page_flip)(struct radeon_device *rdev, int crtc);
884}; 938};
885 939
886/* 940/*
@@ -1344,6 +1398,9 @@ static inline void radeon_ring_write(struct radeon_device *rdev, uint32_t v)
1344#define radeon_pm_finish(rdev) (rdev)->asic->pm_finish((rdev)) 1398#define radeon_pm_finish(rdev) (rdev)->asic->pm_finish((rdev))
1345#define radeon_pm_init_profile(rdev) (rdev)->asic->pm_init_profile((rdev)) 1399#define radeon_pm_init_profile(rdev) (rdev)->asic->pm_init_profile((rdev))
1346#define radeon_pm_get_dynpm_state(rdev) (rdev)->asic->pm_get_dynpm_state((rdev)) 1400#define radeon_pm_get_dynpm_state(rdev) (rdev)->asic->pm_get_dynpm_state((rdev))
1401#define radeon_pre_page_flip(rdev, crtc) rdev->asic->pre_page_flip((rdev), (crtc))
1402#define radeon_page_flip(rdev, crtc, base) rdev->asic->page_flip((rdev), (crtc), (base))
1403#define radeon_post_page_flip(rdev, crtc) rdev->asic->post_page_flip((rdev), (crtc))
1347 1404
1348/* Common functions */ 1405/* Common functions */
1349/* AGP */ 1406/* AGP */
diff --git a/drivers/gpu/drm/radeon/radeon_asic.c b/drivers/gpu/drm/radeon/radeon_asic.c
index 64fb89ecbf74..6b126b3f5fa9 100644
--- a/drivers/gpu/drm/radeon/radeon_asic.c
+++ b/drivers/gpu/drm/radeon/radeon_asic.c
@@ -171,6 +171,9 @@ static struct radeon_asic r100_asic = {
171 .pm_finish = &r100_pm_finish, 171 .pm_finish = &r100_pm_finish,
172 .pm_init_profile = &r100_pm_init_profile, 172 .pm_init_profile = &r100_pm_init_profile,
173 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 173 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
174 .pre_page_flip = &r100_pre_page_flip,
175 .page_flip = &r100_page_flip,
176 .post_page_flip = &r100_post_page_flip,
174}; 177};
175 178
176static struct radeon_asic r200_asic = { 179static struct radeon_asic r200_asic = {
@@ -215,6 +218,9 @@ static struct radeon_asic r200_asic = {
215 .pm_finish = &r100_pm_finish, 218 .pm_finish = &r100_pm_finish,
216 .pm_init_profile = &r100_pm_init_profile, 219 .pm_init_profile = &r100_pm_init_profile,
217 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 220 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
221 .pre_page_flip = &r100_pre_page_flip,
222 .page_flip = &r100_page_flip,
223 .post_page_flip = &r100_post_page_flip,
218}; 224};
219 225
220static struct radeon_asic r300_asic = { 226static struct radeon_asic r300_asic = {
@@ -260,6 +266,9 @@ static struct radeon_asic r300_asic = {
260 .pm_finish = &r100_pm_finish, 266 .pm_finish = &r100_pm_finish,
261 .pm_init_profile = &r100_pm_init_profile, 267 .pm_init_profile = &r100_pm_init_profile,
262 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 268 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
269 .pre_page_flip = &r100_pre_page_flip,
270 .page_flip = &r100_page_flip,
271 .post_page_flip = &r100_post_page_flip,
263}; 272};
264 273
265static struct radeon_asic r300_asic_pcie = { 274static struct radeon_asic r300_asic_pcie = {
@@ -304,6 +313,9 @@ static struct radeon_asic r300_asic_pcie = {
304 .pm_finish = &r100_pm_finish, 313 .pm_finish = &r100_pm_finish,
305 .pm_init_profile = &r100_pm_init_profile, 314 .pm_init_profile = &r100_pm_init_profile,
306 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 315 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
316 .pre_page_flip = &r100_pre_page_flip,
317 .page_flip = &r100_page_flip,
318 .post_page_flip = &r100_post_page_flip,
307}; 319};
308 320
309static struct radeon_asic r420_asic = { 321static struct radeon_asic r420_asic = {
@@ -349,6 +361,9 @@ static struct radeon_asic r420_asic = {
349 .pm_finish = &r100_pm_finish, 361 .pm_finish = &r100_pm_finish,
350 .pm_init_profile = &r420_pm_init_profile, 362 .pm_init_profile = &r420_pm_init_profile,
351 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 363 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
364 .pre_page_flip = &r100_pre_page_flip,
365 .page_flip = &r100_page_flip,
366 .post_page_flip = &r100_post_page_flip,
352}; 367};
353 368
354static struct radeon_asic rs400_asic = { 369static struct radeon_asic rs400_asic = {
@@ -394,6 +409,9 @@ static struct radeon_asic rs400_asic = {
394 .pm_finish = &r100_pm_finish, 409 .pm_finish = &r100_pm_finish,
395 .pm_init_profile = &r100_pm_init_profile, 410 .pm_init_profile = &r100_pm_init_profile,
396 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 411 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
412 .pre_page_flip = &r100_pre_page_flip,
413 .page_flip = &r100_page_flip,
414 .post_page_flip = &r100_post_page_flip,
397}; 415};
398 416
399static struct radeon_asic rs600_asic = { 417static struct radeon_asic rs600_asic = {
@@ -439,6 +457,9 @@ static struct radeon_asic rs600_asic = {
439 .pm_finish = &rs600_pm_finish, 457 .pm_finish = &rs600_pm_finish,
440 .pm_init_profile = &r420_pm_init_profile, 458 .pm_init_profile = &r420_pm_init_profile,
441 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 459 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
460 .pre_page_flip = &rs600_pre_page_flip,
461 .page_flip = &rs600_page_flip,
462 .post_page_flip = &rs600_post_page_flip,
442}; 463};
443 464
444static struct radeon_asic rs690_asic = { 465static struct radeon_asic rs690_asic = {
@@ -484,6 +505,9 @@ static struct radeon_asic rs690_asic = {
484 .pm_finish = &rs600_pm_finish, 505 .pm_finish = &rs600_pm_finish,
485 .pm_init_profile = &r420_pm_init_profile, 506 .pm_init_profile = &r420_pm_init_profile,
486 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 507 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
508 .pre_page_flip = &rs600_pre_page_flip,
509 .page_flip = &rs600_page_flip,
510 .post_page_flip = &rs600_post_page_flip,
487}; 511};
488 512
489static struct radeon_asic rv515_asic = { 513static struct radeon_asic rv515_asic = {
@@ -529,6 +553,9 @@ static struct radeon_asic rv515_asic = {
529 .pm_finish = &rs600_pm_finish, 553 .pm_finish = &rs600_pm_finish,
530 .pm_init_profile = &r420_pm_init_profile, 554 .pm_init_profile = &r420_pm_init_profile,
531 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 555 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
556 .pre_page_flip = &rs600_pre_page_flip,
557 .page_flip = &rs600_page_flip,
558 .post_page_flip = &rs600_post_page_flip,
532}; 559};
533 560
534static struct radeon_asic r520_asic = { 561static struct radeon_asic r520_asic = {
@@ -574,6 +601,9 @@ static struct radeon_asic r520_asic = {
574 .pm_finish = &rs600_pm_finish, 601 .pm_finish = &rs600_pm_finish,
575 .pm_init_profile = &r420_pm_init_profile, 602 .pm_init_profile = &r420_pm_init_profile,
576 .pm_get_dynpm_state = &r100_pm_get_dynpm_state, 603 .pm_get_dynpm_state = &r100_pm_get_dynpm_state,
604 .pre_page_flip = &rs600_pre_page_flip,
605 .page_flip = &rs600_page_flip,
606 .post_page_flip = &rs600_post_page_flip,
577}; 607};
578 608
579static struct radeon_asic r600_asic = { 609static struct radeon_asic r600_asic = {
@@ -618,6 +648,9 @@ static struct radeon_asic r600_asic = {
618 .pm_finish = &rs600_pm_finish, 648 .pm_finish = &rs600_pm_finish,
619 .pm_init_profile = &r600_pm_init_profile, 649 .pm_init_profile = &r600_pm_init_profile,
620 .pm_get_dynpm_state = &r600_pm_get_dynpm_state, 650 .pm_get_dynpm_state = &r600_pm_get_dynpm_state,
651 .pre_page_flip = &rs600_pre_page_flip,
652 .page_flip = &rs600_page_flip,
653 .post_page_flip = &rs600_post_page_flip,
621}; 654};
622 655
623static struct radeon_asic rs780_asic = { 656static struct radeon_asic rs780_asic = {
@@ -662,6 +695,9 @@ static struct radeon_asic rs780_asic = {
662 .pm_finish = &rs600_pm_finish, 695 .pm_finish = &rs600_pm_finish,
663 .pm_init_profile = &rs780_pm_init_profile, 696 .pm_init_profile = &rs780_pm_init_profile,
664 .pm_get_dynpm_state = &r600_pm_get_dynpm_state, 697 .pm_get_dynpm_state = &r600_pm_get_dynpm_state,
698 .pre_page_flip = &rs600_pre_page_flip,
699 .page_flip = &rs600_page_flip,
700 .post_page_flip = &rs600_post_page_flip,
665}; 701};
666 702
667static struct radeon_asic rv770_asic = { 703static struct radeon_asic rv770_asic = {
@@ -706,6 +742,9 @@ static struct radeon_asic rv770_asic = {
706 .pm_finish = &rs600_pm_finish, 742 .pm_finish = &rs600_pm_finish,
707 .pm_init_profile = &r600_pm_init_profile, 743 .pm_init_profile = &r600_pm_init_profile,
708 .pm_get_dynpm_state = &r600_pm_get_dynpm_state, 744 .pm_get_dynpm_state = &r600_pm_get_dynpm_state,
745 .pre_page_flip = &rs600_pre_page_flip,
746 .page_flip = &rv770_page_flip,
747 .post_page_flip = &rs600_post_page_flip,
709}; 748};
710 749
711static struct radeon_asic evergreen_asic = { 750static struct radeon_asic evergreen_asic = {
@@ -749,6 +788,9 @@ static struct radeon_asic evergreen_asic = {
749 .pm_finish = &evergreen_pm_finish, 788 .pm_finish = &evergreen_pm_finish,
750 .pm_init_profile = &r600_pm_init_profile, 789 .pm_init_profile = &r600_pm_init_profile,
751 .pm_get_dynpm_state = &r600_pm_get_dynpm_state, 790 .pm_get_dynpm_state = &r600_pm_get_dynpm_state,
791 .pre_page_flip = &evergreen_pre_page_flip,
792 .page_flip = &evergreen_page_flip,
793 .post_page_flip = &evergreen_post_page_flip,
752}; 794};
753 795
754int radeon_asic_init(struct radeon_device *rdev) 796int radeon_asic_init(struct radeon_device *rdev)
diff --git a/drivers/gpu/drm/radeon/radeon_asic.h b/drivers/gpu/drm/radeon/radeon_asic.h
index 740988244143..4970eda1bd41 100644
--- a/drivers/gpu/drm/radeon/radeon_asic.h
+++ b/drivers/gpu/drm/radeon/radeon_asic.h
@@ -130,6 +130,9 @@ extern void r100_pm_prepare(struct radeon_device *rdev);
130extern void r100_pm_finish(struct radeon_device *rdev); 130extern void r100_pm_finish(struct radeon_device *rdev);
131extern void r100_pm_init_profile(struct radeon_device *rdev); 131extern void r100_pm_init_profile(struct radeon_device *rdev);
132extern void r100_pm_get_dynpm_state(struct radeon_device *rdev); 132extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
133extern void r100_pre_page_flip(struct radeon_device *rdev, int crtc);
134extern u32 r100_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
135extern void r100_post_page_flip(struct radeon_device *rdev, int crtc);
133 136
134/* 137/*
135 * r200,rv250,rs300,rv280 138 * r200,rv250,rs300,rv280
@@ -205,6 +208,9 @@ void rs600_hpd_set_polarity(struct radeon_device *rdev,
205extern void rs600_pm_misc(struct radeon_device *rdev); 208extern void rs600_pm_misc(struct radeon_device *rdev);
206extern void rs600_pm_prepare(struct radeon_device *rdev); 209extern void rs600_pm_prepare(struct radeon_device *rdev);
207extern void rs600_pm_finish(struct radeon_device *rdev); 210extern void rs600_pm_finish(struct radeon_device *rdev);
211extern void rs600_pre_page_flip(struct radeon_device *rdev, int crtc);
212extern u32 rs600_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
213extern void rs600_post_page_flip(struct radeon_device *rdev, int crtc);
208 214
209/* 215/*
210 * rs690,rs740 216 * rs690,rs740
@@ -287,6 +293,7 @@ void rv770_fini(struct radeon_device *rdev);
287int rv770_suspend(struct radeon_device *rdev); 293int rv770_suspend(struct radeon_device *rdev);
288int rv770_resume(struct radeon_device *rdev); 294int rv770_resume(struct radeon_device *rdev);
289extern void rv770_pm_misc(struct radeon_device *rdev); 295extern void rv770_pm_misc(struct radeon_device *rdev);
296extern u32 rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
290 297
291/* 298/*
292 * evergreen 299 * evergreen
@@ -314,5 +321,8 @@ extern int evergreen_cs_parse(struct radeon_cs_parser *p);
314extern void evergreen_pm_misc(struct radeon_device *rdev); 321extern void evergreen_pm_misc(struct radeon_device *rdev);
315extern void evergreen_pm_prepare(struct radeon_device *rdev); 322extern void evergreen_pm_prepare(struct radeon_device *rdev);
316extern void evergreen_pm_finish(struct radeon_device *rdev); 323extern void evergreen_pm_finish(struct radeon_device *rdev);
324extern void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc);
325extern u32 evergreen_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
326extern void evergreen_post_page_flip(struct radeon_device *rdev, int crtc);
317 327
318#endif 328#endif
diff --git a/drivers/gpu/drm/radeon/radeon_display.c b/drivers/gpu/drm/radeon/radeon_display.c
index 1df4dc6c063c..15f24f2ee04d 100644
--- a/drivers/gpu/drm/radeon/radeon_display.c
+++ b/drivers/gpu/drm/radeon/radeon_display.c
@@ -183,12 +183,272 @@ static void radeon_crtc_destroy(struct drm_crtc *crtc)
183 kfree(radeon_crtc); 183 kfree(radeon_crtc);
184} 184}
185 185
186/*
187 * Handle unpin events outside the interrupt handler proper.
188 */
189static void radeon_unpin_work_func(struct work_struct *__work)
190{
191 struct radeon_unpin_work *work =
192 container_of(__work, struct radeon_unpin_work, work);
193 int r;
194
195 /* unpin of the old buffer */
196 r = radeon_bo_reserve(work->old_rbo, false);
197 if (likely(r == 0)) {
198 r = radeon_bo_unpin(work->old_rbo);
199 if (unlikely(r != 0)) {
200 DRM_ERROR("failed to unpin buffer after flip\n");
201 }
202 radeon_bo_unreserve(work->old_rbo);
203 } else
204 DRM_ERROR("failed to reserve buffer after flip\n");
205 kfree(work);
206}
207
208void radeon_crtc_handle_flip(struct radeon_device *rdev, int crtc_id)
209{
210 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
211 struct radeon_unpin_work *work;
212 struct drm_pending_vblank_event *e;
213 struct timeval now;
214 unsigned long flags;
215 u32 update_pending;
216 int vpos, hpos;
217
218 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
219 work = radeon_crtc->unpin_work;
220 if (work == NULL ||
221 !radeon_fence_signaled(work->fence)) {
222 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
223 return;
224 }
225 /* New pageflip, or just completion of a previous one? */
226 if (!radeon_crtc->deferred_flip_completion) {
227 /* do the flip (mmio) */
228 update_pending = radeon_page_flip(rdev, crtc_id, work->new_crtc_base);
229 } else {
230 /* This is just a completion of a flip queued in crtc
231 * at last invocation. Make sure we go directly to
232 * completion routine.
233 */
234 update_pending = 0;
235 radeon_crtc->deferred_flip_completion = 0;
236 }
237
238 /* Has the pageflip already completed in crtc, or is it certain
239 * to complete in this vblank?
240 */
241 if (update_pending &&
242 (DRM_SCANOUTPOS_VALID & radeon_get_crtc_scanoutpos(rdev->ddev, crtc_id,
243 &vpos, &hpos)) &&
244 (vpos >=0) &&
245 (vpos < (99 * rdev->mode_info.crtcs[crtc_id]->base.hwmode.crtc_vdisplay)/100)) {
246 /* crtc didn't flip in this target vblank interval,
247 * but flip is pending in crtc. It will complete it
248 * in next vblank interval, so complete the flip at
249 * next vblank irq.
250 */
251 radeon_crtc->deferred_flip_completion = 1;
252 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
253 return;
254 }
255
256 /* Pageflip (will be) certainly completed in this vblank. Clean up. */
257 radeon_crtc->unpin_work = NULL;
258
259 /* wakeup userspace */
260 if (work->event) {
261 e = work->event;
262 e->event.sequence = drm_vblank_count_and_time(rdev->ddev, crtc_id, &now);
263 e->event.tv_sec = now.tv_sec;
264 e->event.tv_usec = now.tv_usec;
265 list_add_tail(&e->base.link, &e->base.file_priv->event_list);
266 wake_up_interruptible(&e->base.file_priv->event_wait);
267 }
268 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
269
270 drm_vblank_put(rdev->ddev, radeon_crtc->crtc_id);
271 radeon_fence_unref(&work->fence);
272 radeon_post_page_flip(work->rdev, work->crtc_id);
273 schedule_work(&work->work);
274}
275
276static int radeon_crtc_page_flip(struct drm_crtc *crtc,
277 struct drm_framebuffer *fb,
278 struct drm_pending_vblank_event *event)
279{
280 struct drm_device *dev = crtc->dev;
281 struct radeon_device *rdev = dev->dev_private;
282 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
283 struct radeon_framebuffer *old_radeon_fb;
284 struct radeon_framebuffer *new_radeon_fb;
285 struct drm_gem_object *obj;
286 struct radeon_bo *rbo;
287 struct radeon_fence *fence;
288 struct radeon_unpin_work *work;
289 unsigned long flags;
290 u32 tiling_flags, pitch_pixels;
291 u64 base;
292 int r;
293
294 work = kzalloc(sizeof *work, GFP_KERNEL);
295 if (work == NULL)
296 return -ENOMEM;
297
298 r = radeon_fence_create(rdev, &fence);
299 if (unlikely(r != 0)) {
300 kfree(work);
301 DRM_ERROR("flip queue: failed to create fence.\n");
302 return -ENOMEM;
303 }
304 work->event = event;
305 work->rdev = rdev;
306 work->crtc_id = radeon_crtc->crtc_id;
307 work->fence = radeon_fence_ref(fence);
308 old_radeon_fb = to_radeon_framebuffer(crtc->fb);
309 new_radeon_fb = to_radeon_framebuffer(fb);
310 /* schedule unpin of the old buffer */
311 obj = old_radeon_fb->obj;
312 rbo = obj->driver_private;
313 work->old_rbo = rbo;
314 INIT_WORK(&work->work, radeon_unpin_work_func);
315
316 /* We borrow the event spin lock for protecting unpin_work */
317 spin_lock_irqsave(&dev->event_lock, flags);
318 if (radeon_crtc->unpin_work) {
319 spin_unlock_irqrestore(&dev->event_lock, flags);
320 kfree(work);
321 radeon_fence_unref(&fence);
322
323 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
324 return -EBUSY;
325 }
326 radeon_crtc->unpin_work = work;
327 radeon_crtc->deferred_flip_completion = 0;
328 spin_unlock_irqrestore(&dev->event_lock, flags);
329
330 /* pin the new buffer */
331 obj = new_radeon_fb->obj;
332 rbo = obj->driver_private;
333
334 DRM_DEBUG_DRIVER("flip-ioctl() cur_fbo = %p, cur_bbo = %p\n",
335 work->old_rbo, rbo);
336
337 r = radeon_bo_reserve(rbo, false);
338 if (unlikely(r != 0)) {
339 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
340 goto pflip_cleanup;
341 }
342 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &base);
343 if (unlikely(r != 0)) {
344 radeon_bo_unreserve(rbo);
345 r = -EINVAL;
346 DRM_ERROR("failed to pin new rbo buffer before flip\n");
347 goto pflip_cleanup;
348 }
349 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
350 radeon_bo_unreserve(rbo);
351
352 if (!ASIC_IS_AVIVO(rdev)) {
353 /* crtc offset is from display base addr not FB location */
354 base -= radeon_crtc->legacy_display_base_addr;
355 pitch_pixels = fb->pitch / (fb->bits_per_pixel / 8);
356
357 if (tiling_flags & RADEON_TILING_MACRO) {
358 if (ASIC_IS_R300(rdev)) {
359 base &= ~0x7ff;
360 } else {
361 int byteshift = fb->bits_per_pixel >> 4;
362 int tile_addr = (((crtc->y >> 3) * pitch_pixels + crtc->x) >> (8 - byteshift)) << 11;
363 base += tile_addr + ((crtc->x << byteshift) % 256) + ((crtc->y % 8) << 8);
364 }
365 } else {
366 int offset = crtc->y * pitch_pixels + crtc->x;
367 switch (fb->bits_per_pixel) {
368 case 8:
369 default:
370 offset *= 1;
371 break;
372 case 15:
373 case 16:
374 offset *= 2;
375 break;
376 case 24:
377 offset *= 3;
378 break;
379 case 32:
380 offset *= 4;
381 break;
382 }
383 base += offset;
384 }
385 base &= ~7;
386 }
387
388 spin_lock_irqsave(&dev->event_lock, flags);
389 work->new_crtc_base = base;
390 spin_unlock_irqrestore(&dev->event_lock, flags);
391
392 /* update crtc fb */
393 crtc->fb = fb;
394
395 r = drm_vblank_get(dev, radeon_crtc->crtc_id);
396 if (r) {
397 DRM_ERROR("failed to get vblank before flip\n");
398 goto pflip_cleanup1;
399 }
400
401 /* 32 ought to cover us */
402 r = radeon_ring_lock(rdev, 32);
403 if (r) {
404 DRM_ERROR("failed to lock the ring before flip\n");
405 goto pflip_cleanup2;
406 }
407
408 /* emit the fence */
409 radeon_fence_emit(rdev, fence);
410 /* set the proper interrupt */
411 radeon_pre_page_flip(rdev, radeon_crtc->crtc_id);
412 /* fire the ring */
413 radeon_ring_unlock_commit(rdev);
414
415 return 0;
416
417pflip_cleanup2:
418 drm_vblank_put(dev, radeon_crtc->crtc_id);
419
420pflip_cleanup1:
421 r = radeon_bo_reserve(rbo, false);
422 if (unlikely(r != 0)) {
423 DRM_ERROR("failed to reserve new rbo in error path\n");
424 goto pflip_cleanup;
425 }
426 r = radeon_bo_unpin(rbo);
427 if (unlikely(r != 0)) {
428 radeon_bo_unreserve(rbo);
429 r = -EINVAL;
430 DRM_ERROR("failed to unpin new rbo in error path\n");
431 goto pflip_cleanup;
432 }
433 radeon_bo_unreserve(rbo);
434
435pflip_cleanup:
436 spin_lock_irqsave(&dev->event_lock, flags);
437 radeon_crtc->unpin_work = NULL;
438 spin_unlock_irqrestore(&dev->event_lock, flags);
439 radeon_fence_unref(&fence);
440 kfree(work);
441
442 return r;
443}
444
186static const struct drm_crtc_funcs radeon_crtc_funcs = { 445static const struct drm_crtc_funcs radeon_crtc_funcs = {
187 .cursor_set = radeon_crtc_cursor_set, 446 .cursor_set = radeon_crtc_cursor_set,
188 .cursor_move = radeon_crtc_cursor_move, 447 .cursor_move = radeon_crtc_cursor_move,
189 .gamma_set = radeon_crtc_gamma_set, 448 .gamma_set = radeon_crtc_gamma_set,
190 .set_config = drm_crtc_helper_set_config, 449 .set_config = drm_crtc_helper_set_config,
191 .destroy = radeon_crtc_destroy, 450 .destroy = radeon_crtc_destroy,
451 .page_flip = radeon_crtc_page_flip,
192}; 452};
193 453
194static void radeon_crtc_init(struct drm_device *dev, int index) 454static void radeon_crtc_init(struct drm_device *dev, int index)
@@ -1019,7 +1279,7 @@ bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
1019/* 1279/*
1020 * Retrieve current video scanout position of crtc on a given gpu. 1280 * Retrieve current video scanout position of crtc on a given gpu.
1021 * 1281 *
1022 * \param rdev Device to query. 1282 * \param dev Device to query.
1023 * \param crtc Crtc to query. 1283 * \param crtc Crtc to query.
1024 * \param *vpos Location where vertical scanout position should be stored. 1284 * \param *vpos Location where vertical scanout position should be stored.
1025 * \param *hpos Location where horizontal scanout position should go. 1285 * \param *hpos Location where horizontal scanout position should go.
@@ -1031,72 +1291,74 @@ bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
1031 * 1291 *
1032 * \return Flags, or'ed together as follows: 1292 * \return Flags, or'ed together as follows:
1033 * 1293 *
1034 * RADEON_SCANOUTPOS_VALID = Query successfull. 1294 * DRM_SCANOUTPOS_VALID = Query successfull.
1035 * RADEON_SCANOUTPOS_INVBL = Inside vblank. 1295 * DRM_SCANOUTPOS_INVBL = Inside vblank.
1036 * RADEON_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of 1296 * DRM_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of
1037 * this flag means that returned position may be offset by a constant but 1297 * this flag means that returned position may be offset by a constant but
1038 * unknown small number of scanlines wrt. real scanout position. 1298 * unknown small number of scanlines wrt. real scanout position.
1039 * 1299 *
1040 */ 1300 */
1041int radeon_get_crtc_scanoutpos(struct radeon_device *rdev, int crtc, int *vpos, int *hpos) 1301int radeon_get_crtc_scanoutpos(struct drm_device *dev, int crtc, int *vpos, int *hpos)
1042{ 1302{
1043 u32 stat_crtc = 0, vbl = 0, position = 0; 1303 u32 stat_crtc = 0, vbl = 0, position = 0;
1044 int vbl_start, vbl_end, vtotal, ret = 0; 1304 int vbl_start, vbl_end, vtotal, ret = 0;
1045 bool in_vbl = true; 1305 bool in_vbl = true;
1046 1306
1307 struct radeon_device *rdev = dev->dev_private;
1308
1047 if (ASIC_IS_DCE4(rdev)) { 1309 if (ASIC_IS_DCE4(rdev)) {
1048 if (crtc == 0) { 1310 if (crtc == 0) {
1049 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END + 1311 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1050 EVERGREEN_CRTC0_REGISTER_OFFSET); 1312 EVERGREEN_CRTC0_REGISTER_OFFSET);
1051 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION + 1313 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1052 EVERGREEN_CRTC0_REGISTER_OFFSET); 1314 EVERGREEN_CRTC0_REGISTER_OFFSET);
1053 ret |= RADEON_SCANOUTPOS_VALID; 1315 ret |= DRM_SCANOUTPOS_VALID;
1054 } 1316 }
1055 if (crtc == 1) { 1317 if (crtc == 1) {
1056 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END + 1318 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1057 EVERGREEN_CRTC1_REGISTER_OFFSET); 1319 EVERGREEN_CRTC1_REGISTER_OFFSET);
1058 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION + 1320 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1059 EVERGREEN_CRTC1_REGISTER_OFFSET); 1321 EVERGREEN_CRTC1_REGISTER_OFFSET);
1060 ret |= RADEON_SCANOUTPOS_VALID; 1322 ret |= DRM_SCANOUTPOS_VALID;
1061 } 1323 }
1062 if (crtc == 2) { 1324 if (crtc == 2) {
1063 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END + 1325 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1064 EVERGREEN_CRTC2_REGISTER_OFFSET); 1326 EVERGREEN_CRTC2_REGISTER_OFFSET);
1065 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION + 1327 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1066 EVERGREEN_CRTC2_REGISTER_OFFSET); 1328 EVERGREEN_CRTC2_REGISTER_OFFSET);
1067 ret |= RADEON_SCANOUTPOS_VALID; 1329 ret |= DRM_SCANOUTPOS_VALID;
1068 } 1330 }
1069 if (crtc == 3) { 1331 if (crtc == 3) {
1070 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END + 1332 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1071 EVERGREEN_CRTC3_REGISTER_OFFSET); 1333 EVERGREEN_CRTC3_REGISTER_OFFSET);
1072 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION + 1334 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1073 EVERGREEN_CRTC3_REGISTER_OFFSET); 1335 EVERGREEN_CRTC3_REGISTER_OFFSET);
1074 ret |= RADEON_SCANOUTPOS_VALID; 1336 ret |= DRM_SCANOUTPOS_VALID;
1075 } 1337 }
1076 if (crtc == 4) { 1338 if (crtc == 4) {
1077 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END + 1339 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1078 EVERGREEN_CRTC4_REGISTER_OFFSET); 1340 EVERGREEN_CRTC4_REGISTER_OFFSET);
1079 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION + 1341 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1080 EVERGREEN_CRTC4_REGISTER_OFFSET); 1342 EVERGREEN_CRTC4_REGISTER_OFFSET);
1081 ret |= RADEON_SCANOUTPOS_VALID; 1343 ret |= DRM_SCANOUTPOS_VALID;
1082 } 1344 }
1083 if (crtc == 5) { 1345 if (crtc == 5) {
1084 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END + 1346 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1085 EVERGREEN_CRTC5_REGISTER_OFFSET); 1347 EVERGREEN_CRTC5_REGISTER_OFFSET);
1086 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION + 1348 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1087 EVERGREEN_CRTC5_REGISTER_OFFSET); 1349 EVERGREEN_CRTC5_REGISTER_OFFSET);
1088 ret |= RADEON_SCANOUTPOS_VALID; 1350 ret |= DRM_SCANOUTPOS_VALID;
1089 } 1351 }
1090 } else if (ASIC_IS_AVIVO(rdev)) { 1352 } else if (ASIC_IS_AVIVO(rdev)) {
1091 if (crtc == 0) { 1353 if (crtc == 0) {
1092 vbl = RREG32(AVIVO_D1CRTC_V_BLANK_START_END); 1354 vbl = RREG32(AVIVO_D1CRTC_V_BLANK_START_END);
1093 position = RREG32(AVIVO_D1CRTC_STATUS_POSITION); 1355 position = RREG32(AVIVO_D1CRTC_STATUS_POSITION);
1094 ret |= RADEON_SCANOUTPOS_VALID; 1356 ret |= DRM_SCANOUTPOS_VALID;
1095 } 1357 }
1096 if (crtc == 1) { 1358 if (crtc == 1) {
1097 vbl = RREG32(AVIVO_D2CRTC_V_BLANK_START_END); 1359 vbl = RREG32(AVIVO_D2CRTC_V_BLANK_START_END);
1098 position = RREG32(AVIVO_D2CRTC_STATUS_POSITION); 1360 position = RREG32(AVIVO_D2CRTC_STATUS_POSITION);
1099 ret |= RADEON_SCANOUTPOS_VALID; 1361 ret |= DRM_SCANOUTPOS_VALID;
1100 } 1362 }
1101 } else { 1363 } else {
1102 /* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */ 1364 /* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */
@@ -1112,7 +1374,7 @@ int radeon_get_crtc_scanoutpos(struct radeon_device *rdev, int crtc, int *vpos,
1112 if (!(stat_crtc & 1)) 1374 if (!(stat_crtc & 1))
1113 in_vbl = false; 1375 in_vbl = false;
1114 1376
1115 ret |= RADEON_SCANOUTPOS_VALID; 1377 ret |= DRM_SCANOUTPOS_VALID;
1116 } 1378 }
1117 if (crtc == 1) { 1379 if (crtc == 1) {
1118 vbl = (RREG32(RADEON_CRTC2_V_TOTAL_DISP) & 1380 vbl = (RREG32(RADEON_CRTC2_V_TOTAL_DISP) &
@@ -1122,7 +1384,7 @@ int radeon_get_crtc_scanoutpos(struct radeon_device *rdev, int crtc, int *vpos,
1122 if (!(stat_crtc & 1)) 1384 if (!(stat_crtc & 1))
1123 in_vbl = false; 1385 in_vbl = false;
1124 1386
1125 ret |= RADEON_SCANOUTPOS_VALID; 1387 ret |= DRM_SCANOUTPOS_VALID;
1126 } 1388 }
1127 } 1389 }
1128 1390
@@ -1133,13 +1395,13 @@ int radeon_get_crtc_scanoutpos(struct radeon_device *rdev, int crtc, int *vpos,
1133 /* Valid vblank area boundaries from gpu retrieved? */ 1395 /* Valid vblank area boundaries from gpu retrieved? */
1134 if (vbl > 0) { 1396 if (vbl > 0) {
1135 /* Yes: Decode. */ 1397 /* Yes: Decode. */
1136 ret |= RADEON_SCANOUTPOS_ACCURATE; 1398 ret |= DRM_SCANOUTPOS_ACCURATE;
1137 vbl_start = vbl & 0x1fff; 1399 vbl_start = vbl & 0x1fff;
1138 vbl_end = (vbl >> 16) & 0x1fff; 1400 vbl_end = (vbl >> 16) & 0x1fff;
1139 } 1401 }
1140 else { 1402 else {
1141 /* No: Fake something reasonable which gives at least ok results. */ 1403 /* No: Fake something reasonable which gives at least ok results. */
1142 vbl_start = rdev->mode_info.crtcs[crtc]->base.mode.crtc_vdisplay; 1404 vbl_start = rdev->mode_info.crtcs[crtc]->base.hwmode.crtc_vdisplay;
1143 vbl_end = 0; 1405 vbl_end = 0;
1144 } 1406 }
1145 1407
@@ -1155,7 +1417,7 @@ int radeon_get_crtc_scanoutpos(struct radeon_device *rdev, int crtc, int *vpos,
1155 1417
1156 /* Inside "upper part" of vblank area? Apply corrective offset if so: */ 1418 /* Inside "upper part" of vblank area? Apply corrective offset if so: */
1157 if (in_vbl && (*vpos >= vbl_start)) { 1419 if (in_vbl && (*vpos >= vbl_start)) {
1158 vtotal = rdev->mode_info.crtcs[crtc]->base.mode.crtc_vtotal; 1420 vtotal = rdev->mode_info.crtcs[crtc]->base.hwmode.crtc_vtotal;
1159 *vpos = *vpos - vtotal; 1421 *vpos = *vpos - vtotal;
1160 } 1422 }
1161 1423
@@ -1164,7 +1426,7 @@ int radeon_get_crtc_scanoutpos(struct radeon_device *rdev, int crtc, int *vpos,
1164 1426
1165 /* In vblank? */ 1427 /* In vblank? */
1166 if (in_vbl) 1428 if (in_vbl)
1167 ret |= RADEON_SCANOUTPOS_INVBL; 1429 ret |= DRM_SCANOUTPOS_INVBL;
1168 1430
1169 return ret; 1431 return ret;
1170} 1432}
diff --git a/drivers/gpu/drm/radeon/radeon_drv.c b/drivers/gpu/drm/radeon/radeon_drv.c
index 88e4ea925900..a92d2a5cea90 100644
--- a/drivers/gpu/drm/radeon/radeon_drv.c
+++ b/drivers/gpu/drm/radeon/radeon_drv.c
@@ -48,9 +48,10 @@
48 * - 2.5.0 - add get accel 2 to work around ddx breakage for evergreen 48 * - 2.5.0 - add get accel 2 to work around ddx breakage for evergreen
49 * - 2.6.0 - add tiling config query (r6xx+), add initial HiZ support (r300->r500) 49 * - 2.6.0 - add tiling config query (r6xx+), add initial HiZ support (r300->r500)
50 * 2.7.0 - fixups for r600 2D tiling support. (no external ABI change), add eg dyn gpr regs 50 * 2.7.0 - fixups for r600 2D tiling support. (no external ABI change), add eg dyn gpr regs
51 * 2.8.0 - pageflip support
51 */ 52 */
52#define KMS_DRIVER_MAJOR 2 53#define KMS_DRIVER_MAJOR 2
53#define KMS_DRIVER_MINOR 7 54#define KMS_DRIVER_MINOR 8
54#define KMS_DRIVER_PATCHLEVEL 0 55#define KMS_DRIVER_PATCHLEVEL 0
55int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags); 56int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags);
56int radeon_driver_unload_kms(struct drm_device *dev); 57int radeon_driver_unload_kms(struct drm_device *dev);
@@ -66,6 +67,10 @@ int radeon_resume_kms(struct drm_device *dev);
66u32 radeon_get_vblank_counter_kms(struct drm_device *dev, int crtc); 67u32 radeon_get_vblank_counter_kms(struct drm_device *dev, int crtc);
67int radeon_enable_vblank_kms(struct drm_device *dev, int crtc); 68int radeon_enable_vblank_kms(struct drm_device *dev, int crtc);
68void radeon_disable_vblank_kms(struct drm_device *dev, int crtc); 69void radeon_disable_vblank_kms(struct drm_device *dev, int crtc);
70int radeon_get_vblank_timestamp_kms(struct drm_device *dev, int crtc,
71 int *max_error,
72 struct timeval *vblank_time,
73 unsigned flags);
69void radeon_driver_irq_preinstall_kms(struct drm_device *dev); 74void radeon_driver_irq_preinstall_kms(struct drm_device *dev);
70int radeon_driver_irq_postinstall_kms(struct drm_device *dev); 75int radeon_driver_irq_postinstall_kms(struct drm_device *dev);
71void radeon_driver_irq_uninstall_kms(struct drm_device *dev); 76void radeon_driver_irq_uninstall_kms(struct drm_device *dev);
@@ -74,6 +79,8 @@ int radeon_dma_ioctl_kms(struct drm_device *dev, void *data,
74 struct drm_file *file_priv); 79 struct drm_file *file_priv);
75int radeon_gem_object_init(struct drm_gem_object *obj); 80int radeon_gem_object_init(struct drm_gem_object *obj);
76void radeon_gem_object_free(struct drm_gem_object *obj); 81void radeon_gem_object_free(struct drm_gem_object *obj);
82extern int radeon_get_crtc_scanoutpos(struct drm_device *dev, int crtc,
83 int *vpos, int *hpos);
77extern struct drm_ioctl_desc radeon_ioctls_kms[]; 84extern struct drm_ioctl_desc radeon_ioctls_kms[];
78extern int radeon_max_kms_ioctl; 85extern int radeon_max_kms_ioctl;
79int radeon_mmap(struct file *filp, struct vm_area_struct *vma); 86int radeon_mmap(struct file *filp, struct vm_area_struct *vma);
@@ -277,6 +284,8 @@ static struct drm_driver kms_driver = {
277 .get_vblank_counter = radeon_get_vblank_counter_kms, 284 .get_vblank_counter = radeon_get_vblank_counter_kms,
278 .enable_vblank = radeon_enable_vblank_kms, 285 .enable_vblank = radeon_enable_vblank_kms,
279 .disable_vblank = radeon_disable_vblank_kms, 286 .disable_vblank = radeon_disable_vblank_kms,
287 .get_vblank_timestamp = radeon_get_vblank_timestamp_kms,
288 .get_scanout_position = radeon_get_crtc_scanoutpos,
280#if defined(CONFIG_DEBUG_FS) 289#if defined(CONFIG_DEBUG_FS)
281 .debugfs_init = radeon_debugfs_init, 290 .debugfs_init = radeon_debugfs_init,
282 .debugfs_cleanup = radeon_debugfs_cleanup, 291 .debugfs_cleanup = radeon_debugfs_cleanup,
diff --git a/drivers/gpu/drm/radeon/radeon_irq_kms.c b/drivers/gpu/drm/radeon/radeon_irq_kms.c
index a108c7ed14f5..e0d1c6d1b9c7 100644
--- a/drivers/gpu/drm/radeon/radeon_irq_kms.c
+++ b/drivers/gpu/drm/radeon/radeon_irq_kms.c
@@ -71,8 +71,10 @@ void radeon_driver_irq_preinstall_kms(struct drm_device *dev)
71 rdev->irq.gui_idle = false; 71 rdev->irq.gui_idle = false;
72 for (i = 0; i < rdev->num_crtc; i++) 72 for (i = 0; i < rdev->num_crtc; i++)
73 rdev->irq.crtc_vblank_int[i] = false; 73 rdev->irq.crtc_vblank_int[i] = false;
74 for (i = 0; i < 6; i++) 74 for (i = 0; i < 6; i++) {
75 rdev->irq.hpd[i] = false; 75 rdev->irq.hpd[i] = false;
76 rdev->irq.pflip[i] = false;
77 }
76 radeon_irq_set(rdev); 78 radeon_irq_set(rdev);
77 /* Clear bits */ 79 /* Clear bits */
78 radeon_irq_process(rdev); 80 radeon_irq_process(rdev);
@@ -101,8 +103,10 @@ void radeon_driver_irq_uninstall_kms(struct drm_device *dev)
101 rdev->irq.gui_idle = false; 103 rdev->irq.gui_idle = false;
102 for (i = 0; i < rdev->num_crtc; i++) 104 for (i = 0; i < rdev->num_crtc; i++)
103 rdev->irq.crtc_vblank_int[i] = false; 105 rdev->irq.crtc_vblank_int[i] = false;
104 for (i = 0; i < 6; i++) 106 for (i = 0; i < 6; i++) {
105 rdev->irq.hpd[i] = false; 107 rdev->irq.hpd[i] = false;
108 rdev->irq.pflip[i] = false;
109 }
106 radeon_irq_set(rdev); 110 radeon_irq_set(rdev);
107} 111}
108 112
@@ -175,3 +179,34 @@ void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev)
175 spin_unlock_irqrestore(&rdev->irq.sw_lock, irqflags); 179 spin_unlock_irqrestore(&rdev->irq.sw_lock, irqflags);
176} 180}
177 181
182void radeon_irq_kms_pflip_irq_get(struct radeon_device *rdev, int crtc)
183{
184 unsigned long irqflags;
185
186 if (crtc < 0 || crtc >= rdev->num_crtc)
187 return;
188
189 spin_lock_irqsave(&rdev->irq.pflip_lock[crtc], irqflags);
190 if (rdev->ddev->irq_enabled && (++rdev->irq.pflip_refcount[crtc] == 1)) {
191 rdev->irq.pflip[crtc] = true;
192 radeon_irq_set(rdev);
193 }
194 spin_unlock_irqrestore(&rdev->irq.pflip_lock[crtc], irqflags);
195}
196
197void radeon_irq_kms_pflip_irq_put(struct radeon_device *rdev, int crtc)
198{
199 unsigned long irqflags;
200
201 if (crtc < 0 || crtc >= rdev->num_crtc)
202 return;
203
204 spin_lock_irqsave(&rdev->irq.pflip_lock[crtc], irqflags);
205 BUG_ON(rdev->ddev->irq_enabled && rdev->irq.pflip_refcount[crtc] <= 0);
206 if (rdev->ddev->irq_enabled && (--rdev->irq.pflip_refcount[crtc] == 0)) {
207 rdev->irq.pflip[crtc] = false;
208 radeon_irq_set(rdev);
209 }
210 spin_unlock_irqrestore(&rdev->irq.pflip_lock[crtc], irqflags);
211}
212
diff --git a/drivers/gpu/drm/radeon/radeon_kms.c b/drivers/gpu/drm/radeon/radeon_kms.c
index 8fbbe1c6ebbd..4bf423ca4c12 100644
--- a/drivers/gpu/drm/radeon/radeon_kms.c
+++ b/drivers/gpu/drm/radeon/radeon_kms.c
@@ -277,6 +277,27 @@ void radeon_disable_vblank_kms(struct drm_device *dev, int crtc)
277 radeon_irq_set(rdev); 277 radeon_irq_set(rdev);
278} 278}
279 279
280int radeon_get_vblank_timestamp_kms(struct drm_device *dev, int crtc,
281 int *max_error,
282 struct timeval *vblank_time,
283 unsigned flags)
284{
285 struct drm_crtc *drmcrtc;
286 struct radeon_device *rdev = dev->dev_private;
287
288 if (crtc < 0 || crtc >= dev->num_crtcs) {
289 DRM_ERROR("Invalid crtc %d\n", crtc);
290 return -EINVAL;
291 }
292
293 /* Get associated drm_crtc: */
294 drmcrtc = &rdev->mode_info.crtcs[crtc]->base;
295
296 /* Helper routine in DRM core does all the work: */
297 return drm_calc_vbltimestamp_from_scanoutpos(dev, crtc, max_error,
298 vblank_time, flags,
299 drmcrtc);
300}
280 301
281/* 302/*
282 * IOCTL. 303 * IOCTL.
diff --git a/drivers/gpu/drm/radeon/radeon_mode.h b/drivers/gpu/drm/radeon/radeon_mode.h
index e301c6f9e059..f406f02bf14e 100644
--- a/drivers/gpu/drm/radeon/radeon_mode.h
+++ b/drivers/gpu/drm/radeon/radeon_mode.h
@@ -277,6 +277,9 @@ struct radeon_crtc {
277 fixed20_12 hsc; 277 fixed20_12 hsc;
278 struct drm_display_mode native_mode; 278 struct drm_display_mode native_mode;
279 int pll_id; 279 int pll_id;
280 /* page flipping */
281 struct radeon_unpin_work *unpin_work;
282 int deferred_flip_completion;
280}; 283};
281 284
282struct radeon_encoder_primary_dac { 285struct radeon_encoder_primary_dac {
@@ -442,10 +445,6 @@ struct radeon_framebuffer {
442 struct drm_gem_object *obj; 445 struct drm_gem_object *obj;
443}; 446};
444 447
445/* radeon_get_crtc_scanoutpos() return flags */
446#define RADEON_SCANOUTPOS_VALID (1 << 0)
447#define RADEON_SCANOUTPOS_INVBL (1 << 1)
448#define RADEON_SCANOUTPOS_ACCURATE (1 << 2)
449 448
450extern enum radeon_tv_std 449extern enum radeon_tv_std
451radeon_combios_get_tv_info(struct radeon_device *rdev); 450radeon_combios_get_tv_info(struct radeon_device *rdev);
@@ -562,7 +561,8 @@ extern int radeon_crtc_cursor_set(struct drm_crtc *crtc,
562extern int radeon_crtc_cursor_move(struct drm_crtc *crtc, 561extern int radeon_crtc_cursor_move(struct drm_crtc *crtc,
563 int x, int y); 562 int x, int y);
564 563
565extern int radeon_get_crtc_scanoutpos(struct radeon_device *rdev, int crtc, int *vpos, int *hpos); 564extern int radeon_get_crtc_scanoutpos(struct drm_device *dev, int crtc,
565 int *vpos, int *hpos);
566 566
567extern bool radeon_combios_check_hardcoded_edid(struct radeon_device *rdev); 567extern bool radeon_combios_check_hardcoded_edid(struct radeon_device *rdev);
568extern struct edid * 568extern struct edid *
@@ -662,4 +662,7 @@ int radeon_fbdev_total_size(struct radeon_device *rdev);
662bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj); 662bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj);
663 663
664void radeon_fb_output_poll_changed(struct radeon_device *rdev); 664void radeon_fb_output_poll_changed(struct radeon_device *rdev);
665
666void radeon_crtc_handle_flip(struct radeon_device *rdev, int crtc_id);
667
665#endif 668#endif
diff --git a/drivers/gpu/drm/radeon/radeon_pm.c b/drivers/gpu/drm/radeon/radeon_pm.c
index 8c9b2ef32c68..5eda5e471980 100644
--- a/drivers/gpu/drm/radeon/radeon_pm.c
+++ b/drivers/gpu/drm/radeon/radeon_pm.c
@@ -720,9 +720,9 @@ static bool radeon_pm_in_vbl(struct radeon_device *rdev)
720 */ 720 */
721 for (crtc = 0; (crtc < rdev->num_crtc) && in_vbl; crtc++) { 721 for (crtc = 0; (crtc < rdev->num_crtc) && in_vbl; crtc++) {
722 if (rdev->pm.active_crtcs & (1 << crtc)) { 722 if (rdev->pm.active_crtcs & (1 << crtc)) {
723 vbl_status = radeon_get_crtc_scanoutpos(rdev, crtc, &vpos, &hpos); 723 vbl_status = radeon_get_crtc_scanoutpos(rdev->ddev, crtc, &vpos, &hpos);
724 if ((vbl_status & RADEON_SCANOUTPOS_VALID) && 724 if ((vbl_status & DRM_SCANOUTPOS_VALID) &&
725 !(vbl_status & RADEON_SCANOUTPOS_INVBL)) 725 !(vbl_status & DRM_SCANOUTPOS_INVBL))
726 in_vbl = false; 726 in_vbl = false;
727 } 727 }
728 } 728 }
diff --git a/drivers/gpu/drm/radeon/radeon_reg.h b/drivers/gpu/drm/radeon/radeon_reg.h
index 64928814de53..0a310b7f71c3 100644
--- a/drivers/gpu/drm/radeon/radeon_reg.h
+++ b/drivers/gpu/drm/radeon/radeon_reg.h
@@ -422,6 +422,7 @@
422# define RADEON_CRTC_CSYNC_EN (1 << 4) 422# define RADEON_CRTC_CSYNC_EN (1 << 4)
423# define RADEON_CRTC_ICON_EN (1 << 15) 423# define RADEON_CRTC_ICON_EN (1 << 15)
424# define RADEON_CRTC_CUR_EN (1 << 16) 424# define RADEON_CRTC_CUR_EN (1 << 16)
425# define RADEON_CRTC_VSTAT_MODE_MASK (3 << 17)
425# define RADEON_CRTC_CUR_MODE_MASK (7 << 20) 426# define RADEON_CRTC_CUR_MODE_MASK (7 << 20)
426# define RADEON_CRTC_CUR_MODE_SHIFT 20 427# define RADEON_CRTC_CUR_MODE_SHIFT 20
427# define RADEON_CRTC_CUR_MODE_MONO 0 428# define RADEON_CRTC_CUR_MODE_MONO 0
@@ -509,6 +510,8 @@
509# define RADEON_CRTC_TILE_EN (1 << 15) 510# define RADEON_CRTC_TILE_EN (1 << 15)
510# define RADEON_CRTC_OFFSET_FLIP_CNTL (1 << 16) 511# define RADEON_CRTC_OFFSET_FLIP_CNTL (1 << 16)
511# define RADEON_CRTC_STEREO_OFFSET_EN (1 << 17) 512# define RADEON_CRTC_STEREO_OFFSET_EN (1 << 17)
513# define RADEON_CRTC_GUI_TRIG_OFFSET_LEFT_EN (1 << 28)
514# define RADEON_CRTC_GUI_TRIG_OFFSET_RIGHT_EN (1 << 29)
512 515
513#define R300_CRTC_TILE_X0_Y0 0x0350 516#define R300_CRTC_TILE_X0_Y0 0x0350
514#define R300_CRTC2_TILE_X0_Y0 0x0358 517#define R300_CRTC2_TILE_X0_Y0 0x0358
diff --git a/drivers/gpu/drm/radeon/rs600.c b/drivers/gpu/drm/radeon/rs600.c
index f1c6e02c2e6b..9a85b1614c86 100644
--- a/drivers/gpu/drm/radeon/rs600.c
+++ b/drivers/gpu/drm/radeon/rs600.c
@@ -46,6 +46,56 @@
46void rs600_gpu_init(struct radeon_device *rdev); 46void rs600_gpu_init(struct radeon_device *rdev);
47int rs600_mc_wait_for_idle(struct radeon_device *rdev); 47int rs600_mc_wait_for_idle(struct radeon_device *rdev);
48 48
49void rs600_pre_page_flip(struct radeon_device *rdev, int crtc)
50{
51 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc];
52 u32 tmp;
53
54 /* make sure flip is at vb rather than hb */
55 tmp = RREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
56 tmp &= ~AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN;
57 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
58
59 /* set pageflip to happen anywhere in vblank interval */
60 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
61
62 /* enable the pflip int */
63 radeon_irq_kms_pflip_irq_get(rdev, crtc);
64}
65
66void rs600_post_page_flip(struct radeon_device *rdev, int crtc)
67{
68 /* disable the pflip int */
69 radeon_irq_kms_pflip_irq_put(rdev, crtc);
70}
71
72u32 rs600_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
73{
74 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
75 u32 tmp = RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset);
76
77 /* Lock the graphics update lock */
78 tmp |= AVIVO_D1GRPH_UPDATE_LOCK;
79 WREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
80
81 /* update the scanout addresses */
82 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
83 (u32)crtc_base);
84 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
85 (u32)crtc_base);
86
87 /* Wait for update_pending to go high. */
88 while (!(RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset) & AVIVO_D1GRPH_SURFACE_UPDATE_PENDING));
89 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
90
91 /* Unlock the lock, so double-buffering can take place inside vblank */
92 tmp &= ~AVIVO_D1GRPH_UPDATE_LOCK;
93 WREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
94
95 /* Return current update_pending status: */
96 return RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset) & AVIVO_D1GRPH_SURFACE_UPDATE_PENDING;
97}
98
49void rs600_pm_misc(struct radeon_device *rdev) 99void rs600_pm_misc(struct radeon_device *rdev)
50{ 100{
51 int requested_index = rdev->pm.requested_power_state_index; 101 int requested_index = rdev->pm.requested_power_state_index;
@@ -515,10 +565,12 @@ int rs600_irq_set(struct radeon_device *rdev)
515 if (rdev->irq.gui_idle) { 565 if (rdev->irq.gui_idle) {
516 tmp |= S_000040_GUI_IDLE(1); 566 tmp |= S_000040_GUI_IDLE(1);
517 } 567 }
518 if (rdev->irq.crtc_vblank_int[0]) { 568 if (rdev->irq.crtc_vblank_int[0] ||
569 rdev->irq.pflip[0]) {
519 mode_int |= S_006540_D1MODE_VBLANK_INT_MASK(1); 570 mode_int |= S_006540_D1MODE_VBLANK_INT_MASK(1);
520 } 571 }
521 if (rdev->irq.crtc_vblank_int[1]) { 572 if (rdev->irq.crtc_vblank_int[1] ||
573 rdev->irq.pflip[1]) {
522 mode_int |= S_006540_D2MODE_VBLANK_INT_MASK(1); 574 mode_int |= S_006540_D2MODE_VBLANK_INT_MASK(1);
523 } 575 }
524 if (rdev->irq.hpd[0]) { 576 if (rdev->irq.hpd[0]) {
@@ -534,7 +586,7 @@ int rs600_irq_set(struct radeon_device *rdev)
534 return 0; 586 return 0;
535} 587}
536 588
537static inline uint32_t rs600_irq_ack(struct radeon_device *rdev, u32 *r500_disp_int) 589static inline u32 rs600_irq_ack(struct radeon_device *rdev)
538{ 590{
539 uint32_t irqs = RREG32(R_000044_GEN_INT_STATUS); 591 uint32_t irqs = RREG32(R_000044_GEN_INT_STATUS);
540 uint32_t irq_mask = S_000044_SW_INT(1); 592 uint32_t irq_mask = S_000044_SW_INT(1);
@@ -547,27 +599,27 @@ static inline uint32_t rs600_irq_ack(struct radeon_device *rdev, u32 *r500_disp_
547 } 599 }
548 600
549 if (G_000044_DISPLAY_INT_STAT(irqs)) { 601 if (G_000044_DISPLAY_INT_STAT(irqs)) {
550 *r500_disp_int = RREG32(R_007EDC_DISP_INTERRUPT_STATUS); 602 rdev->irq.stat_regs.r500.disp_int = RREG32(R_007EDC_DISP_INTERRUPT_STATUS);
551 if (G_007EDC_LB_D1_VBLANK_INTERRUPT(*r500_disp_int)) { 603 if (G_007EDC_LB_D1_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
552 WREG32(R_006534_D1MODE_VBLANK_STATUS, 604 WREG32(R_006534_D1MODE_VBLANK_STATUS,
553 S_006534_D1MODE_VBLANK_ACK(1)); 605 S_006534_D1MODE_VBLANK_ACK(1));
554 } 606 }
555 if (G_007EDC_LB_D2_VBLANK_INTERRUPT(*r500_disp_int)) { 607 if (G_007EDC_LB_D2_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
556 WREG32(R_006D34_D2MODE_VBLANK_STATUS, 608 WREG32(R_006D34_D2MODE_VBLANK_STATUS,
557 S_006D34_D2MODE_VBLANK_ACK(1)); 609 S_006D34_D2MODE_VBLANK_ACK(1));
558 } 610 }
559 if (G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(*r500_disp_int)) { 611 if (G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
560 tmp = RREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL); 612 tmp = RREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL);
561 tmp |= S_007D08_DC_HOT_PLUG_DETECT1_INT_ACK(1); 613 tmp |= S_007D08_DC_HOT_PLUG_DETECT1_INT_ACK(1);
562 WREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp); 614 WREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
563 } 615 }
564 if (G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(*r500_disp_int)) { 616 if (G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
565 tmp = RREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL); 617 tmp = RREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL);
566 tmp |= S_007D18_DC_HOT_PLUG_DETECT2_INT_ACK(1); 618 tmp |= S_007D18_DC_HOT_PLUG_DETECT2_INT_ACK(1);
567 WREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp); 619 WREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
568 } 620 }
569 } else { 621 } else {
570 *r500_disp_int = 0; 622 rdev->irq.stat_regs.r500.disp_int = 0;
571 } 623 }
572 624
573 if (irqs) { 625 if (irqs) {
@@ -578,32 +630,30 @@ static inline uint32_t rs600_irq_ack(struct radeon_device *rdev, u32 *r500_disp_
578 630
579void rs600_irq_disable(struct radeon_device *rdev) 631void rs600_irq_disable(struct radeon_device *rdev)
580{ 632{
581 u32 tmp;
582
583 WREG32(R_000040_GEN_INT_CNTL, 0); 633 WREG32(R_000040_GEN_INT_CNTL, 0);
584 WREG32(R_006540_DxMODE_INT_MASK, 0); 634 WREG32(R_006540_DxMODE_INT_MASK, 0);
585 /* Wait and acknowledge irq */ 635 /* Wait and acknowledge irq */
586 mdelay(1); 636 mdelay(1);
587 rs600_irq_ack(rdev, &tmp); 637 rs600_irq_ack(rdev);
588} 638}
589 639
590int rs600_irq_process(struct radeon_device *rdev) 640int rs600_irq_process(struct radeon_device *rdev)
591{ 641{
592 uint32_t status, msi_rearm; 642 u32 status, msi_rearm;
593 uint32_t r500_disp_int;
594 bool queue_hotplug = false; 643 bool queue_hotplug = false;
595 644
596 /* reset gui idle ack. the status bit is broken */ 645 /* reset gui idle ack. the status bit is broken */
597 rdev->irq.gui_idle_acked = false; 646 rdev->irq.gui_idle_acked = false;
598 647
599 status = rs600_irq_ack(rdev, &r500_disp_int); 648 status = rs600_irq_ack(rdev);
600 if (!status && !r500_disp_int) { 649 if (!status && !rdev->irq.stat_regs.r500.disp_int) {
601 return IRQ_NONE; 650 return IRQ_NONE;
602 } 651 }
603 while (status || r500_disp_int) { 652 while (status || rdev->irq.stat_regs.r500.disp_int) {
604 /* SW interrupt */ 653 /* SW interrupt */
605 if (G_000044_SW_INT(status)) 654 if (G_000044_SW_INT(status)) {
606 radeon_fence_process(rdev); 655 radeon_fence_process(rdev);
656 }
607 /* GUI idle */ 657 /* GUI idle */
608 if (G_000040_GUI_IDLE(status)) { 658 if (G_000040_GUI_IDLE(status)) {
609 rdev->irq.gui_idle_acked = true; 659 rdev->irq.gui_idle_acked = true;
@@ -611,25 +661,33 @@ int rs600_irq_process(struct radeon_device *rdev)
611 wake_up(&rdev->irq.idle_queue); 661 wake_up(&rdev->irq.idle_queue);
612 } 662 }
613 /* Vertical blank interrupts */ 663 /* Vertical blank interrupts */
614 if (G_007EDC_LB_D1_VBLANK_INTERRUPT(r500_disp_int)) { 664 if (G_007EDC_LB_D1_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
615 drm_handle_vblank(rdev->ddev, 0); 665 if (rdev->irq.crtc_vblank_int[0]) {
616 rdev->pm.vblank_sync = true; 666 drm_handle_vblank(rdev->ddev, 0);
617 wake_up(&rdev->irq.vblank_queue); 667 rdev->pm.vblank_sync = true;
668 wake_up(&rdev->irq.vblank_queue);
669 }
670 if (rdev->irq.pflip[0])
671 radeon_crtc_handle_flip(rdev, 0);
618 } 672 }
619 if (G_007EDC_LB_D2_VBLANK_INTERRUPT(r500_disp_int)) { 673 if (G_007EDC_LB_D2_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
620 drm_handle_vblank(rdev->ddev, 1); 674 if (rdev->irq.crtc_vblank_int[1]) {
621 rdev->pm.vblank_sync = true; 675 drm_handle_vblank(rdev->ddev, 1);
622 wake_up(&rdev->irq.vblank_queue); 676 rdev->pm.vblank_sync = true;
677 wake_up(&rdev->irq.vblank_queue);
678 }
679 if (rdev->irq.pflip[1])
680 radeon_crtc_handle_flip(rdev, 1);
623 } 681 }
624 if (G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(r500_disp_int)) { 682 if (G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
625 queue_hotplug = true; 683 queue_hotplug = true;
626 DRM_DEBUG("HPD1\n"); 684 DRM_DEBUG("HPD1\n");
627 } 685 }
628 if (G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(r500_disp_int)) { 686 if (G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
629 queue_hotplug = true; 687 queue_hotplug = true;
630 DRM_DEBUG("HPD2\n"); 688 DRM_DEBUG("HPD2\n");
631 } 689 }
632 status = rs600_irq_ack(rdev, &r500_disp_int); 690 status = rs600_irq_ack(rdev);
633 } 691 }
634 /* reset gui idle ack. the status bit is broken */ 692 /* reset gui idle ack. the status bit is broken */
635 rdev->irq.gui_idle_acked = false; 693 rdev->irq.gui_idle_acked = false;
diff --git a/drivers/gpu/drm/radeon/rv770.c b/drivers/gpu/drm/radeon/rv770.c
index 4dfead8cee33..42ff07893f3a 100644
--- a/drivers/gpu/drm/radeon/rv770.c
+++ b/drivers/gpu/drm/radeon/rv770.c
@@ -42,6 +42,40 @@
42static void rv770_gpu_init(struct radeon_device *rdev); 42static void rv770_gpu_init(struct radeon_device *rdev);
43void rv770_fini(struct radeon_device *rdev); 43void rv770_fini(struct radeon_device *rdev);
44 44
45u32 rv770_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
46{
47 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
48 u32 tmp = RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset);
49
50 /* Lock the graphics update lock */
51 tmp |= AVIVO_D1GRPH_UPDATE_LOCK;
52 WREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
53
54 /* update the scanout addresses */
55 if (radeon_crtc->crtc_id) {
56 WREG32(D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(crtc_base));
57 WREG32(D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(crtc_base));
58 } else {
59 WREG32(D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(crtc_base));
60 WREG32(D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(crtc_base));
61 }
62 WREG32(D1GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
63 (u32)crtc_base);
64 WREG32(D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
65 (u32)crtc_base);
66
67 /* Wait for update_pending to go high. */
68 while (!(RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset) & AVIVO_D1GRPH_SURFACE_UPDATE_PENDING));
69 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
70
71 /* Unlock the lock, so double-buffering can take place inside vblank */
72 tmp &= ~AVIVO_D1GRPH_UPDATE_LOCK;
73 WREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
74
75 /* Return current update_pending status: */
76 return RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset) & AVIVO_D1GRPH_SURFACE_UPDATE_PENDING;
77}
78
45/* get temperature in millidegrees */ 79/* get temperature in millidegrees */
46u32 rv770_get_temp(struct radeon_device *rdev) 80u32 rv770_get_temp(struct radeon_device *rdev)
47{ 81{
diff --git a/drivers/gpu/drm/radeon/rv770d.h b/drivers/gpu/drm/radeon/rv770d.h
index b7a5a20e81dc..11955c685ad1 100644
--- a/drivers/gpu/drm/radeon/rv770d.h
+++ b/drivers/gpu/drm/radeon/rv770d.h
@@ -351,4 +351,11 @@
351 351
352#define SRBM_STATUS 0x0E50 352#define SRBM_STATUS 0x0E50
353 353
354#define D1GRPH_PRIMARY_SURFACE_ADDRESS 0x6110
355#define D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH 0x6914
356#define D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH 0x6114
357#define D1GRPH_SECONDARY_SURFACE_ADDRESS 0x6118
358#define D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH 0x691c
359#define D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH 0x611c
360
354#endif 361#endif