aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/si.c
diff options
context:
space:
mode:
authorAlex Deucher <alexander.deucher@amd.com>2012-03-20 17:18:22 -0400
committerDave Airlie <airlied@redhat.com>2012-03-21 02:55:54 -0400
commit25a857fbe973bdcc7df0df2e0c8f9c6e1ab0e475 (patch)
tree48299c76543e234ef10ad746dc9d4e0904f472ad /drivers/gpu/drm/radeon/si.c
parent347e7592beb0abd56a11ec16ca8aba9f60681f13 (diff)
drm/radeon/kms: add support for interrupts on SI
This is mostly identical to evergreen/ni, however there are some additional fields in the IV vector for RINGID and VMID. Signed-off-by: Alex Deucher <alexander.deucher@amd.com> Signed-off-by: Dave Airlie <airlied@redhat.com>
Diffstat (limited to 'drivers/gpu/drm/radeon/si.c')
-rw-r--r--drivers/gpu/drm/radeon/si.c721
1 files changed, 721 insertions, 0 deletions
diff --git a/drivers/gpu/drm/radeon/si.c b/drivers/gpu/drm/radeon/si.c
index 4252cd0ab64..6aecbf54cd5 100644
--- a/drivers/gpu/drm/radeon/si.c
+++ b/drivers/gpu/drm/radeon/si.c
@@ -55,6 +55,8 @@ MODULE_FIRMWARE("radeon/VERDE_ce.bin");
55MODULE_FIRMWARE("radeon/VERDE_mc.bin"); 55MODULE_FIRMWARE("radeon/VERDE_mc.bin");
56MODULE_FIRMWARE("radeon/VERDE_rlc.bin"); 56MODULE_FIRMWARE("radeon/VERDE_rlc.bin");
57 57
58extern int r600_ih_ring_alloc(struct radeon_device *rdev);
59extern void r600_ih_ring_fini(struct radeon_device *rdev);
58extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev); 60extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
59extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save); 61extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
60extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save); 62extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
@@ -3072,3 +3074,722 @@ static int si_rlc_resume(struct radeon_device *rdev)
3072 return 0; 3074 return 0;
3073} 3075}
3074 3076
3077static void si_enable_interrupts(struct radeon_device *rdev)
3078{
3079 u32 ih_cntl = RREG32(IH_CNTL);
3080 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3081
3082 ih_cntl |= ENABLE_INTR;
3083 ih_rb_cntl |= IH_RB_ENABLE;
3084 WREG32(IH_CNTL, ih_cntl);
3085 WREG32(IH_RB_CNTL, ih_rb_cntl);
3086 rdev->ih.enabled = true;
3087}
3088
3089static void si_disable_interrupts(struct radeon_device *rdev)
3090{
3091 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3092 u32 ih_cntl = RREG32(IH_CNTL);
3093
3094 ih_rb_cntl &= ~IH_RB_ENABLE;
3095 ih_cntl &= ~ENABLE_INTR;
3096 WREG32(IH_RB_CNTL, ih_rb_cntl);
3097 WREG32(IH_CNTL, ih_cntl);
3098 /* set rptr, wptr to 0 */
3099 WREG32(IH_RB_RPTR, 0);
3100 WREG32(IH_RB_WPTR, 0);
3101 rdev->ih.enabled = false;
3102 rdev->ih.wptr = 0;
3103 rdev->ih.rptr = 0;
3104}
3105
3106static void si_disable_interrupt_state(struct radeon_device *rdev)
3107{
3108 u32 tmp;
3109
3110 WREG32(CP_INT_CNTL_RING0, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3111 WREG32(CP_INT_CNTL_RING1, 0);
3112 WREG32(CP_INT_CNTL_RING2, 0);
3113 WREG32(GRBM_INT_CNTL, 0);
3114 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3115 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3116 if (rdev->num_crtc >= 4) {
3117 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3118 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3119 }
3120 if (rdev->num_crtc >= 6) {
3121 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3122 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3123 }
3124
3125 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3126 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3127 if (rdev->num_crtc >= 4) {
3128 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3129 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3130 }
3131 if (rdev->num_crtc >= 6) {
3132 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3133 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3134 }
3135
3136 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3137
3138 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3139 WREG32(DC_HPD1_INT_CONTROL, tmp);
3140 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3141 WREG32(DC_HPD2_INT_CONTROL, tmp);
3142 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3143 WREG32(DC_HPD3_INT_CONTROL, tmp);
3144 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3145 WREG32(DC_HPD4_INT_CONTROL, tmp);
3146 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3147 WREG32(DC_HPD5_INT_CONTROL, tmp);
3148 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3149 WREG32(DC_HPD6_INT_CONTROL, tmp);
3150
3151}
3152
3153static int si_irq_init(struct radeon_device *rdev)
3154{
3155 int ret = 0;
3156 int rb_bufsz;
3157 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
3158
3159 /* allocate ring */
3160 ret = r600_ih_ring_alloc(rdev);
3161 if (ret)
3162 return ret;
3163
3164 /* disable irqs */
3165 si_disable_interrupts(rdev);
3166
3167 /* init rlc */
3168 ret = si_rlc_resume(rdev);
3169 if (ret) {
3170 r600_ih_ring_fini(rdev);
3171 return ret;
3172 }
3173
3174 /* setup interrupt control */
3175 /* set dummy read address to ring address */
3176 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
3177 interrupt_cntl = RREG32(INTERRUPT_CNTL);
3178 /* IH_DUMMY_RD_OVERRIDE=0 - dummy read disabled with msi, enabled without msi
3179 * IH_DUMMY_RD_OVERRIDE=1 - dummy read controlled by IH_DUMMY_RD_EN
3180 */
3181 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
3182 /* IH_REQ_NONSNOOP_EN=1 if ring is in non-cacheable memory, e.g., vram */
3183 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
3184 WREG32(INTERRUPT_CNTL, interrupt_cntl);
3185
3186 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
3187 rb_bufsz = drm_order(rdev->ih.ring_size / 4);
3188
3189 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
3190 IH_WPTR_OVERFLOW_CLEAR |
3191 (rb_bufsz << 1));
3192
3193 if (rdev->wb.enabled)
3194 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
3195
3196 /* set the writeback address whether it's enabled or not */
3197 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
3198 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
3199
3200 WREG32(IH_RB_CNTL, ih_rb_cntl);
3201
3202 /* set rptr, wptr to 0 */
3203 WREG32(IH_RB_RPTR, 0);
3204 WREG32(IH_RB_WPTR, 0);
3205
3206 /* Default settings for IH_CNTL (disabled at first) */
3207 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10) | MC_VMID(0);
3208 /* RPTR_REARM only works if msi's are enabled */
3209 if (rdev->msi_enabled)
3210 ih_cntl |= RPTR_REARM;
3211 WREG32(IH_CNTL, ih_cntl);
3212
3213 /* force the active interrupt state to all disabled */
3214 si_disable_interrupt_state(rdev);
3215
3216 /* enable irqs */
3217 si_enable_interrupts(rdev);
3218
3219 return ret;
3220}
3221
3222int si_irq_set(struct radeon_device *rdev)
3223{
3224 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3225 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
3226 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
3227 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
3228 u32 grbm_int_cntl = 0;
3229 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
3230
3231 if (!rdev->irq.installed) {
3232 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3233 return -EINVAL;
3234 }
3235 /* don't enable anything if the ih is disabled */
3236 if (!rdev->ih.enabled) {
3237 si_disable_interrupts(rdev);
3238 /* force the active interrupt state to all disabled */
3239 si_disable_interrupt_state(rdev);
3240 return 0;
3241 }
3242
3243 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3244 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3245 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3246 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3247 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3248 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3249
3250 /* enable CP interrupts on all rings */
3251 if (rdev->irq.sw_int[RADEON_RING_TYPE_GFX_INDEX]) {
3252 DRM_DEBUG("si_irq_set: sw int gfx\n");
3253 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3254 }
3255 if (rdev->irq.sw_int[CAYMAN_RING_TYPE_CP1_INDEX]) {
3256 DRM_DEBUG("si_irq_set: sw int cp1\n");
3257 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
3258 }
3259 if (rdev->irq.sw_int[CAYMAN_RING_TYPE_CP2_INDEX]) {
3260 DRM_DEBUG("si_irq_set: sw int cp2\n");
3261 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
3262 }
3263 if (rdev->irq.crtc_vblank_int[0] ||
3264 rdev->irq.pflip[0]) {
3265 DRM_DEBUG("si_irq_set: vblank 0\n");
3266 crtc1 |= VBLANK_INT_MASK;
3267 }
3268 if (rdev->irq.crtc_vblank_int[1] ||
3269 rdev->irq.pflip[1]) {
3270 DRM_DEBUG("si_irq_set: vblank 1\n");
3271 crtc2 |= VBLANK_INT_MASK;
3272 }
3273 if (rdev->irq.crtc_vblank_int[2] ||
3274 rdev->irq.pflip[2]) {
3275 DRM_DEBUG("si_irq_set: vblank 2\n");
3276 crtc3 |= VBLANK_INT_MASK;
3277 }
3278 if (rdev->irq.crtc_vblank_int[3] ||
3279 rdev->irq.pflip[3]) {
3280 DRM_DEBUG("si_irq_set: vblank 3\n");
3281 crtc4 |= VBLANK_INT_MASK;
3282 }
3283 if (rdev->irq.crtc_vblank_int[4] ||
3284 rdev->irq.pflip[4]) {
3285 DRM_DEBUG("si_irq_set: vblank 4\n");
3286 crtc5 |= VBLANK_INT_MASK;
3287 }
3288 if (rdev->irq.crtc_vblank_int[5] ||
3289 rdev->irq.pflip[5]) {
3290 DRM_DEBUG("si_irq_set: vblank 5\n");
3291 crtc6 |= VBLANK_INT_MASK;
3292 }
3293 if (rdev->irq.hpd[0]) {
3294 DRM_DEBUG("si_irq_set: hpd 1\n");
3295 hpd1 |= DC_HPDx_INT_EN;
3296 }
3297 if (rdev->irq.hpd[1]) {
3298 DRM_DEBUG("si_irq_set: hpd 2\n");
3299 hpd2 |= DC_HPDx_INT_EN;
3300 }
3301 if (rdev->irq.hpd[2]) {
3302 DRM_DEBUG("si_irq_set: hpd 3\n");
3303 hpd3 |= DC_HPDx_INT_EN;
3304 }
3305 if (rdev->irq.hpd[3]) {
3306 DRM_DEBUG("si_irq_set: hpd 4\n");
3307 hpd4 |= DC_HPDx_INT_EN;
3308 }
3309 if (rdev->irq.hpd[4]) {
3310 DRM_DEBUG("si_irq_set: hpd 5\n");
3311 hpd5 |= DC_HPDx_INT_EN;
3312 }
3313 if (rdev->irq.hpd[5]) {
3314 DRM_DEBUG("si_irq_set: hpd 6\n");
3315 hpd6 |= DC_HPDx_INT_EN;
3316 }
3317 if (rdev->irq.gui_idle) {
3318 DRM_DEBUG("gui idle\n");
3319 grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
3320 }
3321
3322 WREG32(CP_INT_CNTL_RING0, cp_int_cntl);
3323 WREG32(CP_INT_CNTL_RING1, cp_int_cntl1);
3324 WREG32(CP_INT_CNTL_RING2, cp_int_cntl2);
3325
3326 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3327
3328 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
3329 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
3330 if (rdev->num_crtc >= 4) {
3331 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
3332 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
3333 }
3334 if (rdev->num_crtc >= 6) {
3335 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
3336 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
3337 }
3338
3339 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
3340 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
3341 if (rdev->num_crtc >= 4) {
3342 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
3343 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
3344 }
3345 if (rdev->num_crtc >= 6) {
3346 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
3347 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
3348 }
3349
3350 WREG32(DC_HPD1_INT_CONTROL, hpd1);
3351 WREG32(DC_HPD2_INT_CONTROL, hpd2);
3352 WREG32(DC_HPD3_INT_CONTROL, hpd3);
3353 WREG32(DC_HPD4_INT_CONTROL, hpd4);
3354 WREG32(DC_HPD5_INT_CONTROL, hpd5);
3355 WREG32(DC_HPD6_INT_CONTROL, hpd6);
3356
3357 return 0;
3358}
3359
3360static inline void si_irq_ack(struct radeon_device *rdev)
3361{
3362 u32 tmp;
3363
3364 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3365 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3366 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
3367 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
3368 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
3369 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
3370 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
3371 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
3372 if (rdev->num_crtc >= 4) {
3373 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
3374 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
3375 }
3376 if (rdev->num_crtc >= 6) {
3377 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
3378 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
3379 }
3380
3381 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
3382 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
3383 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
3384 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
3385 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
3386 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
3387 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
3388 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
3389 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
3390 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
3391 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
3392 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
3393
3394 if (rdev->num_crtc >= 4) {
3395 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
3396 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
3397 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
3398 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
3399 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
3400 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
3401 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
3402 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
3403 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
3404 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
3405 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
3406 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
3407 }
3408
3409 if (rdev->num_crtc >= 6) {
3410 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
3411 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
3412 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
3413 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
3414 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
3415 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
3416 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
3417 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
3418 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
3419 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
3420 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
3421 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
3422 }
3423
3424 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
3425 tmp = RREG32(DC_HPD1_INT_CONTROL);
3426 tmp |= DC_HPDx_INT_ACK;
3427 WREG32(DC_HPD1_INT_CONTROL, tmp);
3428 }
3429 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
3430 tmp = RREG32(DC_HPD2_INT_CONTROL);
3431 tmp |= DC_HPDx_INT_ACK;
3432 WREG32(DC_HPD2_INT_CONTROL, tmp);
3433 }
3434 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
3435 tmp = RREG32(DC_HPD3_INT_CONTROL);
3436 tmp |= DC_HPDx_INT_ACK;
3437 WREG32(DC_HPD3_INT_CONTROL, tmp);
3438 }
3439 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
3440 tmp = RREG32(DC_HPD4_INT_CONTROL);
3441 tmp |= DC_HPDx_INT_ACK;
3442 WREG32(DC_HPD4_INT_CONTROL, tmp);
3443 }
3444 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
3445 tmp = RREG32(DC_HPD5_INT_CONTROL);
3446 tmp |= DC_HPDx_INT_ACK;
3447 WREG32(DC_HPD5_INT_CONTROL, tmp);
3448 }
3449 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
3450 tmp = RREG32(DC_HPD5_INT_CONTROL);
3451 tmp |= DC_HPDx_INT_ACK;
3452 WREG32(DC_HPD6_INT_CONTROL, tmp);
3453 }
3454}
3455
3456static void si_irq_disable(struct radeon_device *rdev)
3457{
3458 si_disable_interrupts(rdev);
3459 /* Wait and acknowledge irq */
3460 mdelay(1);
3461 si_irq_ack(rdev);
3462 si_disable_interrupt_state(rdev);
3463}
3464
3465static void si_irq_suspend(struct radeon_device *rdev)
3466{
3467 si_irq_disable(rdev);
3468 si_rlc_stop(rdev);
3469}
3470
3471static inline u32 si_get_ih_wptr(struct radeon_device *rdev)
3472{
3473 u32 wptr, tmp;
3474
3475 if (rdev->wb.enabled)
3476 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
3477 else
3478 wptr = RREG32(IH_RB_WPTR);
3479
3480 if (wptr & RB_OVERFLOW) {
3481 /* When a ring buffer overflow happen start parsing interrupt
3482 * from the last not overwritten vector (wptr + 16). Hopefully
3483 * this should allow us to catchup.
3484 */
3485 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
3486 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
3487 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
3488 tmp = RREG32(IH_RB_CNTL);
3489 tmp |= IH_WPTR_OVERFLOW_CLEAR;
3490 WREG32(IH_RB_CNTL, tmp);
3491 }
3492 return (wptr & rdev->ih.ptr_mask);
3493}
3494
3495/* SI IV Ring
3496 * Each IV ring entry is 128 bits:
3497 * [7:0] - interrupt source id
3498 * [31:8] - reserved
3499 * [59:32] - interrupt source data
3500 * [63:60] - reserved
3501 * [71:64] - RINGID
3502 * [79:72] - VMID
3503 * [127:80] - reserved
3504 */
3505int si_irq_process(struct radeon_device *rdev)
3506{
3507 u32 wptr;
3508 u32 rptr;
3509 u32 src_id, src_data, ring_id;
3510 u32 ring_index;
3511 unsigned long flags;
3512 bool queue_hotplug = false;
3513
3514 if (!rdev->ih.enabled || rdev->shutdown)
3515 return IRQ_NONE;
3516
3517 wptr = si_get_ih_wptr(rdev);
3518 rptr = rdev->ih.rptr;
3519 DRM_DEBUG("si_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
3520
3521 spin_lock_irqsave(&rdev->ih.lock, flags);
3522 if (rptr == wptr) {
3523 spin_unlock_irqrestore(&rdev->ih.lock, flags);
3524 return IRQ_NONE;
3525 }
3526restart_ih:
3527 /* Order reading of wptr vs. reading of IH ring data */
3528 rmb();
3529
3530 /* display interrupts */
3531 si_irq_ack(rdev);
3532
3533 rdev->ih.wptr = wptr;
3534 while (rptr != wptr) {
3535 /* wptr/rptr are in bytes! */
3536 ring_index = rptr / 4;
3537 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
3538 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
3539 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
3540
3541 switch (src_id) {
3542 case 1: /* D1 vblank/vline */
3543 switch (src_data) {
3544 case 0: /* D1 vblank */
3545 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
3546 if (rdev->irq.crtc_vblank_int[0]) {
3547 drm_handle_vblank(rdev->ddev, 0);
3548 rdev->pm.vblank_sync = true;
3549 wake_up(&rdev->irq.vblank_queue);
3550 }
3551 if (rdev->irq.pflip[0])
3552 radeon_crtc_handle_flip(rdev, 0);
3553 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
3554 DRM_DEBUG("IH: D1 vblank\n");
3555 }
3556 break;
3557 case 1: /* D1 vline */
3558 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
3559 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
3560 DRM_DEBUG("IH: D1 vline\n");
3561 }
3562 break;
3563 default:
3564 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3565 break;
3566 }
3567 break;
3568 case 2: /* D2 vblank/vline */
3569 switch (src_data) {
3570 case 0: /* D2 vblank */
3571 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
3572 if (rdev->irq.crtc_vblank_int[1]) {
3573 drm_handle_vblank(rdev->ddev, 1);
3574 rdev->pm.vblank_sync = true;
3575 wake_up(&rdev->irq.vblank_queue);
3576 }
3577 if (rdev->irq.pflip[1])
3578 radeon_crtc_handle_flip(rdev, 1);
3579 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
3580 DRM_DEBUG("IH: D2 vblank\n");
3581 }
3582 break;
3583 case 1: /* D2 vline */
3584 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
3585 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
3586 DRM_DEBUG("IH: D2 vline\n");
3587 }
3588 break;
3589 default:
3590 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3591 break;
3592 }
3593 break;
3594 case 3: /* D3 vblank/vline */
3595 switch (src_data) {
3596 case 0: /* D3 vblank */
3597 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
3598 if (rdev->irq.crtc_vblank_int[2]) {
3599 drm_handle_vblank(rdev->ddev, 2);
3600 rdev->pm.vblank_sync = true;
3601 wake_up(&rdev->irq.vblank_queue);
3602 }
3603 if (rdev->irq.pflip[2])
3604 radeon_crtc_handle_flip(rdev, 2);
3605 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
3606 DRM_DEBUG("IH: D3 vblank\n");
3607 }
3608 break;
3609 case 1: /* D3 vline */
3610 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
3611 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
3612 DRM_DEBUG("IH: D3 vline\n");
3613 }
3614 break;
3615 default:
3616 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3617 break;
3618 }
3619 break;
3620 case 4: /* D4 vblank/vline */
3621 switch (src_data) {
3622 case 0: /* D4 vblank */
3623 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
3624 if (rdev->irq.crtc_vblank_int[3]) {
3625 drm_handle_vblank(rdev->ddev, 3);
3626 rdev->pm.vblank_sync = true;
3627 wake_up(&rdev->irq.vblank_queue);
3628 }
3629 if (rdev->irq.pflip[3])
3630 radeon_crtc_handle_flip(rdev, 3);
3631 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
3632 DRM_DEBUG("IH: D4 vblank\n");
3633 }
3634 break;
3635 case 1: /* D4 vline */
3636 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
3637 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
3638 DRM_DEBUG("IH: D4 vline\n");
3639 }
3640 break;
3641 default:
3642 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3643 break;
3644 }
3645 break;
3646 case 5: /* D5 vblank/vline */
3647 switch (src_data) {
3648 case 0: /* D5 vblank */
3649 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
3650 if (rdev->irq.crtc_vblank_int[4]) {
3651 drm_handle_vblank(rdev->ddev, 4);
3652 rdev->pm.vblank_sync = true;
3653 wake_up(&rdev->irq.vblank_queue);
3654 }
3655 if (rdev->irq.pflip[4])
3656 radeon_crtc_handle_flip(rdev, 4);
3657 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
3658 DRM_DEBUG("IH: D5 vblank\n");
3659 }
3660 break;
3661 case 1: /* D5 vline */
3662 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
3663 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
3664 DRM_DEBUG("IH: D5 vline\n");
3665 }
3666 break;
3667 default:
3668 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3669 break;
3670 }
3671 break;
3672 case 6: /* D6 vblank/vline */
3673 switch (src_data) {
3674 case 0: /* D6 vblank */
3675 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
3676 if (rdev->irq.crtc_vblank_int[5]) {
3677 drm_handle_vblank(rdev->ddev, 5);
3678 rdev->pm.vblank_sync = true;
3679 wake_up(&rdev->irq.vblank_queue);
3680 }
3681 if (rdev->irq.pflip[5])
3682 radeon_crtc_handle_flip(rdev, 5);
3683 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
3684 DRM_DEBUG("IH: D6 vblank\n");
3685 }
3686 break;
3687 case 1: /* D6 vline */
3688 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
3689 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
3690 DRM_DEBUG("IH: D6 vline\n");
3691 }
3692 break;
3693 default:
3694 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3695 break;
3696 }
3697 break;
3698 case 42: /* HPD hotplug */
3699 switch (src_data) {
3700 case 0:
3701 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
3702 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
3703 queue_hotplug = true;
3704 DRM_DEBUG("IH: HPD1\n");
3705 }
3706 break;
3707 case 1:
3708 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
3709 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
3710 queue_hotplug = true;
3711 DRM_DEBUG("IH: HPD2\n");
3712 }
3713 break;
3714 case 2:
3715 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
3716 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
3717 queue_hotplug = true;
3718 DRM_DEBUG("IH: HPD3\n");
3719 }
3720 break;
3721 case 3:
3722 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
3723 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
3724 queue_hotplug = true;
3725 DRM_DEBUG("IH: HPD4\n");
3726 }
3727 break;
3728 case 4:
3729 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
3730 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
3731 queue_hotplug = true;
3732 DRM_DEBUG("IH: HPD5\n");
3733 }
3734 break;
3735 case 5:
3736 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
3737 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
3738 queue_hotplug = true;
3739 DRM_DEBUG("IH: HPD6\n");
3740 }
3741 break;
3742 default:
3743 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3744 break;
3745 }
3746 break;
3747 case 176: /* RINGID0 CP_INT */
3748 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3749 break;
3750 case 177: /* RINGID1 CP_INT */
3751 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
3752 break;
3753 case 178: /* RINGID2 CP_INT */
3754 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
3755 break;
3756 case 181: /* CP EOP event */
3757 DRM_DEBUG("IH: CP EOP\n");
3758 switch (ring_id) {
3759 case 0:
3760 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3761 break;
3762 case 1:
3763 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
3764 break;
3765 case 2:
3766 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
3767 break;
3768 }
3769 break;
3770 case 233: /* GUI IDLE */
3771 DRM_DEBUG("IH: GUI idle\n");
3772 rdev->pm.gui_idle = true;
3773 wake_up(&rdev->irq.idle_queue);
3774 break;
3775 default:
3776 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3777 break;
3778 }
3779
3780 /* wptr/rptr are in bytes! */
3781 rptr += 16;
3782 rptr &= rdev->ih.ptr_mask;
3783 }
3784 /* make sure wptr hasn't changed while processing */
3785 wptr = si_get_ih_wptr(rdev);
3786 if (wptr != rdev->ih.wptr)
3787 goto restart_ih;
3788 if (queue_hotplug)
3789 schedule_work(&rdev->hotplug_work);
3790 rdev->ih.rptr = rptr;
3791 WREG32(IH_RB_RPTR, rdev->ih.rptr);
3792 spin_unlock_irqrestore(&rdev->ih.lock, flags);
3793 return IRQ_HANDLED;
3794}
3795