diff options
author | Alex Deucher <alexdeucher@gmail.com> | 2010-03-24 13:55:51 -0400 |
---|---|---|
committer | Dave Airlie <airlied@redhat.com> | 2010-04-08 20:16:09 -0400 |
commit | 45f9a39bedc3afab3fc85567792efc0103f34a55 (patch) | |
tree | 57a16fa09b3c31ee56d9c4803de00a477d7396ff /drivers/gpu/drm/radeon/evergreen.c | |
parent | fe251e2fffa1ebc17c8e6e895b0374ae4e732fa5 (diff) |
drm/radeon/kms/evergreen: implement irq support
Signed-off-by: Alex Deucher <alexdeucher@gmail.com>
Signed-off-by: Dave Airlie <airlied@redhat.com>
Diffstat (limited to 'drivers/gpu/drm/radeon/evergreen.c')
-rw-r--r-- | drivers/gpu/drm/radeon/evergreen.c | 549 |
1 files changed, 532 insertions, 17 deletions
diff --git a/drivers/gpu/drm/radeon/evergreen.c b/drivers/gpu/drm/radeon/evergreen.c index 57fe569682df..5c34349058c1 100644 --- a/drivers/gpu/drm/radeon/evergreen.c +++ b/drivers/gpu/drm/radeon/evergreen.c | |||
@@ -1205,12 +1205,532 @@ int evergreen_asic_reset(struct radeon_device *rdev) | |||
1205 | return evergreen_gpu_soft_reset(rdev); | 1205 | return evergreen_gpu_soft_reset(rdev); |
1206 | } | 1206 | } |
1207 | 1207 | ||
1208 | /* Interrupts */ | ||
1209 | |||
1210 | u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc) | ||
1211 | { | ||
1212 | switch (crtc) { | ||
1213 | case 0: | ||
1214 | return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET); | ||
1215 | case 1: | ||
1216 | return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET); | ||
1217 | case 2: | ||
1218 | return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET); | ||
1219 | case 3: | ||
1220 | return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET); | ||
1221 | case 4: | ||
1222 | return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET); | ||
1223 | case 5: | ||
1224 | return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET); | ||
1225 | default: | ||
1226 | return 0; | ||
1227 | } | ||
1228 | } | ||
1229 | |||
1230 | void evergreen_disable_interrupt_state(struct radeon_device *rdev) | ||
1231 | { | ||
1232 | u32 tmp; | ||
1233 | |||
1234 | WREG32(CP_INT_CNTL, 0); | ||
1235 | WREG32(GRBM_INT_CNTL, 0); | ||
1236 | WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); | ||
1237 | WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); | ||
1238 | WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); | ||
1239 | WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); | ||
1240 | WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); | ||
1241 | WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); | ||
1242 | |||
1243 | WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); | ||
1244 | WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); | ||
1245 | WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); | ||
1246 | WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); | ||
1247 | WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); | ||
1248 | WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); | ||
1249 | |||
1250 | WREG32(DACA_AUTODETECT_INT_CONTROL, 0); | ||
1251 | WREG32(DACB_AUTODETECT_INT_CONTROL, 0); | ||
1252 | |||
1253 | tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY; | ||
1254 | WREG32(DC_HPD1_INT_CONTROL, tmp); | ||
1255 | tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY; | ||
1256 | WREG32(DC_HPD2_INT_CONTROL, tmp); | ||
1257 | tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY; | ||
1258 | WREG32(DC_HPD3_INT_CONTROL, tmp); | ||
1259 | tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY; | ||
1260 | WREG32(DC_HPD4_INT_CONTROL, tmp); | ||
1261 | tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY; | ||
1262 | WREG32(DC_HPD5_INT_CONTROL, tmp); | ||
1263 | tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY; | ||
1264 | WREG32(DC_HPD6_INT_CONTROL, tmp); | ||
1265 | |||
1266 | } | ||
1267 | |||
1268 | int evergreen_irq_set(struct radeon_device *rdev) | ||
1269 | { | ||
1270 | u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE; | ||
1271 | u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0; | ||
1272 | u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6; | ||
1273 | |||
1274 | if (!rdev->irq.installed) { | ||
1275 | WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n"); | ||
1276 | return -EINVAL; | ||
1277 | } | ||
1278 | /* don't enable anything if the ih is disabled */ | ||
1279 | if (!rdev->ih.enabled) { | ||
1280 | r600_disable_interrupts(rdev); | ||
1281 | /* force the active interrupt state to all disabled */ | ||
1282 | evergreen_disable_interrupt_state(rdev); | ||
1283 | return 0; | ||
1284 | } | ||
1285 | |||
1286 | hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN; | ||
1287 | hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN; | ||
1288 | hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN; | ||
1289 | hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN; | ||
1290 | hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN; | ||
1291 | hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN; | ||
1292 | |||
1293 | if (rdev->irq.sw_int) { | ||
1294 | DRM_DEBUG("evergreen_irq_set: sw int\n"); | ||
1295 | cp_int_cntl |= RB_INT_ENABLE; | ||
1296 | } | ||
1297 | if (rdev->irq.crtc_vblank_int[0]) { | ||
1298 | DRM_DEBUG("evergreen_irq_set: vblank 0\n"); | ||
1299 | crtc1 |= VBLANK_INT_MASK; | ||
1300 | } | ||
1301 | if (rdev->irq.crtc_vblank_int[1]) { | ||
1302 | DRM_DEBUG("evergreen_irq_set: vblank 1\n"); | ||
1303 | crtc2 |= VBLANK_INT_MASK; | ||
1304 | } | ||
1305 | if (rdev->irq.crtc_vblank_int[2]) { | ||
1306 | DRM_DEBUG("evergreen_irq_set: vblank 2\n"); | ||
1307 | crtc3 |= VBLANK_INT_MASK; | ||
1308 | } | ||
1309 | if (rdev->irq.crtc_vblank_int[3]) { | ||
1310 | DRM_DEBUG("evergreen_irq_set: vblank 3\n"); | ||
1311 | crtc4 |= VBLANK_INT_MASK; | ||
1312 | } | ||
1313 | if (rdev->irq.crtc_vblank_int[4]) { | ||
1314 | DRM_DEBUG("evergreen_irq_set: vblank 4\n"); | ||
1315 | crtc5 |= VBLANK_INT_MASK; | ||
1316 | } | ||
1317 | if (rdev->irq.crtc_vblank_int[5]) { | ||
1318 | DRM_DEBUG("evergreen_irq_set: vblank 5\n"); | ||
1319 | crtc6 |= VBLANK_INT_MASK; | ||
1320 | } | ||
1321 | if (rdev->irq.hpd[0]) { | ||
1322 | DRM_DEBUG("evergreen_irq_set: hpd 1\n"); | ||
1323 | hpd1 |= DC_HPDx_INT_EN; | ||
1324 | } | ||
1325 | if (rdev->irq.hpd[1]) { | ||
1326 | DRM_DEBUG("evergreen_irq_set: hpd 2\n"); | ||
1327 | hpd2 |= DC_HPDx_INT_EN; | ||
1328 | } | ||
1329 | if (rdev->irq.hpd[2]) { | ||
1330 | DRM_DEBUG("evergreen_irq_set: hpd 3\n"); | ||
1331 | hpd3 |= DC_HPDx_INT_EN; | ||
1332 | } | ||
1333 | if (rdev->irq.hpd[3]) { | ||
1334 | DRM_DEBUG("evergreen_irq_set: hpd 4\n"); | ||
1335 | hpd4 |= DC_HPDx_INT_EN; | ||
1336 | } | ||
1337 | if (rdev->irq.hpd[4]) { | ||
1338 | DRM_DEBUG("evergreen_irq_set: hpd 5\n"); | ||
1339 | hpd5 |= DC_HPDx_INT_EN; | ||
1340 | } | ||
1341 | if (rdev->irq.hpd[5]) { | ||
1342 | DRM_DEBUG("evergreen_irq_set: hpd 6\n"); | ||
1343 | hpd6 |= DC_HPDx_INT_EN; | ||
1344 | } | ||
1345 | |||
1346 | WREG32(CP_INT_CNTL, cp_int_cntl); | ||
1347 | |||
1348 | WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1); | ||
1349 | WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2); | ||
1350 | WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3); | ||
1351 | WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4); | ||
1352 | WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5); | ||
1353 | WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6); | ||
1354 | |||
1355 | WREG32(DC_HPD1_INT_CONTROL, hpd1); | ||
1356 | WREG32(DC_HPD2_INT_CONTROL, hpd2); | ||
1357 | WREG32(DC_HPD3_INT_CONTROL, hpd3); | ||
1358 | WREG32(DC_HPD4_INT_CONTROL, hpd4); | ||
1359 | WREG32(DC_HPD5_INT_CONTROL, hpd5); | ||
1360 | WREG32(DC_HPD6_INT_CONTROL, hpd6); | ||
1361 | |||
1362 | return 0; | ||
1363 | } | ||
1364 | |||
1365 | static inline void evergreen_irq_ack(struct radeon_device *rdev, | ||
1366 | u32 *disp_int, | ||
1367 | u32 *disp_int_cont, | ||
1368 | u32 *disp_int_cont2, | ||
1369 | u32 *disp_int_cont3, | ||
1370 | u32 *disp_int_cont4, | ||
1371 | u32 *disp_int_cont5) | ||
1372 | { | ||
1373 | u32 tmp; | ||
1374 | |||
1375 | *disp_int = RREG32(DISP_INTERRUPT_STATUS); | ||
1376 | *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); | ||
1377 | *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); | ||
1378 | *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); | ||
1379 | *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); | ||
1380 | *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); | ||
1381 | |||
1382 | if (*disp_int & LB_D1_VBLANK_INTERRUPT) | ||
1383 | WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK); | ||
1384 | if (*disp_int & LB_D1_VLINE_INTERRUPT) | ||
1385 | WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK); | ||
1386 | |||
1387 | if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT) | ||
1388 | WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK); | ||
1389 | if (*disp_int_cont & LB_D2_VLINE_INTERRUPT) | ||
1390 | WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK); | ||
1391 | |||
1392 | if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) | ||
1393 | WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK); | ||
1394 | if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT) | ||
1395 | WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK); | ||
1396 | |||
1397 | if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) | ||
1398 | WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK); | ||
1399 | if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT) | ||
1400 | WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK); | ||
1401 | |||
1402 | if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) | ||
1403 | WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK); | ||
1404 | if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT) | ||
1405 | WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK); | ||
1406 | |||
1407 | if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) | ||
1408 | WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK); | ||
1409 | if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT) | ||
1410 | WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK); | ||
1411 | |||
1412 | if (*disp_int & DC_HPD1_INTERRUPT) { | ||
1413 | tmp = RREG32(DC_HPD1_INT_CONTROL); | ||
1414 | tmp |= DC_HPDx_INT_ACK; | ||
1415 | WREG32(DC_HPD1_INT_CONTROL, tmp); | ||
1416 | } | ||
1417 | if (*disp_int_cont & DC_HPD2_INTERRUPT) { | ||
1418 | tmp = RREG32(DC_HPD2_INT_CONTROL); | ||
1419 | tmp |= DC_HPDx_INT_ACK; | ||
1420 | WREG32(DC_HPD2_INT_CONTROL, tmp); | ||
1421 | } | ||
1422 | if (*disp_int_cont2 & DC_HPD3_INTERRUPT) { | ||
1423 | tmp = RREG32(DC_HPD3_INT_CONTROL); | ||
1424 | tmp |= DC_HPDx_INT_ACK; | ||
1425 | WREG32(DC_HPD3_INT_CONTROL, tmp); | ||
1426 | } | ||
1427 | if (*disp_int_cont3 & DC_HPD4_INTERRUPT) { | ||
1428 | tmp = RREG32(DC_HPD4_INT_CONTROL); | ||
1429 | tmp |= DC_HPDx_INT_ACK; | ||
1430 | WREG32(DC_HPD4_INT_CONTROL, tmp); | ||
1431 | } | ||
1432 | if (*disp_int_cont4 & DC_HPD5_INTERRUPT) { | ||
1433 | tmp = RREG32(DC_HPD5_INT_CONTROL); | ||
1434 | tmp |= DC_HPDx_INT_ACK; | ||
1435 | WREG32(DC_HPD5_INT_CONTROL, tmp); | ||
1436 | } | ||
1437 | if (*disp_int_cont5 & DC_HPD6_INTERRUPT) { | ||
1438 | tmp = RREG32(DC_HPD5_INT_CONTROL); | ||
1439 | tmp |= DC_HPDx_INT_ACK; | ||
1440 | WREG32(DC_HPD6_INT_CONTROL, tmp); | ||
1441 | } | ||
1442 | } | ||
1443 | |||
1444 | void evergreen_irq_disable(struct radeon_device *rdev) | ||
1445 | { | ||
1446 | u32 disp_int, disp_int_cont, disp_int_cont2; | ||
1447 | u32 disp_int_cont3, disp_int_cont4, disp_int_cont5; | ||
1448 | |||
1449 | r600_disable_interrupts(rdev); | ||
1450 | /* Wait and acknowledge irq */ | ||
1451 | mdelay(1); | ||
1452 | evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2, | ||
1453 | &disp_int_cont3, &disp_int_cont4, &disp_int_cont5); | ||
1454 | evergreen_disable_interrupt_state(rdev); | ||
1455 | } | ||
1456 | |||
1457 | static void evergreen_irq_suspend(struct radeon_device *rdev) | ||
1458 | { | ||
1459 | evergreen_irq_disable(rdev); | ||
1460 | r600_rlc_stop(rdev); | ||
1461 | } | ||
1462 | |||
1463 | static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev) | ||
1464 | { | ||
1465 | u32 wptr, tmp; | ||
1466 | |||
1467 | /* XXX use writeback */ | ||
1468 | wptr = RREG32(IH_RB_WPTR); | ||
1469 | |||
1470 | if (wptr & RB_OVERFLOW) { | ||
1471 | /* When a ring buffer overflow happen start parsing interrupt | ||
1472 | * from the last not overwritten vector (wptr + 16). Hopefully | ||
1473 | * this should allow us to catchup. | ||
1474 | */ | ||
1475 | dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n", | ||
1476 | wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask); | ||
1477 | rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; | ||
1478 | tmp = RREG32(IH_RB_CNTL); | ||
1479 | tmp |= IH_WPTR_OVERFLOW_CLEAR; | ||
1480 | WREG32(IH_RB_CNTL, tmp); | ||
1481 | } | ||
1482 | return (wptr & rdev->ih.ptr_mask); | ||
1483 | } | ||
1484 | |||
1485 | int evergreen_irq_process(struct radeon_device *rdev) | ||
1486 | { | ||
1487 | u32 wptr = evergreen_get_ih_wptr(rdev); | ||
1488 | u32 rptr = rdev->ih.rptr; | ||
1489 | u32 src_id, src_data; | ||
1490 | u32 ring_index; | ||
1491 | u32 disp_int, disp_int_cont, disp_int_cont2; | ||
1492 | u32 disp_int_cont3, disp_int_cont4, disp_int_cont5; | ||
1493 | unsigned long flags; | ||
1494 | bool queue_hotplug = false; | ||
1495 | |||
1496 | DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr); | ||
1497 | if (!rdev->ih.enabled) | ||
1498 | return IRQ_NONE; | ||
1499 | |||
1500 | spin_lock_irqsave(&rdev->ih.lock, flags); | ||
1501 | |||
1502 | if (rptr == wptr) { | ||
1503 | spin_unlock_irqrestore(&rdev->ih.lock, flags); | ||
1504 | return IRQ_NONE; | ||
1505 | } | ||
1506 | if (rdev->shutdown) { | ||
1507 | spin_unlock_irqrestore(&rdev->ih.lock, flags); | ||
1508 | return IRQ_NONE; | ||
1509 | } | ||
1510 | |||
1511 | restart_ih: | ||
1512 | /* display interrupts */ | ||
1513 | evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2, | ||
1514 | &disp_int_cont3, &disp_int_cont4, &disp_int_cont5); | ||
1515 | |||
1516 | rdev->ih.wptr = wptr; | ||
1517 | while (rptr != wptr) { | ||
1518 | /* wptr/rptr are in bytes! */ | ||
1519 | ring_index = rptr / 4; | ||
1520 | src_id = rdev->ih.ring[ring_index] & 0xff; | ||
1521 | src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff; | ||
1522 | |||
1523 | switch (src_id) { | ||
1524 | case 1: /* D1 vblank/vline */ | ||
1525 | switch (src_data) { | ||
1526 | case 0: /* D1 vblank */ | ||
1527 | if (disp_int & LB_D1_VBLANK_INTERRUPT) { | ||
1528 | drm_handle_vblank(rdev->ddev, 0); | ||
1529 | wake_up(&rdev->irq.vblank_queue); | ||
1530 | disp_int &= ~LB_D1_VBLANK_INTERRUPT; | ||
1531 | DRM_DEBUG("IH: D1 vblank\n"); | ||
1532 | } | ||
1533 | break; | ||
1534 | case 1: /* D1 vline */ | ||
1535 | if (disp_int & LB_D1_VLINE_INTERRUPT) { | ||
1536 | disp_int &= ~LB_D1_VLINE_INTERRUPT; | ||
1537 | DRM_DEBUG("IH: D1 vline\n"); | ||
1538 | } | ||
1539 | break; | ||
1540 | default: | ||
1541 | DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); | ||
1542 | break; | ||
1543 | } | ||
1544 | break; | ||
1545 | case 2: /* D2 vblank/vline */ | ||
1546 | switch (src_data) { | ||
1547 | case 0: /* D2 vblank */ | ||
1548 | if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) { | ||
1549 | drm_handle_vblank(rdev->ddev, 1); | ||
1550 | wake_up(&rdev->irq.vblank_queue); | ||
1551 | disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; | ||
1552 | DRM_DEBUG("IH: D2 vblank\n"); | ||
1553 | } | ||
1554 | break; | ||
1555 | case 1: /* D2 vline */ | ||
1556 | if (disp_int_cont & LB_D2_VLINE_INTERRUPT) { | ||
1557 | disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; | ||
1558 | DRM_DEBUG("IH: D2 vline\n"); | ||
1559 | } | ||
1560 | break; | ||
1561 | default: | ||
1562 | DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); | ||
1563 | break; | ||
1564 | } | ||
1565 | break; | ||
1566 | case 3: /* D3 vblank/vline */ | ||
1567 | switch (src_data) { | ||
1568 | case 0: /* D3 vblank */ | ||
1569 | if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) { | ||
1570 | drm_handle_vblank(rdev->ddev, 2); | ||
1571 | wake_up(&rdev->irq.vblank_queue); | ||
1572 | disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; | ||
1573 | DRM_DEBUG("IH: D3 vblank\n"); | ||
1574 | } | ||
1575 | break; | ||
1576 | case 1: /* D3 vline */ | ||
1577 | if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) { | ||
1578 | disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; | ||
1579 | DRM_DEBUG("IH: D3 vline\n"); | ||
1580 | } | ||
1581 | break; | ||
1582 | default: | ||
1583 | DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); | ||
1584 | break; | ||
1585 | } | ||
1586 | break; | ||
1587 | case 4: /* D4 vblank/vline */ | ||
1588 | switch (src_data) { | ||
1589 | case 0: /* D4 vblank */ | ||
1590 | if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) { | ||
1591 | drm_handle_vblank(rdev->ddev, 3); | ||
1592 | wake_up(&rdev->irq.vblank_queue); | ||
1593 | disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; | ||
1594 | DRM_DEBUG("IH: D4 vblank\n"); | ||
1595 | } | ||
1596 | break; | ||
1597 | case 1: /* D4 vline */ | ||
1598 | if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) { | ||
1599 | disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; | ||
1600 | DRM_DEBUG("IH: D4 vline\n"); | ||
1601 | } | ||
1602 | break; | ||
1603 | default: | ||
1604 | DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); | ||
1605 | break; | ||
1606 | } | ||
1607 | break; | ||
1608 | case 5: /* D5 vblank/vline */ | ||
1609 | switch (src_data) { | ||
1610 | case 0: /* D5 vblank */ | ||
1611 | if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) { | ||
1612 | drm_handle_vblank(rdev->ddev, 4); | ||
1613 | wake_up(&rdev->irq.vblank_queue); | ||
1614 | disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; | ||
1615 | DRM_DEBUG("IH: D5 vblank\n"); | ||
1616 | } | ||
1617 | break; | ||
1618 | case 1: /* D5 vline */ | ||
1619 | if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) { | ||
1620 | disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; | ||
1621 | DRM_DEBUG("IH: D5 vline\n"); | ||
1622 | } | ||
1623 | break; | ||
1624 | default: | ||
1625 | DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); | ||
1626 | break; | ||
1627 | } | ||
1628 | break; | ||
1629 | case 6: /* D6 vblank/vline */ | ||
1630 | switch (src_data) { | ||
1631 | case 0: /* D6 vblank */ | ||
1632 | if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) { | ||
1633 | drm_handle_vblank(rdev->ddev, 5); | ||
1634 | wake_up(&rdev->irq.vblank_queue); | ||
1635 | disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; | ||
1636 | DRM_DEBUG("IH: D6 vblank\n"); | ||
1637 | } | ||
1638 | break; | ||
1639 | case 1: /* D6 vline */ | ||
1640 | if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) { | ||
1641 | disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; | ||
1642 | DRM_DEBUG("IH: D6 vline\n"); | ||
1643 | } | ||
1644 | break; | ||
1645 | default: | ||
1646 | DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); | ||
1647 | break; | ||
1648 | } | ||
1649 | break; | ||
1650 | case 42: /* HPD hotplug */ | ||
1651 | switch (src_data) { | ||
1652 | case 0: | ||
1653 | if (disp_int & DC_HPD1_INTERRUPT) { | ||
1654 | disp_int &= ~DC_HPD1_INTERRUPT; | ||
1655 | queue_hotplug = true; | ||
1656 | DRM_DEBUG("IH: HPD1\n"); | ||
1657 | } | ||
1658 | break; | ||
1659 | case 1: | ||
1660 | if (disp_int_cont & DC_HPD2_INTERRUPT) { | ||
1661 | disp_int_cont &= ~DC_HPD2_INTERRUPT; | ||
1662 | queue_hotplug = true; | ||
1663 | DRM_DEBUG("IH: HPD2\n"); | ||
1664 | } | ||
1665 | break; | ||
1666 | case 2: | ||
1667 | if (disp_int_cont2 & DC_HPD3_INTERRUPT) { | ||
1668 | disp_int_cont2 &= ~DC_HPD3_INTERRUPT; | ||
1669 | queue_hotplug = true; | ||
1670 | DRM_DEBUG("IH: HPD3\n"); | ||
1671 | } | ||
1672 | break; | ||
1673 | case 3: | ||
1674 | if (disp_int_cont3 & DC_HPD4_INTERRUPT) { | ||
1675 | disp_int_cont3 &= ~DC_HPD4_INTERRUPT; | ||
1676 | queue_hotplug = true; | ||
1677 | DRM_DEBUG("IH: HPD4\n"); | ||
1678 | } | ||
1679 | break; | ||
1680 | case 4: | ||
1681 | if (disp_int_cont4 & DC_HPD5_INTERRUPT) { | ||
1682 | disp_int_cont4 &= ~DC_HPD5_INTERRUPT; | ||
1683 | queue_hotplug = true; | ||
1684 | DRM_DEBUG("IH: HPD5\n"); | ||
1685 | } | ||
1686 | break; | ||
1687 | case 5: | ||
1688 | if (disp_int_cont5 & DC_HPD6_INTERRUPT) { | ||
1689 | disp_int_cont5 &= ~DC_HPD6_INTERRUPT; | ||
1690 | queue_hotplug = true; | ||
1691 | DRM_DEBUG("IH: HPD6\n"); | ||
1692 | } | ||
1693 | break; | ||
1694 | default: | ||
1695 | DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); | ||
1696 | break; | ||
1697 | } | ||
1698 | break; | ||
1699 | case 176: /* CP_INT in ring buffer */ | ||
1700 | case 177: /* CP_INT in IB1 */ | ||
1701 | case 178: /* CP_INT in IB2 */ | ||
1702 | DRM_DEBUG("IH: CP int: 0x%08x\n", src_data); | ||
1703 | radeon_fence_process(rdev); | ||
1704 | break; | ||
1705 | case 181: /* CP EOP event */ | ||
1706 | DRM_DEBUG("IH: CP EOP\n"); | ||
1707 | break; | ||
1708 | default: | ||
1709 | DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); | ||
1710 | break; | ||
1711 | } | ||
1712 | |||
1713 | /* wptr/rptr are in bytes! */ | ||
1714 | rptr += 16; | ||
1715 | rptr &= rdev->ih.ptr_mask; | ||
1716 | } | ||
1717 | /* make sure wptr hasn't changed while processing */ | ||
1718 | wptr = evergreen_get_ih_wptr(rdev); | ||
1719 | if (wptr != rdev->ih.wptr) | ||
1720 | goto restart_ih; | ||
1721 | if (queue_hotplug) | ||
1722 | queue_work(rdev->wq, &rdev->hotplug_work); | ||
1723 | rdev->ih.rptr = rptr; | ||
1724 | WREG32(IH_RB_RPTR, rdev->ih.rptr); | ||
1725 | spin_unlock_irqrestore(&rdev->ih.lock, flags); | ||
1726 | return IRQ_HANDLED; | ||
1727 | } | ||
1728 | |||
1208 | static int evergreen_startup(struct radeon_device *rdev) | 1729 | static int evergreen_startup(struct radeon_device *rdev) |
1209 | { | 1730 | { |
1210 | int r; | 1731 | int r; |
1211 | 1732 | ||
1212 | /* XXX until interrupts are supported */ | 1733 | if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { |
1213 | if (!rdev->me_fw || !rdev->pfp_fw /*|| !rdev->rlc_fw*/) { | ||
1214 | r = r600_init_microcode(rdev); | 1734 | r = r600_init_microcode(rdev); |
1215 | if (r) { | 1735 | if (r) { |
1216 | DRM_ERROR("Failed to load firmware!\n"); | 1736 | DRM_ERROR("Failed to load firmware!\n"); |
@@ -1246,6 +1766,7 @@ static int evergreen_startup(struct radeon_device *rdev) | |||
1246 | DRM_ERROR("failed to pin blit object %d\n", r); | 1766 | DRM_ERROR("failed to pin blit object %d\n", r); |
1247 | return r; | 1767 | return r; |
1248 | } | 1768 | } |
1769 | #endif | ||
1249 | 1770 | ||
1250 | /* Enable IRQ */ | 1771 | /* Enable IRQ */ |
1251 | r = r600_irq_init(rdev); | 1772 | r = r600_irq_init(rdev); |
@@ -1254,8 +1775,7 @@ static int evergreen_startup(struct radeon_device *rdev) | |||
1254 | radeon_irq_kms_fini(rdev); | 1775 | radeon_irq_kms_fini(rdev); |
1255 | return r; | 1776 | return r; |
1256 | } | 1777 | } |
1257 | r600_irq_set(rdev); | 1778 | evergreen_irq_set(rdev); |
1258 | #endif | ||
1259 | 1779 | ||
1260 | r = radeon_ring_init(rdev, rdev->cp.ring_size); | 1780 | r = radeon_ring_init(rdev, rdev->cp.ring_size); |
1261 | if (r) | 1781 | if (r) |
@@ -1312,8 +1832,8 @@ int evergreen_suspend(struct radeon_device *rdev) | |||
1312 | /* FIXME: we should wait for ring to be empty */ | 1832 | /* FIXME: we should wait for ring to be empty */ |
1313 | r700_cp_stop(rdev); | 1833 | r700_cp_stop(rdev); |
1314 | rdev->cp.ready = false; | 1834 | rdev->cp.ready = false; |
1835 | evergreen_irq_suspend(rdev); | ||
1315 | r600_wb_disable(rdev); | 1836 | r600_wb_disable(rdev); |
1316 | |||
1317 | evergreen_pcie_gart_disable(rdev); | 1837 | evergreen_pcie_gart_disable(rdev); |
1318 | #if 0 | 1838 | #if 0 |
1319 | /* unpin shaders bo */ | 1839 | /* unpin shaders bo */ |
@@ -1415,17 +1935,17 @@ int evergreen_init(struct radeon_device *rdev) | |||
1415 | r = radeon_bo_init(rdev); | 1935 | r = radeon_bo_init(rdev); |
1416 | if (r) | 1936 | if (r) |
1417 | return r; | 1937 | return r; |
1418 | #if 0 | 1938 | |
1419 | r = radeon_irq_kms_init(rdev); | 1939 | r = radeon_irq_kms_init(rdev); |
1420 | if (r) | 1940 | if (r) |
1421 | return r; | 1941 | return r; |
1422 | #endif | 1942 | |
1423 | rdev->cp.ring_obj = NULL; | 1943 | rdev->cp.ring_obj = NULL; |
1424 | r600_ring_init(rdev, 1024 * 1024); | 1944 | r600_ring_init(rdev, 1024 * 1024); |
1425 | #if 0 | 1945 | |
1426 | rdev->ih.ring_obj = NULL; | 1946 | rdev->ih.ring_obj = NULL; |
1427 | r600_ih_ring_init(rdev, 64 * 1024); | 1947 | r600_ih_ring_init(rdev, 64 * 1024); |
1428 | #endif | 1948 | |
1429 | r = r600_pcie_gart_init(rdev); | 1949 | r = r600_pcie_gart_init(rdev); |
1430 | if (r) | 1950 | if (r) |
1431 | return r; | 1951 | return r; |
@@ -1436,10 +1956,8 @@ int evergreen_init(struct radeon_device *rdev) | |||
1436 | dev_err(rdev->dev, "disabling GPU acceleration\n"); | 1956 | dev_err(rdev->dev, "disabling GPU acceleration\n"); |
1437 | r700_cp_fini(rdev); | 1957 | r700_cp_fini(rdev); |
1438 | r600_wb_fini(rdev); | 1958 | r600_wb_fini(rdev); |
1439 | #if 0 | ||
1440 | r600_irq_fini(rdev); | 1959 | r600_irq_fini(rdev); |
1441 | radeon_irq_kms_fini(rdev); | 1960 | radeon_irq_kms_fini(rdev); |
1442 | #endif | ||
1443 | evergreen_pcie_gart_fini(rdev); | 1961 | evergreen_pcie_gart_fini(rdev); |
1444 | rdev->accel_working = false; | 1962 | rdev->accel_working = false; |
1445 | } | 1963 | } |
@@ -1461,14 +1979,11 @@ int evergreen_init(struct radeon_device *rdev) | |||
1461 | void evergreen_fini(struct radeon_device *rdev) | 1979 | void evergreen_fini(struct radeon_device *rdev) |
1462 | { | 1980 | { |
1463 | radeon_pm_fini(rdev); | 1981 | radeon_pm_fini(rdev); |
1464 | evergreen_suspend(rdev); | 1982 | /*r600_blit_fini(rdev);*/ |
1465 | #if 0 | 1983 | r700_cp_fini(rdev); |
1466 | r600_blit_fini(rdev); | 1984 | r600_wb_fini(rdev); |
1467 | r600_irq_fini(rdev); | 1985 | r600_irq_fini(rdev); |
1468 | radeon_irq_kms_fini(rdev); | 1986 | radeon_irq_kms_fini(rdev); |
1469 | radeon_ring_fini(rdev); | ||
1470 | r600_wb_fini(rdev); | ||
1471 | #endif | ||
1472 | evergreen_pcie_gart_fini(rdev); | 1987 | evergreen_pcie_gart_fini(rdev); |
1473 | radeon_gem_fini(rdev); | 1988 | radeon_gem_fini(rdev); |
1474 | radeon_fence_driver_fini(rdev); | 1989 | radeon_fence_driver_fini(rdev); |