aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/r300.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/radeon/r300.c')
-rw-r--r--drivers/gpu/drm/radeon/r300.c17
1 files changed, 16 insertions, 1 deletions
diff --git a/drivers/gpu/drm/radeon/r300.c b/drivers/gpu/drm/radeon/r300.c
index 3f2cc9e2e8d9..0051d11b907c 100644
--- a/drivers/gpu/drm/radeon/r300.c
+++ b/drivers/gpu/drm/radeon/r300.c
@@ -36,7 +36,15 @@
36#include "rv350d.h" 36#include "rv350d.h"
37#include "r300_reg_safe.h" 37#include "r300_reg_safe.h"
38 38
39/* This files gather functions specifics to: r300,r350,rv350,rv370,rv380 */ 39/* This files gather functions specifics to: r300,r350,rv350,rv370,rv380
40 *
41 * GPU Errata:
42 * - HOST_PATH_CNTL: r300 family seems to dislike write to HOST_PATH_CNTL
43 * using MMIO to flush host path read cache, this lead to HARDLOCKUP.
44 * However, scheduling such write to the ring seems harmless, i suspect
45 * the CP read collide with the flush somehow, or maybe the MC, hard to
46 * tell. (Jerome Glisse)
47 */
40 48
41/* 49/*
42 * rv370,rv380 PCIE GART 50 * rv370,rv380 PCIE GART
@@ -178,6 +186,11 @@ void r300_fence_ring_emit(struct radeon_device *rdev,
178 /* Wait until IDLE & CLEAN */ 186 /* Wait until IDLE & CLEAN */
179 radeon_ring_write(rdev, PACKET0(0x1720, 0)); 187 radeon_ring_write(rdev, PACKET0(0x1720, 0));
180 radeon_ring_write(rdev, (1 << 17) | (1 << 16) | (1 << 9)); 188 radeon_ring_write(rdev, (1 << 17) | (1 << 16) | (1 << 9));
189 radeon_ring_write(rdev, PACKET0(RADEON_HOST_PATH_CNTL, 0));
190 radeon_ring_write(rdev, rdev->config.r300.hdp_cntl |
191 RADEON_HDP_READ_BUFFER_INVALIDATE);
192 radeon_ring_write(rdev, PACKET0(RADEON_HOST_PATH_CNTL, 0));
193 radeon_ring_write(rdev, rdev->config.r300.hdp_cntl);
181 /* Emit fence sequence & fire IRQ */ 194 /* Emit fence sequence & fire IRQ */
182 radeon_ring_write(rdev, PACKET0(rdev->fence_drv.scratch_reg, 0)); 195 radeon_ring_write(rdev, PACKET0(rdev->fence_drv.scratch_reg, 0));
183 radeon_ring_write(rdev, fence->seq); 196 radeon_ring_write(rdev, fence->seq);
@@ -1258,6 +1271,7 @@ static int r300_startup(struct radeon_device *rdev)
1258 } 1271 }
1259 /* Enable IRQ */ 1272 /* Enable IRQ */
1260 r100_irq_set(rdev); 1273 r100_irq_set(rdev);
1274 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
1261 /* 1M ring buffer */ 1275 /* 1M ring buffer */
1262 r = r100_cp_init(rdev, 1024 * 1024); 1276 r = r100_cp_init(rdev, 1024 * 1024);
1263 if (r) { 1277 if (r) {
@@ -1322,6 +1336,7 @@ void r300_fini(struct radeon_device *rdev)
1322 rv370_pcie_gart_fini(rdev); 1336 rv370_pcie_gart_fini(rdev);
1323 if (rdev->flags & RADEON_IS_PCI) 1337 if (rdev->flags & RADEON_IS_PCI)
1324 r100_pci_gart_fini(rdev); 1338 r100_pci_gart_fini(rdev);
1339 radeon_agp_fini(rdev);
1325 radeon_irq_kms_fini(rdev); 1340 radeon_irq_kms_fini(rdev);
1326 radeon_fence_driver_fini(rdev); 1341 radeon_fence_driver_fini(rdev);
1327 radeon_bo_fini(rdev); 1342 radeon_bo_fini(rdev);