aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu')
-rw-r--r--drivers/gpu/drm/drm_irq.c20
-rw-r--r--drivers/gpu/drm/drm_lock.c33
-rw-r--r--drivers/gpu/drm/radeon/r300_cmdbuf.c196
-rw-r--r--drivers/gpu/drm/radeon/r300_reg.h5
-rw-r--r--drivers/gpu/drm/radeon/radeon_cp.c38
-rw-r--r--drivers/gpu/drm/radeon/radeon_drv.h19
6 files changed, 225 insertions, 86 deletions
diff --git a/drivers/gpu/drm/drm_irq.c b/drivers/gpu/drm/drm_irq.c
index 089c015c01d1..53f0e5af1cc8 100644
--- a/drivers/gpu/drm/drm_irq.c
+++ b/drivers/gpu/drm/drm_irq.c
@@ -400,27 +400,31 @@ static void drm_locked_tasklet_func(unsigned long data)
400{ 400{
401 struct drm_device *dev = (struct drm_device *)data; 401 struct drm_device *dev = (struct drm_device *)data;
402 unsigned long irqflags; 402 unsigned long irqflags;
403 403 void (*tasklet_func)(struct drm_device *);
404
404 spin_lock_irqsave(&dev->tasklet_lock, irqflags); 405 spin_lock_irqsave(&dev->tasklet_lock, irqflags);
406 tasklet_func = dev->locked_tasklet_func;
407 spin_unlock_irqrestore(&dev->tasklet_lock, irqflags);
405 408
406 if (!dev->locked_tasklet_func || 409 if (!tasklet_func ||
407 !drm_lock_take(&dev->lock, 410 !drm_lock_take(&dev->lock,
408 DRM_KERNEL_CONTEXT)) { 411 DRM_KERNEL_CONTEXT)) {
409 spin_unlock_irqrestore(&dev->tasklet_lock, irqflags);
410 return; 412 return;
411 } 413 }
412 414
413 dev->lock.lock_time = jiffies; 415 dev->lock.lock_time = jiffies;
414 atomic_inc(&dev->counts[_DRM_STAT_LOCKS]); 416 atomic_inc(&dev->counts[_DRM_STAT_LOCKS]);
415 417
416 dev->locked_tasklet_func(dev); 418 spin_lock_irqsave(&dev->tasklet_lock, irqflags);
419 tasklet_func = dev->locked_tasklet_func;
420 dev->locked_tasklet_func = NULL;
421 spin_unlock_irqrestore(&dev->tasklet_lock, irqflags);
422
423 if (tasklet_func != NULL)
424 tasklet_func(dev);
417 425
418 drm_lock_free(&dev->lock, 426 drm_lock_free(&dev->lock,
419 DRM_KERNEL_CONTEXT); 427 DRM_KERNEL_CONTEXT);
420
421 dev->locked_tasklet_func = NULL;
422
423 spin_unlock_irqrestore(&dev->tasklet_lock, irqflags);
424} 428}
425 429
426/** 430/**
diff --git a/drivers/gpu/drm/drm_lock.c b/drivers/gpu/drm/drm_lock.c
index 0998723cde79..a4caf95485d7 100644
--- a/drivers/gpu/drm/drm_lock.c
+++ b/drivers/gpu/drm/drm_lock.c
@@ -105,14 +105,19 @@ int drm_lock(struct drm_device *dev, void *data, struct drm_file *file_priv)
105 ret ? "interrupted" : "has lock"); 105 ret ? "interrupted" : "has lock");
106 if (ret) return ret; 106 if (ret) return ret;
107 107
108 sigemptyset(&dev->sigmask); 108 /* don't set the block all signals on the master process for now
109 sigaddset(&dev->sigmask, SIGSTOP); 109 * really probably not the correct answer but lets us debug xkb
110 sigaddset(&dev->sigmask, SIGTSTP); 110 * xserver for now */
111 sigaddset(&dev->sigmask, SIGTTIN); 111 if (!file_priv->master) {
112 sigaddset(&dev->sigmask, SIGTTOU); 112 sigemptyset(&dev->sigmask);
113 dev->sigdata.context = lock->context; 113 sigaddset(&dev->sigmask, SIGSTOP);
114 dev->sigdata.lock = dev->lock.hw_lock; 114 sigaddset(&dev->sigmask, SIGTSTP);
115 block_all_signals(drm_notifier, &dev->sigdata, &dev->sigmask); 115 sigaddset(&dev->sigmask, SIGTTIN);
116 sigaddset(&dev->sigmask, SIGTTOU);
117 dev->sigdata.context = lock->context;
118 dev->sigdata.lock = dev->lock.hw_lock;
119 block_all_signals(drm_notifier, &dev->sigdata, &dev->sigmask);
120 }
116 121
117 if (dev->driver->dma_ready && (lock->flags & _DRM_LOCK_READY)) 122 if (dev->driver->dma_ready && (lock->flags & _DRM_LOCK_READY))
118 dev->driver->dma_ready(dev); 123 dev->driver->dma_ready(dev);
@@ -150,6 +155,7 @@ int drm_unlock(struct drm_device *dev, void *data, struct drm_file *file_priv)
150{ 155{
151 struct drm_lock *lock = data; 156 struct drm_lock *lock = data;
152 unsigned long irqflags; 157 unsigned long irqflags;
158 void (*tasklet_func)(struct drm_device *);
153 159
154 if (lock->context == DRM_KERNEL_CONTEXT) { 160 if (lock->context == DRM_KERNEL_CONTEXT) {
155 DRM_ERROR("Process %d using kernel context %d\n", 161 DRM_ERROR("Process %d using kernel context %d\n",
@@ -158,14 +164,11 @@ int drm_unlock(struct drm_device *dev, void *data, struct drm_file *file_priv)
158 } 164 }
159 165
160 spin_lock_irqsave(&dev->tasklet_lock, irqflags); 166 spin_lock_irqsave(&dev->tasklet_lock, irqflags);
161 167 tasklet_func = dev->locked_tasklet_func;
162 if (dev->locked_tasklet_func) { 168 dev->locked_tasklet_func = NULL;
163 dev->locked_tasklet_func(dev);
164
165 dev->locked_tasklet_func = NULL;
166 }
167
168 spin_unlock_irqrestore(&dev->tasklet_lock, irqflags); 169 spin_unlock_irqrestore(&dev->tasklet_lock, irqflags);
170 if (tasklet_func != NULL)
171 tasklet_func(dev);
169 172
170 atomic_inc(&dev->counts[_DRM_STAT_UNLOCKS]); 173 atomic_inc(&dev->counts[_DRM_STAT_UNLOCKS]);
171 174
diff --git a/drivers/gpu/drm/radeon/r300_cmdbuf.c b/drivers/gpu/drm/radeon/r300_cmdbuf.c
index 702df45320f7..4b27d9abb7bc 100644
--- a/drivers/gpu/drm/radeon/r300_cmdbuf.c
+++ b/drivers/gpu/drm/radeon/r300_cmdbuf.c
@@ -77,6 +77,9 @@ static int r300_emit_cliprects(drm_radeon_private_t *dev_priv,
77 return -EFAULT; 77 return -EFAULT;
78 } 78 }
79 79
80 box.x2--; /* Hardware expects inclusive bottom-right corner */
81 box.y2--;
82
80 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV515) { 83 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV515) {
81 box.x1 = (box.x1) & 84 box.x1 = (box.x1) &
82 R300_CLIPRECT_MASK; 85 R300_CLIPRECT_MASK;
@@ -95,8 +98,8 @@ static int r300_emit_cliprects(drm_radeon_private_t *dev_priv,
95 R300_CLIPRECT_MASK; 98 R300_CLIPRECT_MASK;
96 box.y2 = (box.y2 + R300_CLIPRECT_OFFSET) & 99 box.y2 = (box.y2 + R300_CLIPRECT_OFFSET) &
97 R300_CLIPRECT_MASK; 100 R300_CLIPRECT_MASK;
98
99 } 101 }
102
100 OUT_RING((box.x1 << R300_CLIPRECT_X_SHIFT) | 103 OUT_RING((box.x1 << R300_CLIPRECT_X_SHIFT) |
101 (box.y1 << R300_CLIPRECT_Y_SHIFT)); 104 (box.y1 << R300_CLIPRECT_Y_SHIFT));
102 OUT_RING((box.x2 << R300_CLIPRECT_X_SHIFT) | 105 OUT_RING((box.x2 << R300_CLIPRECT_X_SHIFT) |
@@ -136,6 +139,18 @@ static int r300_emit_cliprects(drm_radeon_private_t *dev_priv,
136 ADVANCE_RING(); 139 ADVANCE_RING();
137 } 140 }
138 141
142 /* flus cache and wait idle clean after cliprect change */
143 BEGIN_RING(2);
144 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
145 OUT_RING(R300_RB3D_DC_FLUSH);
146 ADVANCE_RING();
147 BEGIN_RING(2);
148 OUT_RING(CP_PACKET0(RADEON_WAIT_UNTIL, 0));
149 OUT_RING(RADEON_WAIT_3D_IDLECLEAN);
150 ADVANCE_RING();
151 /* set flush flag */
152 dev_priv->track_flush |= RADEON_FLUSH_EMITED;
153
139 return 0; 154 return 0;
140} 155}
141 156
@@ -166,13 +181,13 @@ void r300_init_reg_flags(struct drm_device *dev)
166 ADD_RANGE(0x21DC, 1); 181 ADD_RANGE(0x21DC, 1);
167 ADD_RANGE(R300_VAP_UNKNOWN_221C, 1); 182 ADD_RANGE(R300_VAP_UNKNOWN_221C, 1);
168 ADD_RANGE(R300_VAP_CLIP_X_0, 4); 183 ADD_RANGE(R300_VAP_CLIP_X_0, 4);
169 ADD_RANGE(R300_VAP_PVS_WAITIDLE, 1); 184 ADD_RANGE(R300_VAP_PVS_STATE_FLUSH_REG, 1);
170 ADD_RANGE(R300_VAP_UNKNOWN_2288, 1); 185 ADD_RANGE(R300_VAP_UNKNOWN_2288, 1);
171 ADD_RANGE(R300_VAP_OUTPUT_VTX_FMT_0, 2); 186 ADD_RANGE(R300_VAP_OUTPUT_VTX_FMT_0, 2);
172 ADD_RANGE(R300_VAP_PVS_CNTL_1, 3); 187 ADD_RANGE(R300_VAP_PVS_CNTL_1, 3);
173 ADD_RANGE(R300_GB_ENABLE, 1); 188 ADD_RANGE(R300_GB_ENABLE, 1);
174 ADD_RANGE(R300_GB_MSPOS0, 5); 189 ADD_RANGE(R300_GB_MSPOS0, 5);
175 ADD_RANGE(R300_TX_CNTL, 1); 190 ADD_RANGE(R300_TX_INVALTAGS, 1);
176 ADD_RANGE(R300_TX_ENABLE, 1); 191 ADD_RANGE(R300_TX_ENABLE, 1);
177 ADD_RANGE(0x4200, 4); 192 ADD_RANGE(0x4200, 4);
178 ADD_RANGE(0x4214, 1); 193 ADD_RANGE(0x4214, 1);
@@ -388,15 +403,28 @@ static __inline__ int r300_emit_vpu(drm_radeon_private_t *dev_priv,
388 if (sz * 16 > cmdbuf->bufsz) 403 if (sz * 16 > cmdbuf->bufsz)
389 return -EINVAL; 404 return -EINVAL;
390 405
391 BEGIN_RING(5 + sz * 4); 406 /* VAP is very sensitive so we purge cache before we program it
392 /* Wait for VAP to come to senses.. */ 407 * and we also flush its state before & after */
393 /* there is no need to emit it multiple times, (only once before VAP is programmed, 408 BEGIN_RING(6);
394 but this optimization is for later */ 409 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
395 OUT_RING_REG(R300_VAP_PVS_WAITIDLE, 0); 410 OUT_RING(R300_RB3D_DC_FLUSH);
411 OUT_RING(CP_PACKET0(RADEON_WAIT_UNTIL, 0));
412 OUT_RING(RADEON_WAIT_3D_IDLECLEAN);
413 OUT_RING(CP_PACKET0(R300_VAP_PVS_STATE_FLUSH_REG, 0));
414 OUT_RING(0);
415 ADVANCE_RING();
416 /* set flush flag */
417 dev_priv->track_flush |= RADEON_FLUSH_EMITED;
418
419 BEGIN_RING(3 + sz * 4);
396 OUT_RING_REG(R300_VAP_PVS_UPLOAD_ADDRESS, addr); 420 OUT_RING_REG(R300_VAP_PVS_UPLOAD_ADDRESS, addr);
397 OUT_RING(CP_PACKET0_TABLE(R300_VAP_PVS_UPLOAD_DATA, sz * 4 - 1)); 421 OUT_RING(CP_PACKET0_TABLE(R300_VAP_PVS_UPLOAD_DATA, sz * 4 - 1));
398 OUT_RING_TABLE((int *)cmdbuf->buf, sz * 4); 422 OUT_RING_TABLE((int *)cmdbuf->buf, sz * 4);
423 ADVANCE_RING();
399 424
425 BEGIN_RING(2);
426 OUT_RING(CP_PACKET0(R300_VAP_PVS_STATE_FLUSH_REG, 0));
427 OUT_RING(0);
400 ADVANCE_RING(); 428 ADVANCE_RING();
401 429
402 cmdbuf->buf += sz * 16; 430 cmdbuf->buf += sz * 16;
@@ -424,6 +452,15 @@ static __inline__ int r300_emit_clear(drm_radeon_private_t *dev_priv,
424 OUT_RING_TABLE((int *)cmdbuf->buf, 8); 452 OUT_RING_TABLE((int *)cmdbuf->buf, 8);
425 ADVANCE_RING(); 453 ADVANCE_RING();
426 454
455 BEGIN_RING(4);
456 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
457 OUT_RING(R300_RB3D_DC_FLUSH);
458 OUT_RING(CP_PACKET0(RADEON_WAIT_UNTIL, 0));
459 OUT_RING(RADEON_WAIT_3D_IDLECLEAN);
460 ADVANCE_RING();
461 /* set flush flag */
462 dev_priv->track_flush |= RADEON_FLUSH_EMITED;
463
427 cmdbuf->buf += 8 * 4; 464 cmdbuf->buf += 8 * 4;
428 cmdbuf->bufsz -= 8 * 4; 465 cmdbuf->bufsz -= 8 * 4;
429 466
@@ -543,22 +580,23 @@ static __inline__ int r300_emit_bitblt_multi(drm_radeon_private_t *dev_priv,
543 return 0; 580 return 0;
544} 581}
545 582
546static __inline__ int r300_emit_indx_buffer(drm_radeon_private_t *dev_priv, 583static __inline__ int r300_emit_draw_indx_2(drm_radeon_private_t *dev_priv,
547 drm_radeon_kcmd_buffer_t *cmdbuf) 584 drm_radeon_kcmd_buffer_t *cmdbuf)
548{ 585{
549 u32 *cmd = (u32 *) cmdbuf->buf; 586 u32 *cmd;
550 int count, ret; 587 int count;
588 int expected_count;
551 RING_LOCALS; 589 RING_LOCALS;
552 590
553 count=(cmd[0]>>16) & 0x3fff; 591 cmd = (u32 *) cmdbuf->buf;
592 count = (cmd[0]>>16) & 0x3fff;
593 expected_count = cmd[1] >> 16;
594 if (!(cmd[1] & R300_VAP_VF_CNTL__INDEX_SIZE_32bit))
595 expected_count = (expected_count+1)/2;
554 596
555 if ((cmd[1] & 0x8000ffff) != 0x80000810) { 597 if (count && count != expected_count) {
556 DRM_ERROR("Invalid indx_buffer reg address %08X\n", cmd[1]); 598 DRM_ERROR("3D_DRAW_INDX_2: packet size %i, expected %i\n",
557 return -EINVAL; 599 count, expected_count);
558 }
559 ret = !radeon_check_offset(dev_priv, cmd[2]);
560 if (ret) {
561 DRM_ERROR("Invalid indx_buffer offset is %08X\n", cmd[2]);
562 return -EINVAL; 600 return -EINVAL;
563 } 601 }
564 602
@@ -570,6 +608,50 @@ static __inline__ int r300_emit_indx_buffer(drm_radeon_private_t *dev_priv,
570 cmdbuf->buf += (count+2)*4; 608 cmdbuf->buf += (count+2)*4;
571 cmdbuf->bufsz -= (count+2)*4; 609 cmdbuf->bufsz -= (count+2)*4;
572 610
611 if (!count) {
612 drm_r300_cmd_header_t header;
613
614 if (cmdbuf->bufsz < 4*4 + sizeof(header)) {
615 DRM_ERROR("3D_DRAW_INDX_2: expect subsequent INDX_BUFFER, but stream is too short.\n");
616 return -EINVAL;
617 }
618
619 header.u = *(unsigned int *)cmdbuf->buf;
620
621 cmdbuf->buf += sizeof(header);
622 cmdbuf->bufsz -= sizeof(header);
623 cmd = (u32 *) cmdbuf->buf;
624
625 if (header.header.cmd_type != R300_CMD_PACKET3 ||
626 header.packet3.packet != R300_CMD_PACKET3_RAW ||
627 cmd[0] != CP_PACKET3(RADEON_CP_INDX_BUFFER, 2)) {
628 DRM_ERROR("3D_DRAW_INDX_2: expect subsequent INDX_BUFFER.\n");
629 return -EINVAL;
630 }
631
632 if ((cmd[1] & 0x8000ffff) != 0x80000810) {
633 DRM_ERROR("Invalid indx_buffer reg address %08X\n", cmd[1]);
634 return -EINVAL;
635 }
636 if (!radeon_check_offset(dev_priv, cmd[2])) {
637 DRM_ERROR("Invalid indx_buffer offset is %08X\n", cmd[2]);
638 return -EINVAL;
639 }
640 if (cmd[3] != expected_count) {
641 DRM_ERROR("INDX_BUFFER: buffer size %i, expected %i\n",
642 cmd[3], expected_count);
643 return -EINVAL;
644 }
645
646 BEGIN_RING(4);
647 OUT_RING(cmd[0]);
648 OUT_RING_TABLE((int *)(cmdbuf->buf + 4), 3);
649 ADVANCE_RING();
650
651 cmdbuf->buf += 4*4;
652 cmdbuf->bufsz -= 4*4;
653 }
654
573 return 0; 655 return 0;
574} 656}
575 657
@@ -613,11 +695,22 @@ static __inline__ int r300_emit_raw_packet3(drm_radeon_private_t *dev_priv,
613 case RADEON_CNTL_BITBLT_MULTI: 695 case RADEON_CNTL_BITBLT_MULTI:
614 return r300_emit_bitblt_multi(dev_priv, cmdbuf); 696 return r300_emit_bitblt_multi(dev_priv, cmdbuf);
615 697
616 case RADEON_CP_INDX_BUFFER: /* DRAW_INDX_2 without INDX_BUFFER seems to lock up the gpu */ 698 case RADEON_CP_INDX_BUFFER:
617 return r300_emit_indx_buffer(dev_priv, cmdbuf); 699 DRM_ERROR("packet3 INDX_BUFFER without preceding 3D_DRAW_INDX_2 is illegal.\n");
618 case RADEON_CP_3D_DRAW_IMMD_2: /* triggers drawing using in-packet vertex data */ 700 return -EINVAL;
619 case RADEON_CP_3D_DRAW_VBUF_2: /* triggers drawing of vertex buffers setup elsewhere */ 701 case RADEON_CP_3D_DRAW_IMMD_2:
620 case RADEON_CP_3D_DRAW_INDX_2: /* triggers drawing using indices to vertex buffer */ 702 /* triggers drawing using in-packet vertex data */
703 case RADEON_CP_3D_DRAW_VBUF_2:
704 /* triggers drawing of vertex buffers setup elsewhere */
705 dev_priv->track_flush &= ~(RADEON_FLUSH_EMITED |
706 RADEON_PURGE_EMITED);
707 break;
708 case RADEON_CP_3D_DRAW_INDX_2:
709 /* triggers drawing using indices to vertex buffer */
710 /* whenever we send vertex we clear flush & purge */
711 dev_priv->track_flush &= ~(RADEON_FLUSH_EMITED |
712 RADEON_PURGE_EMITED);
713 return r300_emit_draw_indx_2(dev_priv, cmdbuf);
621 case RADEON_WAIT_FOR_IDLE: 714 case RADEON_WAIT_FOR_IDLE:
622 case RADEON_CP_NOP: 715 case RADEON_CP_NOP:
623 /* these packets are safe */ 716 /* these packets are safe */
@@ -713,17 +806,53 @@ static __inline__ int r300_emit_packet3(drm_radeon_private_t *dev_priv,
713 */ 806 */
714static __inline__ void r300_pacify(drm_radeon_private_t *dev_priv) 807static __inline__ void r300_pacify(drm_radeon_private_t *dev_priv)
715{ 808{
809 uint32_t cache_z, cache_3d, cache_2d;
716 RING_LOCALS; 810 RING_LOCALS;
717 811
718 BEGIN_RING(6); 812 cache_z = R300_ZC_FLUSH;
719 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); 813 cache_2d = R300_RB2D_DC_FLUSH;
720 OUT_RING(R300_RB3D_DSTCACHE_UNKNOWN_0A); 814 cache_3d = R300_RB3D_DC_FLUSH;
815 if (!(dev_priv->track_flush & RADEON_PURGE_EMITED)) {
816 /* we can purge, primitive where draw since last purge */
817 cache_z |= R300_ZC_FREE;
818 cache_2d |= R300_RB2D_DC_FREE;
819 cache_3d |= R300_RB3D_DC_FREE;
820 }
821
822 /* flush & purge zbuffer */
823 BEGIN_RING(2);
721 OUT_RING(CP_PACKET0(R300_ZB_ZCACHE_CTLSTAT, 0)); 824 OUT_RING(CP_PACKET0(R300_ZB_ZCACHE_CTLSTAT, 0));
722 OUT_RING(R300_ZB_ZCACHE_CTLSTAT_ZC_FLUSH_FLUSH_AND_FREE| 825 OUT_RING(cache_z);
723 R300_ZB_ZCACHE_CTLSTAT_ZC_FREE_FREE); 826 ADVANCE_RING();
724 OUT_RING(CP_PACKET3(RADEON_CP_NOP, 0)); 827 /* flush & purge 3d */
725 OUT_RING(0x0); 828 BEGIN_RING(2);
829 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
830 OUT_RING(cache_3d);
831 ADVANCE_RING();
832 /* flush & purge texture */
833 BEGIN_RING(2);
834 OUT_RING(CP_PACKET0(R300_TX_INVALTAGS, 0));
835 OUT_RING(0);
836 ADVANCE_RING();
837 /* FIXME: is this one really needed ? */
838 BEGIN_RING(2);
839 OUT_RING(CP_PACKET0(R300_RB3D_AARESOLVE_CTL, 0));
840 OUT_RING(0);
841 ADVANCE_RING();
842 BEGIN_RING(2);
843 OUT_RING(CP_PACKET0(RADEON_WAIT_UNTIL, 0));
844 OUT_RING(RADEON_WAIT_3D_IDLECLEAN);
845 ADVANCE_RING();
846 /* flush & purge 2d through E2 as RB2D will trigger lockup */
847 BEGIN_RING(4);
848 OUT_RING(CP_PACKET0(R300_DSTCACHE_CTLSTAT, 0));
849 OUT_RING(cache_2d);
850 OUT_RING(CP_PACKET0(RADEON_WAIT_UNTIL, 0));
851 OUT_RING(RADEON_WAIT_2D_IDLECLEAN |
852 RADEON_WAIT_HOST_IDLECLEAN);
726 ADVANCE_RING(); 853 ADVANCE_RING();
854 /* set flush & purge flags */
855 dev_priv->track_flush |= RADEON_FLUSH_EMITED | RADEON_PURGE_EMITED;
727} 856}
728 857
729/** 858/**
@@ -905,8 +1034,7 @@ int r300_do_cp_cmdbuf(struct drm_device *dev,
905 1034
906 DRM_DEBUG("\n"); 1035 DRM_DEBUG("\n");
907 1036
908 /* See the comment above r300_emit_begin3d for why this call must be here, 1037 /* pacify */
909 * and what the cleanup gotos are for. */
910 r300_pacify(dev_priv); 1038 r300_pacify(dev_priv);
911 1039
912 if (cmdbuf->nbox <= R300_SIMULTANEOUS_CLIPRECTS) { 1040 if (cmdbuf->nbox <= R300_SIMULTANEOUS_CLIPRECTS) {
diff --git a/drivers/gpu/drm/radeon/r300_reg.h b/drivers/gpu/drm/radeon/r300_reg.h
index a6802f26afc4..ee6f811599a3 100644
--- a/drivers/gpu/drm/radeon/r300_reg.h
+++ b/drivers/gpu/drm/radeon/r300_reg.h
@@ -317,7 +317,7 @@ USE OR OTHER DEALINGS IN THE SOFTWARE.
317 * Therefore, I suspect writing zero to 0x2284 synchronizes the engine and 317 * Therefore, I suspect writing zero to 0x2284 synchronizes the engine and
318 * avoids bugs caused by still running shaders reading bad data from memory. 318 * avoids bugs caused by still running shaders reading bad data from memory.
319 */ 319 */
320#define R300_VAP_PVS_WAITIDLE 0x2284 /* GUESS */ 320#define R300_VAP_PVS_STATE_FLUSH_REG 0x2284
321 321
322/* Absolutely no clue what this register is about. */ 322/* Absolutely no clue what this register is about. */
323#define R300_VAP_UNKNOWN_2288 0x2288 323#define R300_VAP_UNKNOWN_2288 0x2288
@@ -513,7 +513,7 @@ USE OR OTHER DEALINGS IN THE SOFTWARE.
513/* gap */ 513/* gap */
514 514
515/* Zero to flush caches. */ 515/* Zero to flush caches. */
516#define R300_TX_CNTL 0x4100 516#define R300_TX_INVALTAGS 0x4100
517#define R300_TX_FLUSH 0x0 517#define R300_TX_FLUSH 0x0
518 518
519/* The upper enable bits are guessed, based on fglrx reported limits. */ 519/* The upper enable bits are guessed, based on fglrx reported limits. */
@@ -1362,6 +1362,7 @@ USE OR OTHER DEALINGS IN THE SOFTWARE.
1362#define R300_RB3D_COLORPITCH2 0x4E40 /* GUESS */ 1362#define R300_RB3D_COLORPITCH2 0x4E40 /* GUESS */
1363#define R300_RB3D_COLORPITCH3 0x4E44 /* GUESS */ 1363#define R300_RB3D_COLORPITCH3 0x4E44 /* GUESS */
1364 1364
1365#define R300_RB3D_AARESOLVE_CTL 0x4E88
1365/* gap */ 1366/* gap */
1366 1367
1367/* Guess by Vladimir. 1368/* Guess by Vladimir.
diff --git a/drivers/gpu/drm/radeon/radeon_cp.c b/drivers/gpu/drm/radeon/radeon_cp.c
index f0de81a5689d..3331f88dcfb6 100644
--- a/drivers/gpu/drm/radeon/radeon_cp.c
+++ b/drivers/gpu/drm/radeon/radeon_cp.c
@@ -40,6 +40,7 @@
40#define RADEON_FIFO_DEBUG 0 40#define RADEON_FIFO_DEBUG 0
41 41
42static int radeon_do_cleanup_cp(struct drm_device * dev); 42static int radeon_do_cleanup_cp(struct drm_device * dev);
43static void radeon_do_cp_start(drm_radeon_private_t * dev_priv);
43 44
44static u32 R500_READ_MCIND(drm_radeon_private_t *dev_priv, int addr) 45static u32 R500_READ_MCIND(drm_radeon_private_t *dev_priv, int addr)
45{ 46{
@@ -198,23 +199,8 @@ static int radeon_do_pixcache_flush(drm_radeon_private_t * dev_priv)
198 DRM_UDELAY(1); 199 DRM_UDELAY(1);
199 } 200 }
200 } else { 201 } else {
201 /* 3D */ 202 /* don't flush or purge cache here or lockup */
202 tmp = RADEON_READ(R300_RB3D_DSTCACHE_CTLSTAT); 203 return 0;
203 tmp |= RADEON_RB3D_DC_FLUSH_ALL;
204 RADEON_WRITE(R300_RB3D_DSTCACHE_CTLSTAT, tmp);
205
206 /* 2D */
207 tmp = RADEON_READ(R300_DSTCACHE_CTLSTAT);
208 tmp |= RADEON_RB3D_DC_FLUSH_ALL;
209 RADEON_WRITE(R300_DSTCACHE_CTLSTAT, tmp);
210
211 for (i = 0; i < dev_priv->usec_timeout; i++) {
212 if (!(RADEON_READ(R300_DSTCACHE_CTLSTAT)
213 & RADEON_RB3D_DC_BUSY)) {
214 return 0;
215 }
216 DRM_UDELAY(1);
217 }
218 } 204 }
219 205
220#if RADEON_FIFO_DEBUG 206#if RADEON_FIFO_DEBUG
@@ -237,6 +223,9 @@ static int radeon_do_wait_for_fifo(drm_radeon_private_t * dev_priv, int entries)
237 return 0; 223 return 0;
238 DRM_UDELAY(1); 224 DRM_UDELAY(1);
239 } 225 }
226 DRM_INFO("wait for fifo failed status : 0x%08X 0x%08X\n",
227 RADEON_READ(RADEON_RBBM_STATUS),
228 RADEON_READ(R300_VAP_CNTL_STATUS));
240 229
241#if RADEON_FIFO_DEBUG 230#if RADEON_FIFO_DEBUG
242 DRM_ERROR("failed!\n"); 231 DRM_ERROR("failed!\n");
@@ -263,6 +252,9 @@ static int radeon_do_wait_for_idle(drm_radeon_private_t * dev_priv)
263 } 252 }
264 DRM_UDELAY(1); 253 DRM_UDELAY(1);
265 } 254 }
255 DRM_INFO("wait idle failed status : 0x%08X 0x%08X\n",
256 RADEON_READ(RADEON_RBBM_STATUS),
257 RADEON_READ(R300_VAP_CNTL_STATUS));
266 258
267#if RADEON_FIFO_DEBUG 259#if RADEON_FIFO_DEBUG
268 DRM_ERROR("failed!\n"); 260 DRM_ERROR("failed!\n");
@@ -443,14 +435,20 @@ static void radeon_do_cp_start(drm_radeon_private_t * dev_priv)
443 435
444 dev_priv->cp_running = 1; 436 dev_priv->cp_running = 1;
445 437
446 BEGIN_RING(6); 438 BEGIN_RING(8);
447 439 /* isync can only be written through cp on r5xx write it here */
440 OUT_RING(CP_PACKET0(RADEON_ISYNC_CNTL, 0));
441 OUT_RING(RADEON_ISYNC_ANY2D_IDLE3D |
442 RADEON_ISYNC_ANY3D_IDLE2D |
443 RADEON_ISYNC_WAIT_IDLEGUI |
444 RADEON_ISYNC_CPSCRATCH_IDLEGUI);
448 RADEON_PURGE_CACHE(); 445 RADEON_PURGE_CACHE();
449 RADEON_PURGE_ZCACHE(); 446 RADEON_PURGE_ZCACHE();
450 RADEON_WAIT_UNTIL_IDLE(); 447 RADEON_WAIT_UNTIL_IDLE();
451
452 ADVANCE_RING(); 448 ADVANCE_RING();
453 COMMIT_RING(); 449 COMMIT_RING();
450
451 dev_priv->track_flush |= RADEON_FLUSH_EMITED | RADEON_PURGE_EMITED;
454} 452}
455 453
456/* Reset the Command Processor. This will not flush any pending 454/* Reset the Command Processor. This will not flush any pending
diff --git a/drivers/gpu/drm/radeon/radeon_drv.h b/drivers/gpu/drm/radeon/radeon_drv.h
index 3f0eca957aa7..099381693175 100644
--- a/drivers/gpu/drm/radeon/radeon_drv.h
+++ b/drivers/gpu/drm/radeon/radeon_drv.h
@@ -220,6 +220,9 @@ struct radeon_virt_surface {
220 struct drm_file *file_priv; 220 struct drm_file *file_priv;
221}; 221};
222 222
223#define RADEON_FLUSH_EMITED (1 < 0)
224#define RADEON_PURGE_EMITED (1 < 1)
225
223typedef struct drm_radeon_private { 226typedef struct drm_radeon_private {
224 drm_radeon_ring_buffer_t ring; 227 drm_radeon_ring_buffer_t ring;
225 drm_radeon_sarea_t *sarea_priv; 228 drm_radeon_sarea_t *sarea_priv;
@@ -311,6 +314,7 @@ typedef struct drm_radeon_private {
311 unsigned long fb_aper_offset; 314 unsigned long fb_aper_offset;
312 315
313 int num_gb_pipes; 316 int num_gb_pipes;
317 int track_flush;
314} drm_radeon_private_t; 318} drm_radeon_private_t;
315 319
316typedef struct drm_radeon_buf_priv { 320typedef struct drm_radeon_buf_priv {
@@ -693,7 +697,6 @@ extern int r300_do_cp_cmdbuf(struct drm_device * dev,
693#define R300_ZB_ZCACHE_CTLSTAT 0x4f18 697#define R300_ZB_ZCACHE_CTLSTAT 0x4f18
694# define R300_ZC_FLUSH (1 << 0) 698# define R300_ZC_FLUSH (1 << 0)
695# define R300_ZC_FREE (1 << 1) 699# define R300_ZC_FREE (1 << 1)
696# define R300_ZC_FLUSH_ALL 0x3
697# define R300_ZC_BUSY (1 << 31) 700# define R300_ZC_BUSY (1 << 31)
698#define RADEON_RB3D_DSTCACHE_CTLSTAT 0x325c 701#define RADEON_RB3D_DSTCACHE_CTLSTAT 0x325c
699# define RADEON_RB3D_DC_FLUSH (3 << 0) 702# define RADEON_RB3D_DC_FLUSH (3 << 0)
@@ -701,6 +704,8 @@ extern int r300_do_cp_cmdbuf(struct drm_device * dev,
701# define RADEON_RB3D_DC_FLUSH_ALL 0xf 704# define RADEON_RB3D_DC_FLUSH_ALL 0xf
702# define RADEON_RB3D_DC_BUSY (1 << 31) 705# define RADEON_RB3D_DC_BUSY (1 << 31)
703#define R300_RB3D_DSTCACHE_CTLSTAT 0x4e4c 706#define R300_RB3D_DSTCACHE_CTLSTAT 0x4e4c
707# define R300_RB3D_DC_FLUSH (2 << 0)
708# define R300_RB3D_DC_FREE (2 << 2)
704# define R300_RB3D_DC_FINISH (1 << 4) 709# define R300_RB3D_DC_FINISH (1 << 4)
705#define RADEON_RB3D_ZSTENCILCNTL 0x1c2c 710#define RADEON_RB3D_ZSTENCILCNTL 0x1c2c
706# define RADEON_Z_TEST_MASK (7 << 4) 711# define RADEON_Z_TEST_MASK (7 << 4)
@@ -1246,17 +1251,17 @@ do { \
1246 OUT_RING(RADEON_RB3D_DC_FLUSH); \ 1251 OUT_RING(RADEON_RB3D_DC_FLUSH); \
1247 } else { \ 1252 } else { \
1248 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); \ 1253 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); \
1249 OUT_RING(RADEON_RB3D_DC_FLUSH); \ 1254 OUT_RING(R300_RB3D_DC_FLUSH); \
1250 } \ 1255 } \
1251} while (0) 1256} while (0)
1252 1257
1253#define RADEON_PURGE_CACHE() do { \ 1258#define RADEON_PURGE_CACHE() do { \
1254 if ((dev_priv->flags & RADEON_FAMILY_MASK) <= CHIP_RV280) { \ 1259 if ((dev_priv->flags & RADEON_FAMILY_MASK) <= CHIP_RV280) { \
1255 OUT_RING(CP_PACKET0(RADEON_RB3D_DSTCACHE_CTLSTAT, 0)); \ 1260 OUT_RING(CP_PACKET0(RADEON_RB3D_DSTCACHE_CTLSTAT, 0)); \
1256 OUT_RING(RADEON_RB3D_DC_FLUSH_ALL); \ 1261 OUT_RING(RADEON_RB3D_DC_FLUSH | RADEON_RB3D_DC_FREE); \
1257 } else { \ 1262 } else { \
1258 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); \ 1263 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); \
1259 OUT_RING(RADEON_RB3D_DC_FLUSH_ALL); \ 1264 OUT_RING(R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE); \
1260 } \ 1265 } \
1261} while (0) 1266} while (0)
1262 1267
@@ -1273,10 +1278,10 @@ do { \
1273#define RADEON_PURGE_ZCACHE() do { \ 1278#define RADEON_PURGE_ZCACHE() do { \
1274 if ((dev_priv->flags & RADEON_FAMILY_MASK) <= CHIP_RV280) { \ 1279 if ((dev_priv->flags & RADEON_FAMILY_MASK) <= CHIP_RV280) { \
1275 OUT_RING(CP_PACKET0(RADEON_RB3D_ZCACHE_CTLSTAT, 0)); \ 1280 OUT_RING(CP_PACKET0(RADEON_RB3D_ZCACHE_CTLSTAT, 0)); \
1276 OUT_RING(RADEON_RB3D_ZC_FLUSH_ALL); \ 1281 OUT_RING(RADEON_RB3D_ZC_FLUSH | RADEON_RB3D_ZC_FREE); \
1277 } else { \ 1282 } else { \
1278 OUT_RING(CP_PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0)); \ 1283 OUT_RING(CP_PACKET0(R300_ZB_ZCACHE_CTLSTAT, 0)); \
1279 OUT_RING(R300_ZC_FLUSH_ALL); \ 1284 OUT_RING(R300_ZC_FLUSH | R300_ZC_FREE); \
1280 } \ 1285 } \
1281} while (0) 1286} while (0)
1282 1287