diff options
Diffstat (limited to 'drivers/gpu')
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_drv.h | 14 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_grctx.h | 2 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_mem.c | 11 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_object.c | 92 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_ramht.c | 16 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_sgdma.c | 50 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv04_fifo.c | 8 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv20_graph.c | 474 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv40_graph.c | 4 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv40_grctx.c | 6 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv50_display.c | 14 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv50_fifo.c | 211 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv50_graph.c | 18 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv50_grctx.c | 2 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv50_instmem.c | 10 |
15 files changed, 474 insertions, 458 deletions
diff --git a/drivers/gpu/drm/nouveau/nouveau_drv.h b/drivers/gpu/drm/nouveau/nouveau_drv.h index c684686f6c0f..372adfdd9de0 100644 --- a/drivers/gpu/drm/nouveau/nouveau_drv.h +++ b/drivers/gpu/drm/nouveau/nouveau_drv.h | |||
@@ -138,6 +138,7 @@ enum nouveau_flags { | |||
138 | #define NVOBJ_FLAG_ZERO_FREE (1 << 2) | 138 | #define NVOBJ_FLAG_ZERO_FREE (1 << 2) |
139 | #define NVOBJ_FLAG_FAKE (1 << 3) | 139 | #define NVOBJ_FLAG_FAKE (1 << 3) |
140 | struct nouveau_gpuobj { | 140 | struct nouveau_gpuobj { |
141 | struct drm_device *dev; | ||
141 | struct list_head list; | 142 | struct list_head list; |
142 | 143 | ||
143 | struct nouveau_channel *im_channel; | 144 | struct nouveau_channel *im_channel; |
@@ -1291,17 +1292,8 @@ static inline void nv_wi32(struct drm_device *dev, unsigned offset, u32 val) | |||
1291 | } | 1292 | } |
1292 | 1293 | ||
1293 | /* object access */ | 1294 | /* object access */ |
1294 | static inline u32 nv_ro32(struct drm_device *dev, struct nouveau_gpuobj *obj, | 1295 | extern u32 nv_ro32(struct nouveau_gpuobj *, u32 offset); |
1295 | unsigned index) | 1296 | extern void nv_wo32(struct nouveau_gpuobj *, u32 offset, u32 val); |
1296 | { | ||
1297 | return nv_ri32(dev, obj->im_pramin->start + index * 4); | ||
1298 | } | ||
1299 | |||
1300 | static inline void nv_wo32(struct drm_device *dev, struct nouveau_gpuobj *obj, | ||
1301 | unsigned index, u32 val) | ||
1302 | { | ||
1303 | nv_wi32(dev, obj->im_pramin->start + index * 4, val); | ||
1304 | } | ||
1305 | 1297 | ||
1306 | /* | 1298 | /* |
1307 | * Logging | 1299 | * Logging |
diff --git a/drivers/gpu/drm/nouveau/nouveau_grctx.h b/drivers/gpu/drm/nouveau/nouveau_grctx.h index 5d39c4ce8006..4a8ad1307fa4 100644 --- a/drivers/gpu/drm/nouveau/nouveau_grctx.h +++ b/drivers/gpu/drm/nouveau/nouveau_grctx.h | |||
@@ -126,7 +126,7 @@ gr_def(struct nouveau_grctx *ctx, uint32_t reg, uint32_t val) | |||
126 | reg = (reg - 0x00400000) / 4; | 126 | reg = (reg - 0x00400000) / 4; |
127 | reg = (reg - ctx->ctxprog_reg) + ctx->ctxvals_base; | 127 | reg = (reg - ctx->ctxprog_reg) + ctx->ctxvals_base; |
128 | 128 | ||
129 | nv_wo32(ctx->dev, ctx->data, reg, val); | 129 | nv_wo32(ctx->data, reg * 4, val); |
130 | } | 130 | } |
131 | #endif | 131 | #endif |
132 | 132 | ||
diff --git a/drivers/gpu/drm/nouveau/nouveau_mem.c b/drivers/gpu/drm/nouveau/nouveau_mem.c index c14466ba69ba..f34c532bcac3 100644 --- a/drivers/gpu/drm/nouveau/nouveau_mem.c +++ b/drivers/gpu/drm/nouveau/nouveau_mem.c | |||
@@ -169,8 +169,9 @@ nv50_mem_vm_bind_linear(struct drm_device *dev, uint64_t virt, uint32_t size, | |||
169 | virt += (end - pte); | 169 | virt += (end - pte); |
170 | 170 | ||
171 | while (pte < end) { | 171 | while (pte < end) { |
172 | nv_wo32(dev, pgt, pte++, offset_l); | 172 | nv_wo32(pgt, (pte * 4) + 0, offset_l); |
173 | nv_wo32(dev, pgt, pte++, offset_h); | 173 | nv_wo32(pgt, (pte * 4) + 4, offset_h); |
174 | pte += 2; | ||
174 | } | 175 | } |
175 | } | 176 | } |
176 | } | 177 | } |
@@ -203,8 +204,10 @@ nv50_mem_vm_unbind(struct drm_device *dev, uint64_t virt, uint32_t size) | |||
203 | pages -= (end - pte); | 204 | pages -= (end - pte); |
204 | virt += (end - pte) << 15; | 205 | virt += (end - pte) << 15; |
205 | 206 | ||
206 | while (pte < end) | 207 | while (pte < end) { |
207 | nv_wo32(dev, pgt, pte++, 0); | 208 | nv_wo32(pgt, (pte * 4), 0); |
209 | pte++; | ||
210 | } | ||
208 | } | 211 | } |
209 | dev_priv->engine.instmem.flush(dev); | 212 | dev_priv->engine.instmem.flush(dev); |
210 | 213 | ||
diff --git a/drivers/gpu/drm/nouveau/nouveau_object.c b/drivers/gpu/drm/nouveau/nouveau_object.c index e658aa2dbe67..52db13cd75b2 100644 --- a/drivers/gpu/drm/nouveau/nouveau_object.c +++ b/drivers/gpu/drm/nouveau/nouveau_object.c | |||
@@ -88,6 +88,7 @@ nouveau_gpuobj_new(struct drm_device *dev, struct nouveau_channel *chan, | |||
88 | if (!gpuobj) | 88 | if (!gpuobj) |
89 | return -ENOMEM; | 89 | return -ENOMEM; |
90 | NV_DEBUG(dev, "gpuobj %p\n", gpuobj); | 90 | NV_DEBUG(dev, "gpuobj %p\n", gpuobj); |
91 | gpuobj->dev = dev; | ||
91 | gpuobj->flags = flags; | 92 | gpuobj->flags = flags; |
92 | gpuobj->im_channel = chan; | 93 | gpuobj->im_channel = chan; |
93 | 94 | ||
@@ -134,7 +135,7 @@ nouveau_gpuobj_new(struct drm_device *dev, struct nouveau_channel *chan, | |||
134 | int i; | 135 | int i; |
135 | 136 | ||
136 | for (i = 0; i < gpuobj->im_pramin->size; i += 4) | 137 | for (i = 0; i < gpuobj->im_pramin->size; i += 4) |
137 | nv_wo32(dev, gpuobj, i/4, 0); | 138 | nv_wo32(gpuobj, i, 0); |
138 | engine->instmem.flush(dev); | 139 | engine->instmem.flush(dev); |
139 | } | 140 | } |
140 | 141 | ||
@@ -224,7 +225,7 @@ nouveau_gpuobj_del(struct drm_device *dev, struct nouveau_gpuobj **pgpuobj) | |||
224 | 225 | ||
225 | if (gpuobj->im_pramin && (gpuobj->flags & NVOBJ_FLAG_ZERO_FREE)) { | 226 | if (gpuobj->im_pramin && (gpuobj->flags & NVOBJ_FLAG_ZERO_FREE)) { |
226 | for (i = 0; i < gpuobj->im_pramin->size; i += 4) | 227 | for (i = 0; i < gpuobj->im_pramin->size; i += 4) |
227 | nv_wo32(dev, gpuobj, i/4, 0); | 228 | nv_wo32(gpuobj, i, 0); |
228 | engine->instmem.flush(dev); | 229 | engine->instmem.flush(dev); |
229 | } | 230 | } |
230 | 231 | ||
@@ -435,6 +436,7 @@ nouveau_gpuobj_new_fake(struct drm_device *dev, uint32_t p_offset, | |||
435 | if (!gpuobj) | 436 | if (!gpuobj) |
436 | return -ENOMEM; | 437 | return -ENOMEM; |
437 | NV_DEBUG(dev, "gpuobj %p\n", gpuobj); | 438 | NV_DEBUG(dev, "gpuobj %p\n", gpuobj); |
439 | gpuobj->dev = dev; | ||
438 | gpuobj->im_channel = NULL; | 440 | gpuobj->im_channel = NULL; |
439 | gpuobj->flags = flags | NVOBJ_FLAG_FAKE; | 441 | gpuobj->flags = flags | NVOBJ_FLAG_FAKE; |
440 | 442 | ||
@@ -458,7 +460,7 @@ nouveau_gpuobj_new_fake(struct drm_device *dev, uint32_t p_offset, | |||
458 | 460 | ||
459 | if (gpuobj->flags & NVOBJ_FLAG_ZERO_ALLOC) { | 461 | if (gpuobj->flags & NVOBJ_FLAG_ZERO_ALLOC) { |
460 | for (i = 0; i < gpuobj->im_pramin->size; i += 4) | 462 | for (i = 0; i < gpuobj->im_pramin->size; i += 4) |
461 | nv_wo32(dev, gpuobj, i/4, 0); | 463 | nv_wo32(gpuobj, i, 0); |
462 | dev_priv->engine.instmem.flush(dev); | 464 | dev_priv->engine.instmem.flush(dev); |
463 | } | 465 | } |
464 | 466 | ||
@@ -555,14 +557,12 @@ nouveau_gpuobj_dma_new(struct nouveau_channel *chan, int class, | |||
555 | adjust = offset & 0x00000fff; | 557 | adjust = offset & 0x00000fff; |
556 | frame = offset & ~0x00000fff; | 558 | frame = offset & ~0x00000fff; |
557 | 559 | ||
558 | nv_wo32(dev, *gpuobj, 0, ((1<<12) | (1<<13) | | 560 | nv_wo32(*gpuobj, 0, ((1<<12) | (1<<13) | (adjust << 20) | |
559 | (adjust << 20) | | 561 | (access << 14) | (target << 16) | |
560 | (access << 14) | | 562 | class)); |
561 | (target << 16) | | 563 | nv_wo32(*gpuobj, 4, size - 1); |
562 | class)); | 564 | nv_wo32(*gpuobj, 8, frame | pte_flags); |
563 | nv_wo32(dev, *gpuobj, 1, size - 1); | 565 | nv_wo32(*gpuobj, 12, frame | pte_flags); |
564 | nv_wo32(dev, *gpuobj, 2, frame | pte_flags); | ||
565 | nv_wo32(dev, *gpuobj, 3, frame | pte_flags); | ||
566 | } else { | 566 | } else { |
567 | uint64_t limit = offset + size - 1; | 567 | uint64_t limit = offset + size - 1; |
568 | uint32_t flags0, flags5; | 568 | uint32_t flags0, flags5; |
@@ -575,12 +575,12 @@ nouveau_gpuobj_dma_new(struct nouveau_channel *chan, int class, | |||
575 | flags5 = 0x00080000; | 575 | flags5 = 0x00080000; |
576 | } | 576 | } |
577 | 577 | ||
578 | nv_wo32(dev, *gpuobj, 0, flags0 | class); | 578 | nv_wo32(*gpuobj, 0, flags0 | class); |
579 | nv_wo32(dev, *gpuobj, 1, lower_32_bits(limit)); | 579 | nv_wo32(*gpuobj, 4, lower_32_bits(limit)); |
580 | nv_wo32(dev, *gpuobj, 2, lower_32_bits(offset)); | 580 | nv_wo32(*gpuobj, 8, lower_32_bits(offset)); |
581 | nv_wo32(dev, *gpuobj, 3, ((upper_32_bits(limit) & 0xff) << 24) | | 581 | nv_wo32(*gpuobj, 12, ((upper_32_bits(limit) & 0xff) << 24) | |
582 | (upper_32_bits(offset) & 0xff)); | 582 | (upper_32_bits(offset) & 0xff)); |
583 | nv_wo32(dev, *gpuobj, 5, flags5); | 583 | nv_wo32(*gpuobj, 20, flags5); |
584 | } | 584 | } |
585 | 585 | ||
586 | instmem->flush(dev); | 586 | instmem->flush(dev); |
@@ -699,25 +699,25 @@ nouveau_gpuobj_gr_new(struct nouveau_channel *chan, int class, | |||
699 | } | 699 | } |
700 | 700 | ||
701 | if (dev_priv->card_type >= NV_50) { | 701 | if (dev_priv->card_type >= NV_50) { |
702 | nv_wo32(dev, *gpuobj, 0, class); | 702 | nv_wo32(*gpuobj, 0, class); |
703 | nv_wo32(dev, *gpuobj, 5, 0x00010000); | 703 | nv_wo32(*gpuobj, 20, 0x00010000); |
704 | } else { | 704 | } else { |
705 | switch (class) { | 705 | switch (class) { |
706 | case NV_CLASS_NULL: | 706 | case NV_CLASS_NULL: |
707 | nv_wo32(dev, *gpuobj, 0, 0x00001030); | 707 | nv_wo32(*gpuobj, 0, 0x00001030); |
708 | nv_wo32(dev, *gpuobj, 1, 0xFFFFFFFF); | 708 | nv_wo32(*gpuobj, 4, 0xFFFFFFFF); |
709 | break; | 709 | break; |
710 | default: | 710 | default: |
711 | if (dev_priv->card_type >= NV_40) { | 711 | if (dev_priv->card_type >= NV_40) { |
712 | nv_wo32(dev, *gpuobj, 0, class); | 712 | nv_wo32(*gpuobj, 0, class); |
713 | #ifdef __BIG_ENDIAN | 713 | #ifdef __BIG_ENDIAN |
714 | nv_wo32(dev, *gpuobj, 2, 0x01000000); | 714 | nv_wo32(*gpuobj, 8, 0x01000000); |
715 | #endif | 715 | #endif |
716 | } else { | 716 | } else { |
717 | #ifdef __BIG_ENDIAN | 717 | #ifdef __BIG_ENDIAN |
718 | nv_wo32(dev, *gpuobj, 0, class | 0x00080000); | 718 | nv_wo32(*gpuobj, 0, class | 0x00080000); |
719 | #else | 719 | #else |
720 | nv_wo32(dev, *gpuobj, 0, class); | 720 | nv_wo32(*gpuobj, 0, class); |
721 | #endif | 721 | #endif |
722 | } | 722 | } |
723 | } | 723 | } |
@@ -836,21 +836,20 @@ nouveau_gpuobj_channel_init(struct nouveau_channel *chan, | |||
836 | if (ret) | 836 | if (ret) |
837 | return ret; | 837 | return ret; |
838 | for (i = 0; i < 0x4000; i += 8) { | 838 | for (i = 0; i < 0x4000; i += 8) { |
839 | nv_wo32(dev, chan->vm_pd, (i+0)/4, 0x00000000); | 839 | nv_wo32(chan->vm_pd, i + 0, 0x00000000); |
840 | nv_wo32(dev, chan->vm_pd, (i+4)/4, 0xdeadcafe); | 840 | nv_wo32(chan->vm_pd, i + 4, 0xdeadcafe); |
841 | } | 841 | } |
842 | 842 | ||
843 | pde = (dev_priv->vm_gart_base / (512*1024*1024)) * 2; | 843 | pde = (dev_priv->vm_gart_base / (512*1024*1024)) * 8; |
844 | ret = nouveau_gpuobj_ref_add(dev, NULL, 0, | 844 | ret = nouveau_gpuobj_ref_add(dev, NULL, 0, |
845 | dev_priv->gart_info.sg_ctxdma, | 845 | dev_priv->gart_info.sg_ctxdma, |
846 | &chan->vm_gart_pt); | 846 | &chan->vm_gart_pt); |
847 | if (ret) | 847 | if (ret) |
848 | return ret; | 848 | return ret; |
849 | nv_wo32(dev, chan->vm_pd, pde++, | 849 | nv_wo32(chan->vm_pd, pde + 0, chan->vm_gart_pt->instance | 3); |
850 | chan->vm_gart_pt->instance | 0x03); | 850 | nv_wo32(chan->vm_pd, pde + 4, 0x00000000); |
851 | nv_wo32(dev, chan->vm_pd, pde++, 0x00000000); | ||
852 | 851 | ||
853 | pde = (dev_priv->vm_vram_base / (512*1024*1024)) * 2; | 852 | pde = (dev_priv->vm_vram_base / (512*1024*1024)) * 8; |
854 | for (i = 0; i < dev_priv->vm_vram_pt_nr; i++) { | 853 | for (i = 0; i < dev_priv->vm_vram_pt_nr; i++) { |
855 | ret = nouveau_gpuobj_ref_add(dev, NULL, 0, | 854 | ret = nouveau_gpuobj_ref_add(dev, NULL, 0, |
856 | dev_priv->vm_vram_pt[i], | 855 | dev_priv->vm_vram_pt[i], |
@@ -858,9 +857,10 @@ nouveau_gpuobj_channel_init(struct nouveau_channel *chan, | |||
858 | if (ret) | 857 | if (ret) |
859 | return ret; | 858 | return ret; |
860 | 859 | ||
861 | nv_wo32(dev, chan->vm_pd, pde++, | 860 | nv_wo32(chan->vm_pd, pde + 0, |
862 | chan->vm_vram_pt[i]->instance | 0x61); | 861 | chan->vm_vram_pt[i]->instance | 0x61); |
863 | nv_wo32(dev, chan->vm_pd, pde++, 0x00000000); | 862 | nv_wo32(chan->vm_pd, pde + 4, 0x00000000); |
863 | pde += 8; | ||
864 | } | 864 | } |
865 | 865 | ||
866 | instmem->flush(dev); | 866 | instmem->flush(dev); |
@@ -996,8 +996,8 @@ nouveau_gpuobj_suspend(struct drm_device *dev) | |||
996 | return -ENOMEM; | 996 | return -ENOMEM; |
997 | } | 997 | } |
998 | 998 | ||
999 | for (i = 0; i < gpuobj->im_pramin->size / 4; i++) | 999 | for (i = 0; i < gpuobj->im_pramin->size; i += 4) |
1000 | gpuobj->im_backing_suspend[i] = nv_ro32(dev, gpuobj, i); | 1000 | gpuobj->im_backing_suspend[i/4] = nv_ro32(gpuobj, i); |
1001 | } | 1001 | } |
1002 | 1002 | ||
1003 | return 0; | 1003 | return 0; |
@@ -1042,8 +1042,8 @@ nouveau_gpuobj_resume(struct drm_device *dev) | |||
1042 | if (!gpuobj->im_backing_suspend) | 1042 | if (!gpuobj->im_backing_suspend) |
1043 | continue; | 1043 | continue; |
1044 | 1044 | ||
1045 | for (i = 0; i < gpuobj->im_pramin->size / 4; i++) | 1045 | for (i = 0; i < gpuobj->im_pramin->size; i += 4) |
1046 | nv_wo32(dev, gpuobj, i, gpuobj->im_backing_suspend[i]); | 1046 | nv_wo32(gpuobj, i, gpuobj->im_backing_suspend[i/4]); |
1047 | dev_priv->engine.instmem.flush(dev); | 1047 | dev_priv->engine.instmem.flush(dev); |
1048 | } | 1048 | } |
1049 | 1049 | ||
@@ -1120,3 +1120,17 @@ int nouveau_ioctl_gpuobj_free(struct drm_device *dev, void *data, | |||
1120 | 1120 | ||
1121 | return 0; | 1121 | return 0; |
1122 | } | 1122 | } |
1123 | |||
1124 | u32 | ||
1125 | nv_ro32(struct nouveau_gpuobj *gpuobj, u32 offset) | ||
1126 | { | ||
1127 | struct drm_device *dev = gpuobj->dev; | ||
1128 | return nv_ri32(dev, gpuobj->im_pramin->start + offset); | ||
1129 | } | ||
1130 | |||
1131 | void | ||
1132 | nv_wo32(struct nouveau_gpuobj *gpuobj, u32 offset, u32 val) | ||
1133 | { | ||
1134 | struct drm_device *dev = gpuobj->dev; | ||
1135 | nv_wi32(dev, gpuobj->im_pramin->start + offset, val); | ||
1136 | } | ||
diff --git a/drivers/gpu/drm/nouveau/nouveau_ramht.c b/drivers/gpu/drm/nouveau/nouveau_ramht.c index 8b27ee5411b3..e5cc93c55d80 100644 --- a/drivers/gpu/drm/nouveau/nouveau_ramht.c +++ b/drivers/gpu/drm/nouveau/nouveau_ramht.c | |||
@@ -54,7 +54,7 @@ nouveau_ramht_entry_valid(struct drm_device *dev, struct nouveau_gpuobj *ramht, | |||
54 | uint32_t offset) | 54 | uint32_t offset) |
55 | { | 55 | { |
56 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 56 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
57 | uint32_t ctx = nv_ro32(dev, ramht, (offset + 4)/4); | 57 | uint32_t ctx = nv_ro32(ramht, offset + 4); |
58 | 58 | ||
59 | if (dev_priv->card_type < NV_40) | 59 | if (dev_priv->card_type < NV_40) |
60 | return ((ctx & NV_RAMHT_CONTEXT_VALID) != 0); | 60 | return ((ctx & NV_RAMHT_CONTEXT_VALID) != 0); |
@@ -100,15 +100,15 @@ nouveau_ramht_insert(struct drm_device *dev, struct nouveau_gpuobj_ref *ref) | |||
100 | NV_DEBUG(dev, | 100 | NV_DEBUG(dev, |
101 | "insert ch%d 0x%08x: h=0x%08x, c=0x%08x\n", | 101 | "insert ch%d 0x%08x: h=0x%08x, c=0x%08x\n", |
102 | chan->id, co, ref->handle, ctx); | 102 | chan->id, co, ref->handle, ctx); |
103 | nv_wo32(dev, ramht, (co + 0)/4, ref->handle); | 103 | nv_wo32(ramht, co + 0, ref->handle); |
104 | nv_wo32(dev, ramht, (co + 4)/4, ctx); | 104 | nv_wo32(ramht, co + 4, ctx); |
105 | 105 | ||
106 | list_add_tail(&ref->list, &chan->ramht_refs); | 106 | list_add_tail(&ref->list, &chan->ramht_refs); |
107 | instmem->flush(dev); | 107 | instmem->flush(dev); |
108 | return 0; | 108 | return 0; |
109 | } | 109 | } |
110 | NV_DEBUG(dev, "collision ch%d 0x%08x: h=0x%08x\n", | 110 | NV_DEBUG(dev, "collision ch%d 0x%08x: h=0x%08x\n", |
111 | chan->id, co, nv_ro32(dev, ramht, co/4)); | 111 | chan->id, co, nv_ro32(ramht, co)); |
112 | 112 | ||
113 | co += 8; | 113 | co += 8; |
114 | if (co >= dev_priv->ramht_size) | 114 | if (co >= dev_priv->ramht_size) |
@@ -136,13 +136,13 @@ nouveau_ramht_remove(struct drm_device *dev, struct nouveau_gpuobj_ref *ref) | |||
136 | co = ho = nouveau_ramht_hash_handle(dev, chan->id, ref->handle); | 136 | co = ho = nouveau_ramht_hash_handle(dev, chan->id, ref->handle); |
137 | do { | 137 | do { |
138 | if (nouveau_ramht_entry_valid(dev, ramht, co) && | 138 | if (nouveau_ramht_entry_valid(dev, ramht, co) && |
139 | (ref->handle == nv_ro32(dev, ramht, (co/4)))) { | 139 | (ref->handle == nv_ro32(ramht, co))) { |
140 | NV_DEBUG(dev, | 140 | NV_DEBUG(dev, |
141 | "remove ch%d 0x%08x: h=0x%08x, c=0x%08x\n", | 141 | "remove ch%d 0x%08x: h=0x%08x, c=0x%08x\n", |
142 | chan->id, co, ref->handle, | 142 | chan->id, co, ref->handle, |
143 | nv_ro32(dev, ramht, (co + 4))); | 143 | nv_ro32(ramht, co + 4)); |
144 | nv_wo32(dev, ramht, (co + 0)/4, 0x00000000); | 144 | nv_wo32(ramht, co + 0, 0x00000000); |
145 | nv_wo32(dev, ramht, (co + 4)/4, 0x00000000); | 145 | nv_wo32(ramht, co + 4, 0x00000000); |
146 | 146 | ||
147 | list_del(&ref->list); | 147 | list_del(&ref->list); |
148 | instmem->flush(dev); | 148 | instmem->flush(dev); |
diff --git a/drivers/gpu/drm/nouveau/nouveau_sgdma.c b/drivers/gpu/drm/nouveau/nouveau_sgdma.c index 6b9187d7f67d..630988af801c 100644 --- a/drivers/gpu/drm/nouveau/nouveau_sgdma.c +++ b/drivers/gpu/drm/nouveau/nouveau_sgdma.c | |||
@@ -105,11 +105,13 @@ nouveau_sgdma_bind(struct ttm_backend *be, struct ttm_mem_reg *mem) | |||
105 | uint32_t offset_h = upper_32_bits(dma_offset); | 105 | uint32_t offset_h = upper_32_bits(dma_offset); |
106 | 106 | ||
107 | for (j = 0; j < PAGE_SIZE / NV_CTXDMA_PAGE_SIZE; j++) { | 107 | for (j = 0; j < PAGE_SIZE / NV_CTXDMA_PAGE_SIZE; j++) { |
108 | if (dev_priv->card_type < NV_50) | 108 | if (dev_priv->card_type < NV_50) { |
109 | nv_wo32(dev, gpuobj, pte++, offset_l | 3); | 109 | nv_wo32(gpuobj, (pte * 4) + 0, offset_l | 3); |
110 | else { | 110 | pte += 1; |
111 | nv_wo32(dev, gpuobj, pte++, offset_l | 0x21); | 111 | } else { |
112 | nv_wo32(dev, gpuobj, pte++, offset_h & 0xff); | 112 | nv_wo32(gpuobj, (pte * 4) + 0, offset_l | 0x21); |
113 | nv_wo32(gpuobj, (pte * 4) + 4, offset_h & 0xff); | ||
114 | pte += 2; | ||
113 | } | 115 | } |
114 | 116 | ||
115 | dma_offset += NV_CTXDMA_PAGE_SIZE; | 117 | dma_offset += NV_CTXDMA_PAGE_SIZE; |
@@ -145,11 +147,13 @@ nouveau_sgdma_unbind(struct ttm_backend *be) | |||
145 | dma_addr_t dma_offset = dev_priv->gart_info.sg_dummy_bus; | 147 | dma_addr_t dma_offset = dev_priv->gart_info.sg_dummy_bus; |
146 | 148 | ||
147 | for (j = 0; j < PAGE_SIZE / NV_CTXDMA_PAGE_SIZE; j++) { | 149 | for (j = 0; j < PAGE_SIZE / NV_CTXDMA_PAGE_SIZE; j++) { |
148 | if (dev_priv->card_type < NV_50) | 150 | if (dev_priv->card_type < NV_50) { |
149 | nv_wo32(dev, gpuobj, pte++, dma_offset | 3); | 151 | nv_wo32(gpuobj, (pte * 4) + 0, dma_offset | 3); |
150 | else { | 152 | pte += 1; |
151 | nv_wo32(dev, gpuobj, pte++, dma_offset | 0x21); | 153 | } else { |
152 | nv_wo32(dev, gpuobj, pte++, 0x00000000); | 154 | nv_wo32(gpuobj, (pte * 4), dma_offset | 0x21); |
155 | nv_wo32(gpuobj, (pte * 4) + 4, 0x00000000); | ||
156 | pte += 2; | ||
153 | } | 157 | } |
154 | 158 | ||
155 | dma_offset += NV_CTXDMA_PAGE_SIZE; | 159 | dma_offset += NV_CTXDMA_PAGE_SIZE; |
@@ -258,21 +262,21 @@ nouveau_sgdma_init(struct drm_device *dev) | |||
258 | /* Maybe use NV_DMA_TARGET_AGP for PCIE? NVIDIA do this, and | 262 | /* Maybe use NV_DMA_TARGET_AGP for PCIE? NVIDIA do this, and |
259 | * confirmed to work on c51. Perhaps means NV_DMA_TARGET_PCIE | 263 | * confirmed to work on c51. Perhaps means NV_DMA_TARGET_PCIE |
260 | * on those cards? */ | 264 | * on those cards? */ |
261 | nv_wo32(dev, gpuobj, 0, NV_CLASS_DMA_IN_MEMORY | | 265 | nv_wo32(gpuobj, 0, NV_CLASS_DMA_IN_MEMORY | |
262 | (1 << 12) /* PT present */ | | 266 | (1 << 12) /* PT present */ | |
263 | (0 << 13) /* PT *not* linear */ | | 267 | (0 << 13) /* PT *not* linear */ | |
264 | (NV_DMA_ACCESS_RW << 14) | | 268 | (NV_DMA_ACCESS_RW << 14) | |
265 | (NV_DMA_TARGET_PCI << 16)); | 269 | (NV_DMA_TARGET_PCI << 16)); |
266 | nv_wo32(dev, gpuobj, 1, aper_size - 1); | 270 | nv_wo32(gpuobj, 4, aper_size - 1); |
267 | for (i = 2; i < 2 + (aper_size >> 12); i++) { | 271 | for (i = 2; i < 2 + (aper_size >> 12); i++) { |
268 | nv_wo32(dev, gpuobj, i, | 272 | nv_wo32(gpuobj, i * 4, |
269 | dev_priv->gart_info.sg_dummy_bus | 3); | 273 | dev_priv->gart_info.sg_dummy_bus | 3); |
270 | } | 274 | } |
271 | } else { | 275 | } else { |
272 | for (i = 0; i < obj_size; i += 8) { | 276 | for (i = 0; i < obj_size; i += 8) { |
273 | nv_wo32(dev, gpuobj, (i+0)/4, | 277 | nv_wo32(gpuobj, i + 0, |
274 | dev_priv->gart_info.sg_dummy_bus | 0x21); | 278 | dev_priv->gart_info.sg_dummy_bus | 0x21); |
275 | nv_wo32(dev, gpuobj, (i+4)/4, 0); | 279 | nv_wo32(gpuobj, i + 4, 0); |
276 | } | 280 | } |
277 | } | 281 | } |
278 | dev_priv->engine.instmem.flush(dev); | 282 | dev_priv->engine.instmem.flush(dev); |
@@ -308,9 +312,9 @@ nouveau_sgdma_get_page(struct drm_device *dev, uint32_t offset, uint32_t *page) | |||
308 | struct nouveau_gpuobj *gpuobj = dev_priv->gart_info.sg_ctxdma; | 312 | struct nouveau_gpuobj *gpuobj = dev_priv->gart_info.sg_ctxdma; |
309 | int pte; | 313 | int pte; |
310 | 314 | ||
311 | pte = (offset >> NV_CTXDMA_PAGE_SHIFT); | 315 | pte = (offset >> NV_CTXDMA_PAGE_SHIFT) << 2; |
312 | if (dev_priv->card_type < NV_50) { | 316 | if (dev_priv->card_type < NV_50) { |
313 | *page = nv_ro32(dev, gpuobj, (pte + 2)) & ~NV_CTXDMA_PAGE_MASK; | 317 | *page = nv_ro32(gpuobj, (pte + 8)) & ~NV_CTXDMA_PAGE_MASK; |
314 | return 0; | 318 | return 0; |
315 | } | 319 | } |
316 | 320 | ||
diff --git a/drivers/gpu/drm/nouveau/nv04_fifo.c b/drivers/gpu/drm/nouveau/nv04_fifo.c index 06cedd99c26a..bbb87ef262c0 100644 --- a/drivers/gpu/drm/nouveau/nv04_fifo.c +++ b/drivers/gpu/drm/nouveau/nv04_fifo.c | |||
@@ -38,10 +38,10 @@ | |||
38 | #define NV04_RAMFC_ENGINE 0x14 | 38 | #define NV04_RAMFC_ENGINE 0x14 |
39 | #define NV04_RAMFC_PULL1_ENGINE 0x18 | 39 | #define NV04_RAMFC_PULL1_ENGINE 0x18 |
40 | 40 | ||
41 | #define RAMFC_WR(offset, val) nv_wo32(dev, chan->ramfc->gpuobj, \ | 41 | #define RAMFC_WR(offset, val) nv_wo32(chan->ramfc->gpuobj, \ |
42 | NV04_RAMFC_##offset/4, (val)) | 42 | NV04_RAMFC_##offset, (val)) |
43 | #define RAMFC_RD(offset) nv_ro32(dev, chan->ramfc->gpuobj, \ | 43 | #define RAMFC_RD(offset) nv_ro32(chan->ramfc->gpuobj, \ |
44 | NV04_RAMFC_##offset/4) | 44 | NV04_RAMFC_##offset) |
45 | 45 | ||
46 | void | 46 | void |
47 | nv04_fifo_disable(struct drm_device *dev) | 47 | nv04_fifo_disable(struct drm_device *dev) |
diff --git a/drivers/gpu/drm/nouveau/nv20_graph.c b/drivers/gpu/drm/nouveau/nv20_graph.c index 17f309b36c91..d8693d32bd0e 100644 --- a/drivers/gpu/drm/nouveau/nv20_graph.c +++ b/drivers/gpu/drm/nouveau/nv20_graph.c | |||
@@ -37,49 +37,49 @@ nv20_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx) | |||
37 | { | 37 | { |
38 | int i; | 38 | int i; |
39 | 39 | ||
40 | nv_wo32(dev, ctx, 0x033c/4, 0xffff0000); | 40 | nv_wo32(ctx, 0x033c, 0xffff0000); |
41 | nv_wo32(dev, ctx, 0x03a0/4, 0x0fff0000); | 41 | nv_wo32(ctx, 0x03a0, 0x0fff0000); |
42 | nv_wo32(dev, ctx, 0x03a4/4, 0x0fff0000); | 42 | nv_wo32(ctx, 0x03a4, 0x0fff0000); |
43 | nv_wo32(dev, ctx, 0x047c/4, 0x00000101); | 43 | nv_wo32(ctx, 0x047c, 0x00000101); |
44 | nv_wo32(dev, ctx, 0x0490/4, 0x00000111); | 44 | nv_wo32(ctx, 0x0490, 0x00000111); |
45 | nv_wo32(dev, ctx, 0x04a8/4, 0x44400000); | 45 | nv_wo32(ctx, 0x04a8, 0x44400000); |
46 | for (i = 0x04d4; i <= 0x04e0; i += 4) | 46 | for (i = 0x04d4; i <= 0x04e0; i += 4) |
47 | nv_wo32(dev, ctx, i/4, 0x00030303); | 47 | nv_wo32(ctx, i, 0x00030303); |
48 | for (i = 0x04f4; i <= 0x0500; i += 4) | 48 | for (i = 0x04f4; i <= 0x0500; i += 4) |
49 | nv_wo32(dev, ctx, i/4, 0x00080000); | 49 | nv_wo32(ctx, i, 0x00080000); |
50 | for (i = 0x050c; i <= 0x0518; i += 4) | 50 | for (i = 0x050c; i <= 0x0518; i += 4) |
51 | nv_wo32(dev, ctx, i/4, 0x01012000); | 51 | nv_wo32(ctx, i, 0x01012000); |
52 | for (i = 0x051c; i <= 0x0528; i += 4) | 52 | for (i = 0x051c; i <= 0x0528; i += 4) |
53 | nv_wo32(dev, ctx, i/4, 0x000105b8); | 53 | nv_wo32(ctx, i, 0x000105b8); |
54 | for (i = 0x052c; i <= 0x0538; i += 4) | 54 | for (i = 0x052c; i <= 0x0538; i += 4) |
55 | nv_wo32(dev, ctx, i/4, 0x00080008); | 55 | nv_wo32(ctx, i, 0x00080008); |
56 | for (i = 0x055c; i <= 0x0598; i += 4) | 56 | for (i = 0x055c; i <= 0x0598; i += 4) |
57 | nv_wo32(dev, ctx, i/4, 0x07ff0000); | 57 | nv_wo32(ctx, i, 0x07ff0000); |
58 | nv_wo32(dev, ctx, 0x05a4/4, 0x4b7fffff); | 58 | nv_wo32(ctx, 0x05a4, 0x4b7fffff); |
59 | nv_wo32(dev, ctx, 0x05fc/4, 0x00000001); | 59 | nv_wo32(ctx, 0x05fc, 0x00000001); |
60 | nv_wo32(dev, ctx, 0x0604/4, 0x00004000); | 60 | nv_wo32(ctx, 0x0604, 0x00004000); |
61 | nv_wo32(dev, ctx, 0x0610/4, 0x00000001); | 61 | nv_wo32(ctx, 0x0610, 0x00000001); |
62 | nv_wo32(dev, ctx, 0x0618/4, 0x00040000); | 62 | nv_wo32(ctx, 0x0618, 0x00040000); |
63 | nv_wo32(dev, ctx, 0x061c/4, 0x00010000); | 63 | nv_wo32(ctx, 0x061c, 0x00010000); |
64 | for (i = 0x1c1c; i <= 0x248c; i += 16) { | 64 | for (i = 0x1c1c; i <= 0x248c; i += 16) { |
65 | nv_wo32(dev, ctx, (i + 0)/4, 0x10700ff9); | 65 | nv_wo32(ctx, (i + 0), 0x10700ff9); |
66 | nv_wo32(dev, ctx, (i + 4)/4, 0x0436086c); | 66 | nv_wo32(ctx, (i + 4), 0x0436086c); |
67 | nv_wo32(dev, ctx, (i + 8)/4, 0x000c001b); | 67 | nv_wo32(ctx, (i + 8), 0x000c001b); |
68 | } | 68 | } |
69 | nv_wo32(dev, ctx, 0x281c/4, 0x3f800000); | 69 | nv_wo32(ctx, 0x281c, 0x3f800000); |
70 | nv_wo32(dev, ctx, 0x2830/4, 0x3f800000); | 70 | nv_wo32(ctx, 0x2830, 0x3f800000); |
71 | nv_wo32(dev, ctx, 0x285c/4, 0x40000000); | 71 | nv_wo32(ctx, 0x285c, 0x40000000); |
72 | nv_wo32(dev, ctx, 0x2860/4, 0x3f800000); | 72 | nv_wo32(ctx, 0x2860, 0x3f800000); |
73 | nv_wo32(dev, ctx, 0x2864/4, 0x3f000000); | 73 | nv_wo32(ctx, 0x2864, 0x3f000000); |
74 | nv_wo32(dev, ctx, 0x286c/4, 0x40000000); | 74 | nv_wo32(ctx, 0x286c, 0x40000000); |
75 | nv_wo32(dev, ctx, 0x2870/4, 0x3f800000); | 75 | nv_wo32(ctx, 0x2870, 0x3f800000); |
76 | nv_wo32(dev, ctx, 0x2878/4, 0xbf800000); | 76 | nv_wo32(ctx, 0x2878, 0xbf800000); |
77 | nv_wo32(dev, ctx, 0x2880/4, 0xbf800000); | 77 | nv_wo32(ctx, 0x2880, 0xbf800000); |
78 | nv_wo32(dev, ctx, 0x34a4/4, 0x000fe000); | 78 | nv_wo32(ctx, 0x34a4, 0x000fe000); |
79 | nv_wo32(dev, ctx, 0x3530/4, 0x000003f8); | 79 | nv_wo32(ctx, 0x3530, 0x000003f8); |
80 | nv_wo32(dev, ctx, 0x3540/4, 0x002fe000); | 80 | nv_wo32(ctx, 0x3540, 0x002fe000); |
81 | for (i = 0x355c; i <= 0x3578; i += 4) | 81 | for (i = 0x355c; i <= 0x3578; i += 4) |
82 | nv_wo32(dev, ctx, i/4, 0x001c527c); | 82 | nv_wo32(ctx, i, 0x001c527c); |
83 | } | 83 | } |
84 | 84 | ||
85 | static void | 85 | static void |
@@ -87,58 +87,58 @@ nv25_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx) | |||
87 | { | 87 | { |
88 | int i; | 88 | int i; |
89 | 89 | ||
90 | nv_wo32(dev, ctx, 0x035c/4, 0xffff0000); | 90 | nv_wo32(ctx, 0x035c, 0xffff0000); |
91 | nv_wo32(dev, ctx, 0x03c0/4, 0x0fff0000); | 91 | nv_wo32(ctx, 0x03c0, 0x0fff0000); |
92 | nv_wo32(dev, ctx, 0x03c4/4, 0x0fff0000); | 92 | nv_wo32(ctx, 0x03c4, 0x0fff0000); |
93 | nv_wo32(dev, ctx, 0x049c/4, 0x00000101); | 93 | nv_wo32(ctx, 0x049c, 0x00000101); |
94 | nv_wo32(dev, ctx, 0x04b0/4, 0x00000111); | 94 | nv_wo32(ctx, 0x04b0, 0x00000111); |
95 | nv_wo32(dev, ctx, 0x04c8/4, 0x00000080); | 95 | nv_wo32(ctx, 0x04c8, 0x00000080); |
96 | nv_wo32(dev, ctx, 0x04cc/4, 0xffff0000); | 96 | nv_wo32(ctx, 0x04cc, 0xffff0000); |
97 | nv_wo32(dev, ctx, 0x04d0/4, 0x00000001); | 97 | nv_wo32(ctx, 0x04d0, 0x00000001); |
98 | nv_wo32(dev, ctx, 0x04e4/4, 0x44400000); | 98 | nv_wo32(ctx, 0x04e4, 0x44400000); |
99 | nv_wo32(dev, ctx, 0x04fc/4, 0x4b800000); | 99 | nv_wo32(ctx, 0x04fc, 0x4b800000); |
100 | for (i = 0x0510; i <= 0x051c; i += 4) | 100 | for (i = 0x0510; i <= 0x051c; i += 4) |
101 | nv_wo32(dev, ctx, i/4, 0x00030303); | 101 | nv_wo32(ctx, i, 0x00030303); |
102 | for (i = 0x0530; i <= 0x053c; i += 4) | 102 | for (i = 0x0530; i <= 0x053c; i += 4) |
103 | nv_wo32(dev, ctx, i/4, 0x00080000); | 103 | nv_wo32(ctx, i, 0x00080000); |
104 | for (i = 0x0548; i <= 0x0554; i += 4) | 104 | for (i = 0x0548; i <= 0x0554; i += 4) |
105 | nv_wo32(dev, ctx, i/4, 0x01012000); | 105 | nv_wo32(ctx, i, 0x01012000); |
106 | for (i = 0x0558; i <= 0x0564; i += 4) | 106 | for (i = 0x0558; i <= 0x0564; i += 4) |
107 | nv_wo32(dev, ctx, i/4, 0x000105b8); | 107 | nv_wo32(ctx, i, 0x000105b8); |
108 | for (i = 0x0568; i <= 0x0574; i += 4) | 108 | for (i = 0x0568; i <= 0x0574; i += 4) |
109 | nv_wo32(dev, ctx, i/4, 0x00080008); | 109 | nv_wo32(ctx, i, 0x00080008); |
110 | for (i = 0x0598; i <= 0x05d4; i += 4) | 110 | for (i = 0x0598; i <= 0x05d4; i += 4) |
111 | nv_wo32(dev, ctx, i/4, 0x07ff0000); | 111 | nv_wo32(ctx, i, 0x07ff0000); |
112 | nv_wo32(dev, ctx, 0x05e0/4, 0x4b7fffff); | 112 | nv_wo32(ctx, 0x05e0, 0x4b7fffff); |
113 | nv_wo32(dev, ctx, 0x0620/4, 0x00000080); | 113 | nv_wo32(ctx, 0x0620, 0x00000080); |
114 | nv_wo32(dev, ctx, 0x0624/4, 0x30201000); | 114 | nv_wo32(ctx, 0x0624, 0x30201000); |
115 | nv_wo32(dev, ctx, 0x0628/4, 0x70605040); | 115 | nv_wo32(ctx, 0x0628, 0x70605040); |
116 | nv_wo32(dev, ctx, 0x062c/4, 0xb0a09080); | 116 | nv_wo32(ctx, 0x062c, 0xb0a09080); |
117 | nv_wo32(dev, ctx, 0x0630/4, 0xf0e0d0c0); | 117 | nv_wo32(ctx, 0x0630, 0xf0e0d0c0); |
118 | nv_wo32(dev, ctx, 0x0664/4, 0x00000001); | 118 | nv_wo32(ctx, 0x0664, 0x00000001); |
119 | nv_wo32(dev, ctx, 0x066c/4, 0x00004000); | 119 | nv_wo32(ctx, 0x066c, 0x00004000); |
120 | nv_wo32(dev, ctx, 0x0678/4, 0x00000001); | 120 | nv_wo32(ctx, 0x0678, 0x00000001); |
121 | nv_wo32(dev, ctx, 0x0680/4, 0x00040000); | 121 | nv_wo32(ctx, 0x0680, 0x00040000); |
122 | nv_wo32(dev, ctx, 0x0684/4, 0x00010000); | 122 | nv_wo32(ctx, 0x0684, 0x00010000); |
123 | for (i = 0x1b04; i <= 0x2374; i += 16) { | 123 | for (i = 0x1b04; i <= 0x2374; i += 16) { |
124 | nv_wo32(dev, ctx, (i + 0)/4, 0x10700ff9); | 124 | nv_wo32(ctx, (i + 0), 0x10700ff9); |
125 | nv_wo32(dev, ctx, (i + 4)/4, 0x0436086c); | 125 | nv_wo32(ctx, (i + 4), 0x0436086c); |
126 | nv_wo32(dev, ctx, (i + 8)/4, 0x000c001b); | 126 | nv_wo32(ctx, (i + 8), 0x000c001b); |
127 | } | 127 | } |
128 | nv_wo32(dev, ctx, 0x2704/4, 0x3f800000); | 128 | nv_wo32(ctx, 0x2704, 0x3f800000); |
129 | nv_wo32(dev, ctx, 0x2718/4, 0x3f800000); | 129 | nv_wo32(ctx, 0x2718, 0x3f800000); |
130 | nv_wo32(dev, ctx, 0x2744/4, 0x40000000); | 130 | nv_wo32(ctx, 0x2744, 0x40000000); |
131 | nv_wo32(dev, ctx, 0x2748/4, 0x3f800000); | 131 | nv_wo32(ctx, 0x2748, 0x3f800000); |
132 | nv_wo32(dev, ctx, 0x274c/4, 0x3f000000); | 132 | nv_wo32(ctx, 0x274c, 0x3f000000); |
133 | nv_wo32(dev, ctx, 0x2754/4, 0x40000000); | 133 | nv_wo32(ctx, 0x2754, 0x40000000); |
134 | nv_wo32(dev, ctx, 0x2758/4, 0x3f800000); | 134 | nv_wo32(ctx, 0x2758, 0x3f800000); |
135 | nv_wo32(dev, ctx, 0x2760/4, 0xbf800000); | 135 | nv_wo32(ctx, 0x2760, 0xbf800000); |
136 | nv_wo32(dev, ctx, 0x2768/4, 0xbf800000); | 136 | nv_wo32(ctx, 0x2768, 0xbf800000); |
137 | nv_wo32(dev, ctx, 0x308c/4, 0x000fe000); | 137 | nv_wo32(ctx, 0x308c, 0x000fe000); |
138 | nv_wo32(dev, ctx, 0x3108/4, 0x000003f8); | 138 | nv_wo32(ctx, 0x3108, 0x000003f8); |
139 | nv_wo32(dev, ctx, 0x3468/4, 0x002fe000); | 139 | nv_wo32(ctx, 0x3468, 0x002fe000); |
140 | for (i = 0x3484; i <= 0x34a0; i += 4) | 140 | for (i = 0x3484; i <= 0x34a0; i += 4) |
141 | nv_wo32(dev, ctx, i/4, 0x001c527c); | 141 | nv_wo32(ctx, i, 0x001c527c); |
142 | } | 142 | } |
143 | 143 | ||
144 | static void | 144 | static void |
@@ -146,49 +146,49 @@ nv2a_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx) | |||
146 | { | 146 | { |
147 | int i; | 147 | int i; |
148 | 148 | ||
149 | nv_wo32(dev, ctx, 0x033c/4, 0xffff0000); | 149 | nv_wo32(ctx, 0x033c, 0xffff0000); |
150 | nv_wo32(dev, ctx, 0x03a0/4, 0x0fff0000); | 150 | nv_wo32(ctx, 0x03a0, 0x0fff0000); |
151 | nv_wo32(dev, ctx, 0x03a4/4, 0x0fff0000); | 151 | nv_wo32(ctx, 0x03a4, 0x0fff0000); |
152 | nv_wo32(dev, ctx, 0x047c/4, 0x00000101); | 152 | nv_wo32(ctx, 0x047c, 0x00000101); |
153 | nv_wo32(dev, ctx, 0x0490/4, 0x00000111); | 153 | nv_wo32(ctx, 0x0490, 0x00000111); |
154 | nv_wo32(dev, ctx, 0x04a8/4, 0x44400000); | 154 | nv_wo32(ctx, 0x04a8, 0x44400000); |
155 | for (i = 0x04d4; i <= 0x04e0; i += 4) | 155 | for (i = 0x04d4; i <= 0x04e0; i += 4) |
156 | nv_wo32(dev, ctx, i/4, 0x00030303); | 156 | nv_wo32(ctx, i, 0x00030303); |
157 | for (i = 0x04f4; i <= 0x0500; i += 4) | 157 | for (i = 0x04f4; i <= 0x0500; i += 4) |
158 | nv_wo32(dev, ctx, i/4, 0x00080000); | 158 | nv_wo32(ctx, i, 0x00080000); |
159 | for (i = 0x050c; i <= 0x0518; i += 4) | 159 | for (i = 0x050c; i <= 0x0518; i += 4) |
160 | nv_wo32(dev, ctx, i/4, 0x01012000); | 160 | nv_wo32(ctx, i, 0x01012000); |
161 | for (i = 0x051c; i <= 0x0528; i += 4) | 161 | for (i = 0x051c; i <= 0x0528; i += 4) |
162 | nv_wo32(dev, ctx, i/4, 0x000105b8); | 162 | nv_wo32(ctx, i, 0x000105b8); |
163 | for (i = 0x052c; i <= 0x0538; i += 4) | 163 | for (i = 0x052c; i <= 0x0538; i += 4) |
164 | nv_wo32(dev, ctx, i/4, 0x00080008); | 164 | nv_wo32(ctx, i, 0x00080008); |
165 | for (i = 0x055c; i <= 0x0598; i += 4) | 165 | for (i = 0x055c; i <= 0x0598; i += 4) |
166 | nv_wo32(dev, ctx, i/4, 0x07ff0000); | 166 | nv_wo32(ctx, i, 0x07ff0000); |
167 | nv_wo32(dev, ctx, 0x05a4/4, 0x4b7fffff); | 167 | nv_wo32(ctx, 0x05a4, 0x4b7fffff); |
168 | nv_wo32(dev, ctx, 0x05fc/4, 0x00000001); | 168 | nv_wo32(ctx, 0x05fc, 0x00000001); |
169 | nv_wo32(dev, ctx, 0x0604/4, 0x00004000); | 169 | nv_wo32(ctx, 0x0604, 0x00004000); |
170 | nv_wo32(dev, ctx, 0x0610/4, 0x00000001); | 170 | nv_wo32(ctx, 0x0610, 0x00000001); |
171 | nv_wo32(dev, ctx, 0x0618/4, 0x00040000); | 171 | nv_wo32(ctx, 0x0618, 0x00040000); |
172 | nv_wo32(dev, ctx, 0x061c/4, 0x00010000); | 172 | nv_wo32(ctx, 0x061c, 0x00010000); |
173 | for (i = 0x1a9c; i <= 0x22fc; i += 16) { /*XXX: check!! */ | 173 | for (i = 0x1a9c; i <= 0x22fc; i += 16) { /*XXX: check!! */ |
174 | nv_wo32(dev, ctx, (i + 0)/4, 0x10700ff9); | 174 | nv_wo32(ctx, (i + 0), 0x10700ff9); |
175 | nv_wo32(dev, ctx, (i + 4)/4, 0x0436086c); | 175 | nv_wo32(ctx, (i + 4), 0x0436086c); |
176 | nv_wo32(dev, ctx, (i + 8)/4, 0x000c001b); | 176 | nv_wo32(ctx, (i + 8), 0x000c001b); |
177 | } | 177 | } |
178 | nv_wo32(dev, ctx, 0x269c/4, 0x3f800000); | 178 | nv_wo32(ctx, 0x269c, 0x3f800000); |
179 | nv_wo32(dev, ctx, 0x26b0/4, 0x3f800000); | 179 | nv_wo32(ctx, 0x26b0, 0x3f800000); |
180 | nv_wo32(dev, ctx, 0x26dc/4, 0x40000000); | 180 | nv_wo32(ctx, 0x26dc, 0x40000000); |
181 | nv_wo32(dev, ctx, 0x26e0/4, 0x3f800000); | 181 | nv_wo32(ctx, 0x26e0, 0x3f800000); |
182 | nv_wo32(dev, ctx, 0x26e4/4, 0x3f000000); | 182 | nv_wo32(ctx, 0x26e4, 0x3f000000); |
183 | nv_wo32(dev, ctx, 0x26ec/4, 0x40000000); | 183 | nv_wo32(ctx, 0x26ec, 0x40000000); |
184 | nv_wo32(dev, ctx, 0x26f0/4, 0x3f800000); | 184 | nv_wo32(ctx, 0x26f0, 0x3f800000); |
185 | nv_wo32(dev, ctx, 0x26f8/4, 0xbf800000); | 185 | nv_wo32(ctx, 0x26f8, 0xbf800000); |
186 | nv_wo32(dev, ctx, 0x2700/4, 0xbf800000); | 186 | nv_wo32(ctx, 0x2700, 0xbf800000); |
187 | nv_wo32(dev, ctx, 0x3024/4, 0x000fe000); | 187 | nv_wo32(ctx, 0x3024, 0x000fe000); |
188 | nv_wo32(dev, ctx, 0x30a0/4, 0x000003f8); | 188 | nv_wo32(ctx, 0x30a0, 0x000003f8); |
189 | nv_wo32(dev, ctx, 0x33fc/4, 0x002fe000); | 189 | nv_wo32(ctx, 0x33fc, 0x002fe000); |
190 | for (i = 0x341c; i <= 0x3438; i += 4) | 190 | for (i = 0x341c; i <= 0x3438; i += 4) |
191 | nv_wo32(dev, ctx, i/4, 0x001c527c); | 191 | nv_wo32(ctx, i, 0x001c527c); |
192 | } | 192 | } |
193 | 193 | ||
194 | static void | 194 | static void |
@@ -196,57 +196,57 @@ nv30_31_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx) | |||
196 | { | 196 | { |
197 | int i; | 197 | int i; |
198 | 198 | ||
199 | nv_wo32(dev, ctx, 0x0410/4, 0x00000101); | 199 | nv_wo32(ctx, 0x0410, 0x00000101); |
200 | nv_wo32(dev, ctx, 0x0424/4, 0x00000111); | 200 | nv_wo32(ctx, 0x0424, 0x00000111); |
201 | nv_wo32(dev, ctx, 0x0428/4, 0x00000060); | 201 | nv_wo32(ctx, 0x0428, 0x00000060); |
202 | nv_wo32(dev, ctx, 0x0444/4, 0x00000080); | 202 | nv_wo32(ctx, 0x0444, 0x00000080); |
203 | nv_wo32(dev, ctx, 0x0448/4, 0xffff0000); | 203 | nv_wo32(ctx, 0x0448, 0xffff0000); |
204 | nv_wo32(dev, ctx, 0x044c/4, 0x00000001); | 204 | nv_wo32(ctx, 0x044c, 0x00000001); |
205 | nv_wo32(dev, ctx, 0x0460/4, 0x44400000); | 205 | nv_wo32(ctx, 0x0460, 0x44400000); |
206 | nv_wo32(dev, ctx, 0x048c/4, 0xffff0000); | 206 | nv_wo32(ctx, 0x048c, 0xffff0000); |
207 | for (i = 0x04e0; i < 0x04e8; i += 4) | 207 | for (i = 0x04e0; i < 0x04e8; i += 4) |
208 | nv_wo32(dev, ctx, i/4, 0x0fff0000); | 208 | nv_wo32(ctx, i, 0x0fff0000); |
209 | nv_wo32(dev, ctx, 0x04ec/4, 0x00011100); | 209 | nv_wo32(ctx, 0x04ec, 0x00011100); |
210 | for (i = 0x0508; i < 0x0548; i += 4) | 210 | for (i = 0x0508; i < 0x0548; i += 4) |
211 | nv_wo32(dev, ctx, i/4, 0x07ff0000); | 211 | nv_wo32(ctx, i, 0x07ff0000); |
212 | nv_wo32(dev, ctx, 0x0550/4, 0x4b7fffff); | 212 | nv_wo32(ctx, 0x0550, 0x4b7fffff); |
213 | nv_wo32(dev, ctx, 0x058c/4, 0x00000080); | 213 | nv_wo32(ctx, 0x058c, 0x00000080); |
214 | nv_wo32(dev, ctx, 0x0590/4, 0x30201000); | 214 | nv_wo32(ctx, 0x0590, 0x30201000); |
215 | nv_wo32(dev, ctx, 0x0594/4, 0x70605040); | 215 | nv_wo32(ctx, 0x0594, 0x70605040); |
216 | nv_wo32(dev, ctx, 0x0598/4, 0xb8a89888); | 216 | nv_wo32(ctx, 0x0598, 0xb8a89888); |
217 | nv_wo32(dev, ctx, 0x059c/4, 0xf8e8d8c8); | 217 | nv_wo32(ctx, 0x059c, 0xf8e8d8c8); |
218 | nv_wo32(dev, ctx, 0x05b0/4, 0xb0000000); | 218 | nv_wo32(ctx, 0x05b0, 0xb0000000); |
219 | for (i = 0x0600; i < 0x0640; i += 4) | 219 | for (i = 0x0600; i < 0x0640; i += 4) |
220 | nv_wo32(dev, ctx, i/4, 0x00010588); | 220 | nv_wo32(ctx, i, 0x00010588); |
221 | for (i = 0x0640; i < 0x0680; i += 4) | 221 | for (i = 0x0640; i < 0x0680; i += 4) |
222 | nv_wo32(dev, ctx, i/4, 0x00030303); | 222 | nv_wo32(ctx, i, 0x00030303); |
223 | for (i = 0x06c0; i < 0x0700; i += 4) | 223 | for (i = 0x06c0; i < 0x0700; i += 4) |
224 | nv_wo32(dev, ctx, i/4, 0x0008aae4); | 224 | nv_wo32(ctx, i, 0x0008aae4); |
225 | for (i = 0x0700; i < 0x0740; i += 4) | 225 | for (i = 0x0700; i < 0x0740; i += 4) |
226 | nv_wo32(dev, ctx, i/4, 0x01012000); | 226 | nv_wo32(ctx, i, 0x01012000); |
227 | for (i = 0x0740; i < 0x0780; i += 4) | 227 | for (i = 0x0740; i < 0x0780; i += 4) |
228 | nv_wo32(dev, ctx, i/4, 0x00080008); | 228 | nv_wo32(ctx, i, 0x00080008); |
229 | nv_wo32(dev, ctx, 0x085c/4, 0x00040000); | 229 | nv_wo32(ctx, 0x085c, 0x00040000); |
230 | nv_wo32(dev, ctx, 0x0860/4, 0x00010000); | 230 | nv_wo32(ctx, 0x0860, 0x00010000); |
231 | for (i = 0x0864; i < 0x0874; i += 4) | 231 | for (i = 0x0864; i < 0x0874; i += 4) |
232 | nv_wo32(dev, ctx, i/4, 0x00040004); | 232 | nv_wo32(ctx, i, 0x00040004); |
233 | for (i = 0x1f18; i <= 0x3088 ; i += 16) { | 233 | for (i = 0x1f18; i <= 0x3088 ; i += 16) { |
234 | nv_wo32(dev, ctx, i/4 + 0, 0x10700ff9); | 234 | nv_wo32(ctx, i + 0, 0x10700ff9); |
235 | nv_wo32(dev, ctx, i/4 + 1, 0x0436086c); | 235 | nv_wo32(ctx, i + 1, 0x0436086c); |
236 | nv_wo32(dev, ctx, i/4 + 2, 0x000c001b); | 236 | nv_wo32(ctx, i + 2, 0x000c001b); |
237 | } | 237 | } |
238 | for (i = 0x30b8; i < 0x30c8; i += 4) | 238 | for (i = 0x30b8; i < 0x30c8; i += 4) |
239 | nv_wo32(dev, ctx, i/4, 0x0000ffff); | 239 | nv_wo32(ctx, i, 0x0000ffff); |
240 | nv_wo32(dev, ctx, 0x344c/4, 0x3f800000); | 240 | nv_wo32(ctx, 0x344c, 0x3f800000); |
241 | nv_wo32(dev, ctx, 0x3808/4, 0x3f800000); | 241 | nv_wo32(ctx, 0x3808, 0x3f800000); |
242 | nv_wo32(dev, ctx, 0x381c/4, 0x3f800000); | 242 | nv_wo32(ctx, 0x381c, 0x3f800000); |
243 | nv_wo32(dev, ctx, 0x3848/4, 0x40000000); | 243 | nv_wo32(ctx, 0x3848, 0x40000000); |
244 | nv_wo32(dev, ctx, 0x384c/4, 0x3f800000); | 244 | nv_wo32(ctx, 0x384c, 0x3f800000); |
245 | nv_wo32(dev, ctx, 0x3850/4, 0x3f000000); | 245 | nv_wo32(ctx, 0x3850, 0x3f000000); |
246 | nv_wo32(dev, ctx, 0x3858/4, 0x40000000); | 246 | nv_wo32(ctx, 0x3858, 0x40000000); |
247 | nv_wo32(dev, ctx, 0x385c/4, 0x3f800000); | 247 | nv_wo32(ctx, 0x385c, 0x3f800000); |
248 | nv_wo32(dev, ctx, 0x3864/4, 0xbf800000); | 248 | nv_wo32(ctx, 0x3864, 0xbf800000); |
249 | nv_wo32(dev, ctx, 0x386c/4, 0xbf800000); | 249 | nv_wo32(ctx, 0x386c, 0xbf800000); |
250 | } | 250 | } |
251 | 251 | ||
252 | static void | 252 | static void |
@@ -254,57 +254,57 @@ nv34_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx) | |||
254 | { | 254 | { |
255 | int i; | 255 | int i; |
256 | 256 | ||
257 | nv_wo32(dev, ctx, 0x040c/4, 0x01000101); | 257 | nv_wo32(ctx, 0x040c, 0x01000101); |
258 | nv_wo32(dev, ctx, 0x0420/4, 0x00000111); | 258 | nv_wo32(ctx, 0x0420, 0x00000111); |
259 | nv_wo32(dev, ctx, 0x0424/4, 0x00000060); | 259 | nv_wo32(ctx, 0x0424, 0x00000060); |
260 | nv_wo32(dev, ctx, 0x0440/4, 0x00000080); | 260 | nv_wo32(ctx, 0x0440, 0x00000080); |
261 | nv_wo32(dev, ctx, 0x0444/4, 0xffff0000); | 261 | nv_wo32(ctx, 0x0444, 0xffff0000); |
262 | nv_wo32(dev, ctx, 0x0448/4, 0x00000001); | 262 | nv_wo32(ctx, 0x0448, 0x00000001); |
263 | nv_wo32(dev, ctx, 0x045c/4, 0x44400000); | 263 | nv_wo32(ctx, 0x045c, 0x44400000); |
264 | nv_wo32(dev, ctx, 0x0480/4, 0xffff0000); | 264 | nv_wo32(ctx, 0x0480, 0xffff0000); |
265 | for (i = 0x04d4; i < 0x04dc; i += 4) | 265 | for (i = 0x04d4; i < 0x04dc; i += 4) |
266 | nv_wo32(dev, ctx, i/4, 0x0fff0000); | 266 | nv_wo32(ctx, i, 0x0fff0000); |
267 | nv_wo32(dev, ctx, 0x04e0/4, 0x00011100); | 267 | nv_wo32(ctx, 0x04e0, 0x00011100); |
268 | for (i = 0x04fc; i < 0x053c; i += 4) | 268 | for (i = 0x04fc; i < 0x053c; i += 4) |
269 | nv_wo32(dev, ctx, i/4, 0x07ff0000); | 269 | nv_wo32(ctx, i, 0x07ff0000); |
270 | nv_wo32(dev, ctx, 0x0544/4, 0x4b7fffff); | 270 | nv_wo32(ctx, 0x0544, 0x4b7fffff); |
271 | nv_wo32(dev, ctx, 0x057c/4, 0x00000080); | 271 | nv_wo32(ctx, 0x057c, 0x00000080); |
272 | nv_wo32(dev, ctx, 0x0580/4, 0x30201000); | 272 | nv_wo32(ctx, 0x0580, 0x30201000); |
273 | nv_wo32(dev, ctx, 0x0584/4, 0x70605040); | 273 | nv_wo32(ctx, 0x0584, 0x70605040); |
274 | nv_wo32(dev, ctx, 0x0588/4, 0xb8a89888); | 274 | nv_wo32(ctx, 0x0588, 0xb8a89888); |
275 | nv_wo32(dev, ctx, 0x058c/4, 0xf8e8d8c8); | 275 | nv_wo32(ctx, 0x058c, 0xf8e8d8c8); |
276 | nv_wo32(dev, ctx, 0x05a0/4, 0xb0000000); | 276 | nv_wo32(ctx, 0x05a0, 0xb0000000); |
277 | for (i = 0x05f0; i < 0x0630; i += 4) | 277 | for (i = 0x05f0; i < 0x0630; i += 4) |
278 | nv_wo32(dev, ctx, i/4, 0x00010588); | 278 | nv_wo32(ctx, i, 0x00010588); |
279 | for (i = 0x0630; i < 0x0670; i += 4) | 279 | for (i = 0x0630; i < 0x0670; i += 4) |
280 | nv_wo32(dev, ctx, i/4, 0x00030303); | 280 | nv_wo32(ctx, i, 0x00030303); |
281 | for (i = 0x06b0; i < 0x06f0; i += 4) | 281 | for (i = 0x06b0; i < 0x06f0; i += 4) |
282 | nv_wo32(dev, ctx, i/4, 0x0008aae4); | 282 | nv_wo32(ctx, i, 0x0008aae4); |
283 | for (i = 0x06f0; i < 0x0730; i += 4) | 283 | for (i = 0x06f0; i < 0x0730; i += 4) |
284 | nv_wo32(dev, ctx, i/4, 0x01012000); | 284 | nv_wo32(ctx, i, 0x01012000); |
285 | for (i = 0x0730; i < 0x0770; i += 4) | 285 | for (i = 0x0730; i < 0x0770; i += 4) |
286 | nv_wo32(dev, ctx, i/4, 0x00080008); | 286 | nv_wo32(ctx, i, 0x00080008); |
287 | nv_wo32(dev, ctx, 0x0850/4, 0x00040000); | 287 | nv_wo32(ctx, 0x0850, 0x00040000); |
288 | nv_wo32(dev, ctx, 0x0854/4, 0x00010000); | 288 | nv_wo32(ctx, 0x0854, 0x00010000); |
289 | for (i = 0x0858; i < 0x0868; i += 4) | 289 | for (i = 0x0858; i < 0x0868; i += 4) |
290 | nv_wo32(dev, ctx, i/4, 0x00040004); | 290 | nv_wo32(ctx, i, 0x00040004); |
291 | for (i = 0x15ac; i <= 0x271c ; i += 16) { | 291 | for (i = 0x15ac; i <= 0x271c ; i += 16) { |
292 | nv_wo32(dev, ctx, i/4 + 0, 0x10700ff9); | 292 | nv_wo32(ctx, i + 0, 0x10700ff9); |
293 | nv_wo32(dev, ctx, i/4 + 1, 0x0436086c); | 293 | nv_wo32(ctx, i + 1, 0x0436086c); |
294 | nv_wo32(dev, ctx, i/4 + 2, 0x000c001b); | 294 | nv_wo32(ctx, i + 2, 0x000c001b); |
295 | } | 295 | } |
296 | for (i = 0x274c; i < 0x275c; i += 4) | 296 | for (i = 0x274c; i < 0x275c; i += 4) |
297 | nv_wo32(dev, ctx, i/4, 0x0000ffff); | 297 | nv_wo32(ctx, i, 0x0000ffff); |
298 | nv_wo32(dev, ctx, 0x2ae0/4, 0x3f800000); | 298 | nv_wo32(ctx, 0x2ae0, 0x3f800000); |
299 | nv_wo32(dev, ctx, 0x2e9c/4, 0x3f800000); | 299 | nv_wo32(ctx, 0x2e9c, 0x3f800000); |
300 | nv_wo32(dev, ctx, 0x2eb0/4, 0x3f800000); | 300 | nv_wo32(ctx, 0x2eb0, 0x3f800000); |
301 | nv_wo32(dev, ctx, 0x2edc/4, 0x40000000); | 301 | nv_wo32(ctx, 0x2edc, 0x40000000); |
302 | nv_wo32(dev, ctx, 0x2ee0/4, 0x3f800000); | 302 | nv_wo32(ctx, 0x2ee0, 0x3f800000); |
303 | nv_wo32(dev, ctx, 0x2ee4/4, 0x3f000000); | 303 | nv_wo32(ctx, 0x2ee4, 0x3f000000); |
304 | nv_wo32(dev, ctx, 0x2eec/4, 0x40000000); | 304 | nv_wo32(ctx, 0x2eec, 0x40000000); |
305 | nv_wo32(dev, ctx, 0x2ef0/4, 0x3f800000); | 305 | nv_wo32(ctx, 0x2ef0, 0x3f800000); |
306 | nv_wo32(dev, ctx, 0x2ef8/4, 0xbf800000); | 306 | nv_wo32(ctx, 0x2ef8, 0xbf800000); |
307 | nv_wo32(dev, ctx, 0x2f00/4, 0xbf800000); | 307 | nv_wo32(ctx, 0x2f00, 0xbf800000); |
308 | } | 308 | } |
309 | 309 | ||
310 | static void | 310 | static void |
@@ -312,57 +312,57 @@ nv35_36_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx) | |||
312 | { | 312 | { |
313 | int i; | 313 | int i; |
314 | 314 | ||
315 | nv_wo32(dev, ctx, 0x040c/4, 0x00000101); | 315 | nv_wo32(ctx, 0x040c, 0x00000101); |
316 | nv_wo32(dev, ctx, 0x0420/4, 0x00000111); | 316 | nv_wo32(ctx, 0x0420, 0x00000111); |
317 | nv_wo32(dev, ctx, 0x0424/4, 0x00000060); | 317 | nv_wo32(ctx, 0x0424, 0x00000060); |
318 | nv_wo32(dev, ctx, 0x0440/4, 0x00000080); | 318 | nv_wo32(ctx, 0x0440, 0x00000080); |
319 | nv_wo32(dev, ctx, 0x0444/4, 0xffff0000); | 319 | nv_wo32(ctx, 0x0444, 0xffff0000); |
320 | nv_wo32(dev, ctx, 0x0448/4, 0x00000001); | 320 | nv_wo32(ctx, 0x0448, 0x00000001); |
321 | nv_wo32(dev, ctx, 0x045c/4, 0x44400000); | 321 | nv_wo32(ctx, 0x045c, 0x44400000); |
322 | nv_wo32(dev, ctx, 0x0488/4, 0xffff0000); | 322 | nv_wo32(ctx, 0x0488, 0xffff0000); |
323 | for (i = 0x04dc; i < 0x04e4; i += 4) | 323 | for (i = 0x04dc; i < 0x04e4; i += 4) |
324 | nv_wo32(dev, ctx, i/4, 0x0fff0000); | 324 | nv_wo32(ctx, i, 0x0fff0000); |
325 | nv_wo32(dev, ctx, 0x04e8/4, 0x00011100); | 325 | nv_wo32(ctx, 0x04e8, 0x00011100); |
326 | for (i = 0x0504; i < 0x0544; i += 4) | 326 | for (i = 0x0504; i < 0x0544; i += 4) |
327 | nv_wo32(dev, ctx, i/4, 0x07ff0000); | 327 | nv_wo32(ctx, i, 0x07ff0000); |
328 | nv_wo32(dev, ctx, 0x054c/4, 0x4b7fffff); | 328 | nv_wo32(ctx, 0x054c, 0x4b7fffff); |
329 | nv_wo32(dev, ctx, 0x0588/4, 0x00000080); | 329 | nv_wo32(ctx, 0x0588, 0x00000080); |
330 | nv_wo32(dev, ctx, 0x058c/4, 0x30201000); | 330 | nv_wo32(ctx, 0x058c, 0x30201000); |
331 | nv_wo32(dev, ctx, 0x0590/4, 0x70605040); | 331 | nv_wo32(ctx, 0x0590, 0x70605040); |
332 | nv_wo32(dev, ctx, 0x0594/4, 0xb8a89888); | 332 | nv_wo32(ctx, 0x0594, 0xb8a89888); |
333 | nv_wo32(dev, ctx, 0x0598/4, 0xf8e8d8c8); | 333 | nv_wo32(ctx, 0x0598, 0xf8e8d8c8); |
334 | nv_wo32(dev, ctx, 0x05ac/4, 0xb0000000); | 334 | nv_wo32(ctx, 0x05ac, 0xb0000000); |
335 | for (i = 0x0604; i < 0x0644; i += 4) | 335 | for (i = 0x0604; i < 0x0644; i += 4) |
336 | nv_wo32(dev, ctx, i/4, 0x00010588); | 336 | nv_wo32(ctx, i, 0x00010588); |
337 | for (i = 0x0644; i < 0x0684; i += 4) | 337 | for (i = 0x0644; i < 0x0684; i += 4) |
338 | nv_wo32(dev, ctx, i/4, 0x00030303); | 338 | nv_wo32(ctx, i, 0x00030303); |
339 | for (i = 0x06c4; i < 0x0704; i += 4) | 339 | for (i = 0x06c4; i < 0x0704; i += 4) |
340 | nv_wo32(dev, ctx, i/4, 0x0008aae4); | 340 | nv_wo32(ctx, i, 0x0008aae4); |
341 | for (i = 0x0704; i < 0x0744; i += 4) | 341 | for (i = 0x0704; i < 0x0744; i += 4) |
342 | nv_wo32(dev, ctx, i/4, 0x01012000); | 342 | nv_wo32(ctx, i, 0x01012000); |
343 | for (i = 0x0744; i < 0x0784; i += 4) | 343 | for (i = 0x0744; i < 0x0784; i += 4) |
344 | nv_wo32(dev, ctx, i/4, 0x00080008); | 344 | nv_wo32(ctx, i, 0x00080008); |
345 | nv_wo32(dev, ctx, 0x0860/4, 0x00040000); | 345 | nv_wo32(ctx, 0x0860, 0x00040000); |
346 | nv_wo32(dev, ctx, 0x0864/4, 0x00010000); | 346 | nv_wo32(ctx, 0x0864, 0x00010000); |
347 | for (i = 0x0868; i < 0x0878; i += 4) | 347 | for (i = 0x0868; i < 0x0878; i += 4) |
348 | nv_wo32(dev, ctx, i/4, 0x00040004); | 348 | nv_wo32(ctx, i, 0x00040004); |
349 | for (i = 0x1f1c; i <= 0x308c ; i += 16) { | 349 | for (i = 0x1f1c; i <= 0x308c ; i += 16) { |
350 | nv_wo32(dev, ctx, i/4 + 0, 0x10700ff9); | 350 | nv_wo32(ctx, i + 0, 0x10700ff9); |
351 | nv_wo32(dev, ctx, i/4 + 1, 0x0436086c); | 351 | nv_wo32(ctx, i + 4, 0x0436086c); |
352 | nv_wo32(dev, ctx, i/4 + 2, 0x000c001b); | 352 | nv_wo32(ctx, i + 8, 0x000c001b); |
353 | } | 353 | } |
354 | for (i = 0x30bc; i < 0x30cc; i += 4) | 354 | for (i = 0x30bc; i < 0x30cc; i += 4) |
355 | nv_wo32(dev, ctx, i/4, 0x0000ffff); | 355 | nv_wo32(ctx, i, 0x0000ffff); |
356 | nv_wo32(dev, ctx, 0x3450/4, 0x3f800000); | 356 | nv_wo32(ctx, 0x3450, 0x3f800000); |
357 | nv_wo32(dev, ctx, 0x380c/4, 0x3f800000); | 357 | nv_wo32(ctx, 0x380c, 0x3f800000); |
358 | nv_wo32(dev, ctx, 0x3820/4, 0x3f800000); | 358 | nv_wo32(ctx, 0x3820, 0x3f800000); |
359 | nv_wo32(dev, ctx, 0x384c/4, 0x40000000); | 359 | nv_wo32(ctx, 0x384c, 0x40000000); |
360 | nv_wo32(dev, ctx, 0x3850/4, 0x3f800000); | 360 | nv_wo32(ctx, 0x3850, 0x3f800000); |
361 | nv_wo32(dev, ctx, 0x3854/4, 0x3f000000); | 361 | nv_wo32(ctx, 0x3854, 0x3f000000); |
362 | nv_wo32(dev, ctx, 0x385c/4, 0x40000000); | 362 | nv_wo32(ctx, 0x385c, 0x40000000); |
363 | nv_wo32(dev, ctx, 0x3860/4, 0x3f800000); | 363 | nv_wo32(ctx, 0x3860, 0x3f800000); |
364 | nv_wo32(dev, ctx, 0x3868/4, 0xbf800000); | 364 | nv_wo32(ctx, 0x3868, 0xbf800000); |
365 | nv_wo32(dev, ctx, 0x3870/4, 0xbf800000); | 365 | nv_wo32(ctx, 0x3870, 0xbf800000); |
366 | } | 366 | } |
367 | 367 | ||
368 | int | 368 | int |
@@ -372,7 +372,7 @@ nv20_graph_create_context(struct nouveau_channel *chan) | |||
372 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 372 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
373 | struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph; | 373 | struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph; |
374 | void (*ctx_init)(struct drm_device *, struct nouveau_gpuobj *); | 374 | void (*ctx_init)(struct drm_device *, struct nouveau_gpuobj *); |
375 | unsigned int idoffs = 0x28/4; | 375 | unsigned int idoffs = 0x28; |
376 | int ret; | 376 | int ret; |
377 | 377 | ||
378 | switch (dev_priv->chipset) { | 378 | switch (dev_priv->chipset) { |
@@ -413,11 +413,11 @@ nv20_graph_create_context(struct nouveau_channel *chan) | |||
413 | ctx_init(dev, chan->ramin_grctx->gpuobj); | 413 | ctx_init(dev, chan->ramin_grctx->gpuobj); |
414 | 414 | ||
415 | /* nv20: nv_wo32(dev, chan->ramin_grctx->gpuobj, 10, chan->id<<24); */ | 415 | /* nv20: nv_wo32(dev, chan->ramin_grctx->gpuobj, 10, chan->id<<24); */ |
416 | nv_wo32(dev, chan->ramin_grctx->gpuobj, idoffs, | 416 | nv_wo32(chan->ramin_grctx->gpuobj, idoffs, |
417 | (chan->id << 24) | 0x1); /* CTX_USER */ | 417 | (chan->id << 24) | 0x1); /* CTX_USER */ |
418 | 418 | ||
419 | nv_wo32(dev, pgraph->ctx_table->gpuobj, chan->id, | 419 | nv_wo32(pgraph->ctx_table->gpuobj, chan->id * 4, |
420 | chan->ramin_grctx->instance >> 4); | 420 | chan->ramin_grctx->instance >> 4); |
421 | return 0; | 421 | return 0; |
422 | } | 422 | } |
423 | 423 | ||
@@ -431,7 +431,7 @@ nv20_graph_destroy_context(struct nouveau_channel *chan) | |||
431 | if (chan->ramin_grctx) | 431 | if (chan->ramin_grctx) |
432 | nouveau_gpuobj_ref_del(dev, &chan->ramin_grctx); | 432 | nouveau_gpuobj_ref_del(dev, &chan->ramin_grctx); |
433 | 433 | ||
434 | nv_wo32(dev, pgraph->ctx_table->gpuobj, chan->id, 0); | 434 | nv_wo32(pgraph->ctx_table->gpuobj, chan->id * 4, 0); |
435 | } | 435 | } |
436 | 436 | ||
437 | int | 437 | int |
diff --git a/drivers/gpu/drm/nouveau/nv40_graph.c b/drivers/gpu/drm/nouveau/nv40_graph.c index fd7d2b501316..6215dfcf1ea9 100644 --- a/drivers/gpu/drm/nouveau/nv40_graph.c +++ b/drivers/gpu/drm/nouveau/nv40_graph.c | |||
@@ -73,8 +73,8 @@ nv40_graph_create_context(struct nouveau_channel *chan) | |||
73 | ctx.data = chan->ramin_grctx->gpuobj; | 73 | ctx.data = chan->ramin_grctx->gpuobj; |
74 | nv40_grctx_init(&ctx); | 74 | nv40_grctx_init(&ctx); |
75 | 75 | ||
76 | nv_wo32(dev, chan->ramin_grctx->gpuobj, 0, | 76 | nv_wo32(chan->ramin_grctx->gpuobj, 0, |
77 | chan->ramin_grctx->gpuobj->im_pramin->start); | 77 | chan->ramin_grctx->gpuobj->im_pramin->start); |
78 | return 0; | 78 | return 0; |
79 | } | 79 | } |
80 | 80 | ||
diff --git a/drivers/gpu/drm/nouveau/nv40_grctx.c b/drivers/gpu/drm/nouveau/nv40_grctx.c index 9b5c97469588..ce585093264e 100644 --- a/drivers/gpu/drm/nouveau/nv40_grctx.c +++ b/drivers/gpu/drm/nouveau/nv40_grctx.c | |||
@@ -596,13 +596,13 @@ nv40_graph_construct_shader(struct nouveau_grctx *ctx) | |||
596 | 596 | ||
597 | offset += 0x0280/4; | 597 | offset += 0x0280/4; |
598 | for (i = 0; i < 16; i++, offset += 2) | 598 | for (i = 0; i < 16; i++, offset += 2) |
599 | nv_wo32(dev, obj, offset, 0x3f800000); | 599 | nv_wo32(obj, offset * 4, 0x3f800000); |
600 | 600 | ||
601 | for (vs = 0; vs < vs_nr; vs++, offset += vs_len) { | 601 | for (vs = 0; vs < vs_nr; vs++, offset += vs_len) { |
602 | for (i = 0; i < vs_nr_b0 * 6; i += 6) | 602 | for (i = 0; i < vs_nr_b0 * 6; i += 6) |
603 | nv_wo32(dev, obj, offset + b0_offset + i, 0x00000001); | 603 | nv_wo32(obj, (offset + b0_offset + i) * 4, 0x00000001); |
604 | for (i = 0; i < vs_nr_b1 * 4; i += 4) | 604 | for (i = 0; i < vs_nr_b1 * 4; i += 4) |
605 | nv_wo32(dev, obj, offset + b1_offset + i, 0x3f800000); | 605 | nv_wo32(obj, (offset + b1_offset + i) * 4, 0x3f800000); |
606 | } | 606 | } |
607 | } | 607 | } |
608 | 608 | ||
diff --git a/drivers/gpu/drm/nouveau/nv50_display.c b/drivers/gpu/drm/nouveau/nv50_display.c index c87f8744866f..435d2b727949 100644 --- a/drivers/gpu/drm/nouveau/nv50_display.c +++ b/drivers/gpu/drm/nouveau/nv50_display.c | |||
@@ -72,15 +72,15 @@ nv50_evo_dmaobj_new(struct nouveau_channel *evo, uint32_t class, uint32_t name, | |||
72 | return ret; | 72 | return ret; |
73 | } | 73 | } |
74 | 74 | ||
75 | nv_wo32(dev, obj, 0, (tile_flags << 22) | (magic_flags << 16) | class); | 75 | nv_wo32(obj, 0, (tile_flags << 22) | (magic_flags << 16) | class); |
76 | nv_wo32(dev, obj, 1, limit); | 76 | nv_wo32(obj, 4, limit); |
77 | nv_wo32(dev, obj, 2, offset); | 77 | nv_wo32(obj, 8, offset); |
78 | nv_wo32(dev, obj, 3, 0x00000000); | 78 | nv_wo32(obj, 12, 0x00000000); |
79 | nv_wo32(dev, obj, 4, 0x00000000); | 79 | nv_wo32(obj, 16, 0x00000000); |
80 | if (dev_priv->card_type < NV_C0) | 80 | if (dev_priv->card_type < NV_C0) |
81 | nv_wo32(dev, obj, 5, 0x00010000); | 81 | nv_wo32(obj, 20, 0x00010000); |
82 | else | 82 | else |
83 | nv_wo32(dev, obj, 5, 0x00020000); | 83 | nv_wo32(obj, 20, 0x00020000); |
84 | dev_priv->engine.instmem.flush(dev); | 84 | dev_priv->engine.instmem.flush(dev); |
85 | 85 | ||
86 | return 0; | 86 | return 0; |
diff --git a/drivers/gpu/drm/nouveau/nv50_fifo.c b/drivers/gpu/drm/nouveau/nv50_fifo.c index fb0281ae8f90..38dbcda86196 100644 --- a/drivers/gpu/drm/nouveau/nv50_fifo.c +++ b/drivers/gpu/drm/nouveau/nv50_fifo.c | |||
@@ -43,8 +43,10 @@ nv50_fifo_playlist_update(struct drm_device *dev) | |||
43 | 43 | ||
44 | /* We never schedule channel 0 or 127 */ | 44 | /* We never schedule channel 0 or 127 */ |
45 | for (i = 1, nr = 0; i < 127; i++) { | 45 | for (i = 1, nr = 0; i < 127; i++) { |
46 | if (dev_priv->fifos[i] && dev_priv->fifos[i]->ramfc) | 46 | if (dev_priv->fifos[i] && dev_priv->fifos[i]->ramfc) { |
47 | nv_wo32(dev, cur->gpuobj, nr++, i); | 47 | nv_wo32(cur->gpuobj, (nr * 4), i); |
48 | nr++; | ||
49 | } | ||
48 | } | 50 | } |
49 | dev_priv->engine.instmem.flush(dev); | 51 | dev_priv->engine.instmem.flush(dev); |
50 | 52 | ||
@@ -258,27 +260,25 @@ nv50_fifo_create_context(struct nouveau_channel *chan) | |||
258 | 260 | ||
259 | spin_lock_irqsave(&dev_priv->context_switch_lock, flags); | 261 | spin_lock_irqsave(&dev_priv->context_switch_lock, flags); |
260 | 262 | ||
261 | nv_wo32(dev, ramfc, 0x48/4, chan->pushbuf->instance >> 4); | 263 | nv_wo32(ramfc, 0x48, chan->pushbuf->instance >> 4); |
262 | nv_wo32(dev, ramfc, 0x80/4, (0 << 27) /* 4KiB */ | | 264 | nv_wo32(ramfc, 0x80, (0 << 27) /* 4KiB */ | |
263 | (4 << 24) /* SEARCH_FULL */ | | 265 | (4 << 24) /* SEARCH_FULL */ | |
264 | (chan->ramht->instance >> 4)); | 266 | (chan->ramht->instance >> 4)); |
265 | nv_wo32(dev, ramfc, 0x44/4, 0x2101ffff); | 267 | nv_wo32(ramfc, 0x44, 0x2101ffff); |
266 | nv_wo32(dev, ramfc, 0x60/4, 0x7fffffff); | 268 | nv_wo32(ramfc, 0x60, 0x7fffffff); |
267 | nv_wo32(dev, ramfc, 0x40/4, 0x00000000); | 269 | nv_wo32(ramfc, 0x40, 0x00000000); |
268 | nv_wo32(dev, ramfc, 0x7c/4, 0x30000001); | 270 | nv_wo32(ramfc, 0x7c, 0x30000001); |
269 | nv_wo32(dev, ramfc, 0x78/4, 0x00000000); | 271 | nv_wo32(ramfc, 0x78, 0x00000000); |
270 | nv_wo32(dev, ramfc, 0x3c/4, 0x403f6078); | 272 | nv_wo32(ramfc, 0x3c, 0x403f6078); |
271 | nv_wo32(dev, ramfc, 0x50/4, chan->pushbuf_base + | 273 | nv_wo32(ramfc, 0x50, chan->pushbuf_base + chan->dma.ib_base * 4); |
272 | chan->dma.ib_base * 4); | 274 | nv_wo32(ramfc, 0x54, drm_order(chan->dma.ib_max + 1) << 16); |
273 | nv_wo32(dev, ramfc, 0x54/4, drm_order(chan->dma.ib_max + 1) << 16); | ||
274 | 275 | ||
275 | if (dev_priv->chipset != 0x50) { | 276 | if (dev_priv->chipset != 0x50) { |
276 | nv_wo32(dev, chan->ramin->gpuobj, 0, chan->id); | 277 | nv_wo32(chan->ramin->gpuobj, 0, chan->id); |
277 | nv_wo32(dev, chan->ramin->gpuobj, 1, | 278 | nv_wo32(chan->ramin->gpuobj, 4, chan->ramfc->instance >> 8); |
278 | chan->ramfc->instance >> 8); | ||
279 | 279 | ||
280 | nv_wo32(dev, ramfc, 0x88/4, chan->cache->instance >> 10); | 280 | nv_wo32(ramfc, 0x88, chan->cache->instance >> 10); |
281 | nv_wo32(dev, ramfc, 0x98/4, chan->ramin->instance >> 12); | 281 | nv_wo32(ramfc, 0x98, chan->ramin->instance >> 12); |
282 | } | 282 | } |
283 | 283 | ||
284 | dev_priv->engine.instmem.flush(dev); | 284 | dev_priv->engine.instmem.flush(dev); |
@@ -321,57 +321,57 @@ nv50_fifo_load_context(struct nouveau_channel *chan) | |||
321 | 321 | ||
322 | NV_DEBUG(dev, "ch%d\n", chan->id); | 322 | NV_DEBUG(dev, "ch%d\n", chan->id); |
323 | 323 | ||
324 | nv_wr32(dev, 0x3330, nv_ro32(dev, ramfc, 0x00/4)); | 324 | nv_wr32(dev, 0x3330, nv_ro32(ramfc, 0x00)); |
325 | nv_wr32(dev, 0x3334, nv_ro32(dev, ramfc, 0x04/4)); | 325 | nv_wr32(dev, 0x3334, nv_ro32(ramfc, 0x04)); |
326 | nv_wr32(dev, 0x3240, nv_ro32(dev, ramfc, 0x08/4)); | 326 | nv_wr32(dev, 0x3240, nv_ro32(ramfc, 0x08)); |
327 | nv_wr32(dev, 0x3320, nv_ro32(dev, ramfc, 0x0c/4)); | 327 | nv_wr32(dev, 0x3320, nv_ro32(ramfc, 0x0c)); |
328 | nv_wr32(dev, 0x3244, nv_ro32(dev, ramfc, 0x10/4)); | 328 | nv_wr32(dev, 0x3244, nv_ro32(ramfc, 0x10)); |
329 | nv_wr32(dev, 0x3328, nv_ro32(dev, ramfc, 0x14/4)); | 329 | nv_wr32(dev, 0x3328, nv_ro32(ramfc, 0x14)); |
330 | nv_wr32(dev, 0x3368, nv_ro32(dev, ramfc, 0x18/4)); | 330 | nv_wr32(dev, 0x3368, nv_ro32(ramfc, 0x18)); |
331 | nv_wr32(dev, 0x336c, nv_ro32(dev, ramfc, 0x1c/4)); | 331 | nv_wr32(dev, 0x336c, nv_ro32(ramfc, 0x1c)); |
332 | nv_wr32(dev, 0x3370, nv_ro32(dev, ramfc, 0x20/4)); | 332 | nv_wr32(dev, 0x3370, nv_ro32(ramfc, 0x20)); |
333 | nv_wr32(dev, 0x3374, nv_ro32(dev, ramfc, 0x24/4)); | 333 | nv_wr32(dev, 0x3374, nv_ro32(ramfc, 0x24)); |
334 | nv_wr32(dev, 0x3378, nv_ro32(dev, ramfc, 0x28/4)); | 334 | nv_wr32(dev, 0x3378, nv_ro32(ramfc, 0x28)); |
335 | nv_wr32(dev, 0x337c, nv_ro32(dev, ramfc, 0x2c/4)); | 335 | nv_wr32(dev, 0x337c, nv_ro32(ramfc, 0x2c)); |
336 | nv_wr32(dev, 0x3228, nv_ro32(dev, ramfc, 0x30/4)); | 336 | nv_wr32(dev, 0x3228, nv_ro32(ramfc, 0x30)); |
337 | nv_wr32(dev, 0x3364, nv_ro32(dev, ramfc, 0x34/4)); | 337 | nv_wr32(dev, 0x3364, nv_ro32(ramfc, 0x34)); |
338 | nv_wr32(dev, 0x32a0, nv_ro32(dev, ramfc, 0x38/4)); | 338 | nv_wr32(dev, 0x32a0, nv_ro32(ramfc, 0x38)); |
339 | nv_wr32(dev, 0x3224, nv_ro32(dev, ramfc, 0x3c/4)); | 339 | nv_wr32(dev, 0x3224, nv_ro32(ramfc, 0x3c)); |
340 | nv_wr32(dev, 0x324c, nv_ro32(dev, ramfc, 0x40/4)); | 340 | nv_wr32(dev, 0x324c, nv_ro32(ramfc, 0x40)); |
341 | nv_wr32(dev, 0x2044, nv_ro32(dev, ramfc, 0x44/4)); | 341 | nv_wr32(dev, 0x2044, nv_ro32(ramfc, 0x44)); |
342 | nv_wr32(dev, 0x322c, nv_ro32(dev, ramfc, 0x48/4)); | 342 | nv_wr32(dev, 0x322c, nv_ro32(ramfc, 0x48)); |
343 | nv_wr32(dev, 0x3234, nv_ro32(dev, ramfc, 0x4c/4)); | 343 | nv_wr32(dev, 0x3234, nv_ro32(ramfc, 0x4c)); |
344 | nv_wr32(dev, 0x3340, nv_ro32(dev, ramfc, 0x50/4)); | 344 | nv_wr32(dev, 0x3340, nv_ro32(ramfc, 0x50)); |
345 | nv_wr32(dev, 0x3344, nv_ro32(dev, ramfc, 0x54/4)); | 345 | nv_wr32(dev, 0x3344, nv_ro32(ramfc, 0x54)); |
346 | nv_wr32(dev, 0x3280, nv_ro32(dev, ramfc, 0x58/4)); | 346 | nv_wr32(dev, 0x3280, nv_ro32(ramfc, 0x58)); |
347 | nv_wr32(dev, 0x3254, nv_ro32(dev, ramfc, 0x5c/4)); | 347 | nv_wr32(dev, 0x3254, nv_ro32(ramfc, 0x5c)); |
348 | nv_wr32(dev, 0x3260, nv_ro32(dev, ramfc, 0x60/4)); | 348 | nv_wr32(dev, 0x3260, nv_ro32(ramfc, 0x60)); |
349 | nv_wr32(dev, 0x3264, nv_ro32(dev, ramfc, 0x64/4)); | 349 | nv_wr32(dev, 0x3264, nv_ro32(ramfc, 0x64)); |
350 | nv_wr32(dev, 0x3268, nv_ro32(dev, ramfc, 0x68/4)); | 350 | nv_wr32(dev, 0x3268, nv_ro32(ramfc, 0x68)); |
351 | nv_wr32(dev, 0x326c, nv_ro32(dev, ramfc, 0x6c/4)); | 351 | nv_wr32(dev, 0x326c, nv_ro32(ramfc, 0x6c)); |
352 | nv_wr32(dev, 0x32e4, nv_ro32(dev, ramfc, 0x70/4)); | 352 | nv_wr32(dev, 0x32e4, nv_ro32(ramfc, 0x70)); |
353 | nv_wr32(dev, 0x3248, nv_ro32(dev, ramfc, 0x74/4)); | 353 | nv_wr32(dev, 0x3248, nv_ro32(ramfc, 0x74)); |
354 | nv_wr32(dev, 0x2088, nv_ro32(dev, ramfc, 0x78/4)); | 354 | nv_wr32(dev, 0x2088, nv_ro32(ramfc, 0x78)); |
355 | nv_wr32(dev, 0x2058, nv_ro32(dev, ramfc, 0x7c/4)); | 355 | nv_wr32(dev, 0x2058, nv_ro32(ramfc, 0x7c)); |
356 | nv_wr32(dev, 0x2210, nv_ro32(dev, ramfc, 0x80/4)); | 356 | nv_wr32(dev, 0x2210, nv_ro32(ramfc, 0x80)); |
357 | 357 | ||
358 | cnt = nv_ro32(dev, ramfc, 0x84/4); | 358 | cnt = nv_ro32(ramfc, 0x84); |
359 | for (ptr = 0; ptr < cnt; ptr++) { | 359 | for (ptr = 0; ptr < cnt; ptr++) { |
360 | nv_wr32(dev, NV40_PFIFO_CACHE1_METHOD(ptr), | 360 | nv_wr32(dev, NV40_PFIFO_CACHE1_METHOD(ptr), |
361 | nv_ro32(dev, cache, (ptr * 2) + 0)); | 361 | nv_ro32(cache, (ptr * 8) + 0)); |
362 | nv_wr32(dev, NV40_PFIFO_CACHE1_DATA(ptr), | 362 | nv_wr32(dev, NV40_PFIFO_CACHE1_DATA(ptr), |
363 | nv_ro32(dev, cache, (ptr * 2) + 1)); | 363 | nv_ro32(cache, (ptr * 8) + 4)); |
364 | } | 364 | } |
365 | nv_wr32(dev, NV03_PFIFO_CACHE1_PUT, cnt << 2); | 365 | nv_wr32(dev, NV03_PFIFO_CACHE1_PUT, cnt << 2); |
366 | nv_wr32(dev, NV03_PFIFO_CACHE1_GET, 0); | 366 | nv_wr32(dev, NV03_PFIFO_CACHE1_GET, 0); |
367 | 367 | ||
368 | /* guessing that all the 0x34xx regs aren't on NV50 */ | 368 | /* guessing that all the 0x34xx regs aren't on NV50 */ |
369 | if (dev_priv->chipset != 0x50) { | 369 | if (dev_priv->chipset != 0x50) { |
370 | nv_wr32(dev, 0x340c, nv_ro32(dev, ramfc, 0x88/4)); | 370 | nv_wr32(dev, 0x340c, nv_ro32(ramfc, 0x88)); |
371 | nv_wr32(dev, 0x3400, nv_ro32(dev, ramfc, 0x8c/4)); | 371 | nv_wr32(dev, 0x3400, nv_ro32(ramfc, 0x8c)); |
372 | nv_wr32(dev, 0x3404, nv_ro32(dev, ramfc, 0x90/4)); | 372 | nv_wr32(dev, 0x3404, nv_ro32(ramfc, 0x90)); |
373 | nv_wr32(dev, 0x3408, nv_ro32(dev, ramfc, 0x94/4)); | 373 | nv_wr32(dev, 0x3408, nv_ro32(ramfc, 0x94)); |
374 | nv_wr32(dev, 0x3410, nv_ro32(dev, ramfc, 0x98/4)); | 374 | nv_wr32(dev, 0x3410, nv_ro32(ramfc, 0x98)); |
375 | } | 375 | } |
376 | 376 | ||
377 | nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, chan->id | (1<<16)); | 377 | nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, chan->id | (1<<16)); |
@@ -402,59 +402,60 @@ nv50_fifo_unload_context(struct drm_device *dev) | |||
402 | ramfc = chan->ramfc->gpuobj; | 402 | ramfc = chan->ramfc->gpuobj; |
403 | cache = chan->cache->gpuobj; | 403 | cache = chan->cache->gpuobj; |
404 | 404 | ||
405 | nv_wo32(dev, ramfc, 0x00/4, nv_rd32(dev, 0x3330)); | 405 | nv_wo32(ramfc, 0x00, nv_rd32(dev, 0x3330)); |
406 | nv_wo32(dev, ramfc, 0x04/4, nv_rd32(dev, 0x3334)); | 406 | nv_wo32(ramfc, 0x04, nv_rd32(dev, 0x3334)); |
407 | nv_wo32(dev, ramfc, 0x08/4, nv_rd32(dev, 0x3240)); | 407 | nv_wo32(ramfc, 0x08, nv_rd32(dev, 0x3240)); |
408 | nv_wo32(dev, ramfc, 0x0c/4, nv_rd32(dev, 0x3320)); | 408 | nv_wo32(ramfc, 0x0c, nv_rd32(dev, 0x3320)); |
409 | nv_wo32(dev, ramfc, 0x10/4, nv_rd32(dev, 0x3244)); | 409 | nv_wo32(ramfc, 0x10, nv_rd32(dev, 0x3244)); |
410 | nv_wo32(dev, ramfc, 0x14/4, nv_rd32(dev, 0x3328)); | 410 | nv_wo32(ramfc, 0x14, nv_rd32(dev, 0x3328)); |
411 | nv_wo32(dev, ramfc, 0x18/4, nv_rd32(dev, 0x3368)); | 411 | nv_wo32(ramfc, 0x18, nv_rd32(dev, 0x3368)); |
412 | nv_wo32(dev, ramfc, 0x1c/4, nv_rd32(dev, 0x336c)); | 412 | nv_wo32(ramfc, 0x1c, nv_rd32(dev, 0x336c)); |
413 | nv_wo32(dev, ramfc, 0x20/4, nv_rd32(dev, 0x3370)); | 413 | nv_wo32(ramfc, 0x20, nv_rd32(dev, 0x3370)); |
414 | nv_wo32(dev, ramfc, 0x24/4, nv_rd32(dev, 0x3374)); | 414 | nv_wo32(ramfc, 0x24, nv_rd32(dev, 0x3374)); |
415 | nv_wo32(dev, ramfc, 0x28/4, nv_rd32(dev, 0x3378)); | 415 | nv_wo32(ramfc, 0x28, nv_rd32(dev, 0x3378)); |
416 | nv_wo32(dev, ramfc, 0x2c/4, nv_rd32(dev, 0x337c)); | 416 | nv_wo32(ramfc, 0x2c, nv_rd32(dev, 0x337c)); |
417 | nv_wo32(dev, ramfc, 0x30/4, nv_rd32(dev, 0x3228)); | 417 | nv_wo32(ramfc, 0x30, nv_rd32(dev, 0x3228)); |
418 | nv_wo32(dev, ramfc, 0x34/4, nv_rd32(dev, 0x3364)); | 418 | nv_wo32(ramfc, 0x34, nv_rd32(dev, 0x3364)); |
419 | nv_wo32(dev, ramfc, 0x38/4, nv_rd32(dev, 0x32a0)); | 419 | nv_wo32(ramfc, 0x38, nv_rd32(dev, 0x32a0)); |
420 | nv_wo32(dev, ramfc, 0x3c/4, nv_rd32(dev, 0x3224)); | 420 | nv_wo32(ramfc, 0x3c, nv_rd32(dev, 0x3224)); |
421 | nv_wo32(dev, ramfc, 0x40/4, nv_rd32(dev, 0x324c)); | 421 | nv_wo32(ramfc, 0x40, nv_rd32(dev, 0x324c)); |
422 | nv_wo32(dev, ramfc, 0x44/4, nv_rd32(dev, 0x2044)); | 422 | nv_wo32(ramfc, 0x44, nv_rd32(dev, 0x2044)); |
423 | nv_wo32(dev, ramfc, 0x48/4, nv_rd32(dev, 0x322c)); | 423 | nv_wo32(ramfc, 0x48, nv_rd32(dev, 0x322c)); |
424 | nv_wo32(dev, ramfc, 0x4c/4, nv_rd32(dev, 0x3234)); | 424 | nv_wo32(ramfc, 0x4c, nv_rd32(dev, 0x3234)); |
425 | nv_wo32(dev, ramfc, 0x50/4, nv_rd32(dev, 0x3340)); | 425 | nv_wo32(ramfc, 0x50, nv_rd32(dev, 0x3340)); |
426 | nv_wo32(dev, ramfc, 0x54/4, nv_rd32(dev, 0x3344)); | 426 | nv_wo32(ramfc, 0x54, nv_rd32(dev, 0x3344)); |
427 | nv_wo32(dev, ramfc, 0x58/4, nv_rd32(dev, 0x3280)); | 427 | nv_wo32(ramfc, 0x58, nv_rd32(dev, 0x3280)); |
428 | nv_wo32(dev, ramfc, 0x5c/4, nv_rd32(dev, 0x3254)); | 428 | nv_wo32(ramfc, 0x5c, nv_rd32(dev, 0x3254)); |
429 | nv_wo32(dev, ramfc, 0x60/4, nv_rd32(dev, 0x3260)); | 429 | nv_wo32(ramfc, 0x60, nv_rd32(dev, 0x3260)); |
430 | nv_wo32(dev, ramfc, 0x64/4, nv_rd32(dev, 0x3264)); | 430 | nv_wo32(ramfc, 0x64, nv_rd32(dev, 0x3264)); |
431 | nv_wo32(dev, ramfc, 0x68/4, nv_rd32(dev, 0x3268)); | 431 | nv_wo32(ramfc, 0x68, nv_rd32(dev, 0x3268)); |
432 | nv_wo32(dev, ramfc, 0x6c/4, nv_rd32(dev, 0x326c)); | 432 | nv_wo32(ramfc, 0x6c, nv_rd32(dev, 0x326c)); |
433 | nv_wo32(dev, ramfc, 0x70/4, nv_rd32(dev, 0x32e4)); | 433 | nv_wo32(ramfc, 0x70, nv_rd32(dev, 0x32e4)); |
434 | nv_wo32(dev, ramfc, 0x74/4, nv_rd32(dev, 0x3248)); | 434 | nv_wo32(ramfc, 0x74, nv_rd32(dev, 0x3248)); |
435 | nv_wo32(dev, ramfc, 0x78/4, nv_rd32(dev, 0x2088)); | 435 | nv_wo32(ramfc, 0x78, nv_rd32(dev, 0x2088)); |
436 | nv_wo32(dev, ramfc, 0x7c/4, nv_rd32(dev, 0x2058)); | 436 | nv_wo32(ramfc, 0x7c, nv_rd32(dev, 0x2058)); |
437 | nv_wo32(dev, ramfc, 0x80/4, nv_rd32(dev, 0x2210)); | 437 | nv_wo32(ramfc, 0x80, nv_rd32(dev, 0x2210)); |
438 | 438 | ||
439 | put = (nv_rd32(dev, NV03_PFIFO_CACHE1_PUT) & 0x7ff) >> 2; | 439 | put = (nv_rd32(dev, NV03_PFIFO_CACHE1_PUT) & 0x7ff) >> 2; |
440 | get = (nv_rd32(dev, NV03_PFIFO_CACHE1_GET) & 0x7ff) >> 2; | 440 | get = (nv_rd32(dev, NV03_PFIFO_CACHE1_GET) & 0x7ff) >> 2; |
441 | ptr = 0; | 441 | ptr = 0; |
442 | while (put != get) { | 442 | while (put != get) { |
443 | nv_wo32(dev, cache, ptr++, | 443 | nv_wo32(cache, ptr + 0, |
444 | nv_rd32(dev, NV40_PFIFO_CACHE1_METHOD(get))); | 444 | nv_rd32(dev, NV40_PFIFO_CACHE1_METHOD(get))); |
445 | nv_wo32(dev, cache, ptr++, | 445 | nv_wo32(cache, ptr + 4, |
446 | nv_rd32(dev, NV40_PFIFO_CACHE1_DATA(get))); | 446 | nv_rd32(dev, NV40_PFIFO_CACHE1_DATA(get))); |
447 | get = (get + 1) & 0x1ff; | 447 | get = (get + 1) & 0x1ff; |
448 | ptr += 8; | ||
448 | } | 449 | } |
449 | 450 | ||
450 | /* guessing that all the 0x34xx regs aren't on NV50 */ | 451 | /* guessing that all the 0x34xx regs aren't on NV50 */ |
451 | if (dev_priv->chipset != 0x50) { | 452 | if (dev_priv->chipset != 0x50) { |
452 | nv_wo32(dev, ramfc, 0x84/4, ptr >> 1); | 453 | nv_wo32(ramfc, 0x84, ptr >> 3); |
453 | nv_wo32(dev, ramfc, 0x88/4, nv_rd32(dev, 0x340c)); | 454 | nv_wo32(ramfc, 0x88, nv_rd32(dev, 0x340c)); |
454 | nv_wo32(dev, ramfc, 0x8c/4, nv_rd32(dev, 0x3400)); | 455 | nv_wo32(ramfc, 0x8c, nv_rd32(dev, 0x3400)); |
455 | nv_wo32(dev, ramfc, 0x90/4, nv_rd32(dev, 0x3404)); | 456 | nv_wo32(ramfc, 0x90, nv_rd32(dev, 0x3404)); |
456 | nv_wo32(dev, ramfc, 0x94/4, nv_rd32(dev, 0x3408)); | 457 | nv_wo32(ramfc, 0x94, nv_rd32(dev, 0x3408)); |
457 | nv_wo32(dev, ramfc, 0x98/4, nv_rd32(dev, 0x3410)); | 458 | nv_wo32(ramfc, 0x98, nv_rd32(dev, 0x3410)); |
458 | } | 459 | } |
459 | 460 | ||
460 | dev_priv->engine.instmem.flush(dev); | 461 | dev_priv->engine.instmem.flush(dev); |
diff --git a/drivers/gpu/drm/nouveau/nv50_graph.c b/drivers/gpu/drm/nouveau/nv50_graph.c index 1413028e1580..17a8d788a494 100644 --- a/drivers/gpu/drm/nouveau/nv50_graph.c +++ b/drivers/gpu/drm/nouveau/nv50_graph.c | |||
@@ -220,20 +220,20 @@ nv50_graph_create_context(struct nouveau_channel *chan) | |||
220 | obj = chan->ramin_grctx->gpuobj; | 220 | obj = chan->ramin_grctx->gpuobj; |
221 | 221 | ||
222 | hdr = (dev_priv->chipset == 0x50) ? 0x200 : 0x20; | 222 | hdr = (dev_priv->chipset == 0x50) ? 0x200 : 0x20; |
223 | nv_wo32(dev, ramin, (hdr + 0x00)/4, 0x00190002); | 223 | nv_wo32(ramin, hdr + 0x00, 0x00190002); |
224 | nv_wo32(dev, ramin, (hdr + 0x04)/4, chan->ramin_grctx->instance + | 224 | nv_wo32(ramin, hdr + 0x04, chan->ramin_grctx->instance + |
225 | pgraph->grctx_size - 1); | 225 | pgraph->grctx_size - 1); |
226 | nv_wo32(dev, ramin, (hdr + 0x08)/4, chan->ramin_grctx->instance); | 226 | nv_wo32(ramin, hdr + 0x08, chan->ramin_grctx->instance); |
227 | nv_wo32(dev, ramin, (hdr + 0x0c)/4, 0); | 227 | nv_wo32(ramin, hdr + 0x0c, 0); |
228 | nv_wo32(dev, ramin, (hdr + 0x10)/4, 0); | 228 | nv_wo32(ramin, hdr + 0x10, 0); |
229 | nv_wo32(dev, ramin, (hdr + 0x14)/4, 0x00010000); | 229 | nv_wo32(ramin, hdr + 0x14, 0x00010000); |
230 | 230 | ||
231 | ctx.dev = chan->dev; | 231 | ctx.dev = chan->dev; |
232 | ctx.mode = NOUVEAU_GRCTX_VALS; | 232 | ctx.mode = NOUVEAU_GRCTX_VALS; |
233 | ctx.data = obj; | 233 | ctx.data = obj; |
234 | nv50_grctx_init(&ctx); | 234 | nv50_grctx_init(&ctx); |
235 | 235 | ||
236 | nv_wo32(dev, obj, 0x00000/4, chan->ramin->instance >> 12); | 236 | nv_wo32(obj, 0x00000, chan->ramin->instance >> 12); |
237 | 237 | ||
238 | dev_priv->engine.instmem.flush(dev); | 238 | dev_priv->engine.instmem.flush(dev); |
239 | return 0; | 239 | return 0; |
@@ -252,7 +252,7 @@ nv50_graph_destroy_context(struct nouveau_channel *chan) | |||
252 | return; | 252 | return; |
253 | 253 | ||
254 | for (i = hdr; i < hdr + 24; i += 4) | 254 | for (i = hdr; i < hdr + 24; i += 4) |
255 | nv_wo32(dev, chan->ramin->gpuobj, i/4, 0); | 255 | nv_wo32(chan->ramin->gpuobj, i, 0); |
256 | dev_priv->engine.instmem.flush(dev); | 256 | dev_priv->engine.instmem.flush(dev); |
257 | 257 | ||
258 | nouveau_gpuobj_ref_del(dev, &chan->ramin_grctx); | 258 | nouveau_gpuobj_ref_del(dev, &chan->ramin_grctx); |
diff --git a/drivers/gpu/drm/nouveau/nv50_grctx.c b/drivers/gpu/drm/nouveau/nv50_grctx.c index 42a8fb20c1e6..ba6c033c8c95 100644 --- a/drivers/gpu/drm/nouveau/nv50_grctx.c +++ b/drivers/gpu/drm/nouveau/nv50_grctx.c | |||
@@ -995,7 +995,7 @@ xf_emit(struct nouveau_grctx *ctx, int num, uint32_t val) { | |||
995 | int i; | 995 | int i; |
996 | if (val && ctx->mode == NOUVEAU_GRCTX_VALS) | 996 | if (val && ctx->mode == NOUVEAU_GRCTX_VALS) |
997 | for (i = 0; i < num; i++) | 997 | for (i = 0; i < num; i++) |
998 | nv_wo32(ctx->dev, ctx->data, ctx->ctxvals_pos + (i << 3), val); | 998 | nv_wo32(ctx->data, (ctx->ctxvals_pos + (i << 3))*4, val); |
999 | ctx->ctxvals_pos += num << 3; | 999 | ctx->ctxvals_pos += num << 3; |
1000 | } | 1000 | } |
1001 | 1001 | ||
diff --git a/drivers/gpu/drm/nouveau/nv50_instmem.c b/drivers/gpu/drm/nouveau/nv50_instmem.c index 91ef93cf1f35..821806c835ba 100644 --- a/drivers/gpu/drm/nouveau/nv50_instmem.c +++ b/drivers/gpu/drm/nouveau/nv50_instmem.c | |||
@@ -449,9 +449,10 @@ nv50_instmem_bind(struct drm_device *dev, struct nouveau_gpuobj *gpuobj) | |||
449 | } | 449 | } |
450 | 450 | ||
451 | while (pte < pte_end) { | 451 | while (pte < pte_end) { |
452 | nv_wo32(dev, pramin_pt, pte++, lower_32_bits(vram)); | 452 | nv_wo32(pramin_pt, (pte * 4) + 0, lower_32_bits(vram)); |
453 | nv_wo32(dev, pramin_pt, pte++, upper_32_bits(vram)); | 453 | nv_wo32(pramin_pt, (pte * 4) + 4, upper_32_bits(vram)); |
454 | vram += NV50_INSTMEM_PAGE_SIZE; | 454 | vram += NV50_INSTMEM_PAGE_SIZE; |
455 | pte += 2; | ||
455 | } | 456 | } |
456 | dev_priv->engine.instmem.flush(dev); | 457 | dev_priv->engine.instmem.flush(dev); |
457 | 458 | ||
@@ -476,8 +477,9 @@ nv50_instmem_unbind(struct drm_device *dev, struct nouveau_gpuobj *gpuobj) | |||
476 | pte_end = ((gpuobj->im_pramin->size >> 12) << 1) + pte; | 477 | pte_end = ((gpuobj->im_pramin->size >> 12) << 1) + pte; |
477 | 478 | ||
478 | while (pte < pte_end) { | 479 | while (pte < pte_end) { |
479 | nv_wo32(dev, priv->pramin_pt->gpuobj, pte++, 0x00000000); | 480 | nv_wo32(priv->pramin_pt->gpuobj, (pte * 4) + 0, 0x00000000); |
480 | nv_wo32(dev, priv->pramin_pt->gpuobj, pte++, 0x00000000); | 481 | nv_wo32(priv->pramin_pt->gpuobj, (pte * 4) + 4, 0x00000000); |
482 | pte += 2; | ||
481 | } | 483 | } |
482 | dev_priv->engine.instmem.flush(dev); | 484 | dev_priv->engine.instmem.flush(dev); |
483 | 485 | ||