aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/nouveau/nv84_fence.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/nouveau/nv84_fence.c')
-rw-r--r--drivers/gpu/drm/nouveau/nv84_fence.c127
1 files changed, 66 insertions, 61 deletions
diff --git a/drivers/gpu/drm/nouveau/nv84_fence.c b/drivers/gpu/drm/nouveau/nv84_fence.c
index c2f889b0d340..c686650584b6 100644
--- a/drivers/gpu/drm/nouveau/nv84_fence.c
+++ b/drivers/gpu/drm/nouveau/nv84_fence.c
@@ -22,13 +22,17 @@
22 * Authors: Ben Skeggs 22 * Authors: Ben Skeggs
23 */ 23 */
24 24
25#include "drmP.h" 25#include <core/object.h>
26#include "nouveau_drv.h" 26#include <core/class.h>
27
28#include <engine/fifo.h>
29
30#include "nouveau_drm.h"
27#include "nouveau_dma.h" 31#include "nouveau_dma.h"
28#include "nouveau_fifo.h"
29#include "nouveau_ramht.h"
30#include "nouveau_fence.h" 32#include "nouveau_fence.h"
31 33
34#include "nv50_display.h"
35
32struct nv84_fence_chan { 36struct nv84_fence_chan {
33 struct nouveau_fence_chan base; 37 struct nouveau_fence_chan base;
34}; 38};
@@ -42,13 +46,14 @@ static int
42nv84_fence_emit(struct nouveau_fence *fence) 46nv84_fence_emit(struct nouveau_fence *fence)
43{ 47{
44 struct nouveau_channel *chan = fence->channel; 48 struct nouveau_channel *chan = fence->channel;
49 struct nouveau_fifo_chan *fifo = (void *)chan->object;
45 int ret = RING_SPACE(chan, 7); 50 int ret = RING_SPACE(chan, 7);
46 if (ret == 0) { 51 if (ret == 0) {
47 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1); 52 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
48 OUT_RING (chan, NvSema); 53 OUT_RING (chan, NvSema);
49 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 54 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
50 OUT_RING (chan, upper_32_bits(chan->id * 16)); 55 OUT_RING (chan, upper_32_bits(fifo->chid * 16));
51 OUT_RING (chan, lower_32_bits(chan->id * 16)); 56 OUT_RING (chan, lower_32_bits(fifo->chid * 16));
52 OUT_RING (chan, fence->sequence); 57 OUT_RING (chan, fence->sequence);
53 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG); 58 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
54 FIRE_RING (chan); 59 FIRE_RING (chan);
@@ -61,13 +66,14 @@ static int
61nv84_fence_sync(struct nouveau_fence *fence, 66nv84_fence_sync(struct nouveau_fence *fence,
62 struct nouveau_channel *prev, struct nouveau_channel *chan) 67 struct nouveau_channel *prev, struct nouveau_channel *chan)
63{ 68{
69 struct nouveau_fifo_chan *fifo = (void *)prev->object;
64 int ret = RING_SPACE(chan, 7); 70 int ret = RING_SPACE(chan, 7);
65 if (ret == 0) { 71 if (ret == 0) {
66 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1); 72 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
67 OUT_RING (chan, NvSema); 73 OUT_RING (chan, NvSema);
68 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 74 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
69 OUT_RING (chan, upper_32_bits(prev->id * 16)); 75 OUT_RING (chan, upper_32_bits(fifo->chid * 16));
70 OUT_RING (chan, lower_32_bits(prev->id * 16)); 76 OUT_RING (chan, lower_32_bits(fifo->chid * 16));
71 OUT_RING (chan, fence->sequence); 77 OUT_RING (chan, fence->sequence);
72 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_GEQUAL); 78 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_GEQUAL);
73 FIRE_RING (chan); 79 FIRE_RING (chan);
@@ -78,100 +84,99 @@ nv84_fence_sync(struct nouveau_fence *fence,
78static u32 84static u32
79nv84_fence_read(struct nouveau_channel *chan) 85nv84_fence_read(struct nouveau_channel *chan)
80{ 86{
81 struct nv84_fence_priv *priv = nv_engine(chan->dev, NVOBJ_ENGINE_FENCE); 87 struct nouveau_fifo_chan *fifo = (void *)chan->object;
82 return nv_ro32(priv->mem, chan->id * 16); 88 struct nv84_fence_priv *priv = chan->drm->fence;
89 return nv_ro32(priv->mem, fifo->chid * 16);
83} 90}
84 91
85static void 92static void
86nv84_fence_context_del(struct nouveau_channel *chan, int engine) 93nv84_fence_context_del(struct nouveau_channel *chan)
87{ 94{
88 struct nv84_fence_chan *fctx = chan->engctx[engine]; 95 struct nv84_fence_chan *fctx = chan->fence;
89 nouveau_fence_context_del(&fctx->base); 96 nouveau_fence_context_del(&fctx->base);
90 chan->engctx[engine] = NULL; 97 chan->fence = NULL;
91 kfree(fctx); 98 kfree(fctx);
92} 99}
93 100
94static int 101static int
95nv84_fence_context_new(struct nouveau_channel *chan, int engine) 102nv84_fence_context_new(struct nouveau_channel *chan)
96{ 103{
97 struct nv84_fence_priv *priv = nv_engine(chan->dev, engine); 104 struct drm_device *dev = chan->drm->dev;
105 struct nouveau_fifo_chan *fifo = (void *)chan->object;
106 struct nv84_fence_priv *priv = chan->drm->fence;
98 struct nv84_fence_chan *fctx; 107 struct nv84_fence_chan *fctx;
99 struct nouveau_gpuobj *obj; 108 struct nouveau_object *object;
100 int ret; 109 int ret, i;
101 110
102 fctx = chan->engctx[engine] = kzalloc(sizeof(*fctx), GFP_KERNEL); 111 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL);
103 if (!fctx) 112 if (!fctx)
104 return -ENOMEM; 113 return -ENOMEM;
105 114
106 nouveau_fence_context_new(&fctx->base); 115 nouveau_fence_context_new(&fctx->base);
107 116
108 ret = nouveau_gpuobj_dma_new(chan, NV_CLASS_DMA_FROM_MEMORY, 117 ret = nouveau_object_new(nv_object(chan->cli), chan->handle,
109 priv->mem->vinst, priv->mem->size, 118 NvSema, 0x0002,
110 NV_MEM_ACCESS_RW, 119 &(struct nv_dma_class) {
111 NV_MEM_TARGET_VRAM, &obj); 120 .flags = NV_DMA_TARGET_VRAM |
112 if (ret == 0) { 121 NV_DMA_ACCESS_RDWR,
113 ret = nouveau_ramht_insert(chan, NvSema, obj); 122 .start = priv->mem->addr,
114 nouveau_gpuobj_ref(NULL, &obj); 123 .limit = priv->mem->addr +
115 nv_wo32(priv->mem, chan->id * 16, 0x00000000); 124 priv->mem->size - 1,
125 }, sizeof(struct nv_dma_class),
126 &object);
127
128 /* dma objects for display sync channel semaphore blocks */
129 for (i = 0; !ret && i < dev->mode_config.num_crtc; i++) {
130 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i);
131
132 ret = nouveau_object_new(nv_object(chan->cli), chan->handle,
133 NvEvoSema0 + i, 0x003d,
134 &(struct nv_dma_class) {
135 .flags = NV_DMA_TARGET_VRAM |
136 NV_DMA_ACCESS_RDWR,
137 .start = bo->bo.offset,
138 .limit = bo->bo.offset + 0xfff,
139 }, sizeof(struct nv_dma_class),
140 &object);
116 } 141 }
117 142
118 if (ret) 143 if (ret)
119 nv84_fence_context_del(chan, engine); 144 nv84_fence_context_del(chan);
145 nv_wo32(priv->mem, fifo->chid * 16, 0x00000000);
120 return ret; 146 return ret;
121} 147}
122 148
123static int
124nv84_fence_fini(struct drm_device *dev, int engine, bool suspend)
125{
126 return 0;
127}
128
129static int
130nv84_fence_init(struct drm_device *dev, int engine)
131{
132 return 0;
133}
134
135static void 149static void
136nv84_fence_destroy(struct drm_device *dev, int engine) 150nv84_fence_destroy(struct nouveau_drm *drm)
137{ 151{
138 struct drm_nouveau_private *dev_priv = dev->dev_private; 152 struct nv84_fence_priv *priv = drm->fence;
139 struct nv84_fence_priv *priv = nv_engine(dev, engine);
140
141 nouveau_gpuobj_ref(NULL, &priv->mem); 153 nouveau_gpuobj_ref(NULL, &priv->mem);
142 dev_priv->eng[engine] = NULL; 154 drm->fence = NULL;
143 kfree(priv); 155 kfree(priv);
144} 156}
145 157
146int 158int
147nv84_fence_create(struct drm_device *dev) 159nv84_fence_create(struct nouveau_drm *drm)
148{ 160{
149 struct nouveau_fifo_priv *pfifo = nv_engine(dev, NVOBJ_ENGINE_FIFO); 161 struct nouveau_fifo *pfifo = nouveau_fifo(drm->device);
150 struct drm_nouveau_private *dev_priv = dev->dev_private;
151 struct nv84_fence_priv *priv; 162 struct nv84_fence_priv *priv;
163 u32 chan = pfifo->max + 1;
152 int ret; 164 int ret;
153 165
154 priv = kzalloc(sizeof(*priv), GFP_KERNEL); 166 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL);
155 if (!priv) 167 if (!priv)
156 return -ENOMEM; 168 return -ENOMEM;
157 169
158 priv->base.engine.destroy = nv84_fence_destroy; 170 priv->base.dtor = nv84_fence_destroy;
159 priv->base.engine.init = nv84_fence_init; 171 priv->base.context_new = nv84_fence_context_new;
160 priv->base.engine.fini = nv84_fence_fini; 172 priv->base.context_del = nv84_fence_context_del;
161 priv->base.engine.context_new = nv84_fence_context_new;
162 priv->base.engine.context_del = nv84_fence_context_del;
163 priv->base.emit = nv84_fence_emit; 173 priv->base.emit = nv84_fence_emit;
164 priv->base.sync = nv84_fence_sync; 174 priv->base.sync = nv84_fence_sync;
165 priv->base.read = nv84_fence_read; 175 priv->base.read = nv84_fence_read;
166 dev_priv->eng[NVOBJ_ENGINE_FENCE] = &priv->base.engine;
167
168 ret = nouveau_gpuobj_new(dev, NULL, 16 * pfifo->channels,
169 0x1000, 0, &priv->mem);
170 if (ret)
171 goto out;
172 176
173out: 177 ret = nouveau_gpuobj_new(drm->device, NULL, chan * 16, 0x1000, 0,
178 &priv->mem);
174 if (ret) 179 if (ret)
175 nv84_fence_destroy(dev, NVOBJ_ENGINE_FENCE); 180 nv84_fence_destroy(drm);
176 return ret; 181 return ret;
177} 182}