diff options
Diffstat (limited to 'drivers/gpu/drm/nouveau/nv10_fence.c')
-rw-r--r-- | drivers/gpu/drm/nouveau/nv10_fence.c | 103 |
1 files changed, 45 insertions, 58 deletions
diff --git a/drivers/gpu/drm/nouveau/nv10_fence.c b/drivers/gpu/drm/nouveau/nv10_fence.c index d30f752464ef..ce752bf5cc4e 100644 --- a/drivers/gpu/drm/nouveau/nv10_fence.c +++ b/drivers/gpu/drm/nouveau/nv10_fence.c | |||
@@ -22,10 +22,11 @@ | |||
22 | * Authors: Ben Skeggs <bskeggs@redhat.com> | 22 | * Authors: Ben Skeggs <bskeggs@redhat.com> |
23 | */ | 23 | */ |
24 | 24 | ||
25 | #include <drm/drmP.h> | 25 | #include <core/object.h> |
26 | #include "nouveau_drv.h" | 26 | #include <core/class.h> |
27 | |||
28 | #include "nouveau_drm.h" | ||
27 | #include "nouveau_dma.h" | 29 | #include "nouveau_dma.h" |
28 | #include "nouveau_ramht.h" | ||
29 | #include "nouveau_fence.h" | 30 | #include "nouveau_fence.h" |
30 | 31 | ||
31 | struct nv10_fence_chan { | 32 | struct nv10_fence_chan { |
@@ -39,7 +40,7 @@ struct nv10_fence_priv { | |||
39 | u32 sequence; | 40 | u32 sequence; |
40 | }; | 41 | }; |
41 | 42 | ||
42 | static int | 43 | int |
43 | nv10_fence_emit(struct nouveau_fence *fence) | 44 | nv10_fence_emit(struct nouveau_fence *fence) |
44 | { | 45 | { |
45 | struct nouveau_channel *chan = fence->channel; | 46 | struct nouveau_channel *chan = fence->channel; |
@@ -60,15 +61,15 @@ nv10_fence_sync(struct nouveau_fence *fence, | |||
60 | return -ENODEV; | 61 | return -ENODEV; |
61 | } | 62 | } |
62 | 63 | ||
63 | static int | 64 | int |
64 | nv17_fence_sync(struct nouveau_fence *fence, | 65 | nv17_fence_sync(struct nouveau_fence *fence, |
65 | struct nouveau_channel *prev, struct nouveau_channel *chan) | 66 | struct nouveau_channel *prev, struct nouveau_channel *chan) |
66 | { | 67 | { |
67 | struct nv10_fence_priv *priv = nv_engine(chan->dev, NVOBJ_ENGINE_FENCE); | 68 | struct nv10_fence_priv *priv = chan->drm->fence; |
68 | u32 value; | 69 | u32 value; |
69 | int ret; | 70 | int ret; |
70 | 71 | ||
71 | if (!mutex_trylock(&prev->mutex)) | 72 | if (!mutex_trylock(&prev->cli->mutex)) |
72 | return -EBUSY; | 73 | return -EBUSY; |
73 | 74 | ||
74 | spin_lock(&priv->lock); | 75 | spin_lock(&priv->lock); |
@@ -95,34 +96,33 @@ nv17_fence_sync(struct nouveau_fence *fence, | |||
95 | FIRE_RING (chan); | 96 | FIRE_RING (chan); |
96 | } | 97 | } |
97 | 98 | ||
98 | mutex_unlock(&prev->mutex); | 99 | mutex_unlock(&prev->cli->mutex); |
99 | return 0; | 100 | return 0; |
100 | } | 101 | } |
101 | 102 | ||
102 | static u32 | 103 | u32 |
103 | nv10_fence_read(struct nouveau_channel *chan) | 104 | nv10_fence_read(struct nouveau_channel *chan) |
104 | { | 105 | { |
105 | return nvchan_rd32(chan, 0x0048); | 106 | return nv_ro32(chan->object, 0x0048); |
106 | } | 107 | } |
107 | 108 | ||
108 | static void | 109 | void |
109 | nv10_fence_context_del(struct nouveau_channel *chan, int engine) | 110 | nv10_fence_context_del(struct nouveau_channel *chan) |
110 | { | 111 | { |
111 | struct nv10_fence_chan *fctx = chan->engctx[engine]; | 112 | struct nv10_fence_chan *fctx = chan->fence; |
112 | nouveau_fence_context_del(&fctx->base); | 113 | nouveau_fence_context_del(&fctx->base); |
113 | chan->engctx[engine] = NULL; | 114 | chan->fence = NULL; |
114 | kfree(fctx); | 115 | kfree(fctx); |
115 | } | 116 | } |
116 | 117 | ||
117 | static int | 118 | static int |
118 | nv10_fence_context_new(struct nouveau_channel *chan, int engine) | 119 | nv10_fence_context_new(struct nouveau_channel *chan) |
119 | { | 120 | { |
120 | struct nv10_fence_priv *priv = nv_engine(chan->dev, engine); | 121 | struct nv10_fence_priv *priv = chan->drm->fence; |
121 | struct nv10_fence_chan *fctx; | 122 | struct nv10_fence_chan *fctx; |
122 | struct nouveau_gpuobj *obj; | ||
123 | int ret = 0; | 123 | int ret = 0; |
124 | 124 | ||
125 | fctx = chan->engctx[engine] = kzalloc(sizeof(*fctx), GFP_KERNEL); | 125 | fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); |
126 | if (!fctx) | 126 | if (!fctx) |
127 | return -ENOMEM; | 127 | return -ENOMEM; |
128 | 128 | ||
@@ -130,69 +130,56 @@ nv10_fence_context_new(struct nouveau_channel *chan, int engine) | |||
130 | 130 | ||
131 | if (priv->bo) { | 131 | if (priv->bo) { |
132 | struct ttm_mem_reg *mem = &priv->bo->bo.mem; | 132 | struct ttm_mem_reg *mem = &priv->bo->bo.mem; |
133 | 133 | struct nouveau_object *object; | |
134 | ret = nouveau_gpuobj_dma_new(chan, NV_CLASS_DMA_FROM_MEMORY, | 134 | u32 start = mem->start * PAGE_SIZE; |
135 | mem->start * PAGE_SIZE, mem->size, | 135 | u32 limit = mem->start + mem->size - 1; |
136 | NV_MEM_ACCESS_RW, | 136 | |
137 | NV_MEM_TARGET_VRAM, &obj); | 137 | ret = nouveau_object_new(nv_object(chan->cli), chan->handle, |
138 | if (!ret) { | 138 | NvSema, 0x0002, |
139 | ret = nouveau_ramht_insert(chan, NvSema, obj); | 139 | &(struct nv_dma_class) { |
140 | nouveau_gpuobj_ref(NULL, &obj); | 140 | .flags = NV_DMA_TARGET_VRAM | |
141 | } | 141 | NV_DMA_ACCESS_RDWR, |
142 | .start = start, | ||
143 | .limit = limit, | ||
144 | }, sizeof(struct nv_dma_class), | ||
145 | &object); | ||
142 | } | 146 | } |
143 | 147 | ||
144 | if (ret) | 148 | if (ret) |
145 | nv10_fence_context_del(chan, engine); | 149 | nv10_fence_context_del(chan); |
146 | return ret; | 150 | return ret; |
147 | } | 151 | } |
148 | 152 | ||
149 | static int | 153 | void |
150 | nv10_fence_fini(struct drm_device *dev, int engine, bool suspend) | 154 | nv10_fence_destroy(struct nouveau_drm *drm) |
151 | { | 155 | { |
152 | return 0; | 156 | struct nv10_fence_priv *priv = drm->fence; |
153 | } | 157 | nouveau_bo_unmap(priv->bo); |
154 | |||
155 | static int | ||
156 | nv10_fence_init(struct drm_device *dev, int engine) | ||
157 | { | ||
158 | return 0; | ||
159 | } | ||
160 | |||
161 | static void | ||
162 | nv10_fence_destroy(struct drm_device *dev, int engine) | ||
163 | { | ||
164 | struct drm_nouveau_private *dev_priv = dev->dev_private; | ||
165 | struct nv10_fence_priv *priv = nv_engine(dev, engine); | ||
166 | |||
167 | nouveau_bo_ref(NULL, &priv->bo); | 158 | nouveau_bo_ref(NULL, &priv->bo); |
168 | dev_priv->eng[engine] = NULL; | 159 | drm->fence = NULL; |
169 | kfree(priv); | 160 | kfree(priv); |
170 | } | 161 | } |
171 | 162 | ||
172 | int | 163 | int |
173 | nv10_fence_create(struct drm_device *dev) | 164 | nv10_fence_create(struct nouveau_drm *drm) |
174 | { | 165 | { |
175 | struct drm_nouveau_private *dev_priv = dev->dev_private; | ||
176 | struct nv10_fence_priv *priv; | 166 | struct nv10_fence_priv *priv; |
177 | int ret = 0; | 167 | int ret = 0; |
178 | 168 | ||
179 | priv = kzalloc(sizeof(*priv), GFP_KERNEL); | 169 | priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); |
180 | if (!priv) | 170 | if (!priv) |
181 | return -ENOMEM; | 171 | return -ENOMEM; |
182 | 172 | ||
183 | priv->base.engine.destroy = nv10_fence_destroy; | 173 | priv->base.dtor = nv10_fence_destroy; |
184 | priv->base.engine.init = nv10_fence_init; | 174 | priv->base.context_new = nv10_fence_context_new; |
185 | priv->base.engine.fini = nv10_fence_fini; | 175 | priv->base.context_del = nv10_fence_context_del; |
186 | priv->base.engine.context_new = nv10_fence_context_new; | ||
187 | priv->base.engine.context_del = nv10_fence_context_del; | ||
188 | priv->base.emit = nv10_fence_emit; | 176 | priv->base.emit = nv10_fence_emit; |
189 | priv->base.read = nv10_fence_read; | 177 | priv->base.read = nv10_fence_read; |
190 | priv->base.sync = nv10_fence_sync; | 178 | priv->base.sync = nv10_fence_sync; |
191 | dev_priv->eng[NVOBJ_ENGINE_FENCE] = &priv->base.engine; | ||
192 | spin_lock_init(&priv->lock); | 179 | spin_lock_init(&priv->lock); |
193 | 180 | ||
194 | if (dev_priv->chipset >= 0x17) { | 181 | if (nv_device(drm->device)->chipset >= 0x17) { |
195 | ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM, | 182 | ret = nouveau_bo_new(drm->dev, 4096, 0x1000, TTM_PL_FLAG_VRAM, |
196 | 0, 0x0000, NULL, &priv->bo); | 183 | 0, 0x0000, NULL, &priv->bo); |
197 | if (!ret) { | 184 | if (!ret) { |
198 | ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM); | 185 | ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM); |
@@ -209,6 +196,6 @@ nv10_fence_create(struct drm_device *dev) | |||
209 | } | 196 | } |
210 | 197 | ||
211 | if (ret) | 198 | if (ret) |
212 | nv10_fence_destroy(dev, NVOBJ_ENGINE_FENCE); | 199 | nv10_fence_destroy(drm); |
213 | return ret; | 200 | return ret; |
214 | } | 201 | } |