aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/nouveau/nv50_fifo.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/nouveau/nv50_fifo.c')
-rw-r--r--drivers/gpu/drm/nouveau/nv50_fifo.c126
1 files changed, 46 insertions, 80 deletions
diff --git a/drivers/gpu/drm/nouveau/nv50_fifo.c b/drivers/gpu/drm/nouveau/nv50_fifo.c
index e20c0e2474f3..fb0281ae8f90 100644
--- a/drivers/gpu/drm/nouveau/nv50_fifo.c
+++ b/drivers/gpu/drm/nouveau/nv50_fifo.c
@@ -28,41 +28,33 @@
28#include "drm.h" 28#include "drm.h"
29#include "nouveau_drv.h" 29#include "nouveau_drv.h"
30 30
31struct nv50_fifo_priv {
32 struct nouveau_gpuobj_ref *thingo[2];
33 int cur_thingo;
34};
35
36#define IS_G80 ((dev_priv->chipset & 0xf0) == 0x50)
37
38static void 31static void
39nv50_fifo_init_thingo(struct drm_device *dev) 32nv50_fifo_playlist_update(struct drm_device *dev)
40{ 33{
41 struct drm_nouveau_private *dev_priv = dev->dev_private; 34 struct drm_nouveau_private *dev_priv = dev->dev_private;
42 struct nv50_fifo_priv *priv = dev_priv->engine.fifo.priv; 35 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
43 struct nouveau_gpuobj_ref *cur; 36 struct nouveau_gpuobj_ref *cur;
44 int i, nr; 37 int i, nr;
45 38
46 NV_DEBUG(dev, "\n"); 39 NV_DEBUG(dev, "\n");
47 40
48 cur = priv->thingo[priv->cur_thingo]; 41 cur = pfifo->playlist[pfifo->cur_playlist];
49 priv->cur_thingo = !priv->cur_thingo; 42 pfifo->cur_playlist = !pfifo->cur_playlist;
50 43
51 /* We never schedule channel 0 or 127 */ 44 /* We never schedule channel 0 or 127 */
52 dev_priv->engine.instmem.prepare_access(dev, true);
53 for (i = 1, nr = 0; i < 127; i++) { 45 for (i = 1, nr = 0; i < 127; i++) {
54 if (dev_priv->fifos[i] && dev_priv->fifos[i]->ramfc) 46 if (dev_priv->fifos[i] && dev_priv->fifos[i]->ramfc)
55 nv_wo32(dev, cur->gpuobj, nr++, i); 47 nv_wo32(dev, cur->gpuobj, nr++, i);
56 } 48 }
57 dev_priv->engine.instmem.finish_access(dev); 49 dev_priv->engine.instmem.flush(dev);
58 50
59 nv_wr32(dev, 0x32f4, cur->instance >> 12); 51 nv_wr32(dev, 0x32f4, cur->instance >> 12);
60 nv_wr32(dev, 0x32ec, nr); 52 nv_wr32(dev, 0x32ec, nr);
61 nv_wr32(dev, 0x2500, 0x101); 53 nv_wr32(dev, 0x2500, 0x101);
62} 54}
63 55
64static int 56static void
65nv50_fifo_channel_enable(struct drm_device *dev, int channel, bool nt) 57nv50_fifo_channel_enable(struct drm_device *dev, int channel)
66{ 58{
67 struct drm_nouveau_private *dev_priv = dev->dev_private; 59 struct drm_nouveau_private *dev_priv = dev->dev_private;
68 struct nouveau_channel *chan = dev_priv->fifos[channel]; 60 struct nouveau_channel *chan = dev_priv->fifos[channel];
@@ -70,37 +62,28 @@ nv50_fifo_channel_enable(struct drm_device *dev, int channel, bool nt)
70 62
71 NV_DEBUG(dev, "ch%d\n", channel); 63 NV_DEBUG(dev, "ch%d\n", channel);
72 64
73 if (!chan->ramfc) 65 if (dev_priv->chipset == 0x50)
74 return -EINVAL;
75
76 if (IS_G80)
77 inst = chan->ramfc->instance >> 12; 66 inst = chan->ramfc->instance >> 12;
78 else 67 else
79 inst = chan->ramfc->instance >> 8; 68 inst = chan->ramfc->instance >> 8;
80 nv_wr32(dev, NV50_PFIFO_CTX_TABLE(channel),
81 inst | NV50_PFIFO_CTX_TABLE_CHANNEL_ENABLED);
82 69
83 if (!nt) 70 nv_wr32(dev, NV50_PFIFO_CTX_TABLE(channel), inst |
84 nv50_fifo_init_thingo(dev); 71 NV50_PFIFO_CTX_TABLE_CHANNEL_ENABLED);
85 return 0;
86} 72}
87 73
88static void 74static void
89nv50_fifo_channel_disable(struct drm_device *dev, int channel, bool nt) 75nv50_fifo_channel_disable(struct drm_device *dev, int channel)
90{ 76{
91 struct drm_nouveau_private *dev_priv = dev->dev_private; 77 struct drm_nouveau_private *dev_priv = dev->dev_private;
92 uint32_t inst; 78 uint32_t inst;
93 79
94 NV_DEBUG(dev, "ch%d, nt=%d\n", channel, nt); 80 NV_DEBUG(dev, "ch%d\n", channel);
95 81
96 if (IS_G80) 82 if (dev_priv->chipset == 0x50)
97 inst = NV50_PFIFO_CTX_TABLE_INSTANCE_MASK_G80; 83 inst = NV50_PFIFO_CTX_TABLE_INSTANCE_MASK_G80;
98 else 84 else
99 inst = NV50_PFIFO_CTX_TABLE_INSTANCE_MASK_G84; 85 inst = NV50_PFIFO_CTX_TABLE_INSTANCE_MASK_G84;
100 nv_wr32(dev, NV50_PFIFO_CTX_TABLE(channel), inst); 86 nv_wr32(dev, NV50_PFIFO_CTX_TABLE(channel), inst);
101
102 if (!nt)
103 nv50_fifo_init_thingo(dev);
104} 87}
105 88
106static void 89static void
@@ -133,12 +116,12 @@ nv50_fifo_init_context_table(struct drm_device *dev)
133 116
134 for (i = 0; i < NV50_PFIFO_CTX_TABLE__SIZE; i++) { 117 for (i = 0; i < NV50_PFIFO_CTX_TABLE__SIZE; i++) {
135 if (dev_priv->fifos[i]) 118 if (dev_priv->fifos[i])
136 nv50_fifo_channel_enable(dev, i, true); 119 nv50_fifo_channel_enable(dev, i);
137 else 120 else
138 nv50_fifo_channel_disable(dev, i, true); 121 nv50_fifo_channel_disable(dev, i);
139 } 122 }
140 123
141 nv50_fifo_init_thingo(dev); 124 nv50_fifo_playlist_update(dev);
142} 125}
143 126
144static void 127static void
@@ -162,41 +145,38 @@ nv50_fifo_init_regs(struct drm_device *dev)
162 nv_wr32(dev, 0x3270, 0); 145 nv_wr32(dev, 0x3270, 0);
163 146
164 /* Enable dummy channels setup by nv50_instmem.c */ 147 /* Enable dummy channels setup by nv50_instmem.c */
165 nv50_fifo_channel_enable(dev, 0, true); 148 nv50_fifo_channel_enable(dev, 0);
166 nv50_fifo_channel_enable(dev, 127, true); 149 nv50_fifo_channel_enable(dev, 127);
167} 150}
168 151
169int 152int
170nv50_fifo_init(struct drm_device *dev) 153nv50_fifo_init(struct drm_device *dev)
171{ 154{
172 struct drm_nouveau_private *dev_priv = dev->dev_private; 155 struct drm_nouveau_private *dev_priv = dev->dev_private;
173 struct nv50_fifo_priv *priv; 156 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
174 int ret; 157 int ret;
175 158
176 NV_DEBUG(dev, "\n"); 159 NV_DEBUG(dev, "\n");
177 160
178 priv = dev_priv->engine.fifo.priv; 161 if (pfifo->playlist[0]) {
179 if (priv) { 162 pfifo->cur_playlist = !pfifo->cur_playlist;
180 priv->cur_thingo = !priv->cur_thingo;
181 goto just_reset; 163 goto just_reset;
182 } 164 }
183 165
184 priv = kzalloc(sizeof(*priv), GFP_KERNEL);
185 if (!priv)
186 return -ENOMEM;
187 dev_priv->engine.fifo.priv = priv;
188
189 ret = nouveau_gpuobj_new_ref(dev, NULL, NULL, 0, 128*4, 0x1000, 166 ret = nouveau_gpuobj_new_ref(dev, NULL, NULL, 0, 128*4, 0x1000,
190 NVOBJ_FLAG_ZERO_ALLOC, &priv->thingo[0]); 167 NVOBJ_FLAG_ZERO_ALLOC,
168 &pfifo->playlist[0]);
191 if (ret) { 169 if (ret) {
192 NV_ERROR(dev, "error creating thingo0: %d\n", ret); 170 NV_ERROR(dev, "error creating playlist 0: %d\n", ret);
193 return ret; 171 return ret;
194 } 172 }
195 173
196 ret = nouveau_gpuobj_new_ref(dev, NULL, NULL, 0, 128*4, 0x1000, 174 ret = nouveau_gpuobj_new_ref(dev, NULL, NULL, 0, 128*4, 0x1000,
197 NVOBJ_FLAG_ZERO_ALLOC, &priv->thingo[1]); 175 NVOBJ_FLAG_ZERO_ALLOC,
176 &pfifo->playlist[1]);
198 if (ret) { 177 if (ret) {
199 NV_ERROR(dev, "error creating thingo1: %d\n", ret); 178 nouveau_gpuobj_ref_del(dev, &pfifo->playlist[0]);
179 NV_ERROR(dev, "error creating playlist 1: %d\n", ret);
200 return ret; 180 return ret;
201 } 181 }
202 182
@@ -216,18 +196,15 @@ void
216nv50_fifo_takedown(struct drm_device *dev) 196nv50_fifo_takedown(struct drm_device *dev)
217{ 197{
218 struct drm_nouveau_private *dev_priv = dev->dev_private; 198 struct drm_nouveau_private *dev_priv = dev->dev_private;
219 struct nv50_fifo_priv *priv = dev_priv->engine.fifo.priv; 199 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
220 200
221 NV_DEBUG(dev, "\n"); 201 NV_DEBUG(dev, "\n");
222 202
223 if (!priv) 203 if (!pfifo->playlist[0])
224 return; 204 return;
225 205
226 nouveau_gpuobj_ref_del(dev, &priv->thingo[0]); 206 nouveau_gpuobj_ref_del(dev, &pfifo->playlist[0]);
227 nouveau_gpuobj_ref_del(dev, &priv->thingo[1]); 207 nouveau_gpuobj_ref_del(dev, &pfifo->playlist[1]);
228
229 dev_priv->engine.fifo.priv = NULL;
230 kfree(priv);
231} 208}
232 209
233int 210int
@@ -248,7 +225,7 @@ nv50_fifo_create_context(struct nouveau_channel *chan)
248 225
249 NV_DEBUG(dev, "ch%d\n", chan->id); 226 NV_DEBUG(dev, "ch%d\n", chan->id);
250 227
251 if (IS_G80) { 228 if (dev_priv->chipset == 0x50) {
252 uint32_t ramin_poffset = chan->ramin->gpuobj->im_pramin->start; 229 uint32_t ramin_poffset = chan->ramin->gpuobj->im_pramin->start;
253 uint32_t ramin_voffset = chan->ramin->gpuobj->im_backing_start; 230 uint32_t ramin_voffset = chan->ramin->gpuobj->im_backing_start;
254 231
@@ -281,10 +258,10 @@ nv50_fifo_create_context(struct nouveau_channel *chan)
281 258
282 spin_lock_irqsave(&dev_priv->context_switch_lock, flags); 259 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
283 260
284 dev_priv->engine.instmem.prepare_access(dev, true);
285
286 nv_wo32(dev, ramfc, 0x48/4, chan->pushbuf->instance >> 4); 261 nv_wo32(dev, ramfc, 0x48/4, chan->pushbuf->instance >> 4);
287 nv_wo32(dev, ramfc, 0x80/4, (0xc << 24) | (chan->ramht->instance >> 4)); 262 nv_wo32(dev, ramfc, 0x80/4, (0 << 27) /* 4KiB */ |
263 (4 << 24) /* SEARCH_FULL */ |
264 (chan->ramht->instance >> 4));
288 nv_wo32(dev, ramfc, 0x44/4, 0x2101ffff); 265 nv_wo32(dev, ramfc, 0x44/4, 0x2101ffff);
289 nv_wo32(dev, ramfc, 0x60/4, 0x7fffffff); 266 nv_wo32(dev, ramfc, 0x60/4, 0x7fffffff);
290 nv_wo32(dev, ramfc, 0x40/4, 0x00000000); 267 nv_wo32(dev, ramfc, 0x40/4, 0x00000000);
@@ -295,7 +272,7 @@ nv50_fifo_create_context(struct nouveau_channel *chan)
295 chan->dma.ib_base * 4); 272 chan->dma.ib_base * 4);
296 nv_wo32(dev, ramfc, 0x54/4, drm_order(chan->dma.ib_max + 1) << 16); 273 nv_wo32(dev, ramfc, 0x54/4, drm_order(chan->dma.ib_max + 1) << 16);
297 274
298 if (!IS_G80) { 275 if (dev_priv->chipset != 0x50) {
299 nv_wo32(dev, chan->ramin->gpuobj, 0, chan->id); 276 nv_wo32(dev, chan->ramin->gpuobj, 0, chan->id);
300 nv_wo32(dev, chan->ramin->gpuobj, 1, 277 nv_wo32(dev, chan->ramin->gpuobj, 1,
301 chan->ramfc->instance >> 8); 278 chan->ramfc->instance >> 8);
@@ -304,16 +281,10 @@ nv50_fifo_create_context(struct nouveau_channel *chan)
304 nv_wo32(dev, ramfc, 0x98/4, chan->ramin->instance >> 12); 281 nv_wo32(dev, ramfc, 0x98/4, chan->ramin->instance >> 12);
305 } 282 }
306 283
307 dev_priv->engine.instmem.finish_access(dev); 284 dev_priv->engine.instmem.flush(dev);
308
309 ret = nv50_fifo_channel_enable(dev, chan->id, false);
310 if (ret) {
311 NV_ERROR(dev, "error enabling ch%d: %d\n", chan->id, ret);
312 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
313 nouveau_gpuobj_ref_del(dev, &chan->ramfc);
314 return ret;
315 }
316 285
286 nv50_fifo_channel_enable(dev, chan->id);
287 nv50_fifo_playlist_update(dev);
317 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags); 288 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
318 return 0; 289 return 0;
319} 290}
@@ -328,11 +299,12 @@ nv50_fifo_destroy_context(struct nouveau_channel *chan)
328 299
329 /* This will ensure the channel is seen as disabled. */ 300 /* This will ensure the channel is seen as disabled. */
330 chan->ramfc = NULL; 301 chan->ramfc = NULL;
331 nv50_fifo_channel_disable(dev, chan->id, false); 302 nv50_fifo_channel_disable(dev, chan->id);
332 303
333 /* Dummy channel, also used on ch 127 */ 304 /* Dummy channel, also used on ch 127 */
334 if (chan->id == 0) 305 if (chan->id == 0)
335 nv50_fifo_channel_disable(dev, 127, false); 306 nv50_fifo_channel_disable(dev, 127);
307 nv50_fifo_playlist_update(dev);
336 308
337 nouveau_gpuobj_ref_del(dev, &ramfc); 309 nouveau_gpuobj_ref_del(dev, &ramfc);
338 nouveau_gpuobj_ref_del(dev, &chan->cache); 310 nouveau_gpuobj_ref_del(dev, &chan->cache);
@@ -349,8 +321,6 @@ nv50_fifo_load_context(struct nouveau_channel *chan)
349 321
350 NV_DEBUG(dev, "ch%d\n", chan->id); 322 NV_DEBUG(dev, "ch%d\n", chan->id);
351 323
352 dev_priv->engine.instmem.prepare_access(dev, false);
353
354 nv_wr32(dev, 0x3330, nv_ro32(dev, ramfc, 0x00/4)); 324 nv_wr32(dev, 0x3330, nv_ro32(dev, ramfc, 0x00/4));
355 nv_wr32(dev, 0x3334, nv_ro32(dev, ramfc, 0x04/4)); 325 nv_wr32(dev, 0x3334, nv_ro32(dev, ramfc, 0x04/4));
356 nv_wr32(dev, 0x3240, nv_ro32(dev, ramfc, 0x08/4)); 326 nv_wr32(dev, 0x3240, nv_ro32(dev, ramfc, 0x08/4));
@@ -396,7 +366,7 @@ nv50_fifo_load_context(struct nouveau_channel *chan)
396 nv_wr32(dev, NV03_PFIFO_CACHE1_GET, 0); 366 nv_wr32(dev, NV03_PFIFO_CACHE1_GET, 0);
397 367
398 /* guessing that all the 0x34xx regs aren't on NV50 */ 368 /* guessing that all the 0x34xx regs aren't on NV50 */
399 if (!IS_G80) { 369 if (dev_priv->chipset != 0x50) {
400 nv_wr32(dev, 0x340c, nv_ro32(dev, ramfc, 0x88/4)); 370 nv_wr32(dev, 0x340c, nv_ro32(dev, ramfc, 0x88/4));
401 nv_wr32(dev, 0x3400, nv_ro32(dev, ramfc, 0x8c/4)); 371 nv_wr32(dev, 0x3400, nv_ro32(dev, ramfc, 0x8c/4));
402 nv_wr32(dev, 0x3404, nv_ro32(dev, ramfc, 0x90/4)); 372 nv_wr32(dev, 0x3404, nv_ro32(dev, ramfc, 0x90/4));
@@ -404,8 +374,6 @@ nv50_fifo_load_context(struct nouveau_channel *chan)
404 nv_wr32(dev, 0x3410, nv_ro32(dev, ramfc, 0x98/4)); 374 nv_wr32(dev, 0x3410, nv_ro32(dev, ramfc, 0x98/4));
405 } 375 }
406 376
407 dev_priv->engine.instmem.finish_access(dev);
408
409 nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, chan->id | (1<<16)); 377 nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, chan->id | (1<<16));
410 return 0; 378 return 0;
411} 379}
@@ -434,8 +402,6 @@ nv50_fifo_unload_context(struct drm_device *dev)
434 ramfc = chan->ramfc->gpuobj; 402 ramfc = chan->ramfc->gpuobj;
435 cache = chan->cache->gpuobj; 403 cache = chan->cache->gpuobj;
436 404
437 dev_priv->engine.instmem.prepare_access(dev, true);
438
439 nv_wo32(dev, ramfc, 0x00/4, nv_rd32(dev, 0x3330)); 405 nv_wo32(dev, ramfc, 0x00/4, nv_rd32(dev, 0x3330));
440 nv_wo32(dev, ramfc, 0x04/4, nv_rd32(dev, 0x3334)); 406 nv_wo32(dev, ramfc, 0x04/4, nv_rd32(dev, 0x3334));
441 nv_wo32(dev, ramfc, 0x08/4, nv_rd32(dev, 0x3240)); 407 nv_wo32(dev, ramfc, 0x08/4, nv_rd32(dev, 0x3240));
@@ -482,7 +448,7 @@ nv50_fifo_unload_context(struct drm_device *dev)
482 } 448 }
483 449
484 /* guessing that all the 0x34xx regs aren't on NV50 */ 450 /* guessing that all the 0x34xx regs aren't on NV50 */
485 if (!IS_G80) { 451 if (dev_priv->chipset != 0x50) {
486 nv_wo32(dev, ramfc, 0x84/4, ptr >> 1); 452 nv_wo32(dev, ramfc, 0x84/4, ptr >> 1);
487 nv_wo32(dev, ramfc, 0x88/4, nv_rd32(dev, 0x340c)); 453 nv_wo32(dev, ramfc, 0x88/4, nv_rd32(dev, 0x340c));
488 nv_wo32(dev, ramfc, 0x8c/4, nv_rd32(dev, 0x3400)); 454 nv_wo32(dev, ramfc, 0x8c/4, nv_rd32(dev, 0x3400));
@@ -491,7 +457,7 @@ nv50_fifo_unload_context(struct drm_device *dev)
491 nv_wo32(dev, ramfc, 0x98/4, nv_rd32(dev, 0x3410)); 457 nv_wo32(dev, ramfc, 0x98/4, nv_rd32(dev, 0x3410));
492 } 458 }
493 459
494 dev_priv->engine.instmem.finish_access(dev); 460 dev_priv->engine.instmem.flush(dev);
495 461
496 /*XXX: probably reload ch127 (NULL) state back too */ 462 /*XXX: probably reload ch127 (NULL) state back too */
497 nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, 127); 463 nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, 127);