aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm
diff options
context:
space:
mode:
authorBen Skeggs <bskeggs@redhat.com>2011-03-31 23:03:56 -0400
committerBen Skeggs <bskeggs@redhat.com>2011-05-15 20:48:27 -0400
commit4976986bd4f51368890f57b964176ec532972543 (patch)
tree38d7c55ae27878885f4f854c765427962dad7c2b /drivers/gpu/drm
parentd11db279014e881da9f5259c963501b33a413929 (diff)
drm/nv04/gr: move to exec engine interfaces
Like nv10-nv50, needs cleanup. Signed-off-by: Ben Skeggs <bskeggs@redhat.com>
Diffstat (limited to 'drivers/gpu/drm')
-rw-r--r--drivers/gpu/drm/nouveau/nouveau_drv.h10
-rw-r--r--drivers/gpu/drm/nouveau/nouveau_state.c16
-rw-r--r--drivers/gpu/drm/nouveau/nv04_graph.c353
3 files changed, 195 insertions, 184 deletions
diff --git a/drivers/gpu/drm/nouveau/nouveau_drv.h b/drivers/gpu/drm/nouveau/nouveau_drv.h
index 11e40b5b1f06..fb33d4f096c5 100644
--- a/drivers/gpu/drm/nouveau/nouveau_drv.h
+++ b/drivers/gpu/drm/nouveau/nouveau_drv.h
@@ -1141,15 +1141,9 @@ extern int nvc0_fifo_load_context(struct nouveau_channel *);
1141extern int nvc0_fifo_unload_context(struct drm_device *); 1141extern int nvc0_fifo_unload_context(struct drm_device *);
1142 1142
1143/* nv04_graph.c */ 1143/* nv04_graph.c */
1144extern int nv04_graph_init(struct drm_device *); 1144extern int nv04_graph_create(struct drm_device *);
1145extern void nv04_graph_takedown(struct drm_device *);
1146extern void nv04_graph_fifo_access(struct drm_device *, bool); 1145extern void nv04_graph_fifo_access(struct drm_device *, bool);
1147extern struct nouveau_channel *nv04_graph_channel(struct drm_device *); 1146extern int nv04_graph_object_new(struct nouveau_channel *, int, u32, u16);
1148extern int nv04_graph_create_context(struct nouveau_channel *);
1149extern void nv04_graph_destroy_context(struct nouveau_channel *);
1150extern int nv04_graph_load_context(struct nouveau_channel *);
1151extern int nv04_graph_unload_context(struct drm_device *);
1152extern int nv04_graph_object_new(struct nouveau_channel *, u32, u16);
1153extern int nv04_graph_mthd_page_flip(struct nouveau_channel *chan, 1147extern int nv04_graph_mthd_page_flip(struct nouveau_channel *chan,
1154 u32 class, u32 mthd, u32 data); 1148 u32 class, u32 mthd, u32 data);
1155extern struct nouveau_bitfield nv04_graph_nsource[]; 1149extern struct nouveau_bitfield nv04_graph_nsource[];
diff --git a/drivers/gpu/drm/nouveau/nouveau_state.c b/drivers/gpu/drm/nouveau/nouveau_state.c
index 502593afeb42..2a127edc2bcf 100644
--- a/drivers/gpu/drm/nouveau/nouveau_state.c
+++ b/drivers/gpu/drm/nouveau/nouveau_state.c
@@ -65,15 +65,10 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev)
65 engine->timer.takedown = nv04_timer_takedown; 65 engine->timer.takedown = nv04_timer_takedown;
66 engine->fb.init = nv04_fb_init; 66 engine->fb.init = nv04_fb_init;
67 engine->fb.takedown = nv04_fb_takedown; 67 engine->fb.takedown = nv04_fb_takedown;
68 engine->graph.init = nv04_graph_init; 68 engine->graph.init = nouveau_stub_init;
69 engine->graph.takedown = nv04_graph_takedown; 69 engine->graph.takedown = nouveau_stub_takedown;
70 engine->graph.fifo_access = nv04_graph_fifo_access; 70 engine->graph.channel = nvc0_graph_channel;
71 engine->graph.channel = nv04_graph_channel; 71 engine->graph.fifo_access = nvc0_graph_fifo_access;
72 engine->graph.create_context = nv04_graph_create_context;
73 engine->graph.destroy_context = nv04_graph_destroy_context;
74 engine->graph.load_context = nv04_graph_load_context;
75 engine->graph.unload_context = nv04_graph_unload_context;
76 engine->graph.object_new = nv04_graph_object_new;
77 engine->fifo.channels = 16; 72 engine->fifo.channels = 16;
78 engine->fifo.init = nv04_fifo_init; 73 engine->fifo.init = nv04_fifo_init;
79 engine->fifo.takedown = nv04_fifo_fini; 74 engine->fifo.takedown = nv04_fifo_fini;
@@ -599,6 +594,9 @@ nouveau_card_init(struct drm_device *dev)
599 goto out_timer; 594 goto out_timer;
600 595
601 switch (dev_priv->card_type) { 596 switch (dev_priv->card_type) {
597 case NV_04:
598 nv04_graph_create(dev);
599 break;
602 case NV_10: 600 case NV_10:
603 nv10_graph_create(dev); 601 nv10_graph_create(dev);
604 break; 602 break;
diff --git a/drivers/gpu/drm/nouveau/nv04_graph.c b/drivers/gpu/drm/nouveau/nv04_graph.c
index eb45f3aac885..3626ee7db3ba 100644
--- a/drivers/gpu/drm/nouveau/nv04_graph.c
+++ b/drivers/gpu/drm/nouveau/nv04_graph.c
@@ -30,8 +30,9 @@
30#include "nouveau_util.h" 30#include "nouveau_util.h"
31#include "nouveau_ramht.h" 31#include "nouveau_ramht.h"
32 32
33static int nv04_graph_register(struct drm_device *dev); 33struct nv04_graph_engine {
34static void nv04_graph_isr(struct drm_device *dev); 34 struct nouveau_exec_engine base;
35};
35 36
36static uint32_t nv04_graph_ctx_regs[] = { 37static uint32_t nv04_graph_ctx_regs[] = {
37 0x0040053c, 38 0x0040053c,
@@ -351,7 +352,7 @@ struct graph_state {
351 uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)]; 352 uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
352}; 353};
353 354
354struct nouveau_channel * 355static struct nouveau_channel *
355nv04_graph_channel(struct drm_device *dev) 356nv04_graph_channel(struct drm_device *dev)
356{ 357{
357 struct drm_nouveau_private *dev_priv = dev->dev_private; 358 struct drm_nouveau_private *dev_priv = dev->dev_private;
@@ -366,26 +367,6 @@ nv04_graph_channel(struct drm_device *dev)
366 return dev_priv->channels.ptr[chid]; 367 return dev_priv->channels.ptr[chid];
367} 368}
368 369
369static void
370nv04_graph_context_switch(struct drm_device *dev)
371{
372 struct drm_nouveau_private *dev_priv = dev->dev_private;
373 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
374 struct nouveau_channel *chan = NULL;
375 int chid;
376
377 nouveau_wait_for_idle(dev);
378
379 /* If previous context is valid, we need to save it */
380 pgraph->unload_context(dev);
381
382 /* Load context for next channel */
383 chid = dev_priv->engine.fifo.channel_id(dev);
384 chan = dev_priv->channels.ptr[chid];
385 if (chan)
386 nv04_graph_load_context(chan);
387}
388
389static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg) 370static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
390{ 371{
391 int i; 372 int i;
@@ -398,48 +379,11 @@ static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
398 return NULL; 379 return NULL;
399} 380}
400 381
401int nv04_graph_create_context(struct nouveau_channel *chan) 382static int
402{ 383nv04_graph_load_context(struct nouveau_channel *chan)
403 struct graph_state *pgraph_ctx;
404 NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
405
406 chan->pgraph_ctx = pgraph_ctx = kzalloc(sizeof(*pgraph_ctx),
407 GFP_KERNEL);
408 if (pgraph_ctx == NULL)
409 return -ENOMEM;
410
411 *ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
412
413 return 0;
414}
415
416void nv04_graph_destroy_context(struct nouveau_channel *chan)
417{
418 struct drm_device *dev = chan->dev;
419 struct drm_nouveau_private *dev_priv = dev->dev_private;
420 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
421 struct graph_state *pgraph_ctx = chan->pgraph_ctx;
422 unsigned long flags;
423
424 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
425 pgraph->fifo_access(dev, false);
426
427 /* Unload the context if it's the currently active one */
428 if (pgraph->channel(dev) == chan)
429 pgraph->unload_context(dev);
430
431 /* Free the context resources */
432 kfree(pgraph_ctx);
433 chan->pgraph_ctx = NULL;
434
435 pgraph->fifo_access(dev, true);
436 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
437}
438
439int nv04_graph_load_context(struct nouveau_channel *chan)
440{ 384{
385 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
441 struct drm_device *dev = chan->dev; 386 struct drm_device *dev = chan->dev;
442 struct graph_state *pgraph_ctx = chan->pgraph_ctx;
443 uint32_t tmp; 387 uint32_t tmp;
444 int i; 388 int i;
445 389
@@ -457,20 +401,19 @@ int nv04_graph_load_context(struct nouveau_channel *chan)
457 return 0; 401 return 0;
458} 402}
459 403
460int 404static int
461nv04_graph_unload_context(struct drm_device *dev) 405nv04_graph_unload_context(struct drm_device *dev)
462{ 406{
463 struct drm_nouveau_private *dev_priv = dev->dev_private; 407 struct drm_nouveau_private *dev_priv = dev->dev_private;
464 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
465 struct nouveau_channel *chan = NULL; 408 struct nouveau_channel *chan = NULL;
466 struct graph_state *ctx; 409 struct graph_state *ctx;
467 uint32_t tmp; 410 uint32_t tmp;
468 int i; 411 int i;
469 412
470 chan = pgraph->channel(dev); 413 chan = nv04_graph_channel(dev);
471 if (!chan) 414 if (!chan)
472 return 0; 415 return 0;
473 ctx = chan->pgraph_ctx; 416 ctx = chan->engctx[NVOBJ_ENGINE_GR];
474 417
475 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) 418 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
476 ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]); 419 ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]);
@@ -482,8 +425,48 @@ nv04_graph_unload_context(struct drm_device *dev)
482 return 0; 425 return 0;
483} 426}
484 427
428static int
429nv04_graph_context_new(struct nouveau_channel *chan, int engine)
430{
431 struct graph_state *pgraph_ctx;
432 NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
433
434 pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
435 if (pgraph_ctx == NULL)
436 return -ENOMEM;
437
438 *ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
439
440 chan->engctx[engine] = pgraph_ctx;
441 return 0;
442}
443
444static void
445nv04_graph_context_del(struct nouveau_channel *chan, int engine)
446{
447 struct drm_device *dev = chan->dev;
448 struct drm_nouveau_private *dev_priv = dev->dev_private;
449 struct graph_state *pgraph_ctx = chan->engctx[engine];
450 unsigned long flags;
451
452 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
453 nv04_graph_fifo_access(dev, false);
454
455 /* Unload the context if it's the currently active one */
456 if (nv04_graph_channel(dev) == chan)
457 nv04_graph_unload_context(dev);
458
459 nv04_graph_fifo_access(dev, true);
460 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
461
462 /* Free the context resources */
463 kfree(pgraph_ctx);
464 chan->engctx[engine] = NULL;
465}
466
485int 467int
486nv04_graph_object_new(struct nouveau_channel *chan, u32 handle, u16 class) 468nv04_graph_object_new(struct nouveau_channel *chan, int engine,
469 u32 handle, u16 class)
487{ 470{
488 struct drm_device *dev = chan->dev; 471 struct drm_device *dev = chan->dev;
489 struct nouveau_gpuobj *obj = NULL; 472 struct nouveau_gpuobj *obj = NULL;
@@ -509,23 +492,18 @@ nv04_graph_object_new(struct nouveau_channel *chan, u32 handle, u16 class)
509 return ret; 492 return ret;
510} 493}
511 494
512int nv04_graph_init(struct drm_device *dev) 495static int
496nv04_graph_init(struct drm_device *dev, int engine)
513{ 497{
514 struct drm_nouveau_private *dev_priv = dev->dev_private; 498 struct drm_nouveau_private *dev_priv = dev->dev_private;
515 uint32_t tmp; 499 uint32_t tmp;
516 int ret;
517 500
518 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & 501 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
519 ~NV_PMC_ENABLE_PGRAPH); 502 ~NV_PMC_ENABLE_PGRAPH);
520 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) | 503 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
521 NV_PMC_ENABLE_PGRAPH); 504 NV_PMC_ENABLE_PGRAPH);
522 505
523 ret = nv04_graph_register(dev);
524 if (ret)
525 return ret;
526
527 /* Enable PGRAPH interrupts */ 506 /* Enable PGRAPH interrupts */
528 nouveau_irq_register(dev, 12, nv04_graph_isr);
529 nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF); 507 nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF);
530 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); 508 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
531 509
@@ -559,10 +537,12 @@ int nv04_graph_init(struct drm_device *dev)
559 return 0; 537 return 0;
560} 538}
561 539
562void nv04_graph_takedown(struct drm_device *dev) 540static int
541nv04_graph_fini(struct drm_device *dev, int engine)
563{ 542{
543 nv04_graph_unload_context(dev);
564 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000); 544 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
565 nouveau_irq_unregister(dev, 12); 545 return 0;
566} 546}
567 547
568void 548void
@@ -997,13 +977,138 @@ nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan,
997 return 1; 977 return 1;
998} 978}
999 979
1000static int 980static struct nouveau_bitfield nv04_graph_intr[] = {
1001nv04_graph_register(struct drm_device *dev) 981 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
982 {}
983};
984
985static struct nouveau_bitfield nv04_graph_nstatus[] = {
986 { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
987 { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
988 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
989 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
990 {}
991};
992
993struct nouveau_bitfield nv04_graph_nsource[] = {
994 { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
995 { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
996 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
997 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
998 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
999 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
1000 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
1001 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
1002 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
1003 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
1004 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
1005 { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
1006 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
1007 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
1008 { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
1009 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
1010 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1011 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
1012 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
1013 {}
1014};
1015
1016static void
1017nv04_graph_context_switch(struct drm_device *dev)
1002{ 1018{
1003 struct drm_nouveau_private *dev_priv = dev->dev_private; 1019 struct drm_nouveau_private *dev_priv = dev->dev_private;
1020 struct nouveau_channel *chan = NULL;
1021 int chid;
1004 1022
1005 if (dev_priv->engine.graph.registered) 1023 nouveau_wait_for_idle(dev);
1006 return 0; 1024
1025 /* If previous context is valid, we need to save it */
1026 nv04_graph_unload_context(dev);
1027
1028 /* Load context for next channel */
1029 chid = dev_priv->engine.fifo.channel_id(dev);
1030 chan = dev_priv->channels.ptr[chid];
1031 if (chan)
1032 nv04_graph_load_context(chan);
1033}
1034
1035static void
1036nv04_graph_isr(struct drm_device *dev)
1037{
1038 u32 stat;
1039
1040 while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1041 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1042 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1043 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1044 u32 chid = (addr & 0x0f000000) >> 24;
1045 u32 subc = (addr & 0x0000e000) >> 13;
1046 u32 mthd = (addr & 0x00001ffc);
1047 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1048 u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
1049 u32 show = stat;
1050
1051 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1052 if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1053 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1054 show &= ~NV_PGRAPH_INTR_NOTIFY;
1055 }
1056 }
1057
1058 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1059 nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1060 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1061 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1062 nv04_graph_context_switch(dev);
1063 }
1064
1065 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1066 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1067
1068 if (show && nouveau_ratelimit()) {
1069 NV_INFO(dev, "PGRAPH -");
1070 nouveau_bitfield_print(nv04_graph_intr, show);
1071 printk(" nsource:");
1072 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1073 printk(" nstatus:");
1074 nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1075 printk("\n");
1076 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1077 "mthd 0x%04x data 0x%08x\n",
1078 chid, subc, class, mthd, data);
1079 }
1080 }
1081}
1082
1083static void
1084nv04_graph_destroy(struct drm_device *dev, int engine)
1085{
1086 struct nv04_graph_engine *pgraph = nv_engine(dev, engine);
1087
1088 nouveau_irq_unregister(dev, 12);
1089
1090 NVOBJ_ENGINE_DEL(dev, GR);
1091 kfree(pgraph);
1092}
1093
1094int
1095nv04_graph_create(struct drm_device *dev)
1096{
1097 struct nv04_graph_engine *pgraph;
1098
1099 pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
1100 if (!pgraph)
1101 return -ENOMEM;
1102
1103 pgraph->base.destroy = nv04_graph_destroy;
1104 pgraph->base.init = nv04_graph_init;
1105 pgraph->base.fini = nv04_graph_fini;
1106 pgraph->base.context_new = nv04_graph_context_new;
1107 pgraph->base.context_del = nv04_graph_context_del;
1108 pgraph->base.object_new = nv04_graph_object_new;
1109
1110 NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
1111 nouveau_irq_register(dev, 12, nv04_graph_isr);
1007 1112
1008 /* dvd subpicture */ 1113 /* dvd subpicture */
1009 NVOBJ_CLASS(dev, 0x0038, GR); 1114 NVOBJ_CLASS(dev, 0x0038, GR);
@@ -1250,91 +1355,5 @@ nv04_graph_register(struct drm_device *dev)
1250 NVOBJ_CLASS(dev, 0x506e, SW); 1355 NVOBJ_CLASS(dev, 0x506e, SW);
1251 NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref); 1356 NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref);
1252 NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip); 1357 NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
1253
1254 dev_priv->engine.graph.registered = true;
1255 return 0; 1358 return 0;
1256};
1257
1258static struct nouveau_bitfield nv04_graph_intr[] = {
1259 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1260 {}
1261};
1262
1263static struct nouveau_bitfield nv04_graph_nstatus[] = {
1264 { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1265 { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1266 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1267 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1268 {}
1269};
1270
1271struct nouveau_bitfield nv04_graph_nsource[] = {
1272 { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
1273 { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
1274 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
1275 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
1276 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
1277 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
1278 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
1279 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
1280 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
1281 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
1282 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
1283 { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
1284 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
1285 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
1286 { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
1287 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
1288 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1289 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
1290 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
1291 {}
1292};
1293
1294static void
1295nv04_graph_isr(struct drm_device *dev)
1296{
1297 u32 stat;
1298
1299 while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1300 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1301 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1302 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1303 u32 chid = (addr & 0x0f000000) >> 24;
1304 u32 subc = (addr & 0x0000e000) >> 13;
1305 u32 mthd = (addr & 0x00001ffc);
1306 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1307 u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
1308 u32 show = stat;
1309
1310 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1311 if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1312 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1313 show &= ~NV_PGRAPH_INTR_NOTIFY;
1314 }
1315 }
1316
1317 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1318 nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1319 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1320 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1321 nv04_graph_context_switch(dev);
1322 }
1323
1324 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1325 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1326
1327 if (show && nouveau_ratelimit()) {
1328 NV_INFO(dev, "PGRAPH -");
1329 nouveau_bitfield_print(nv04_graph_intr, show);
1330 printk(" nsource:");
1331 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1332 printk(" nstatus:");
1333 nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1334 printk("\n");
1335 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1336 "mthd 0x%04x data 0x%08x\n",
1337 chid, subc, class, mthd, data);
1338 }
1339 }
1340} 1359}