diff options
author | Ben Skeggs <bskeggs@redhat.com> | 2010-10-19 20:39:35 -0400 |
---|---|---|
committer | Ben Skeggs <bskeggs@redhat.com> | 2010-12-03 00:06:56 -0500 |
commit | b8c157d3a9a13871742c8a8d3d4598c3791ed5f5 (patch) | |
tree | 3ee372c2e8aa1100148f3d6e8232befdb386399a | |
parent | a6a1a38075661bec189f2bad7912f8861e6ce357 (diff) |
drm/nouveau: only expose the object classes that are supported by the chipset
We previously added all the available classes for the entire generation,
even though the objects wouldn't work on the hardware.
Signed-off-by: Ben Skeggs <bskeggs@redhat.com>
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_drv.h | 38 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_irq.c | 36 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_object.c | 112 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_state.c | 7 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv04_graph.c | 530 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv10_graph.c | 92 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv20_graph.c | 131 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv40_graph.c | 63 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv50_graph.c | 91 |
9 files changed, 667 insertions, 433 deletions
diff --git a/drivers/gpu/drm/nouveau/nouveau_drv.h b/drivers/gpu/drm/nouveau/nouveau_drv.h index 2099f04c0b0a..3fb87995446b 100644 --- a/drivers/gpu/drm/nouveau/nouveau_drv.h +++ b/drivers/gpu/drm/nouveau/nouveau_drv.h | |||
@@ -316,21 +316,9 @@ struct nouveau_fifo_engine { | |||
316 | void (*tlb_flush)(struct drm_device *dev); | 316 | void (*tlb_flush)(struct drm_device *dev); |
317 | }; | 317 | }; |
318 | 318 | ||
319 | struct nouveau_pgraph_object_method { | ||
320 | int id; | ||
321 | int (*exec)(struct nouveau_channel *chan, int grclass, int mthd, | ||
322 | uint32_t data); | ||
323 | }; | ||
324 | |||
325 | struct nouveau_pgraph_object_class { | ||
326 | int id; | ||
327 | u32 engine; | ||
328 | struct nouveau_pgraph_object_method *methods; | ||
329 | }; | ||
330 | |||
331 | struct nouveau_pgraph_engine { | 319 | struct nouveau_pgraph_engine { |
332 | struct nouveau_pgraph_object_class *grclass; | ||
333 | bool accel_blocked; | 320 | bool accel_blocked; |
321 | bool registered; | ||
334 | int grctx_size; | 322 | int grctx_size; |
335 | 323 | ||
336 | /* NV2x/NV3x context table (0x400780) */ | 324 | /* NV2x/NV3x context table (0x400780) */ |
@@ -584,6 +572,7 @@ struct drm_nouveau_private { | |||
584 | bool ramin_available; | 572 | bool ramin_available; |
585 | struct drm_mm ramin_heap; | 573 | struct drm_mm ramin_heap; |
586 | struct list_head gpuobj_list; | 574 | struct list_head gpuobj_list; |
575 | struct list_head classes; | ||
587 | 576 | ||
588 | struct nouveau_bo *vga_ram; | 577 | struct nouveau_bo *vga_ram; |
589 | 578 | ||
@@ -816,12 +805,29 @@ extern void nouveau_channel_ref(struct nouveau_channel *chan, | |||
816 | struct nouveau_channel **pchan); | 805 | struct nouveau_channel **pchan); |
817 | 806 | ||
818 | /* nouveau_object.c */ | 807 | /* nouveau_object.c */ |
808 | #define NVOBJ_CLASS(d,c,e) do { \ | ||
809 | int ret = nouveau_gpuobj_class_new((d), (c), NVOBJ_ENGINE_##e); \ | ||
810 | if (ret) \ | ||
811 | return ret; \ | ||
812 | } while(0) | ||
813 | |||
814 | #define NVOBJ_MTHD(d,c,m,e) do { \ | ||
815 | int ret = nouveau_gpuobj_mthd_new((d), (c), (m), (e)); \ | ||
816 | if (ret) \ | ||
817 | return ret; \ | ||
818 | } while(0) | ||
819 | |||
819 | extern int nouveau_gpuobj_early_init(struct drm_device *); | 820 | extern int nouveau_gpuobj_early_init(struct drm_device *); |
820 | extern int nouveau_gpuobj_init(struct drm_device *); | 821 | extern int nouveau_gpuobj_init(struct drm_device *); |
821 | extern void nouveau_gpuobj_takedown(struct drm_device *); | 822 | extern void nouveau_gpuobj_takedown(struct drm_device *); |
822 | extern int nouveau_gpuobj_suspend(struct drm_device *dev); | 823 | extern int nouveau_gpuobj_suspend(struct drm_device *dev); |
823 | extern void nouveau_gpuobj_suspend_cleanup(struct drm_device *dev); | 824 | extern void nouveau_gpuobj_suspend_cleanup(struct drm_device *dev); |
824 | extern void nouveau_gpuobj_resume(struct drm_device *dev); | 825 | extern void nouveau_gpuobj_resume(struct drm_device *dev); |
826 | extern int nouveau_gpuobj_class_new(struct drm_device *, u32 class, u32 eng); | ||
827 | extern int nouveau_gpuobj_mthd_new(struct drm_device *, u32 class, u32 mthd, | ||
828 | int (*exec)(struct nouveau_channel *, | ||
829 | u32 class, u32 mthd, u32 data)); | ||
830 | extern int nouveau_gpuobj_mthd_call(struct nouveau_channel *, u32, u32, u32); | ||
825 | extern int nouveau_gpuobj_channel_init(struct nouveau_channel *, | 831 | extern int nouveau_gpuobj_channel_init(struct nouveau_channel *, |
826 | uint32_t vram_h, uint32_t tt_h); | 832 | uint32_t vram_h, uint32_t tt_h); |
827 | extern void nouveau_gpuobj_channel_takedown(struct nouveau_channel *); | 833 | extern void nouveau_gpuobj_channel_takedown(struct nouveau_channel *); |
@@ -1038,7 +1044,6 @@ extern int nvc0_fifo_load_context(struct nouveau_channel *); | |||
1038 | extern int nvc0_fifo_unload_context(struct drm_device *); | 1044 | extern int nvc0_fifo_unload_context(struct drm_device *); |
1039 | 1045 | ||
1040 | /* nv04_graph.c */ | 1046 | /* nv04_graph.c */ |
1041 | extern struct nouveau_pgraph_object_class nv04_graph_grclass[]; | ||
1042 | extern int nv04_graph_init(struct drm_device *); | 1047 | extern int nv04_graph_init(struct drm_device *); |
1043 | extern void nv04_graph_takedown(struct drm_device *); | 1048 | extern void nv04_graph_takedown(struct drm_device *); |
1044 | extern void nv04_graph_fifo_access(struct drm_device *, bool); | 1049 | extern void nv04_graph_fifo_access(struct drm_device *, bool); |
@@ -1050,7 +1055,6 @@ extern int nv04_graph_unload_context(struct drm_device *); | |||
1050 | extern void nv04_graph_context_switch(struct drm_device *); | 1055 | extern void nv04_graph_context_switch(struct drm_device *); |
1051 | 1056 | ||
1052 | /* nv10_graph.c */ | 1057 | /* nv10_graph.c */ |
1053 | extern struct nouveau_pgraph_object_class nv10_graph_grclass[]; | ||
1054 | extern int nv10_graph_init(struct drm_device *); | 1058 | extern int nv10_graph_init(struct drm_device *); |
1055 | extern void nv10_graph_takedown(struct drm_device *); | 1059 | extern void nv10_graph_takedown(struct drm_device *); |
1056 | extern struct nouveau_channel *nv10_graph_channel(struct drm_device *); | 1060 | extern struct nouveau_channel *nv10_graph_channel(struct drm_device *); |
@@ -1063,8 +1067,6 @@ extern void nv10_graph_set_region_tiling(struct drm_device *, int, uint32_t, | |||
1063 | uint32_t, uint32_t); | 1067 | uint32_t, uint32_t); |
1064 | 1068 | ||
1065 | /* nv20_graph.c */ | 1069 | /* nv20_graph.c */ |
1066 | extern struct nouveau_pgraph_object_class nv20_graph_grclass[]; | ||
1067 | extern struct nouveau_pgraph_object_class nv30_graph_grclass[]; | ||
1068 | extern int nv20_graph_create_context(struct nouveau_channel *); | 1070 | extern int nv20_graph_create_context(struct nouveau_channel *); |
1069 | extern void nv20_graph_destroy_context(struct nouveau_channel *); | 1071 | extern void nv20_graph_destroy_context(struct nouveau_channel *); |
1070 | extern int nv20_graph_load_context(struct nouveau_channel *); | 1072 | extern int nv20_graph_load_context(struct nouveau_channel *); |
@@ -1076,7 +1078,6 @@ extern void nv20_graph_set_region_tiling(struct drm_device *, int, uint32_t, | |||
1076 | uint32_t, uint32_t); | 1078 | uint32_t, uint32_t); |
1077 | 1079 | ||
1078 | /* nv40_graph.c */ | 1080 | /* nv40_graph.c */ |
1079 | extern struct nouveau_pgraph_object_class nv40_graph_grclass[]; | ||
1080 | extern int nv40_graph_init(struct drm_device *); | 1081 | extern int nv40_graph_init(struct drm_device *); |
1081 | extern void nv40_graph_takedown(struct drm_device *); | 1082 | extern void nv40_graph_takedown(struct drm_device *); |
1082 | extern struct nouveau_channel *nv40_graph_channel(struct drm_device *); | 1083 | extern struct nouveau_channel *nv40_graph_channel(struct drm_device *); |
@@ -1089,7 +1090,6 @@ extern void nv40_graph_set_region_tiling(struct drm_device *, int, uint32_t, | |||
1089 | uint32_t, uint32_t); | 1090 | uint32_t, uint32_t); |
1090 | 1091 | ||
1091 | /* nv50_graph.c */ | 1092 | /* nv50_graph.c */ |
1092 | extern struct nouveau_pgraph_object_class nv50_graph_grclass[]; | ||
1093 | extern int nv50_graph_init(struct drm_device *); | 1093 | extern int nv50_graph_init(struct drm_device *); |
1094 | extern void nv50_graph_takedown(struct drm_device *); | 1094 | extern void nv50_graph_takedown(struct drm_device *); |
1095 | extern void nv50_graph_fifo_access(struct drm_device *, bool); | 1095 | extern void nv50_graph_fifo_access(struct drm_device *, bool); |
diff --git a/drivers/gpu/drm/nouveau/nouveau_irq.c b/drivers/gpu/drm/nouveau/nouveau_irq.c index c5e37bc17192..f09151d17297 100644 --- a/drivers/gpu/drm/nouveau/nouveau_irq.c +++ b/drivers/gpu/drm/nouveau/nouveau_irq.c | |||
@@ -80,33 +80,6 @@ nouveau_irq_uninstall(struct drm_device *dev) | |||
80 | nv_wr32(dev, NV03_PMC_INTR_EN_0, 0); | 80 | nv_wr32(dev, NV03_PMC_INTR_EN_0, 0); |
81 | } | 81 | } |
82 | 82 | ||
83 | static int | ||
84 | nouveau_call_method(struct nouveau_channel *chan, int class, int mthd, int data) | ||
85 | { | ||
86 | struct drm_nouveau_private *dev_priv = chan->dev->dev_private; | ||
87 | struct nouveau_pgraph_object_method *grm; | ||
88 | struct nouveau_pgraph_object_class *grc; | ||
89 | |||
90 | grc = dev_priv->engine.graph.grclass; | ||
91 | while (grc->id) { | ||
92 | if (grc->id == class) | ||
93 | break; | ||
94 | grc++; | ||
95 | } | ||
96 | |||
97 | if (grc->id != class || !grc->methods) | ||
98 | return -ENOENT; | ||
99 | |||
100 | grm = grc->methods; | ||
101 | while (grm->id) { | ||
102 | if (grm->id == mthd) | ||
103 | return grm->exec(chan, class, mthd, data); | ||
104 | grm++; | ||
105 | } | ||
106 | |||
107 | return -ENOENT; | ||
108 | } | ||
109 | |||
110 | static bool | 83 | static bool |
111 | nouveau_fifo_swmthd(struct drm_device *dev, u32 chid, u32 addr, u32 data) | 84 | nouveau_fifo_swmthd(struct drm_device *dev, u32 chid, u32 addr, u32 data) |
112 | { | 85 | { |
@@ -142,8 +115,8 @@ nouveau_fifo_swmthd(struct drm_device *dev, u32 chid, u32 addr, u32 data) | |||
142 | if (unlikely(((engine >> (subc * 4)) & 0xf) != 0)) | 115 | if (unlikely(((engine >> (subc * 4)) & 0xf) != 0)) |
143 | break; | 116 | break; |
144 | 117 | ||
145 | if (!nouveau_call_method(chan, chan->sw_subchannel[subc], | 118 | if (!nouveau_gpuobj_mthd_call(chan, chan->sw_subchannel[subc], |
146 | mthd, data)) | 119 | mthd, data)) |
147 | handled = true; | 120 | handled = true; |
148 | break; | 121 | break; |
149 | } | 122 | } |
@@ -541,6 +514,7 @@ nouveau_pgraph_intr_swmthd(struct drm_device *dev, | |||
541 | struct nouveau_pgraph_trap *trap) | 514 | struct nouveau_pgraph_trap *trap) |
542 | { | 515 | { |
543 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 516 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
517 | struct nouveau_channel *chan; | ||
544 | unsigned long flags; | 518 | unsigned long flags; |
545 | int ret = -EINVAL; | 519 | int ret = -EINVAL; |
546 | 520 | ||
@@ -548,8 +522,8 @@ nouveau_pgraph_intr_swmthd(struct drm_device *dev, | |||
548 | if (trap->channel > 0 && | 522 | if (trap->channel > 0 && |
549 | trap->channel < dev_priv->engine.fifo.channels && | 523 | trap->channel < dev_priv->engine.fifo.channels && |
550 | dev_priv->channels.ptr[trap->channel]) { | 524 | dev_priv->channels.ptr[trap->channel]) { |
551 | ret = nouveau_call_method(dev_priv->channels.ptr[trap->channel], | 525 | chan = dev_priv->channels.ptr[trap->channel]; |
552 | trap->class, trap->mthd, trap->data); | 526 | ret = nouveau_gpuobj_mthd_call(chan, trap->class, trap->mthd, trap->data); |
553 | } | 527 | } |
554 | spin_unlock_irqrestore(&dev_priv->channels.lock, flags); | 528 | spin_unlock_irqrestore(&dev_priv->channels.lock, flags); |
555 | 529 | ||
diff --git a/drivers/gpu/drm/nouveau/nouveau_object.c b/drivers/gpu/drm/nouveau/nouveau_object.c index 9c26da4cdc00..6226beb3613d 100644 --- a/drivers/gpu/drm/nouveau/nouveau_object.c +++ b/drivers/gpu/drm/nouveau/nouveau_object.c | |||
@@ -36,6 +36,83 @@ | |||
36 | #include "nouveau_drm.h" | 36 | #include "nouveau_drm.h" |
37 | #include "nouveau_ramht.h" | 37 | #include "nouveau_ramht.h" |
38 | 38 | ||
39 | struct nouveau_gpuobj_method { | ||
40 | struct list_head head; | ||
41 | u32 mthd; | ||
42 | int (*exec)(struct nouveau_channel *, u32 class, u32 mthd, u32 data); | ||
43 | }; | ||
44 | |||
45 | struct nouveau_gpuobj_class { | ||
46 | struct list_head head; | ||
47 | struct list_head methods; | ||
48 | u32 id; | ||
49 | u32 engine; | ||
50 | }; | ||
51 | |||
52 | int | ||
53 | nouveau_gpuobj_class_new(struct drm_device *dev, u32 class, u32 engine) | ||
54 | { | ||
55 | struct drm_nouveau_private *dev_priv = dev->dev_private; | ||
56 | struct nouveau_gpuobj_class *oc; | ||
57 | |||
58 | oc = kzalloc(sizeof(*oc), GFP_KERNEL); | ||
59 | if (!oc) | ||
60 | return -ENOMEM; | ||
61 | |||
62 | INIT_LIST_HEAD(&oc->methods); | ||
63 | oc->id = class; | ||
64 | oc->engine = engine; | ||
65 | list_add(&oc->head, &dev_priv->classes); | ||
66 | return 0; | ||
67 | } | ||
68 | |||
69 | int | ||
70 | nouveau_gpuobj_mthd_new(struct drm_device *dev, u32 class, u32 mthd, | ||
71 | int (*exec)(struct nouveau_channel *, u32, u32, u32)) | ||
72 | { | ||
73 | struct drm_nouveau_private *dev_priv = dev->dev_private; | ||
74 | struct nouveau_gpuobj_method *om; | ||
75 | struct nouveau_gpuobj_class *oc; | ||
76 | |||
77 | list_for_each_entry(oc, &dev_priv->classes, head) { | ||
78 | if (oc->id == class) | ||
79 | goto found; | ||
80 | } | ||
81 | |||
82 | return -EINVAL; | ||
83 | |||
84 | found: | ||
85 | om = kzalloc(sizeof(*om), GFP_KERNEL); | ||
86 | if (!om) | ||
87 | return -ENOMEM; | ||
88 | |||
89 | om->mthd = mthd; | ||
90 | om->exec = exec; | ||
91 | list_add(&om->head, &oc->methods); | ||
92 | return 0; | ||
93 | } | ||
94 | |||
95 | int | ||
96 | nouveau_gpuobj_mthd_call(struct nouveau_channel *chan, | ||
97 | u32 class, u32 mthd, u32 data) | ||
98 | { | ||
99 | struct drm_nouveau_private *dev_priv = chan->dev->dev_private; | ||
100 | struct nouveau_gpuobj_method *om; | ||
101 | struct nouveau_gpuobj_class *oc; | ||
102 | |||
103 | list_for_each_entry(oc, &dev_priv->classes, head) { | ||
104 | if (oc->id != class) | ||
105 | continue; | ||
106 | |||
107 | list_for_each_entry(om, &oc->methods, head) { | ||
108 | if (om->mthd == mthd) | ||
109 | return om->exec(chan, class, mthd, data); | ||
110 | } | ||
111 | } | ||
112 | |||
113 | return -ENOENT; | ||
114 | } | ||
115 | |||
39 | /* NVidia uses context objects to drive drawing operations. | 116 | /* NVidia uses context objects to drive drawing operations. |
40 | 117 | ||
41 | Context objects can be selected into 8 subchannels in the FIFO, | 118 | Context objects can be selected into 8 subchannels in the FIFO, |
@@ -205,9 +282,20 @@ void | |||
205 | nouveau_gpuobj_takedown(struct drm_device *dev) | 282 | nouveau_gpuobj_takedown(struct drm_device *dev) |
206 | { | 283 | { |
207 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 284 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
285 | struct nouveau_gpuobj_method *om, *tm; | ||
286 | struct nouveau_gpuobj_class *oc, *tc; | ||
208 | 287 | ||
209 | NV_DEBUG(dev, "\n"); | 288 | NV_DEBUG(dev, "\n"); |
210 | 289 | ||
290 | list_for_each_entry_safe(oc, tc, &dev_priv->classes, head) { | ||
291 | list_for_each_entry_safe(om, tm, &oc->methods, head) { | ||
292 | list_del(&om->head); | ||
293 | kfree(om); | ||
294 | } | ||
295 | list_del(&oc->head); | ||
296 | kfree(oc); | ||
297 | } | ||
298 | |||
211 | BUG_ON(!list_empty(&dev_priv->gpuobj_list)); | 299 | BUG_ON(!list_empty(&dev_priv->gpuobj_list)); |
212 | } | 300 | } |
213 | 301 | ||
@@ -527,26 +615,22 @@ nouveau_gpuobj_gr_new(struct nouveau_channel *chan, int class, | |||
527 | struct nouveau_gpuobj **gpuobj) | 615 | struct nouveau_gpuobj **gpuobj) |
528 | { | 616 | { |
529 | struct drm_nouveau_private *dev_priv = chan->dev->dev_private; | 617 | struct drm_nouveau_private *dev_priv = chan->dev->dev_private; |
530 | struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph; | ||
531 | struct nouveau_pgraph_object_class *grc; | ||
532 | struct drm_device *dev = chan->dev; | 618 | struct drm_device *dev = chan->dev; |
619 | struct nouveau_gpuobj_class *oc; | ||
533 | int ret; | 620 | int ret; |
534 | 621 | ||
535 | NV_DEBUG(dev, "ch%d class=0x%04x\n", chan->id, class); | 622 | NV_DEBUG(dev, "ch%d class=0x%04x\n", chan->id, class); |
536 | 623 | ||
537 | grc = pgraph->grclass; | 624 | list_for_each_entry(oc, &dev_priv->classes, head) { |
538 | while (grc->id) { | 625 | if (oc->id == class) |
539 | if (grc->id == class) | 626 | goto found; |
540 | break; | ||
541 | grc++; | ||
542 | } | 627 | } |
543 | 628 | ||
544 | if (!grc->id) { | 629 | NV_ERROR(dev, "illegal object class: 0x%x\n", class); |
545 | NV_ERROR(dev, "illegal object class: 0x%x\n", class); | 630 | return -EINVAL; |
546 | return -EINVAL; | ||
547 | } | ||
548 | 631 | ||
549 | if (grc->engine == NVOBJ_ENGINE_SW) | 632 | found: |
633 | if (oc->engine == NVOBJ_ENGINE_SW) | ||
550 | return nouveau_gpuobj_sw_new(chan, class, gpuobj); | 634 | return nouveau_gpuobj_sw_new(chan, class, gpuobj); |
551 | 635 | ||
552 | ret = nouveau_gpuobj_new(dev, chan, | 636 | ret = nouveau_gpuobj_new(dev, chan, |
@@ -585,8 +669,8 @@ nouveau_gpuobj_gr_new(struct nouveau_channel *chan, int class, | |||
585 | } | 669 | } |
586 | dev_priv->engine.instmem.flush(dev); | 670 | dev_priv->engine.instmem.flush(dev); |
587 | 671 | ||
588 | (*gpuobj)->engine = grc->engine; | 672 | (*gpuobj)->engine = oc->engine; |
589 | (*gpuobj)->class = class; | 673 | (*gpuobj)->class = oc->id; |
590 | return 0; | 674 | return 0; |
591 | } | 675 | } |
592 | 676 | ||
diff --git a/drivers/gpu/drm/nouveau/nouveau_state.c b/drivers/gpu/drm/nouveau/nouveau_state.c index 82b58188398b..be28754ffd50 100644 --- a/drivers/gpu/drm/nouveau/nouveau_state.c +++ b/drivers/gpu/drm/nouveau/nouveau_state.c | |||
@@ -65,7 +65,6 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev) | |||
65 | engine->timer.takedown = nv04_timer_takedown; | 65 | engine->timer.takedown = nv04_timer_takedown; |
66 | engine->fb.init = nv04_fb_init; | 66 | engine->fb.init = nv04_fb_init; |
67 | engine->fb.takedown = nv04_fb_takedown; | 67 | engine->fb.takedown = nv04_fb_takedown; |
68 | engine->graph.grclass = nv04_graph_grclass; | ||
69 | engine->graph.init = nv04_graph_init; | 68 | engine->graph.init = nv04_graph_init; |
70 | engine->graph.takedown = nv04_graph_takedown; | 69 | engine->graph.takedown = nv04_graph_takedown; |
71 | engine->graph.fifo_access = nv04_graph_fifo_access; | 70 | engine->graph.fifo_access = nv04_graph_fifo_access; |
@@ -118,7 +117,6 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev) | |||
118 | engine->fb.init = nv10_fb_init; | 117 | engine->fb.init = nv10_fb_init; |
119 | engine->fb.takedown = nv10_fb_takedown; | 118 | engine->fb.takedown = nv10_fb_takedown; |
120 | engine->fb.set_region_tiling = nv10_fb_set_region_tiling; | 119 | engine->fb.set_region_tiling = nv10_fb_set_region_tiling; |
121 | engine->graph.grclass = nv10_graph_grclass; | ||
122 | engine->graph.init = nv10_graph_init; | 120 | engine->graph.init = nv10_graph_init; |
123 | engine->graph.takedown = nv10_graph_takedown; | 121 | engine->graph.takedown = nv10_graph_takedown; |
124 | engine->graph.channel = nv10_graph_channel; | 122 | engine->graph.channel = nv10_graph_channel; |
@@ -172,7 +170,6 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev) | |||
172 | engine->fb.init = nv10_fb_init; | 170 | engine->fb.init = nv10_fb_init; |
173 | engine->fb.takedown = nv10_fb_takedown; | 171 | engine->fb.takedown = nv10_fb_takedown; |
174 | engine->fb.set_region_tiling = nv10_fb_set_region_tiling; | 172 | engine->fb.set_region_tiling = nv10_fb_set_region_tiling; |
175 | engine->graph.grclass = nv20_graph_grclass; | ||
176 | engine->graph.init = nv20_graph_init; | 173 | engine->graph.init = nv20_graph_init; |
177 | engine->graph.takedown = nv20_graph_takedown; | 174 | engine->graph.takedown = nv20_graph_takedown; |
178 | engine->graph.channel = nv10_graph_channel; | 175 | engine->graph.channel = nv10_graph_channel; |
@@ -226,7 +223,6 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev) | |||
226 | engine->fb.init = nv30_fb_init; | 223 | engine->fb.init = nv30_fb_init; |
227 | engine->fb.takedown = nv30_fb_takedown; | 224 | engine->fb.takedown = nv30_fb_takedown; |
228 | engine->fb.set_region_tiling = nv10_fb_set_region_tiling; | 225 | engine->fb.set_region_tiling = nv10_fb_set_region_tiling; |
229 | engine->graph.grclass = nv30_graph_grclass; | ||
230 | engine->graph.init = nv30_graph_init; | 226 | engine->graph.init = nv30_graph_init; |
231 | engine->graph.takedown = nv20_graph_takedown; | 227 | engine->graph.takedown = nv20_graph_takedown; |
232 | engine->graph.fifo_access = nv04_graph_fifo_access; | 228 | engine->graph.fifo_access = nv04_graph_fifo_access; |
@@ -283,7 +279,6 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev) | |||
283 | engine->fb.init = nv40_fb_init; | 279 | engine->fb.init = nv40_fb_init; |
284 | engine->fb.takedown = nv40_fb_takedown; | 280 | engine->fb.takedown = nv40_fb_takedown; |
285 | engine->fb.set_region_tiling = nv40_fb_set_region_tiling; | 281 | engine->fb.set_region_tiling = nv40_fb_set_region_tiling; |
286 | engine->graph.grclass = nv40_graph_grclass; | ||
287 | engine->graph.init = nv40_graph_init; | 282 | engine->graph.init = nv40_graph_init; |
288 | engine->graph.takedown = nv40_graph_takedown; | 283 | engine->graph.takedown = nv40_graph_takedown; |
289 | engine->graph.fifo_access = nv04_graph_fifo_access; | 284 | engine->graph.fifo_access = nv04_graph_fifo_access; |
@@ -345,7 +340,6 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev) | |||
345 | engine->timer.takedown = nv04_timer_takedown; | 340 | engine->timer.takedown = nv04_timer_takedown; |
346 | engine->fb.init = nv50_fb_init; | 341 | engine->fb.init = nv50_fb_init; |
347 | engine->fb.takedown = nv50_fb_takedown; | 342 | engine->fb.takedown = nv50_fb_takedown; |
348 | engine->graph.grclass = nv50_graph_grclass; | ||
349 | engine->graph.init = nv50_graph_init; | 343 | engine->graph.init = nv50_graph_init; |
350 | engine->graph.takedown = nv50_graph_takedown; | 344 | engine->graph.takedown = nv50_graph_takedown; |
351 | engine->graph.fifo_access = nv50_graph_fifo_access; | 345 | engine->graph.fifo_access = nv50_graph_fifo_access; |
@@ -424,7 +418,6 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev) | |||
424 | engine->timer.takedown = nv04_timer_takedown; | 418 | engine->timer.takedown = nv04_timer_takedown; |
425 | engine->fb.init = nvc0_fb_init; | 419 | engine->fb.init = nvc0_fb_init; |
426 | engine->fb.takedown = nvc0_fb_takedown; | 420 | engine->fb.takedown = nvc0_fb_takedown; |
427 | engine->graph.grclass = NULL; //nvc0_graph_grclass; | ||
428 | engine->graph.init = nvc0_graph_init; | 421 | engine->graph.init = nvc0_graph_init; |
429 | engine->graph.takedown = nvc0_graph_takedown; | 422 | engine->graph.takedown = nvc0_graph_takedown; |
430 | engine->graph.fifo_access = nvc0_graph_fifo_access; | 423 | engine->graph.fifo_access = nvc0_graph_fifo_access; |
diff --git a/drivers/gpu/drm/nouveau/nv04_graph.c b/drivers/gpu/drm/nouveau/nv04_graph.c index 5d8ab1b6b04a..81aba097ef95 100644 --- a/drivers/gpu/drm/nouveau/nv04_graph.c +++ b/drivers/gpu/drm/nouveau/nv04_graph.c | |||
@@ -27,6 +27,8 @@ | |||
27 | #include "nouveau_drm.h" | 27 | #include "nouveau_drm.h" |
28 | #include "nouveau_drv.h" | 28 | #include "nouveau_drv.h" |
29 | 29 | ||
30 | static int nv04_graph_register(struct drm_device *dev); | ||
31 | |||
30 | static uint32_t nv04_graph_ctx_regs[] = { | 32 | static uint32_t nv04_graph_ctx_regs[] = { |
31 | 0x0040053c, | 33 | 0x0040053c, |
32 | 0x00400544, | 34 | 0x00400544, |
@@ -483,12 +485,17 @@ int nv04_graph_init(struct drm_device *dev) | |||
483 | { | 485 | { |
484 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 486 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
485 | uint32_t tmp; | 487 | uint32_t tmp; |
488 | int ret; | ||
486 | 489 | ||
487 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & | 490 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & |
488 | ~NV_PMC_ENABLE_PGRAPH); | 491 | ~NV_PMC_ENABLE_PGRAPH); |
489 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) | | 492 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) | |
490 | NV_PMC_ENABLE_PGRAPH); | 493 | NV_PMC_ENABLE_PGRAPH); |
491 | 494 | ||
495 | ret = nv04_graph_register(dev); | ||
496 | if (ret) | ||
497 | return ret; | ||
498 | |||
492 | /* Enable PGRAPH interrupts */ | 499 | /* Enable PGRAPH interrupts */ |
493 | nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF); | 500 | nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF); |
494 | nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); | 501 | nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); |
@@ -539,8 +546,8 @@ nv04_graph_fifo_access(struct drm_device *dev, bool enabled) | |||
539 | } | 546 | } |
540 | 547 | ||
541 | static int | 548 | static int |
542 | nv04_graph_mthd_set_ref(struct nouveau_channel *chan, int grclass, | 549 | nv04_graph_mthd_set_ref(struct nouveau_channel *chan, |
543 | int mthd, uint32_t data) | 550 | u32 class, u32 mthd, u32 data) |
544 | { | 551 | { |
545 | atomic_set(&chan->fence.last_sequence_irq, data); | 552 | atomic_set(&chan->fence.last_sequence_irq, data); |
546 | return 0; | 553 | return 0; |
@@ -621,12 +628,12 @@ nv04_graph_mthd_set_ref(struct nouveau_channel *chan, int grclass, | |||
621 | */ | 628 | */ |
622 | 629 | ||
623 | static void | 630 | static void |
624 | nv04_graph_set_ctx1(struct nouveau_channel *chan, uint32_t mask, uint32_t value) | 631 | nv04_graph_set_ctx1(struct nouveau_channel *chan, u32 mask, u32 value) |
625 | { | 632 | { |
626 | struct drm_device *dev = chan->dev; | 633 | struct drm_device *dev = chan->dev; |
627 | uint32_t instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4; | 634 | u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4; |
628 | int subc = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7; | 635 | int subc = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7; |
629 | uint32_t tmp; | 636 | u32 tmp; |
630 | 637 | ||
631 | tmp = nv_ri32(dev, instance); | 638 | tmp = nv_ri32(dev, instance); |
632 | tmp &= ~mask; | 639 | tmp &= ~mask; |
@@ -638,11 +645,11 @@ nv04_graph_set_ctx1(struct nouveau_channel *chan, uint32_t mask, uint32_t value) | |||
638 | } | 645 | } |
639 | 646 | ||
640 | static void | 647 | static void |
641 | nv04_graph_set_ctx_val(struct nouveau_channel *chan, uint32_t mask, uint32_t value) | 648 | nv04_graph_set_ctx_val(struct nouveau_channel *chan, u32 mask, u32 value) |
642 | { | 649 | { |
643 | struct drm_device *dev = chan->dev; | 650 | struct drm_device *dev = chan->dev; |
644 | uint32_t instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4; | 651 | u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4; |
645 | uint32_t tmp, ctx1; | 652 | u32 tmp, ctx1; |
646 | int class, op, valid = 1; | 653 | int class, op, valid = 1; |
647 | 654 | ||
648 | ctx1 = nv_ri32(dev, instance); | 655 | ctx1 = nv_ri32(dev, instance); |
@@ -687,13 +694,13 @@ nv04_graph_set_ctx_val(struct nouveau_channel *chan, uint32_t mask, uint32_t val | |||
687 | } | 694 | } |
688 | 695 | ||
689 | static int | 696 | static int |
690 | nv04_graph_mthd_set_operation(struct nouveau_channel *chan, int grclass, | 697 | nv04_graph_mthd_set_operation(struct nouveau_channel *chan, |
691 | int mthd, uint32_t data) | 698 | u32 class, u32 mthd, u32 data) |
692 | { | 699 | { |
693 | if (data > 5) | 700 | if (data > 5) |
694 | return 1; | 701 | return 1; |
695 | /* Old versions of the objects only accept first three operations. */ | 702 | /* Old versions of the objects only accept first three operations. */ |
696 | if (data > 2 && grclass < 0x40) | 703 | if (data > 2 && class < 0x40) |
697 | return 1; | 704 | return 1; |
698 | nv04_graph_set_ctx1(chan, 0x00038000, data << 15); | 705 | nv04_graph_set_ctx1(chan, 0x00038000, data << 15); |
699 | /* changing operation changes set of objects needed for validation */ | 706 | /* changing operation changes set of objects needed for validation */ |
@@ -702,8 +709,8 @@ nv04_graph_mthd_set_operation(struct nouveau_channel *chan, int grclass, | |||
702 | } | 709 | } |
703 | 710 | ||
704 | static int | 711 | static int |
705 | nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan, int grclass, | 712 | nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan, |
706 | int mthd, uint32_t data) | 713 | u32 class, u32 mthd, u32 data) |
707 | { | 714 | { |
708 | uint32_t min = data & 0xffff, max; | 715 | uint32_t min = data & 0xffff, max; |
709 | uint32_t w = data >> 16; | 716 | uint32_t w = data >> 16; |
@@ -721,8 +728,8 @@ nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan, int grclass, | |||
721 | } | 728 | } |
722 | 729 | ||
723 | static int | 730 | static int |
724 | nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan, int grclass, | 731 | nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan, |
725 | int mthd, uint32_t data) | 732 | u32 class, u32 mthd, u32 data) |
726 | { | 733 | { |
727 | uint32_t min = data & 0xffff, max; | 734 | uint32_t min = data & 0xffff, max; |
728 | uint32_t w = data >> 16; | 735 | uint32_t w = data >> 16; |
@@ -740,8 +747,8 @@ nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan, int grclass, | |||
740 | } | 747 | } |
741 | 748 | ||
742 | static int | 749 | static int |
743 | nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan, int grclass, | 750 | nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan, |
744 | int mthd, uint32_t data) | 751 | u32 class, u32 mthd, u32 data) |
745 | { | 752 | { |
746 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 753 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
747 | case 0x30: | 754 | case 0x30: |
@@ -757,8 +764,8 @@ nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan, int grclass, | |||
757 | } | 764 | } |
758 | 765 | ||
759 | static int | 766 | static int |
760 | nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan, int grclass, | 767 | nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan, |
761 | int mthd, uint32_t data) | 768 | u32 class, u32 mthd, u32 data) |
762 | { | 769 | { |
763 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 770 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
764 | case 0x30: | 771 | case 0x30: |
@@ -778,8 +785,8 @@ nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan, int grclass, | |||
778 | } | 785 | } |
779 | 786 | ||
780 | static int | 787 | static int |
781 | nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan, int grclass, | 788 | nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan, |
782 | int mthd, uint32_t data) | 789 | u32 class, u32 mthd, u32 data) |
783 | { | 790 | { |
784 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 791 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
785 | case 0x30: | 792 | case 0x30: |
@@ -793,8 +800,8 @@ nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan, int grclass, | |||
793 | } | 800 | } |
794 | 801 | ||
795 | static int | 802 | static int |
796 | nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan, int grclass, | 803 | nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan, |
797 | int mthd, uint32_t data) | 804 | u32 class, u32 mthd, u32 data) |
798 | { | 805 | { |
799 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 806 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
800 | case 0x30: | 807 | case 0x30: |
@@ -808,8 +815,8 @@ nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan, int grclass, | |||
808 | } | 815 | } |
809 | 816 | ||
810 | static int | 817 | static int |
811 | nv04_graph_mthd_bind_rop(struct nouveau_channel *chan, int grclass, | 818 | nv04_graph_mthd_bind_rop(struct nouveau_channel *chan, |
812 | int mthd, uint32_t data) | 819 | u32 class, u32 mthd, u32 data) |
813 | { | 820 | { |
814 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 821 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
815 | case 0x30: | 822 | case 0x30: |
@@ -823,8 +830,8 @@ nv04_graph_mthd_bind_rop(struct nouveau_channel *chan, int grclass, | |||
823 | } | 830 | } |
824 | 831 | ||
825 | static int | 832 | static int |
826 | nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan, int grclass, | 833 | nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan, |
827 | int mthd, uint32_t data) | 834 | u32 class, u32 mthd, u32 data) |
828 | { | 835 | { |
829 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 836 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
830 | case 0x30: | 837 | case 0x30: |
@@ -838,8 +845,8 @@ nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan, int grclass, | |||
838 | } | 845 | } |
839 | 846 | ||
840 | static int | 847 | static int |
841 | nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan, int grclass, | 848 | nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan, |
842 | int mthd, uint32_t data) | 849 | u32 class, u32 mthd, u32 data) |
843 | { | 850 | { |
844 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 851 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
845 | case 0x30: | 852 | case 0x30: |
@@ -853,8 +860,8 @@ nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan, int grclass, | |||
853 | } | 860 | } |
854 | 861 | ||
855 | static int | 862 | static int |
856 | nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan, int grclass, | 863 | nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan, |
857 | int mthd, uint32_t data) | 864 | u32 class, u32 mthd, u32 data) |
858 | { | 865 | { |
859 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 866 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
860 | case 0x30: | 867 | case 0x30: |
@@ -868,8 +875,8 @@ nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan, int grclass, | |||
868 | } | 875 | } |
869 | 876 | ||
870 | static int | 877 | static int |
871 | nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan, int grclass, | 878 | nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan, |
872 | int mthd, uint32_t data) | 879 | u32 class, u32 mthd, u32 data) |
873 | { | 880 | { |
874 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 881 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
875 | case 0x30: | 882 | case 0x30: |
@@ -883,8 +890,8 @@ nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan, int grclass, | |||
883 | } | 890 | } |
884 | 891 | ||
885 | static int | 892 | static int |
886 | nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan, int grclass, | 893 | nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan, |
887 | int mthd, uint32_t data) | 894 | u32 class, u32 mthd, u32 data) |
888 | { | 895 | { |
889 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 896 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
890 | case 0x30: | 897 | case 0x30: |
@@ -898,8 +905,8 @@ nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan, int grclass, | |||
898 | } | 905 | } |
899 | 906 | ||
900 | static int | 907 | static int |
901 | nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan, int grclass, | 908 | nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan, |
902 | int mthd, uint32_t data) | 909 | u32 class, u32 mthd, u32 data) |
903 | { | 910 | { |
904 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 911 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
905 | case 0x30: | 912 | case 0x30: |
@@ -913,8 +920,8 @@ nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan, int grclass, | |||
913 | } | 920 | } |
914 | 921 | ||
915 | static int | 922 | static int |
916 | nv04_graph_mthd_bind_clip(struct nouveau_channel *chan, int grclass, | 923 | nv04_graph_mthd_bind_clip(struct nouveau_channel *chan, |
917 | int mthd, uint32_t data) | 924 | u32 class, u32 mthd, u32 data) |
918 | { | 925 | { |
919 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 926 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
920 | case 0x30: | 927 | case 0x30: |
@@ -928,8 +935,8 @@ nv04_graph_mthd_bind_clip(struct nouveau_channel *chan, int grclass, | |||
928 | } | 935 | } |
929 | 936 | ||
930 | static int | 937 | static int |
931 | nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan, int grclass, | 938 | nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan, |
932 | int mthd, uint32_t data) | 939 | u32 class, u32 mthd, u32 data) |
933 | { | 940 | { |
934 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { | 941 | switch (nv_ri32(chan->dev, data << 4) & 0xff) { |
935 | case 0x30: | 942 | case 0x30: |
@@ -945,194 +952,259 @@ nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan, int grclass, | |||
945 | return 1; | 952 | return 1; |
946 | } | 953 | } |
947 | 954 | ||
948 | static struct nouveau_pgraph_object_method nv04_graph_mthds_sw[] = { | 955 | static int |
949 | { 0x0150, nv04_graph_mthd_set_ref }, | 956 | nv04_graph_register(struct drm_device *dev) |
950 | {} | 957 | { |
951 | }; | 958 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
952 | |||
953 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv03_gdirect[] = { | ||
954 | { 0x0184, nv04_graph_mthd_bind_nv01_patt }, | ||
955 | { 0x0188, nv04_graph_mthd_bind_rop }, | ||
956 | { 0x018c, nv04_graph_mthd_bind_beta1 }, | ||
957 | { 0x0190, nv04_graph_mthd_bind_surf_dst }, | ||
958 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
959 | {}, | ||
960 | }; | ||
961 | |||
962 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv04_gdirect[] = { | ||
963 | { 0x0188, nv04_graph_mthd_bind_nv04_patt }, | ||
964 | { 0x018c, nv04_graph_mthd_bind_rop }, | ||
965 | { 0x0190, nv04_graph_mthd_bind_beta1 }, | ||
966 | { 0x0194, nv04_graph_mthd_bind_beta4 }, | ||
967 | { 0x0198, nv04_graph_mthd_bind_surf2d }, | ||
968 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
969 | {}, | ||
970 | }; | ||
971 | |||
972 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv01_imageblit[] = { | ||
973 | { 0x0184, nv04_graph_mthd_bind_chroma }, | ||
974 | { 0x0188, nv04_graph_mthd_bind_clip }, | ||
975 | { 0x018c, nv04_graph_mthd_bind_nv01_patt }, | ||
976 | { 0x0190, nv04_graph_mthd_bind_rop }, | ||
977 | { 0x0194, nv04_graph_mthd_bind_beta1 }, | ||
978 | { 0x0198, nv04_graph_mthd_bind_surf_dst }, | ||
979 | { 0x019c, nv04_graph_mthd_bind_surf_src }, | ||
980 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
981 | {}, | ||
982 | }; | ||
983 | |||
984 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv04_imageblit_ifc[] = { | ||
985 | { 0x0184, nv04_graph_mthd_bind_chroma }, | ||
986 | { 0x0188, nv04_graph_mthd_bind_clip }, | ||
987 | { 0x018c, nv04_graph_mthd_bind_nv04_patt }, | ||
988 | { 0x0190, nv04_graph_mthd_bind_rop }, | ||
989 | { 0x0194, nv04_graph_mthd_bind_beta1 }, | ||
990 | { 0x0198, nv04_graph_mthd_bind_beta4 }, | ||
991 | { 0x019c, nv04_graph_mthd_bind_surf2d }, | ||
992 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
993 | {}, | ||
994 | }; | ||
995 | |||
996 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv04_iifc[] = { | ||
997 | { 0x0188, nv04_graph_mthd_bind_chroma }, | ||
998 | { 0x018c, nv04_graph_mthd_bind_clip }, | ||
999 | { 0x0190, nv04_graph_mthd_bind_nv04_patt }, | ||
1000 | { 0x0194, nv04_graph_mthd_bind_rop }, | ||
1001 | { 0x0198, nv04_graph_mthd_bind_beta1 }, | ||
1002 | { 0x019c, nv04_graph_mthd_bind_beta4 }, | ||
1003 | { 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf }, | ||
1004 | { 0x03e4, nv04_graph_mthd_set_operation }, | ||
1005 | {}, | ||
1006 | }; | ||
1007 | |||
1008 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv01_ifc[] = { | ||
1009 | { 0x0184, nv04_graph_mthd_bind_chroma }, | ||
1010 | { 0x0188, nv04_graph_mthd_bind_clip }, | ||
1011 | { 0x018c, nv04_graph_mthd_bind_nv01_patt }, | ||
1012 | { 0x0190, nv04_graph_mthd_bind_rop }, | ||
1013 | { 0x0194, nv04_graph_mthd_bind_beta1 }, | ||
1014 | { 0x0198, nv04_graph_mthd_bind_surf_dst }, | ||
1015 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
1016 | {}, | ||
1017 | }; | ||
1018 | |||
1019 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv03_sifc[] = { | ||
1020 | { 0x0184, nv04_graph_mthd_bind_chroma }, | ||
1021 | { 0x0188, nv04_graph_mthd_bind_nv01_patt }, | ||
1022 | { 0x018c, nv04_graph_mthd_bind_rop }, | ||
1023 | { 0x0190, nv04_graph_mthd_bind_beta1 }, | ||
1024 | { 0x0194, nv04_graph_mthd_bind_surf_dst }, | ||
1025 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
1026 | {}, | ||
1027 | }; | ||
1028 | |||
1029 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv04_sifc[] = { | ||
1030 | { 0x0184, nv04_graph_mthd_bind_chroma }, | ||
1031 | { 0x0188, nv04_graph_mthd_bind_nv04_patt }, | ||
1032 | { 0x018c, nv04_graph_mthd_bind_rop }, | ||
1033 | { 0x0190, nv04_graph_mthd_bind_beta1 }, | ||
1034 | { 0x0194, nv04_graph_mthd_bind_beta4 }, | ||
1035 | { 0x0198, nv04_graph_mthd_bind_surf2d }, | ||
1036 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
1037 | {}, | ||
1038 | }; | ||
1039 | |||
1040 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv03_sifm[] = { | ||
1041 | { 0x0188, nv04_graph_mthd_bind_nv01_patt }, | ||
1042 | { 0x018c, nv04_graph_mthd_bind_rop }, | ||
1043 | { 0x0190, nv04_graph_mthd_bind_beta1 }, | ||
1044 | { 0x0194, nv04_graph_mthd_bind_surf_dst }, | ||
1045 | { 0x0304, nv04_graph_mthd_set_operation }, | ||
1046 | {}, | ||
1047 | }; | ||
1048 | |||
1049 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv04_sifm[] = { | ||
1050 | { 0x0188, nv04_graph_mthd_bind_nv04_patt }, | ||
1051 | { 0x018c, nv04_graph_mthd_bind_rop }, | ||
1052 | { 0x0190, nv04_graph_mthd_bind_beta1 }, | ||
1053 | { 0x0194, nv04_graph_mthd_bind_beta4 }, | ||
1054 | { 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf }, | ||
1055 | { 0x0304, nv04_graph_mthd_set_operation }, | ||
1056 | {}, | ||
1057 | }; | ||
1058 | |||
1059 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv01_shape[] = { | ||
1060 | { 0x0184, nv04_graph_mthd_bind_clip }, | ||
1061 | { 0x0188, nv04_graph_mthd_bind_nv01_patt }, | ||
1062 | { 0x018c, nv04_graph_mthd_bind_rop }, | ||
1063 | { 0x0190, nv04_graph_mthd_bind_beta1 }, | ||
1064 | { 0x0194, nv04_graph_mthd_bind_surf_dst }, | ||
1065 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
1066 | {}, | ||
1067 | }; | ||
1068 | |||
1069 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv04_shape[] = { | ||
1070 | { 0x0184, nv04_graph_mthd_bind_clip }, | ||
1071 | { 0x0188, nv04_graph_mthd_bind_nv04_patt }, | ||
1072 | { 0x018c, nv04_graph_mthd_bind_rop }, | ||
1073 | { 0x0190, nv04_graph_mthd_bind_beta1 }, | ||
1074 | { 0x0194, nv04_graph_mthd_bind_beta4 }, | ||
1075 | { 0x0198, nv04_graph_mthd_bind_surf2d }, | ||
1076 | { 0x02fc, nv04_graph_mthd_set_operation }, | ||
1077 | {}, | ||
1078 | }; | ||
1079 | |||
1080 | static struct nouveau_pgraph_object_method nv04_graph_mthds_nv03_tex_tri[] = { | ||
1081 | { 0x0188, nv04_graph_mthd_bind_clip }, | ||
1082 | { 0x018c, nv04_graph_mthd_bind_surf_color }, | ||
1083 | { 0x0190, nv04_graph_mthd_bind_surf_zeta }, | ||
1084 | {}, | ||
1085 | }; | ||
1086 | 959 | ||
1087 | static struct nouveau_pgraph_object_method nv04_graph_mthds_surf3d[] = { | 960 | if (dev_priv->engine.graph.registered) |
1088 | { 0x02f8, nv04_graph_mthd_surf3d_clip_h }, | 961 | return 0; |
1089 | { 0x02fc, nv04_graph_mthd_surf3d_clip_v }, | ||
1090 | {}, | ||
1091 | }; | ||
1092 | 962 | ||
1093 | struct nouveau_pgraph_object_class nv04_graph_grclass[] = { | 963 | /* dvd subpicture */ |
1094 | { 0x0038, NVOBJ_ENGINE_GR, NULL }, /* dvd subpicture */ | 964 | NVOBJ_CLASS(dev, 0x0038, GR); |
1095 | { 0x0039, NVOBJ_ENGINE_GR, NULL }, /* m2mf */ | 965 | |
1096 | { 0x004b, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv03_gdirect }, /* nv03 gdirect */ | 966 | /* m2mf */ |
1097 | { 0x004a, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_gdirect }, /* nv04 gdirect */ | 967 | NVOBJ_CLASS(dev, 0x0039, GR); |
1098 | { 0x001f, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv01_imageblit }, /* nv01 imageblit */ | 968 | |
1099 | { 0x005f, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_imageblit_ifc }, /* nv04 imageblit */ | 969 | /* nv03 gdirect */ |
1100 | { 0x0060, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_iifc }, /* nv04 iifc */ | 970 | NVOBJ_CLASS(dev, 0x004b, GR); |
1101 | { 0x0064, NVOBJ_ENGINE_GR, NULL }, /* nv05 iifc */ | 971 | NVOBJ_MTHD (dev, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt); |
1102 | { 0x0021, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv01_ifc }, /* nv01 ifc */ | 972 | NVOBJ_MTHD (dev, 0x004b, 0x0188, nv04_graph_mthd_bind_rop); |
1103 | { 0x0061, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_imageblit_ifc }, /* nv04 ifc */ | 973 | NVOBJ_MTHD (dev, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1); |
1104 | { 0x0065, NVOBJ_ENGINE_GR, NULL }, /* nv05 ifc */ | 974 | NVOBJ_MTHD (dev, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst); |
1105 | { 0x0036, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv03_sifc }, /* nv03 sifc */ | 975 | NVOBJ_MTHD (dev, 0x004b, 0x02fc, nv04_graph_mthd_set_operation); |
1106 | { 0x0076, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_sifc }, /* nv04 sifc */ | 976 | |
1107 | { 0x0066, NVOBJ_ENGINE_GR, NULL }, /* nv05 sifc */ | 977 | /* nv04 gdirect */ |
1108 | { 0x0037, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv03_sifm }, /* nv03 sifm */ | 978 | NVOBJ_CLASS(dev, 0x004a, GR); |
1109 | { 0x0077, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_sifm }, /* nv04 sifm */ | 979 | NVOBJ_MTHD (dev, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt); |
1110 | { 0x0030, NVOBJ_ENGINE_GR, NULL }, /* null */ | 980 | NVOBJ_MTHD (dev, 0x004a, 0x018c, nv04_graph_mthd_bind_rop); |
1111 | { 0x0042, NVOBJ_ENGINE_GR, NULL }, /* surf2d */ | 981 | NVOBJ_MTHD (dev, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1); |
1112 | { 0x0043, NVOBJ_ENGINE_GR, NULL }, /* rop */ | 982 | NVOBJ_MTHD (dev, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4); |
1113 | { 0x0012, NVOBJ_ENGINE_GR, NULL }, /* beta1 */ | 983 | NVOBJ_MTHD (dev, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d); |
1114 | { 0x0072, NVOBJ_ENGINE_GR, NULL }, /* beta4 */ | 984 | NVOBJ_MTHD (dev, 0x004a, 0x02fc, nv04_graph_mthd_set_operation); |
1115 | { 0x0019, NVOBJ_ENGINE_GR, NULL }, /* cliprect */ | 985 | |
1116 | { 0x0018, NVOBJ_ENGINE_GR, NULL }, /* nv01 pattern */ | 986 | /* nv01 imageblit */ |
1117 | { 0x0044, NVOBJ_ENGINE_GR, NULL }, /* nv04 pattern */ | 987 | NVOBJ_CLASS(dev, 0x001f, GR); |
1118 | { 0x0052, NVOBJ_ENGINE_GR, NULL }, /* swzsurf */ | 988 | NVOBJ_MTHD (dev, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma); |
1119 | { 0x0053, NVOBJ_ENGINE_GR, nv04_graph_mthds_surf3d }, /* surf3d */ | 989 | NVOBJ_MTHD (dev, 0x001f, 0x0188, nv04_graph_mthd_bind_clip); |
1120 | { 0x0048, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv03_tex_tri }, /* nv03 tex_tri */ | 990 | NVOBJ_MTHD (dev, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt); |
1121 | { 0x0054, NVOBJ_ENGINE_GR, NULL }, /* tex_tri */ | 991 | NVOBJ_MTHD (dev, 0x001f, 0x0190, nv04_graph_mthd_bind_rop); |
1122 | { 0x0055, NVOBJ_ENGINE_GR, NULL }, /* multitex_tri */ | 992 | NVOBJ_MTHD (dev, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1); |
1123 | { 0x0017, NVOBJ_ENGINE_GR, NULL }, /* nv01 chroma */ | 993 | NVOBJ_MTHD (dev, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst); |
1124 | { 0x0057, NVOBJ_ENGINE_GR, NULL }, /* nv04 chroma */ | 994 | NVOBJ_MTHD (dev, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src); |
1125 | { 0x0058, NVOBJ_ENGINE_GR, NULL }, /* surf_dst */ | 995 | NVOBJ_MTHD (dev, 0x001f, 0x02fc, nv04_graph_mthd_set_operation); |
1126 | { 0x0059, NVOBJ_ENGINE_GR, NULL }, /* surf_src */ | 996 | |
1127 | { 0x005a, NVOBJ_ENGINE_GR, NULL }, /* surf_color */ | 997 | /* nv04 imageblit */ |
1128 | { 0x005b, NVOBJ_ENGINE_GR, NULL }, /* surf_zeta */ | 998 | NVOBJ_CLASS(dev, 0x005f, GR); |
1129 | { 0x001c, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv01_shape }, /* nv01 line */ | 999 | NVOBJ_MTHD (dev, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma); |
1130 | { 0x005c, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_shape }, /* nv04 line */ | 1000 | NVOBJ_MTHD (dev, 0x005f, 0x0188, nv04_graph_mthd_bind_clip); |
1131 | { 0x001d, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv01_shape }, /* nv01 tri */ | 1001 | NVOBJ_MTHD (dev, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt); |
1132 | { 0x005d, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_shape }, /* nv04 tri */ | 1002 | NVOBJ_MTHD (dev, 0x005f, 0x0190, nv04_graph_mthd_bind_rop); |
1133 | { 0x001e, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv01_shape }, /* nv01 rect */ | 1003 | NVOBJ_MTHD (dev, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1); |
1134 | { 0x005e, NVOBJ_ENGINE_GR, nv04_graph_mthds_nv04_shape }, /* nv04 rect */ | 1004 | NVOBJ_MTHD (dev, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4); |
1135 | { 0x506e, NVOBJ_ENGINE_SW, nv04_graph_mthds_sw }, | 1005 | NVOBJ_MTHD (dev, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d); |
1136 | {} | 1006 | NVOBJ_MTHD (dev, 0x005f, 0x02fc, nv04_graph_mthd_set_operation); |
1007 | |||
1008 | /* nv04 iifc */ | ||
1009 | NVOBJ_CLASS(dev, 0x0060, GR); | ||
1010 | NVOBJ_MTHD (dev, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma); | ||
1011 | NVOBJ_MTHD (dev, 0x0060, 0x018c, nv04_graph_mthd_bind_clip); | ||
1012 | NVOBJ_MTHD (dev, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt); | ||
1013 | NVOBJ_MTHD (dev, 0x0060, 0x0194, nv04_graph_mthd_bind_rop); | ||
1014 | NVOBJ_MTHD (dev, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1); | ||
1015 | NVOBJ_MTHD (dev, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4); | ||
1016 | NVOBJ_MTHD (dev, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf); | ||
1017 | NVOBJ_MTHD (dev, 0x0060, 0x03e4, nv04_graph_mthd_set_operation); | ||
1018 | |||
1019 | /* nv05 iifc */ | ||
1020 | NVOBJ_CLASS(dev, 0x0064, GR); | ||
1021 | |||
1022 | /* nv01 ifc */ | ||
1023 | NVOBJ_CLASS(dev, 0x0021, GR); | ||
1024 | NVOBJ_MTHD (dev, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma); | ||
1025 | NVOBJ_MTHD (dev, 0x0021, 0x0188, nv04_graph_mthd_bind_clip); | ||
1026 | NVOBJ_MTHD (dev, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt); | ||
1027 | NVOBJ_MTHD (dev, 0x0021, 0x0190, nv04_graph_mthd_bind_rop); | ||
1028 | NVOBJ_MTHD (dev, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1); | ||
1029 | NVOBJ_MTHD (dev, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst); | ||
1030 | NVOBJ_MTHD (dev, 0x0021, 0x02fc, nv04_graph_mthd_set_operation); | ||
1031 | |||
1032 | /* nv04 ifc */ | ||
1033 | NVOBJ_CLASS(dev, 0x0061, GR); | ||
1034 | NVOBJ_MTHD (dev, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma); | ||
1035 | NVOBJ_MTHD (dev, 0x0061, 0x0188, nv04_graph_mthd_bind_clip); | ||
1036 | NVOBJ_MTHD (dev, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt); | ||
1037 | NVOBJ_MTHD (dev, 0x0061, 0x0190, nv04_graph_mthd_bind_rop); | ||
1038 | NVOBJ_MTHD (dev, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1); | ||
1039 | NVOBJ_MTHD (dev, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4); | ||
1040 | NVOBJ_MTHD (dev, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d); | ||
1041 | NVOBJ_MTHD (dev, 0x0061, 0x02fc, nv04_graph_mthd_set_operation); | ||
1042 | |||
1043 | /* nv05 ifc */ | ||
1044 | NVOBJ_CLASS(dev, 0x0065, GR); | ||
1045 | |||
1046 | /* nv03 sifc */ | ||
1047 | NVOBJ_CLASS(dev, 0x0036, GR); | ||
1048 | NVOBJ_MTHD (dev, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma); | ||
1049 | NVOBJ_MTHD (dev, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt); | ||
1050 | NVOBJ_MTHD (dev, 0x0036, 0x018c, nv04_graph_mthd_bind_rop); | ||
1051 | NVOBJ_MTHD (dev, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1052 | NVOBJ_MTHD (dev, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst); | ||
1053 | NVOBJ_MTHD (dev, 0x0036, 0x02fc, nv04_graph_mthd_set_operation); | ||
1054 | |||
1055 | /* nv04 sifc */ | ||
1056 | NVOBJ_CLASS(dev, 0x0076, GR); | ||
1057 | NVOBJ_MTHD (dev, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma); | ||
1058 | NVOBJ_MTHD (dev, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt); | ||
1059 | NVOBJ_MTHD (dev, 0x0076, 0x018c, nv04_graph_mthd_bind_rop); | ||
1060 | NVOBJ_MTHD (dev, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1061 | NVOBJ_MTHD (dev, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4); | ||
1062 | NVOBJ_MTHD (dev, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d); | ||
1063 | NVOBJ_MTHD (dev, 0x0076, 0x02fc, nv04_graph_mthd_set_operation); | ||
1064 | |||
1065 | /* nv05 sifc */ | ||
1066 | NVOBJ_CLASS(dev, 0x0066, GR); | ||
1067 | |||
1068 | /* nv03 sifm */ | ||
1069 | NVOBJ_CLASS(dev, 0x0037, GR); | ||
1070 | NVOBJ_MTHD (dev, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt); | ||
1071 | NVOBJ_MTHD (dev, 0x0037, 0x018c, nv04_graph_mthd_bind_rop); | ||
1072 | NVOBJ_MTHD (dev, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1073 | NVOBJ_MTHD (dev, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst); | ||
1074 | NVOBJ_MTHD (dev, 0x0037, 0x0304, nv04_graph_mthd_set_operation); | ||
1075 | |||
1076 | /* nv04 sifm */ | ||
1077 | NVOBJ_CLASS(dev, 0x0077, GR); | ||
1078 | NVOBJ_MTHD (dev, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt); | ||
1079 | NVOBJ_MTHD (dev, 0x0077, 0x018c, nv04_graph_mthd_bind_rop); | ||
1080 | NVOBJ_MTHD (dev, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1081 | NVOBJ_MTHD (dev, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4); | ||
1082 | NVOBJ_MTHD (dev, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf); | ||
1083 | NVOBJ_MTHD (dev, 0x0077, 0x0304, nv04_graph_mthd_set_operation); | ||
1084 | |||
1085 | /* null */ | ||
1086 | NVOBJ_CLASS(dev, 0x0030, GR); | ||
1087 | |||
1088 | /* surf2d */ | ||
1089 | NVOBJ_CLASS(dev, 0x0042, GR); | ||
1090 | |||
1091 | /* rop */ | ||
1092 | NVOBJ_CLASS(dev, 0x0043, GR); | ||
1093 | |||
1094 | /* beta1 */ | ||
1095 | NVOBJ_CLASS(dev, 0x0012, GR); | ||
1096 | |||
1097 | /* beta4 */ | ||
1098 | NVOBJ_CLASS(dev, 0x0072, GR); | ||
1099 | |||
1100 | /* cliprect */ | ||
1101 | NVOBJ_CLASS(dev, 0x0019, GR); | ||
1102 | |||
1103 | /* nv01 pattern */ | ||
1104 | NVOBJ_CLASS(dev, 0x0018, GR); | ||
1105 | |||
1106 | /* nv04 pattern */ | ||
1107 | NVOBJ_CLASS(dev, 0x0044, GR); | ||
1108 | |||
1109 | /* swzsurf */ | ||
1110 | NVOBJ_CLASS(dev, 0x0052, GR); | ||
1111 | |||
1112 | /* surf3d */ | ||
1113 | NVOBJ_CLASS(dev, 0x0053, GR); | ||
1114 | NVOBJ_MTHD (dev, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h); | ||
1115 | NVOBJ_MTHD (dev, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v); | ||
1116 | |||
1117 | /* nv03 tex_tri */ | ||
1118 | NVOBJ_CLASS(dev, 0x0048, GR); | ||
1119 | NVOBJ_MTHD (dev, 0x0048, 0x0188, nv04_graph_mthd_bind_clip); | ||
1120 | NVOBJ_MTHD (dev, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color); | ||
1121 | NVOBJ_MTHD (dev, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta); | ||
1122 | |||
1123 | /* tex_tri */ | ||
1124 | NVOBJ_CLASS(dev, 0x0054, GR); | ||
1125 | |||
1126 | /* multitex_tri */ | ||
1127 | NVOBJ_CLASS(dev, 0x0055, GR); | ||
1128 | |||
1129 | /* nv01 chroma */ | ||
1130 | NVOBJ_CLASS(dev, 0x0017, GR); | ||
1131 | |||
1132 | /* nv04 chroma */ | ||
1133 | NVOBJ_CLASS(dev, 0x0057, GR); | ||
1134 | |||
1135 | /* surf_dst */ | ||
1136 | NVOBJ_CLASS(dev, 0x0058, GR); | ||
1137 | |||
1138 | /* surf_src */ | ||
1139 | NVOBJ_CLASS(dev, 0x0059, GR); | ||
1140 | |||
1141 | /* surf_color */ | ||
1142 | NVOBJ_CLASS(dev, 0x005a, GR); | ||
1143 | |||
1144 | /* surf_zeta */ | ||
1145 | NVOBJ_CLASS(dev, 0x005b, GR); | ||
1146 | |||
1147 | /* nv01 line */ | ||
1148 | NVOBJ_CLASS(dev, 0x001c, GR); | ||
1149 | NVOBJ_MTHD (dev, 0x001c, 0x0184, nv04_graph_mthd_bind_clip); | ||
1150 | NVOBJ_MTHD (dev, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt); | ||
1151 | NVOBJ_MTHD (dev, 0x001c, 0x018c, nv04_graph_mthd_bind_rop); | ||
1152 | NVOBJ_MTHD (dev, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1153 | NVOBJ_MTHD (dev, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst); | ||
1154 | NVOBJ_MTHD (dev, 0x001c, 0x02fc, nv04_graph_mthd_set_operation); | ||
1155 | |||
1156 | /* nv04 line */ | ||
1157 | NVOBJ_CLASS(dev, 0x005c, GR); | ||
1158 | NVOBJ_MTHD (dev, 0x005c, 0x0184, nv04_graph_mthd_bind_clip); | ||
1159 | NVOBJ_MTHD (dev, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt); | ||
1160 | NVOBJ_MTHD (dev, 0x005c, 0x018c, nv04_graph_mthd_bind_rop); | ||
1161 | NVOBJ_MTHD (dev, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1162 | NVOBJ_MTHD (dev, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4); | ||
1163 | NVOBJ_MTHD (dev, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d); | ||
1164 | NVOBJ_MTHD (dev, 0x005c, 0x02fc, nv04_graph_mthd_set_operation); | ||
1165 | |||
1166 | /* nv01 tri */ | ||
1167 | NVOBJ_CLASS(dev, 0x001d, GR); | ||
1168 | NVOBJ_MTHD (dev, 0x001d, 0x0184, nv04_graph_mthd_bind_clip); | ||
1169 | NVOBJ_MTHD (dev, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt); | ||
1170 | NVOBJ_MTHD (dev, 0x001d, 0x018c, nv04_graph_mthd_bind_rop); | ||
1171 | NVOBJ_MTHD (dev, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1172 | NVOBJ_MTHD (dev, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst); | ||
1173 | NVOBJ_MTHD (dev, 0x001d, 0x02fc, nv04_graph_mthd_set_operation); | ||
1174 | |||
1175 | /* nv04 tri */ | ||
1176 | NVOBJ_CLASS(dev, 0x005d, GR); | ||
1177 | NVOBJ_MTHD (dev, 0x005d, 0x0184, nv04_graph_mthd_bind_clip); | ||
1178 | NVOBJ_MTHD (dev, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt); | ||
1179 | NVOBJ_MTHD (dev, 0x005d, 0x018c, nv04_graph_mthd_bind_rop); | ||
1180 | NVOBJ_MTHD (dev, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1181 | NVOBJ_MTHD (dev, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4); | ||
1182 | NVOBJ_MTHD (dev, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d); | ||
1183 | NVOBJ_MTHD (dev, 0x005d, 0x02fc, nv04_graph_mthd_set_operation); | ||
1184 | |||
1185 | /* nv01 rect */ | ||
1186 | NVOBJ_CLASS(dev, 0x001e, GR); | ||
1187 | NVOBJ_MTHD (dev, 0x001e, 0x0184, nv04_graph_mthd_bind_clip); | ||
1188 | NVOBJ_MTHD (dev, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt); | ||
1189 | NVOBJ_MTHD (dev, 0x001e, 0x018c, nv04_graph_mthd_bind_rop); | ||
1190 | NVOBJ_MTHD (dev, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1191 | NVOBJ_MTHD (dev, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst); | ||
1192 | NVOBJ_MTHD (dev, 0x001e, 0x02fc, nv04_graph_mthd_set_operation); | ||
1193 | |||
1194 | /* nv04 rect */ | ||
1195 | NVOBJ_CLASS(dev, 0x005e, GR); | ||
1196 | NVOBJ_MTHD (dev, 0x005e, 0x0184, nv04_graph_mthd_bind_clip); | ||
1197 | NVOBJ_MTHD (dev, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt); | ||
1198 | NVOBJ_MTHD (dev, 0x005e, 0x018c, nv04_graph_mthd_bind_rop); | ||
1199 | NVOBJ_MTHD (dev, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1); | ||
1200 | NVOBJ_MTHD (dev, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4); | ||
1201 | NVOBJ_MTHD (dev, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d); | ||
1202 | NVOBJ_MTHD (dev, 0x005e, 0x02fc, nv04_graph_mthd_set_operation); | ||
1203 | |||
1204 | /* nvsw */ | ||
1205 | NVOBJ_CLASS(dev, 0x506e, SW); | ||
1206 | NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref); | ||
1207 | |||
1208 | dev_priv->engine.graph.registered = true; | ||
1209 | return 0; | ||
1137 | }; | 1210 | }; |
1138 | |||
diff --git a/drivers/gpu/drm/nouveau/nv10_graph.c b/drivers/gpu/drm/nouveau/nv10_graph.c index ed31a622889e..17c20dbff232 100644 --- a/drivers/gpu/drm/nouveau/nv10_graph.c +++ b/drivers/gpu/drm/nouveau/nv10_graph.c | |||
@@ -27,6 +27,8 @@ | |||
27 | #include "nouveau_drm.h" | 27 | #include "nouveau_drm.h" |
28 | #include "nouveau_drv.h" | 28 | #include "nouveau_drv.h" |
29 | 29 | ||
30 | static int nv10_graph_register(struct drm_device *); | ||
31 | |||
30 | #define NV10_FIFO_NUMBER 32 | 32 | #define NV10_FIFO_NUMBER 32 |
31 | 33 | ||
32 | struct pipe_state { | 34 | struct pipe_state { |
@@ -914,13 +916,17 @@ int nv10_graph_init(struct drm_device *dev) | |||
914 | { | 916 | { |
915 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 917 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
916 | uint32_t tmp; | 918 | uint32_t tmp; |
917 | int i; | 919 | int ret, i; |
918 | 920 | ||
919 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & | 921 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & |
920 | ~NV_PMC_ENABLE_PGRAPH); | 922 | ~NV_PMC_ENABLE_PGRAPH); |
921 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) | | 923 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) | |
922 | NV_PMC_ENABLE_PGRAPH); | 924 | NV_PMC_ENABLE_PGRAPH); |
923 | 925 | ||
926 | ret = nv10_graph_register(dev); | ||
927 | if (ret) | ||
928 | return ret; | ||
929 | |||
924 | nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF); | 930 | nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF); |
925 | nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); | 931 | nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); |
926 | 932 | ||
@@ -966,8 +972,8 @@ void nv10_graph_takedown(struct drm_device *dev) | |||
966 | } | 972 | } |
967 | 973 | ||
968 | static int | 974 | static int |
969 | nv17_graph_mthd_lma_window(struct nouveau_channel *chan, int grclass, | 975 | nv17_graph_mthd_lma_window(struct nouveau_channel *chan, |
970 | int mthd, uint32_t data) | 976 | u32 class, u32 mthd, u32 data) |
971 | { | 977 | { |
972 | struct drm_device *dev = chan->dev; | 978 | struct drm_device *dev = chan->dev; |
973 | struct graph_state *ctx = chan->pgraph_ctx; | 979 | struct graph_state *ctx = chan->pgraph_ctx; |
@@ -1046,8 +1052,8 @@ nv17_graph_mthd_lma_window(struct nouveau_channel *chan, int grclass, | |||
1046 | } | 1052 | } |
1047 | 1053 | ||
1048 | static int | 1054 | static int |
1049 | nv17_graph_mthd_lma_enable(struct nouveau_channel *chan, int grclass, | 1055 | nv17_graph_mthd_lma_enable(struct nouveau_channel *chan, |
1050 | int mthd, uint32_t data) | 1056 | u32 class, u32 mthd, u32 data) |
1051 | { | 1057 | { |
1052 | struct drm_device *dev = chan->dev; | 1058 | struct drm_device *dev = chan->dev; |
1053 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 1059 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
@@ -1065,36 +1071,48 @@ nv17_graph_mthd_lma_enable(struct nouveau_channel *chan, int grclass, | |||
1065 | return 0; | 1071 | return 0; |
1066 | } | 1072 | } |
1067 | 1073 | ||
1068 | static struct nouveau_pgraph_object_method nv17_graph_celsius_mthds[] = { | 1074 | static int |
1069 | { 0x1638, nv17_graph_mthd_lma_window }, | 1075 | nv10_graph_register(struct drm_device *dev) |
1070 | { 0x163c, nv17_graph_mthd_lma_window }, | 1076 | { |
1071 | { 0x1640, nv17_graph_mthd_lma_window }, | 1077 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
1072 | { 0x1644, nv17_graph_mthd_lma_window }, | ||
1073 | { 0x1658, nv17_graph_mthd_lma_enable }, | ||
1074 | {} | ||
1075 | }; | ||
1076 | 1078 | ||
1077 | struct nouveau_pgraph_object_class nv10_graph_grclass[] = { | 1079 | if (dev_priv->engine.graph.registered) |
1078 | { 0x506e, NVOBJ_ENGINE_SW, NULL }, /* nvsw */ | 1080 | return 0; |
1079 | { 0x0030, NVOBJ_ENGINE_GR, NULL }, /* null */ | 1081 | |
1080 | { 0x0039, NVOBJ_ENGINE_GR, NULL }, /* m2mf */ | 1082 | NVOBJ_CLASS(dev, 0x506e, SW); /* nvsw */ |
1081 | { 0x004a, NVOBJ_ENGINE_GR, NULL }, /* gdirect */ | 1083 | NVOBJ_CLASS(dev, 0x0030, GR); /* null */ |
1082 | { 0x005f, NVOBJ_ENGINE_GR, NULL }, /* imageblit */ | 1084 | NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */ |
1083 | { 0x009f, NVOBJ_ENGINE_GR, NULL }, /* imageblit (nv12) */ | 1085 | NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */ |
1084 | { 0x008a, NVOBJ_ENGINE_GR, NULL }, /* ifc */ | 1086 | NVOBJ_CLASS(dev, 0x005f, GR); /* imageblit */ |
1085 | { 0x0089, NVOBJ_ENGINE_GR, NULL }, /* sifm */ | 1087 | NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */ |
1086 | { 0x0062, NVOBJ_ENGINE_GR, NULL }, /* surf2d */ | 1088 | NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */ |
1087 | { 0x0043, NVOBJ_ENGINE_GR, NULL }, /* rop */ | 1089 | NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */ |
1088 | { 0x0012, NVOBJ_ENGINE_GR, NULL }, /* beta1 */ | 1090 | NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */ |
1089 | { 0x0072, NVOBJ_ENGINE_GR, NULL }, /* beta4 */ | 1091 | NVOBJ_CLASS(dev, 0x0043, GR); /* rop */ |
1090 | { 0x0019, NVOBJ_ENGINE_GR, NULL }, /* cliprect */ | 1092 | NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */ |
1091 | { 0x0044, NVOBJ_ENGINE_GR, NULL }, /* pattern */ | 1093 | NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */ |
1092 | { 0x0052, NVOBJ_ENGINE_GR, NULL }, /* swzsurf */ | 1094 | NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */ |
1093 | { 0x0093, NVOBJ_ENGINE_GR, NULL }, /* surf3d */ | 1095 | NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */ |
1094 | { 0x0094, NVOBJ_ENGINE_GR, NULL }, /* tex_tri */ | 1096 | NVOBJ_CLASS(dev, 0x0052, GR); /* swzsurf */ |
1095 | { 0x0095, NVOBJ_ENGINE_GR, NULL }, /* multitex_tri */ | 1097 | NVOBJ_CLASS(dev, 0x0093, GR); /* surf3d */ |
1096 | { 0x0056, NVOBJ_ENGINE_GR, NULL }, /* celcius (nv10) */ | 1098 | NVOBJ_CLASS(dev, 0x0094, GR); /* tex_tri */ |
1097 | { 0x0096, NVOBJ_ENGINE_GR, NULL }, /* celcius (nv11) */ | 1099 | NVOBJ_CLASS(dev, 0x0095, GR); /* multitex_tri */ |
1098 | { 0x0099, NVOBJ_ENGINE_GR, nv17_graph_celsius_mthds }, /* celcius (nv17) */ | 1100 | |
1099 | {} | 1101 | /* celcius */ |
1100 | }; | 1102 | if (dev_priv->chipset <= 0x10) { |
1103 | NVOBJ_CLASS(dev, 0x0056, GR); | ||
1104 | } else | ||
1105 | if (dev_priv->chipset <= 0x17 || dev_priv->chipset == 0x1a) { | ||
1106 | NVOBJ_CLASS(dev, 0x0096, GR); | ||
1107 | } else { | ||
1108 | NVOBJ_CLASS(dev, 0x0099, GR); | ||
1109 | NVOBJ_MTHD (dev, 0x0099, 0x1638, nv17_graph_mthd_lma_window); | ||
1110 | NVOBJ_MTHD (dev, 0x0099, 0x163c, nv17_graph_mthd_lma_window); | ||
1111 | NVOBJ_MTHD (dev, 0x0099, 0x1640, nv17_graph_mthd_lma_window); | ||
1112 | NVOBJ_MTHD (dev, 0x0099, 0x1644, nv17_graph_mthd_lma_window); | ||
1113 | NVOBJ_MTHD (dev, 0x0099, 0x1658, nv17_graph_mthd_lma_enable); | ||
1114 | } | ||
1115 | |||
1116 | dev_priv->engine.graph.registered = true; | ||
1117 | return 0; | ||
1118 | } | ||
diff --git a/drivers/gpu/drm/nouveau/nv20_graph.c b/drivers/gpu/drm/nouveau/nv20_graph.c index 872f8d059694..7720bccb3c98 100644 --- a/drivers/gpu/drm/nouveau/nv20_graph.c +++ b/drivers/gpu/drm/nouveau/nv20_graph.c | |||
@@ -32,6 +32,9 @@ | |||
32 | #define NV34_GRCTX_SIZE (18140) | 32 | #define NV34_GRCTX_SIZE (18140) |
33 | #define NV35_36_GRCTX_SIZE (22396) | 33 | #define NV35_36_GRCTX_SIZE (22396) |
34 | 34 | ||
35 | static int nv20_graph_register(struct drm_device *); | ||
36 | static int nv30_graph_register(struct drm_device *); | ||
37 | |||
35 | static void | 38 | static void |
36 | nv20_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx) | 39 | nv20_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx) |
37 | { | 40 | { |
@@ -572,6 +575,12 @@ nv20_graph_init(struct drm_device *dev) | |||
572 | 575 | ||
573 | nv20_graph_rdi(dev); | 576 | nv20_graph_rdi(dev); |
574 | 577 | ||
578 | ret = nv20_graph_register(dev); | ||
579 | if (ret) { | ||
580 | nouveau_gpuobj_ref(NULL, &pgraph->ctx_table); | ||
581 | return ret; | ||
582 | } | ||
583 | |||
575 | nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF); | 584 | nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF); |
576 | nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); | 585 | nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); |
577 | 586 | ||
@@ -696,6 +705,12 @@ nv30_graph_init(struct drm_device *dev) | |||
696 | return ret; | 705 | return ret; |
697 | } | 706 | } |
698 | 707 | ||
708 | ret = nv30_graph_register(dev); | ||
709 | if (ret) { | ||
710 | nouveau_gpuobj_ref(NULL, &pgraph->ctx_table); | ||
711 | return ret; | ||
712 | } | ||
713 | |||
699 | nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_TABLE, | 714 | nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_TABLE, |
700 | pgraph->ctx_table->pinst >> 4); | 715 | pgraph->ctx_table->pinst >> 4); |
701 | 716 | ||
@@ -756,48 +771,76 @@ nv30_graph_init(struct drm_device *dev) | |||
756 | return 0; | 771 | return 0; |
757 | } | 772 | } |
758 | 773 | ||
759 | struct nouveau_pgraph_object_class nv20_graph_grclass[] = { | 774 | static int |
760 | { 0x506e, NVOBJ_ENGINE_SW, NULL }, /* nvsw */ | 775 | nv20_graph_register(struct drm_device *dev) |
761 | { 0x0030, NVOBJ_ENGINE_GR, NULL }, /* null */ | 776 | { |
762 | { 0x0039, NVOBJ_ENGINE_GR, NULL }, /* m2mf */ | 777 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
763 | { 0x004a, NVOBJ_ENGINE_GR, NULL }, /* gdirect */ | 778 | |
764 | { 0x009f, NVOBJ_ENGINE_GR, NULL }, /* imageblit (nv12) */ | 779 | if (dev_priv->engine.graph.registered) |
765 | { 0x008a, NVOBJ_ENGINE_GR, NULL }, /* ifc */ | 780 | return 0; |
766 | { 0x0089, NVOBJ_ENGINE_GR, NULL }, /* sifm */ | ||
767 | { 0x0062, NVOBJ_ENGINE_GR, NULL }, /* surf2d */ | ||
768 | { 0x0043, NVOBJ_ENGINE_GR, NULL }, /* rop */ | ||
769 | { 0x0012, NVOBJ_ENGINE_GR, NULL }, /* beta1 */ | ||
770 | { 0x0072, NVOBJ_ENGINE_GR, NULL }, /* beta4 */ | ||
771 | { 0x0019, NVOBJ_ENGINE_GR, NULL }, /* cliprect */ | ||
772 | { 0x0044, NVOBJ_ENGINE_GR, NULL }, /* pattern */ | ||
773 | { 0x009e, NVOBJ_ENGINE_GR, NULL }, /* swzsurf */ | ||
774 | { 0x0096, NVOBJ_ENGINE_GR, NULL }, /* celcius */ | ||
775 | { 0x0097, NVOBJ_ENGINE_GR, NULL }, /* kelvin (nv20) */ | ||
776 | { 0x0597, NVOBJ_ENGINE_GR, NULL }, /* kelvin (nv25) */ | ||
777 | {} | ||
778 | }; | ||
779 | |||
780 | struct nouveau_pgraph_object_class nv30_graph_grclass[] = { | ||
781 | { 0x506e, NVOBJ_ENGINE_SW, NULL }, /* nvsw */ | ||
782 | { 0x0030, NVOBJ_ENGINE_GR, NULL }, /* null */ | ||
783 | { 0x0039, NVOBJ_ENGINE_GR, NULL }, /* m2mf */ | ||
784 | { 0x004a, NVOBJ_ENGINE_GR, NULL }, /* gdirect */ | ||
785 | { 0x009f, NVOBJ_ENGINE_GR, NULL }, /* imageblit (nv12) */ | ||
786 | { 0x008a, NVOBJ_ENGINE_GR, NULL }, /* ifc */ | ||
787 | { 0x038a, NVOBJ_ENGINE_GR, NULL }, /* ifc (nv30) */ | ||
788 | { 0x0089, NVOBJ_ENGINE_GR, NULL }, /* sifm */ | ||
789 | { 0x0389, NVOBJ_ENGINE_GR, NULL }, /* sifm (nv30) */ | ||
790 | { 0x0062, NVOBJ_ENGINE_GR, NULL }, /* surf2d */ | ||
791 | { 0x0362, NVOBJ_ENGINE_GR, NULL }, /* surf2d (nv30) */ | ||
792 | { 0x0043, NVOBJ_ENGINE_GR, NULL }, /* rop */ | ||
793 | { 0x0012, NVOBJ_ENGINE_GR, NULL }, /* beta1 */ | ||
794 | { 0x0072, NVOBJ_ENGINE_GR, NULL }, /* beta4 */ | ||
795 | { 0x0019, NVOBJ_ENGINE_GR, NULL }, /* cliprect */ | ||
796 | { 0x0044, NVOBJ_ENGINE_GR, NULL }, /* pattern */ | ||
797 | { 0x039e, NVOBJ_ENGINE_GR, NULL }, /* swzsurf */ | ||
798 | { 0x0397, NVOBJ_ENGINE_GR, NULL }, /* rankine (nv30) */ | ||
799 | { 0x0497, NVOBJ_ENGINE_GR, NULL }, /* rankine (nv35) */ | ||
800 | { 0x0697, NVOBJ_ENGINE_GR, NULL }, /* rankine (nv34) */ | ||
801 | {} | ||
802 | }; | ||
803 | 781 | ||
782 | NVOBJ_CLASS(dev, 0x506e, SW); /* nvsw */ | ||
783 | NVOBJ_CLASS(dev, 0x0030, GR); /* null */ | ||
784 | NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */ | ||
785 | NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */ | ||
786 | NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */ | ||
787 | NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */ | ||
788 | NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */ | ||
789 | NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */ | ||
790 | NVOBJ_CLASS(dev, 0x0043, GR); /* rop */ | ||
791 | NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */ | ||
792 | NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */ | ||
793 | NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */ | ||
794 | NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */ | ||
795 | NVOBJ_CLASS(dev, 0x009e, GR); /* swzsurf */ | ||
796 | NVOBJ_CLASS(dev, 0x0096, GR); /* celcius */ | ||
797 | |||
798 | /* kelvin */ | ||
799 | if (dev_priv->chipset < 0x25) | ||
800 | NVOBJ_CLASS(dev, 0x0097, GR); | ||
801 | else | ||
802 | NVOBJ_CLASS(dev, 0x0597, GR); | ||
803 | |||
804 | dev_priv->engine.graph.registered = true; | ||
805 | return 0; | ||
806 | } | ||
807 | |||
808 | static int | ||
809 | nv30_graph_register(struct drm_device *dev) | ||
810 | { | ||
811 | struct drm_nouveau_private *dev_priv = dev->dev_private; | ||
812 | |||
813 | if (dev_priv->engine.graph.registered) | ||
814 | return 0; | ||
815 | |||
816 | NVOBJ_CLASS(dev, 0x506e, SW); /* nvsw */ | ||
817 | NVOBJ_CLASS(dev, 0x0030, GR); /* null */ | ||
818 | NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */ | ||
819 | NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */ | ||
820 | NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */ | ||
821 | NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */ | ||
822 | NVOBJ_CLASS(dev, 0x038a, GR); /* ifc (nv30) */ | ||
823 | NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */ | ||
824 | NVOBJ_CLASS(dev, 0x0389, GR); /* sifm (nv30) */ | ||
825 | NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */ | ||
826 | NVOBJ_CLASS(dev, 0x0362, GR); /* surf2d (nv30) */ | ||
827 | NVOBJ_CLASS(dev, 0x0043, GR); /* rop */ | ||
828 | NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */ | ||
829 | NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */ | ||
830 | NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */ | ||
831 | NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */ | ||
832 | NVOBJ_CLASS(dev, 0x039e, GR); /* swzsurf */ | ||
833 | |||
834 | /* rankine */ | ||
835 | if (0x00000003 & (1 << (dev_priv->chipset & 0x0f))) | ||
836 | NVOBJ_CLASS(dev, 0x0397, GR); | ||
837 | else | ||
838 | if (0x00000010 & (1 << (dev_priv->chipset & 0x0f))) | ||
839 | NVOBJ_CLASS(dev, 0x0697, GR); | ||
840 | else | ||
841 | if (0x000001e0 & (1 << (dev_priv->chipset & 0x0f))) | ||
842 | NVOBJ_CLASS(dev, 0x0497, GR); | ||
843 | |||
844 | dev_priv->engine.graph.registered = true; | ||
845 | return 0; | ||
846 | } | ||
diff --git a/drivers/gpu/drm/nouveau/nv40_graph.c b/drivers/gpu/drm/nouveau/nv40_graph.c index 70d957895cea..b9361e28687c 100644 --- a/drivers/gpu/drm/nouveau/nv40_graph.c +++ b/drivers/gpu/drm/nouveau/nv40_graph.c | |||
@@ -29,6 +29,8 @@ | |||
29 | #include "nouveau_drv.h" | 29 | #include "nouveau_drv.h" |
30 | #include "nouveau_grctx.h" | 30 | #include "nouveau_grctx.h" |
31 | 31 | ||
32 | static int nv40_graph_register(struct drm_device *); | ||
33 | |||
32 | struct nouveau_channel * | 34 | struct nouveau_channel * |
33 | nv40_graph_channel(struct drm_device *dev) | 35 | nv40_graph_channel(struct drm_device *dev) |
34 | { | 36 | { |
@@ -248,7 +250,7 @@ nv40_graph_init(struct drm_device *dev) | |||
248 | struct nouveau_fb_engine *pfb = &dev_priv->engine.fb; | 250 | struct nouveau_fb_engine *pfb = &dev_priv->engine.fb; |
249 | struct nouveau_grctx ctx = {}; | 251 | struct nouveau_grctx ctx = {}; |
250 | uint32_t vramsz, *cp; | 252 | uint32_t vramsz, *cp; |
251 | int i, j; | 253 | int ret, i, j; |
252 | 254 | ||
253 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & | 255 | nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & |
254 | ~NV_PMC_ENABLE_PGRAPH); | 256 | ~NV_PMC_ENABLE_PGRAPH); |
@@ -272,6 +274,10 @@ nv40_graph_init(struct drm_device *dev) | |||
272 | 274 | ||
273 | kfree(cp); | 275 | kfree(cp); |
274 | 276 | ||
277 | ret = nv40_graph_register(dev); | ||
278 | if (ret) | ||
279 | return ret; | ||
280 | |||
275 | /* No context present currently */ | 281 | /* No context present currently */ |
276 | nv_wr32(dev, NV40_PGRAPH_CTXCTL_CUR, 0x00000000); | 282 | nv_wr32(dev, NV40_PGRAPH_CTXCTL_CUR, 0x00000000); |
277 | 283 | ||
@@ -408,25 +414,38 @@ void nv40_graph_takedown(struct drm_device *dev) | |||
408 | { | 414 | { |
409 | } | 415 | } |
410 | 416 | ||
411 | struct nouveau_pgraph_object_class nv40_graph_grclass[] = { | 417 | static int |
412 | { 0x506e, NVOBJ_ENGINE_SW, NULL }, /* nvsw */ | 418 | nv40_graph_register(struct drm_device *dev) |
413 | { 0x0030, NVOBJ_ENGINE_GR, NULL }, /* null */ | 419 | { |
414 | { 0x0039, NVOBJ_ENGINE_GR, NULL }, /* m2mf */ | 420 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
415 | { 0x004a, NVOBJ_ENGINE_GR, NULL }, /* gdirect */ | 421 | |
416 | { 0x009f, NVOBJ_ENGINE_GR, NULL }, /* imageblit (nv12) */ | 422 | if (dev_priv->engine.graph.registered) |
417 | { 0x008a, NVOBJ_ENGINE_GR, NULL }, /* ifc */ | 423 | return 0; |
418 | { 0x0089, NVOBJ_ENGINE_GR, NULL }, /* sifm */ | ||
419 | { 0x3089, NVOBJ_ENGINE_GR, NULL }, /* sifm (nv40) */ | ||
420 | { 0x0062, NVOBJ_ENGINE_GR, NULL }, /* surf2d */ | ||
421 | { 0x3062, NVOBJ_ENGINE_GR, NULL }, /* surf2d (nv40) */ | ||
422 | { 0x0043, NVOBJ_ENGINE_GR, NULL }, /* rop */ | ||
423 | { 0x0012, NVOBJ_ENGINE_GR, NULL }, /* beta1 */ | ||
424 | { 0x0072, NVOBJ_ENGINE_GR, NULL }, /* beta4 */ | ||
425 | { 0x0019, NVOBJ_ENGINE_GR, NULL }, /* cliprect */ | ||
426 | { 0x0044, NVOBJ_ENGINE_GR, NULL }, /* pattern */ | ||
427 | { 0x309e, NVOBJ_ENGINE_GR, NULL }, /* swzsurf */ | ||
428 | { 0x4097, NVOBJ_ENGINE_GR, NULL }, /* curie (nv40) */ | ||
429 | { 0x4497, NVOBJ_ENGINE_GR, NULL }, /* curie (nv44) */ | ||
430 | {} | ||
431 | }; | ||
432 | 424 | ||
425 | NVOBJ_CLASS(dev, 0x506e, SW); /* nvsw */ | ||
426 | NVOBJ_CLASS(dev, 0x0030, GR); /* null */ | ||
427 | NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */ | ||
428 | NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */ | ||
429 | NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */ | ||
430 | NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */ | ||
431 | NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */ | ||
432 | NVOBJ_CLASS(dev, 0x3089, GR); /* sifm (nv40) */ | ||
433 | NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */ | ||
434 | NVOBJ_CLASS(dev, 0x3062, GR); /* surf2d (nv40) */ | ||
435 | NVOBJ_CLASS(dev, 0x0043, GR); /* rop */ | ||
436 | NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */ | ||
437 | NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */ | ||
438 | NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */ | ||
439 | NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */ | ||
440 | NVOBJ_CLASS(dev, 0x309e, GR); /* swzsurf */ | ||
441 | |||
442 | /* curie */ | ||
443 | if (dev_priv->chipset >= 0x60 || | ||
444 | 0x00005450 & (1 << (dev_priv->chipset & 0x0f))) | ||
445 | NVOBJ_CLASS(dev, 0x4497, GR); | ||
446 | else | ||
447 | NVOBJ_CLASS(dev, 0x4097, GR); | ||
448 | |||
449 | dev_priv->engine.graph.registered = true; | ||
450 | return 0; | ||
451 | } | ||
diff --git a/drivers/gpu/drm/nouveau/nv50_graph.c b/drivers/gpu/drm/nouveau/nv50_graph.c index 01a598917e3c..84ca90e91811 100644 --- a/drivers/gpu/drm/nouveau/nv50_graph.c +++ b/drivers/gpu/drm/nouveau/nv50_graph.c | |||
@@ -30,6 +30,8 @@ | |||
30 | #include "nouveau_ramht.h" | 30 | #include "nouveau_ramht.h" |
31 | #include "nouveau_grctx.h" | 31 | #include "nouveau_grctx.h" |
32 | 32 | ||
33 | static int nv50_graph_register(struct drm_device *); | ||
34 | |||
33 | static void | 35 | static void |
34 | nv50_graph_init_reset(struct drm_device *dev) | 36 | nv50_graph_init_reset(struct drm_device *dev) |
35 | { | 37 | { |
@@ -145,12 +147,15 @@ nv50_graph_init(struct drm_device *dev) | |||
145 | nv50_graph_init_reset(dev); | 147 | nv50_graph_init_reset(dev); |
146 | nv50_graph_init_regs__nv(dev); | 148 | nv50_graph_init_regs__nv(dev); |
147 | nv50_graph_init_regs(dev); | 149 | nv50_graph_init_regs(dev); |
148 | nv50_graph_init_intr(dev); | ||
149 | 150 | ||
150 | ret = nv50_graph_init_ctxctl(dev); | 151 | ret = nv50_graph_init_ctxctl(dev); |
151 | if (ret) | 152 | if (ret) |
152 | return ret; | 153 | return ret; |
153 | 154 | ||
155 | ret = nv50_graph_register(dev); | ||
156 | if (ret) | ||
157 | return ret; | ||
158 | nv50_graph_init_intr(dev); | ||
154 | return 0; | 159 | return 0; |
155 | } | 160 | } |
156 | 161 | ||
@@ -333,8 +338,8 @@ nv50_graph_context_switch(struct drm_device *dev) | |||
333 | } | 338 | } |
334 | 339 | ||
335 | static int | 340 | static int |
336 | nv50_graph_nvsw_dma_vblsem(struct nouveau_channel *chan, int grclass, | 341 | nv50_graph_nvsw_dma_vblsem(struct nouveau_channel *chan, |
337 | int mthd, uint32_t data) | 342 | u32 class, u32 mthd, u32 data) |
338 | { | 343 | { |
339 | struct nouveau_gpuobj *gpuobj; | 344 | struct nouveau_gpuobj *gpuobj; |
340 | 345 | ||
@@ -351,8 +356,8 @@ nv50_graph_nvsw_dma_vblsem(struct nouveau_channel *chan, int grclass, | |||
351 | } | 356 | } |
352 | 357 | ||
353 | static int | 358 | static int |
354 | nv50_graph_nvsw_vblsem_offset(struct nouveau_channel *chan, int grclass, | 359 | nv50_graph_nvsw_vblsem_offset(struct nouveau_channel *chan, |
355 | int mthd, uint32_t data) | 360 | u32 class, u32 mthd, u32 data) |
356 | { | 361 | { |
357 | if (nouveau_notifier_offset(chan->nvsw.vblsem, &data)) | 362 | if (nouveau_notifier_offset(chan->nvsw.vblsem, &data)) |
358 | return -ERANGE; | 363 | return -ERANGE; |
@@ -362,16 +367,16 @@ nv50_graph_nvsw_vblsem_offset(struct nouveau_channel *chan, int grclass, | |||
362 | } | 367 | } |
363 | 368 | ||
364 | static int | 369 | static int |
365 | nv50_graph_nvsw_vblsem_release_val(struct nouveau_channel *chan, int grclass, | 370 | nv50_graph_nvsw_vblsem_release_val(struct nouveau_channel *chan, |
366 | int mthd, uint32_t data) | 371 | u32 class, u32 mthd, u32 data) |
367 | { | 372 | { |
368 | chan->nvsw.vblsem_rval = data; | 373 | chan->nvsw.vblsem_rval = data; |
369 | return 0; | 374 | return 0; |
370 | } | 375 | } |
371 | 376 | ||
372 | static int | 377 | static int |
373 | nv50_graph_nvsw_vblsem_release(struct nouveau_channel *chan, int grclass, | 378 | nv50_graph_nvsw_vblsem_release(struct nouveau_channel *chan, |
374 | int mthd, uint32_t data) | 379 | u32 class, u32 mthd, u32 data) |
375 | { | 380 | { |
376 | struct drm_device *dev = chan->dev; | 381 | struct drm_device *dev = chan->dev; |
377 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 382 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
@@ -392,27 +397,53 @@ nv50_graph_nvsw_vblsem_release(struct nouveau_channel *chan, int grclass, | |||
392 | return 0; | 397 | return 0; |
393 | } | 398 | } |
394 | 399 | ||
395 | static struct nouveau_pgraph_object_method nv50_graph_nvsw_methods[] = { | 400 | static int |
396 | { 0x018c, nv50_graph_nvsw_dma_vblsem }, | 401 | nv50_graph_register(struct drm_device *dev) |
397 | { 0x0400, nv50_graph_nvsw_vblsem_offset }, | 402 | { |
398 | { 0x0404, nv50_graph_nvsw_vblsem_release_val }, | 403 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
399 | { 0x0408, nv50_graph_nvsw_vblsem_release }, | 404 | |
400 | {} | 405 | if (dev_priv->engine.graph.registered) |
401 | }; | 406 | return 0; |
402 | 407 | ||
403 | struct nouveau_pgraph_object_class nv50_graph_grclass[] = { | 408 | NVOBJ_CLASS(dev, 0x506e, SW); /* nvsw */ |
404 | { 0x506e, NVOBJ_ENGINE_SW, nv50_graph_nvsw_methods }, /* nvsw */ | 409 | NVOBJ_MTHD (dev, 0x506e, 0x018c, nv50_graph_nvsw_dma_vblsem); |
405 | { 0x0030, NVOBJ_ENGINE_GR, NULL }, /* null */ | 410 | NVOBJ_MTHD (dev, 0x506e, 0x0400, nv50_graph_nvsw_vblsem_offset); |
406 | { 0x5039, NVOBJ_ENGINE_GR, NULL }, /* m2mf */ | 411 | NVOBJ_MTHD (dev, 0x506e, 0x0404, nv50_graph_nvsw_vblsem_release_val); |
407 | { 0x502d, NVOBJ_ENGINE_GR, NULL }, /* 2d */ | 412 | NVOBJ_MTHD (dev, 0x506e, 0x0408, nv50_graph_nvsw_vblsem_release); |
408 | { 0x50c0, NVOBJ_ENGINE_GR, NULL }, /* compute */ | 413 | |
409 | { 0x85c0, NVOBJ_ENGINE_GR, NULL }, /* compute (nva3, nva5, nva8) */ | 414 | NVOBJ_CLASS(dev, 0x0030, GR); /* null */ |
410 | { 0x5097, NVOBJ_ENGINE_GR, NULL }, /* tesla (nv50) */ | 415 | NVOBJ_CLASS(dev, 0x5039, GR); /* m2mf */ |
411 | { 0x8297, NVOBJ_ENGINE_GR, NULL }, /* tesla (nv8x/nv9x) */ | 416 | NVOBJ_CLASS(dev, 0x502d, GR); /* 2d */ |
412 | { 0x8397, NVOBJ_ENGINE_GR, NULL }, /* tesla (nva0, nvaa, nvac) */ | 417 | NVOBJ_CLASS(dev, 0x50c0, GR); /* compute */ |
413 | { 0x8597, NVOBJ_ENGINE_GR, NULL }, /* tesla (nva3, nva5, nva8) */ | 418 | NVOBJ_CLASS(dev, 0x85c0, GR); /* compute (nva3, nva5, nva8) */ |
414 | {} | 419 | |
415 | }; | 420 | /* tesla */ |
421 | if (dev_priv->chipset == 0x50) | ||
422 | NVOBJ_CLASS(dev, 0x5097, GR); /* tesla (nv50) */ | ||
423 | else | ||
424 | if (dev_priv->chipset < 0xa0) | ||
425 | NVOBJ_CLASS(dev, 0x8297, GR); /* tesla (nv8x/nv9x) */ | ||
426 | else { | ||
427 | switch (dev_priv->chipset) { | ||
428 | case 0xa0: | ||
429 | case 0xaa: | ||
430 | case 0xac: | ||
431 | NVOBJ_CLASS(dev, 0x8397, GR); | ||
432 | break; | ||
433 | case 0xa3: | ||
434 | case 0xa5: | ||
435 | case 0xa8: | ||
436 | NVOBJ_CLASS(dev, 0x8597, GR); | ||
437 | break; | ||
438 | case 0xaf: | ||
439 | NVOBJ_CLASS(dev, 0x8697, GR); | ||
440 | break; | ||
441 | } | ||
442 | } | ||
443 | |||
444 | dev_priv->engine.graph.registered = true; | ||
445 | return 0; | ||
446 | } | ||
416 | 447 | ||
417 | void | 448 | void |
418 | nv50_graph_tlb_flush(struct drm_device *dev) | 449 | nv50_graph_tlb_flush(struct drm_device *dev) |