diff options
author | Ben Skeggs <bskeggs@redhat.com> | 2010-10-19 00:18:06 -0400 |
---|---|---|
committer | Ben Skeggs <bskeggs@redhat.com> | 2010-12-03 00:10:54 -0500 |
commit | 1e96268aca1bb40f42bdbc9d2293b123b072f1de (patch) | |
tree | e17f06ea78f5a38e2a14478872cc67a05be86fa9 /drivers/gpu | |
parent | b7bc613a4cc08d867b43189c2af0bb83b1fa1dc6 (diff) |
drm/nv50: initial work to allow multiple evo channels
This doesn't work yet for unknown reasons.
Signed-off-by: Ben Skeggs <bskeggs@redhat.com>
Diffstat (limited to 'drivers/gpu')
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_drv.h | 1 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nouveau_ramht.c | 2 | ||||
-rw-r--r-- | drivers/gpu/drm/nouveau/nv50_evo.c | 246 |
3 files changed, 147 insertions, 102 deletions
diff --git a/drivers/gpu/drm/nouveau/nouveau_drv.h b/drivers/gpu/drm/nouveau/nouveau_drv.h index d15bfd427267..bf34f25c7a54 100644 --- a/drivers/gpu/drm/nouveau/nouveau_drv.h +++ b/drivers/gpu/drm/nouveau/nouveau_drv.h | |||
@@ -689,6 +689,7 @@ struct drm_nouveau_private { | |||
689 | struct backlight_device *backlight; | 689 | struct backlight_device *backlight; |
690 | 690 | ||
691 | struct nouveau_channel *evo; | 691 | struct nouveau_channel *evo; |
692 | u32 evo_alloc; | ||
692 | struct { | 693 | struct { |
693 | struct dcb_entry *dcb; | 694 | struct dcb_entry *dcb; |
694 | u16 script; | 695 | u16 script; |
diff --git a/drivers/gpu/drm/nouveau/nouveau_ramht.c b/drivers/gpu/drm/nouveau/nouveau_ramht.c index b4c63c058888..d4a2adc927d8 100644 --- a/drivers/gpu/drm/nouveau/nouveau_ramht.c +++ b/drivers/gpu/drm/nouveau/nouveau_ramht.c | |||
@@ -114,7 +114,7 @@ nouveau_ramht_insert(struct nouveau_channel *chan, u32 handle, | |||
114 | (gpuobj->engine << NV40_RAMHT_CONTEXT_ENGINE_SHIFT); | 114 | (gpuobj->engine << NV40_RAMHT_CONTEXT_ENGINE_SHIFT); |
115 | } else { | 115 | } else { |
116 | if (gpuobj->engine == NVOBJ_ENGINE_DISPLAY) { | 116 | if (gpuobj->engine == NVOBJ_ENGINE_DISPLAY) { |
117 | ctx = (gpuobj->cinst << 10) | 2; | 117 | ctx = (gpuobj->cinst << 10) | chan->id; |
118 | } else { | 118 | } else { |
119 | ctx = (gpuobj->cinst >> 4) | | 119 | ctx = (gpuobj->cinst >> 4) | |
120 | ((gpuobj->engine << | 120 | ((gpuobj->engine << |
diff --git a/drivers/gpu/drm/nouveau/nv50_evo.c b/drivers/gpu/drm/nouveau/nv50_evo.c index 211f5fb1dc73..dbad233e750f 100644 --- a/drivers/gpu/drm/nouveau/nv50_evo.c +++ b/drivers/gpu/drm/nouveau/nv50_evo.c | |||
@@ -29,22 +29,26 @@ | |||
29 | #include "nouveau_ramht.h" | 29 | #include "nouveau_ramht.h" |
30 | 30 | ||
31 | static void | 31 | static void |
32 | nv50_evo_channel_del(struct nouveau_channel **pchan) | 32 | nv50_evo_channel_del(struct nouveau_channel **pevo) |
33 | { | 33 | { |
34 | struct nouveau_channel *chan = *pchan; | 34 | struct drm_nouveau_private *dev_priv; |
35 | struct nouveau_channel *evo = *pevo; | ||
35 | 36 | ||
36 | if (!chan) | 37 | if (!evo) |
37 | return; | 38 | return; |
38 | *pchan = NULL; | 39 | *pevo = NULL; |
39 | 40 | ||
40 | nouveau_gpuobj_channel_takedown(chan); | 41 | dev_priv = evo->dev->dev_private; |
41 | nouveau_bo_unmap(chan->pushbuf_bo); | 42 | dev_priv->evo_alloc &= ~(1 << evo->id); |
42 | nouveau_bo_ref(NULL, &chan->pushbuf_bo); | ||
43 | 43 | ||
44 | if (chan->user) | 44 | nouveau_gpuobj_channel_takedown(evo); |
45 | iounmap(chan->user); | 45 | nouveau_bo_unmap(evo->pushbuf_bo); |
46 | nouveau_bo_ref(NULL, &evo->pushbuf_bo); | ||
46 | 47 | ||
47 | kfree(chan); | 48 | if (evo->user) |
49 | iounmap(evo->user); | ||
50 | |||
51 | kfree(evo); | ||
48 | } | 52 | } |
49 | 53 | ||
50 | int | 54 | int |
@@ -56,7 +60,7 @@ nv50_evo_dmaobj_new(struct nouveau_channel *evo, u32 class, u32 name, | |||
56 | struct nouveau_gpuobj *obj = NULL; | 60 | struct nouveau_gpuobj *obj = NULL; |
57 | int ret; | 61 | int ret; |
58 | 62 | ||
59 | ret = nouveau_gpuobj_new(dev, evo, 6*4, 32, 0, &obj); | 63 | ret = nouveau_gpuobj_new(dev, dev_priv->evo, 6*4, 32, 0, &obj); |
60 | if (ret) | 64 | if (ret) |
61 | return ret; | 65 | return ret; |
62 | obj->engine = NVOBJ_ENGINE_DISPLAY; | 66 | obj->engine = NVOBJ_ENGINE_DISPLAY; |
@@ -82,101 +86,63 @@ nv50_evo_dmaobj_new(struct nouveau_channel *evo, u32 class, u32 name, | |||
82 | } | 86 | } |
83 | 87 | ||
84 | static int | 88 | static int |
85 | nv50_evo_channel_new(struct drm_device *dev, struct nouveau_channel **pchan) | 89 | nv50_evo_channel_new(struct drm_device *dev, struct nouveau_channel **pevo) |
86 | { | 90 | { |
87 | struct drm_nouveau_private *dev_priv = dev->dev_private; | 91 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
88 | struct nouveau_gpuobj *ramht = NULL; | 92 | struct nouveau_channel *evo; |
89 | struct nouveau_channel *chan; | ||
90 | int ret; | 93 | int ret; |
91 | 94 | ||
92 | chan = kzalloc(sizeof(struct nouveau_channel), GFP_KERNEL); | 95 | evo = kzalloc(sizeof(struct nouveau_channel), GFP_KERNEL); |
93 | if (!chan) | 96 | if (!evo) |
94 | return -ENOMEM; | 97 | return -ENOMEM; |
95 | *pchan = chan; | 98 | *pevo = evo; |
96 | |||
97 | chan->id = -1; | ||
98 | chan->dev = dev; | ||
99 | chan->user_get = 4; | ||
100 | chan->user_put = 0; | ||
101 | |||
102 | ret = nouveau_gpuobj_new(dev, NULL, 32768, 0x1000, | ||
103 | NVOBJ_FLAG_ZERO_ALLOC, &chan->ramin); | ||
104 | if (ret) { | ||
105 | NV_ERROR(dev, "Error allocating EVO channel memory: %d\n", ret); | ||
106 | nv50_evo_channel_del(pchan); | ||
107 | return ret; | ||
108 | } | ||
109 | |||
110 | ret = drm_mm_init(&chan->ramin_heap, 0, 32768); | ||
111 | if (ret) { | ||
112 | NV_ERROR(dev, "Error initialising EVO PRAMIN heap: %d\n", ret); | ||
113 | nv50_evo_channel_del(pchan); | ||
114 | return ret; | ||
115 | } | ||
116 | 99 | ||
117 | ret = nouveau_gpuobj_new(dev, chan, 4096, 16, 0, &ramht); | 100 | for (evo->id = 0; evo->id < 5; evo->id++) { |
118 | if (ret) { | 101 | if (dev_priv->evo_alloc & (1 << evo->id)) |
119 | NV_ERROR(dev, "Unable to allocate EVO RAMHT: %d\n", ret); | 102 | continue; |
120 | nv50_evo_channel_del(pchan); | ||
121 | return ret; | ||
122 | } | ||
123 | 103 | ||
124 | ret = nouveau_ramht_new(dev, ramht, &chan->ramht); | 104 | dev_priv->evo_alloc |= (1 << evo->id); |
125 | nouveau_gpuobj_ref(NULL, &ramht); | 105 | break; |
126 | if (ret) { | ||
127 | nv50_evo_channel_del(pchan); | ||
128 | return ret; | ||
129 | } | 106 | } |
130 | 107 | ||
131 | if (dev_priv->chipset != 0x50) { | 108 | if (evo->id == 5) { |
132 | ret = nv50_evo_dmaobj_new(chan, 0x3d, NvEvoFB16, 0x70, 0x19, | 109 | kfree(evo); |
133 | 0, 0xffffffff); | 110 | return -ENODEV; |
134 | if (ret) { | ||
135 | nv50_evo_channel_del(pchan); | ||
136 | return ret; | ||
137 | } | ||
138 | |||
139 | |||
140 | ret = nv50_evo_dmaobj_new(chan, 0x3d, NvEvoFB32, 0x7a, 0x19, | ||
141 | 0, 0xffffffff); | ||
142 | if (ret) { | ||
143 | nv50_evo_channel_del(pchan); | ||
144 | return ret; | ||
145 | } | ||
146 | } | 111 | } |
147 | 112 | ||
148 | ret = nv50_evo_dmaobj_new(chan, 0x3d, NvEvoVRAM, 0, 0x19, | 113 | evo->dev = dev; |
149 | 0, dev_priv->vram_size); | 114 | evo->user_get = 4; |
150 | if (ret) { | 115 | evo->user_put = 0; |
151 | nv50_evo_channel_del(pchan); | ||
152 | return ret; | ||
153 | } | ||
154 | 116 | ||
155 | ret = nouveau_bo_new(dev, NULL, 4096, 0, TTM_PL_FLAG_VRAM, 0, 0, | 117 | ret = nouveau_bo_new(dev, NULL, 4096, 0, TTM_PL_FLAG_VRAM, 0, 0, |
156 | false, true, &chan->pushbuf_bo); | 118 | false, true, &evo->pushbuf_bo); |
157 | if (ret == 0) | 119 | if (ret == 0) |
158 | ret = nouveau_bo_pin(chan->pushbuf_bo, TTM_PL_FLAG_VRAM); | 120 | ret = nouveau_bo_pin(evo->pushbuf_bo, TTM_PL_FLAG_VRAM); |
159 | if (ret) { | 121 | if (ret) { |
160 | NV_ERROR(dev, "Error creating EVO DMA push buffer: %d\n", ret); | 122 | NV_ERROR(dev, "Error creating EVO DMA push buffer: %d\n", ret); |
161 | nv50_evo_channel_del(pchan); | 123 | nv50_evo_channel_del(pevo); |
162 | return ret; | 124 | return ret; |
163 | } | 125 | } |
164 | 126 | ||
165 | ret = nouveau_bo_map(chan->pushbuf_bo); | 127 | ret = nouveau_bo_map(evo->pushbuf_bo); |
166 | if (ret) { | 128 | if (ret) { |
167 | NV_ERROR(dev, "Error mapping EVO DMA push buffer: %d\n", ret); | 129 | NV_ERROR(dev, "Error mapping EVO DMA push buffer: %d\n", ret); |
168 | nv50_evo_channel_del(pchan); | 130 | nv50_evo_channel_del(pevo); |
169 | return ret; | 131 | return ret; |
170 | } | 132 | } |
171 | 133 | ||
172 | chan->user = ioremap(pci_resource_start(dev->pdev, 0) + | 134 | evo->user = ioremap(pci_resource_start(dev->pdev, 0) + |
173 | NV50_PDISPLAY_USER(0), PAGE_SIZE); | 135 | NV50_PDISPLAY_USER(evo->id), PAGE_SIZE); |
174 | if (!chan->user) { | 136 | if (!evo->user) { |
175 | NV_ERROR(dev, "Error mapping EVO control regs.\n"); | 137 | NV_ERROR(dev, "Error mapping EVO control regs.\n"); |
176 | nv50_evo_channel_del(pchan); | 138 | nv50_evo_channel_del(pevo); |
177 | return -ENOMEM; | 139 | return -ENOMEM; |
178 | } | 140 | } |
179 | 141 | ||
142 | /* bind primary evo channel's ramht to the channel */ | ||
143 | if (dev_priv->evo && evo != dev_priv->evo) | ||
144 | nouveau_ramht_ref(dev_priv->evo->ramht, &evo->ramht, NULL); | ||
145 | |||
180 | return 0; | 146 | return 0; |
181 | } | 147 | } |
182 | 148 | ||
@@ -186,7 +152,7 @@ nv50_evo_channel_init(struct nouveau_channel *evo) | |||
186 | struct drm_nouveau_private *dev_priv = evo->dev->dev_private; | 152 | struct drm_nouveau_private *dev_priv = evo->dev->dev_private; |
187 | struct nouveau_timer_engine *ptimer = &dev_priv->engine.timer; | 153 | struct nouveau_timer_engine *ptimer = &dev_priv->engine.timer; |
188 | struct drm_device *dev = evo->dev; | 154 | struct drm_device *dev = evo->dev; |
189 | int ret, i; | 155 | int id = evo->id, ret, i; |
190 | u64 start; | 156 | u64 start; |
191 | u32 tmp; | 157 | u32 tmp; |
192 | 158 | ||
@@ -194,13 +160,13 @@ nv50_evo_channel_init(struct nouveau_channel *evo) | |||
194 | * stuck in some unspecified state | 160 | * stuck in some unspecified state |
195 | */ | 161 | */ |
196 | start = ptimer->read(dev); | 162 | start = ptimer->read(dev); |
197 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(0), 0x2b00); | 163 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(id), 0x2b00); |
198 | while ((tmp = nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(0))) & 0x1e0000) { | 164 | while ((tmp = nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(id))) & 0x1e0000) { |
199 | if ((tmp & 0x9f0000) == 0x20000) | 165 | if ((tmp & 0x9f0000) == 0x20000) |
200 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(0), tmp | 0x800000); | 166 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(id), tmp | 0x800000); |
201 | 167 | ||
202 | if ((tmp & 0x3f0000) == 0x30000) | 168 | if ((tmp & 0x3f0000) == 0x30000) |
203 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(0), tmp | 0x200000); | 169 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(id), tmp | 0x200000); |
204 | 170 | ||
205 | if (ptimer->read(dev) - start > 1000000000ULL) { | 171 | if (ptimer->read(dev) - start > 1000000000ULL) { |
206 | NV_ERROR(dev, "timeout: (0x610200 & 0x1e0000) != 0\n"); | 172 | NV_ERROR(dev, "timeout: (0x610200 & 0x1e0000) != 0\n"); |
@@ -209,36 +175,37 @@ nv50_evo_channel_init(struct nouveau_channel *evo) | |||
209 | } | 175 | } |
210 | } | 176 | } |
211 | 177 | ||
212 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(0), 0x1000b03); | 178 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(id), 0x1000b03); |
213 | if (!nv_wait(dev, NV50_PDISPLAY_EVO_CTRL(0), | 179 | if (!nv_wait(dev, NV50_PDISPLAY_EVO_CTRL(id), |
214 | 0x40000000, 0x40000000)) { | 180 | 0x40000000, 0x40000000)) { |
215 | NV_ERROR(dev, "timeout: (0x610200 & 0x40000000) == 0x40000000\n"); | 181 | NV_ERROR(dev, "timeout: (0x610200 & 0x40000000) == 0x40000000\n"); |
216 | NV_ERROR(dev, "0x610200 = 0x%08x\n", | 182 | NV_ERROR(dev, "0x610200 = 0x%08x\n", |
217 | nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(0))); | 183 | nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(id))); |
218 | return -EBUSY; | 184 | return -EBUSY; |
219 | } | 185 | } |
220 | 186 | ||
221 | /* initialise fifo */ | 187 | /* initialise fifo */ |
222 | nv_wr32(dev, NV50_PDISPLAY_EVO_DMA_CB(0), | 188 | nv_wr32(dev, NV50_PDISPLAY_EVO_DMA_CB(id), |
223 | ((evo->pushbuf_bo->bo.mem.start << PAGE_SHIFT) >> 8) | | 189 | ((evo->pushbuf_bo->bo.mem.start << PAGE_SHIFT) >> 8) | |
224 | NV50_PDISPLAY_EVO_DMA_CB_LOCATION_VRAM | | 190 | NV50_PDISPLAY_EVO_DMA_CB_LOCATION_VRAM | |
225 | NV50_PDISPLAY_EVO_DMA_CB_VALID); | 191 | NV50_PDISPLAY_EVO_DMA_CB_VALID); |
226 | nv_wr32(dev, NV50_PDISPLAY_EVO_UNK2(0), 0x00010000); | 192 | nv_wr32(dev, NV50_PDISPLAY_EVO_UNK2(id), 0x00010000); |
227 | nv_wr32(dev, NV50_PDISPLAY_EVO_HASH_TAG(0), 0x00000002); | 193 | nv_wr32(dev, NV50_PDISPLAY_EVO_HASH_TAG(id), id); |
228 | if (!nv_wait(dev, 0x610200, 0x80000000, 0x00000000)) { | 194 | if (!nv_wait(dev, NV50_PDISPLAY_EVO_CTRL(id), 0x80000000, 0x00000000)) { |
229 | NV_ERROR(dev, "timeout: (0x610200 & 0x80000000) == 0\n"); | 195 | NV_ERROR(dev, "timeout: (0x610200 & 0x80000000) == 0\n"); |
230 | NV_ERROR(dev, "0x610200 = 0x%08x\n", nv_rd32(dev, 0x610200)); | 196 | NV_ERROR(dev, "0x610200 = 0x%08x\n", |
197 | nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(id))); | ||
231 | return -EBUSY; | 198 | return -EBUSY; |
232 | } | 199 | } |
233 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(0), | 200 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(id), |
234 | (nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(0)) & ~0x00000003) | | 201 | (nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(id)) & ~0x00000003) | |
235 | NV50_PDISPLAY_EVO_CTRL_DMA_ENABLED); | 202 | NV50_PDISPLAY_EVO_CTRL_DMA_ENABLED); |
236 | nv_wr32(dev, NV50_PDISPLAY_USER_PUT(0), 0); | 203 | nv_wr32(dev, NV50_PDISPLAY_USER_PUT(id), 0); |
237 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(0), 0x01000003 | | 204 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(id), 0x01000003 | |
238 | NV50_PDISPLAY_EVO_CTRL_DMA_ENABLED); | 205 | NV50_PDISPLAY_EVO_CTRL_DMA_ENABLED); |
239 | 206 | ||
240 | /* enable error reporting on the channel */ | 207 | /* enable error reporting on the channel */ |
241 | nv_mask(dev, 0x610028, 0x00000000, 0x00010001 << 0); | 208 | nv_mask(dev, 0x610028, 0x00000000, 0x00010001 << id); |
242 | 209 | ||
243 | evo->dma.max = (4096/4) - 2; | 210 | evo->dma.max = (4096/4) - 2; |
244 | evo->dma.put = 0; | 211 | evo->dma.put = 0; |
@@ -260,12 +227,89 @@ nv50_evo_channel_fini(struct nouveau_channel *evo) | |||
260 | { | 227 | { |
261 | struct drm_device *dev = evo->dev; | 228 | struct drm_device *dev = evo->dev; |
262 | 229 | ||
263 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(0), 0); | 230 | nv_wr32(dev, NV50_PDISPLAY_EVO_CTRL(evo->id), 0); |
264 | if (!nv_wait(dev, NV50_PDISPLAY_EVO_CTRL(0), 0x1e0000, 0)) { | 231 | if (!nv_wait(dev, NV50_PDISPLAY_EVO_CTRL(evo->id), 0x1e0000, 0)) { |
265 | NV_ERROR(dev, "timeout: (0x610200 & 0x1e0000) == 0\n"); | 232 | NV_ERROR(dev, "timeout: (0x610200 & 0x1e0000) == 0\n"); |
266 | NV_ERROR(dev, "0x610200 = 0x%08x\n", | 233 | NV_ERROR(dev, "0x610200 = 0x%08x\n", |
267 | nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(0))); | 234 | nv_rd32(dev, NV50_PDISPLAY_EVO_CTRL(evo->id))); |
235 | } | ||
236 | } | ||
237 | |||
238 | static int | ||
239 | nv50_evo_create(struct drm_device *dev) | ||
240 | { | ||
241 | struct drm_nouveau_private *dev_priv = dev->dev_private; | ||
242 | struct nouveau_gpuobj *ramht = NULL; | ||
243 | struct nouveau_channel *evo; | ||
244 | int ret; | ||
245 | |||
246 | /* create primary evo channel, the one we use for modesetting | ||
247 | * purporses | ||
248 | */ | ||
249 | ret = nv50_evo_channel_new(dev, &dev_priv->evo); | ||
250 | if (ret) | ||
251 | return ret; | ||
252 | evo = dev_priv->evo; | ||
253 | |||
254 | /* setup object management on it, any other evo channel will | ||
255 | * use this also as there's no per-channel support on the | ||
256 | * hardware | ||
257 | */ | ||
258 | ret = nouveau_gpuobj_new(dev, NULL, 32768, 0x1000, | ||
259 | NVOBJ_FLAG_ZERO_ALLOC, &evo->ramin); | ||
260 | if (ret) { | ||
261 | NV_ERROR(dev, "Error allocating EVO channel memory: %d\n", ret); | ||
262 | nv50_evo_channel_del(&dev_priv->evo); | ||
263 | return ret; | ||
264 | } | ||
265 | |||
266 | ret = drm_mm_init(&evo->ramin_heap, 0, 32768); | ||
267 | if (ret) { | ||
268 | NV_ERROR(dev, "Error initialising EVO PRAMIN heap: %d\n", ret); | ||
269 | nv50_evo_channel_del(&dev_priv->evo); | ||
270 | return ret; | ||
271 | } | ||
272 | |||
273 | ret = nouveau_gpuobj_new(dev, evo, 4096, 16, 0, &ramht); | ||
274 | if (ret) { | ||
275 | NV_ERROR(dev, "Unable to allocate EVO RAMHT: %d\n", ret); | ||
276 | nv50_evo_channel_del(&dev_priv->evo); | ||
277 | return ret; | ||
278 | } | ||
279 | |||
280 | ret = nouveau_ramht_new(dev, ramht, &evo->ramht); | ||
281 | nouveau_gpuobj_ref(NULL, &ramht); | ||
282 | if (ret) { | ||
283 | nv50_evo_channel_del(&dev_priv->evo); | ||
284 | return ret; | ||
285 | } | ||
286 | |||
287 | /* create some default objects for the scanout memtypes we support */ | ||
288 | if (dev_priv->chipset != 0x50) { | ||
289 | ret = nv50_evo_dmaobj_new(evo, 0x3d, NvEvoFB16, 0x70, 0x19, | ||
290 | 0, 0xffffffff); | ||
291 | if (ret) { | ||
292 | nv50_evo_channel_del(&dev_priv->evo); | ||
293 | return ret; | ||
294 | } | ||
295 | |||
296 | |||
297 | ret = nv50_evo_dmaobj_new(evo, 0x3d, NvEvoFB32, 0x7a, 0x19, | ||
298 | 0, 0xffffffff); | ||
299 | if (ret) { | ||
300 | nv50_evo_channel_del(&dev_priv->evo); | ||
301 | return ret; | ||
302 | } | ||
303 | } | ||
304 | |||
305 | ret = nv50_evo_dmaobj_new(evo, 0x3d, NvEvoVRAM, 0, 0x19, | ||
306 | 0, dev_priv->vram_size); | ||
307 | if (ret) { | ||
308 | nv50_evo_channel_del(&dev_priv->evo); | ||
309 | return ret; | ||
268 | } | 310 | } |
311 | |||
312 | return 0; | ||
269 | } | 313 | } |
270 | 314 | ||
271 | int | 315 | int |
@@ -275,7 +319,7 @@ nv50_evo_init(struct drm_device *dev) | |||
275 | int ret; | 319 | int ret; |
276 | 320 | ||
277 | if (!dev_priv->evo) { | 321 | if (!dev_priv->evo) { |
278 | ret = nv50_evo_channel_new(dev, &dev_priv->evo); | 322 | ret = nv50_evo_create(dev); |
279 | if (ret) | 323 | if (ret) |
280 | return ret; | 324 | return ret; |
281 | } | 325 | } |