aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/mga/mga_state.c
diff options
context:
space:
mode:
authorDave Airlie <airlied@redhat.com>2008-05-28 20:09:59 -0400
committerDave Airlie <airlied@redhat.com>2008-07-13 20:45:01 -0400
commitc0e09200dc0813972442e550a5905a132768e56c (patch)
treed38e635a30ff8b0a2b98b9d7f97cab1501f8209e /drivers/gpu/drm/mga/mga_state.c
parentbce7f793daec3e65ec5c5705d2457b81fe7b5725 (diff)
drm: reorganise drm tree to be more future proof.
With the coming of kernel based modesetting and the memory manager stuff, the everything in one directory approach was getting very ugly and starting to be unmanageable. This restructures the drm along the lines of other kernel components. It creates a drivers/gpu/drm directory and moves the hw drivers into subdirectores. It moves the includes into an include/drm, and sets up the unifdef for the userspace headers we should be exporting. Signed-off-by: Dave Airlie <airlied@redhat.com>
Diffstat (limited to 'drivers/gpu/drm/mga/mga_state.c')
-rw-r--r--drivers/gpu/drm/mga/mga_state.c1104
1 files changed, 1104 insertions, 0 deletions
diff --git a/drivers/gpu/drm/mga/mga_state.c b/drivers/gpu/drm/mga/mga_state.c
new file mode 100644
index 00000000000..d3f8aade07b
--- /dev/null
+++ b/drivers/gpu/drm/mga/mga_state.c
@@ -0,0 +1,1104 @@
1/* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
3 *
4 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
5 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
6 * All Rights Reserved.
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
17 * Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25 * OTHER DEALINGS IN THE SOFTWARE.
26 *
27 * Authors:
28 * Jeff Hartmann <jhartmann@valinux.com>
29 * Keith Whitwell <keith@tungstengraphics.com>
30 *
31 * Rewritten by:
32 * Gareth Hughes <gareth@valinux.com>
33 */
34
35#include "drmP.h"
36#include "drm.h"
37#include "mga_drm.h"
38#include "mga_drv.h"
39
40/* ================================================================
41 * DMA hardware state programming functions
42 */
43
44static void mga_emit_clip_rect(drm_mga_private_t * dev_priv,
45 struct drm_clip_rect * box)
46{
47 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
48 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
49 unsigned int pitch = dev_priv->front_pitch;
50 DMA_LOCALS;
51
52 BEGIN_DMA(2);
53
54 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
55 */
56 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
57 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
58 MGA_LEN + MGA_EXEC, 0x80000000,
59 MGA_DWGCTL, ctx->dwgctl,
60 MGA_LEN + MGA_EXEC, 0x80000000);
61 }
62 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
63 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
64 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
65
66 ADVANCE_DMA();
67}
68
69static __inline__ void mga_g200_emit_context(drm_mga_private_t * dev_priv)
70{
71 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
72 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
73 DMA_LOCALS;
74
75 BEGIN_DMA(3);
76
77 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
78 MGA_MACCESS, ctx->maccess,
79 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
80
81 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
82 MGA_FOGCOL, ctx->fogcolor,
83 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
84
85 DMA_BLOCK(MGA_FCOL, ctx->fcol,
86 MGA_DMAPAD, 0x00000000,
87 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
88
89 ADVANCE_DMA();
90}
91
92static __inline__ void mga_g400_emit_context(drm_mga_private_t * dev_priv)
93{
94 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
95 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
96 DMA_LOCALS;
97
98 BEGIN_DMA(4);
99
100 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
101 MGA_MACCESS, ctx->maccess,
102 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
103
104 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
105 MGA_FOGCOL, ctx->fogcolor,
106 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
107
108 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
109 MGA_TDUALSTAGE0, ctx->tdualstage0,
110 MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
111
112 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
113 MGA_STENCILCTL, ctx->stencilctl,
114 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
115
116 ADVANCE_DMA();
117}
118
119static __inline__ void mga_g200_emit_tex0(drm_mga_private_t * dev_priv)
120{
121 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
122 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
123 DMA_LOCALS;
124
125 BEGIN_DMA(4);
126
127 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
128 MGA_TEXCTL, tex->texctl,
129 MGA_TEXFILTER, tex->texfilter,
130 MGA_TEXBORDERCOL, tex->texbordercol);
131
132 DMA_BLOCK(MGA_TEXORG, tex->texorg,
133 MGA_TEXORG1, tex->texorg1,
134 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
135
136 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
137 MGA_TEXWIDTH, tex->texwidth,
138 MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
139
140 DMA_BLOCK(MGA_WR34, tex->texheight,
141 MGA_TEXTRANS, 0x0000ffff,
142 MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
143
144 ADVANCE_DMA();
145}
146
147static __inline__ void mga_g400_emit_tex0(drm_mga_private_t * dev_priv)
148{
149 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
150 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
151 DMA_LOCALS;
152
153/* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
154/* tex->texctl, tex->texctl2); */
155
156 BEGIN_DMA(6);
157
158 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
159 MGA_TEXCTL, tex->texctl,
160 MGA_TEXFILTER, tex->texfilter,
161 MGA_TEXBORDERCOL, tex->texbordercol);
162
163 DMA_BLOCK(MGA_TEXORG, tex->texorg,
164 MGA_TEXORG1, tex->texorg1,
165 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
166
167 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
168 MGA_TEXWIDTH, tex->texwidth,
169 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
170
171 DMA_BLOCK(MGA_WR57, 0x00000000,
172 MGA_WR53, 0x00000000,
173 MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
174
175 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
176 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
177 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
178 MGA_DMAPAD, 0x00000000);
179
180 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
181 MGA_DMAPAD, 0x00000000,
182 MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
183
184 ADVANCE_DMA();
185}
186
187static __inline__ void mga_g400_emit_tex1(drm_mga_private_t * dev_priv)
188{
189 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
190 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
191 DMA_LOCALS;
192
193/* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
194/* tex->texctl, tex->texctl2); */
195
196 BEGIN_DMA(5);
197
198 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
199 MGA_MAP1_ENABLE |
200 MGA_G400_TC2_MAGIC),
201 MGA_TEXCTL, tex->texctl,
202 MGA_TEXFILTER, tex->texfilter,
203 MGA_TEXBORDERCOL, tex->texbordercol);
204
205 DMA_BLOCK(MGA_TEXORG, tex->texorg,
206 MGA_TEXORG1, tex->texorg1,
207 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
208
209 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
210 MGA_TEXWIDTH, tex->texwidth,
211 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
212
213 DMA_BLOCK(MGA_WR57, 0x00000000,
214 MGA_WR53, 0x00000000,
215 MGA_WR61, 0x00000000,
216 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
217
218 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
219 MGA_TEXTRANS, 0x0000ffff,
220 MGA_TEXTRANSHIGH, 0x0000ffff,
221 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
222
223 ADVANCE_DMA();
224}
225
226static __inline__ void mga_g200_emit_pipe(drm_mga_private_t * dev_priv)
227{
228 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
229 unsigned int pipe = sarea_priv->warp_pipe;
230 DMA_LOCALS;
231
232 BEGIN_DMA(3);
233
234 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
235 MGA_WVRTXSZ, 0x00000007,
236 MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
237
238 DMA_BLOCK(MGA_WR25, 0x00000100,
239 MGA_WR34, 0x00000000,
240 MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
241
242 /* Padding required to to hardware bug.
243 */
244 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
245 MGA_DMAPAD, 0xffffffff,
246 MGA_DMAPAD, 0xffffffff,
247 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
248 MGA_WMODE_START | dev_priv->wagp_enable));
249
250 ADVANCE_DMA();
251}
252
253static __inline__ void mga_g400_emit_pipe(drm_mga_private_t * dev_priv)
254{
255 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
256 unsigned int pipe = sarea_priv->warp_pipe;
257 DMA_LOCALS;
258
259/* printk("mga_g400_emit_pipe %x\n", pipe); */
260
261 BEGIN_DMA(10);
262
263 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
264 MGA_DMAPAD, 0x00000000,
265 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
266
267 if (pipe & MGA_T2) {
268 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
269 MGA_DMAPAD, 0x00000000,
270 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
271
272 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
273 MGA_WACCEPTSEQ, 0x00000000,
274 MGA_WACCEPTSEQ, 0x00000000,
275 MGA_WACCEPTSEQ, 0x1e000000);
276 } else {
277 if (dev_priv->warp_pipe & MGA_T2) {
278 /* Flush the WARP pipe */
279 DMA_BLOCK(MGA_YDST, 0x00000000,
280 MGA_FXLEFT, 0x00000000,
281 MGA_FXRIGHT, 0x00000001,
282 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
283
284 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
285 MGA_DWGSYNC, 0x00007000,
286 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
287 MGA_LEN + MGA_EXEC, 0x00000000);
288
289 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
290 MGA_G400_TC2_MAGIC),
291 MGA_LEN + MGA_EXEC, 0x00000000,
292 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
293 MGA_DMAPAD, 0x00000000);
294 }
295
296 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
297 MGA_DMAPAD, 0x00000000,
298 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
299
300 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
301 MGA_WACCEPTSEQ, 0x00000000,
302 MGA_WACCEPTSEQ, 0x00000000,
303 MGA_WACCEPTSEQ, 0x18000000);
304 }
305
306 DMA_BLOCK(MGA_WFLAG, 0x00000000,
307 MGA_WFLAG1, 0x00000000,
308 MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
309
310 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
311 MGA_WR57, 0x00000000, /* tex0 */
312 MGA_WR53, 0x00000000, /* tex1 */
313 MGA_WR61, 0x00000000); /* tex1 */
314
315 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
316 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
317 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
318 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
319
320 /* Padding required to to hardware bug */
321 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
322 MGA_DMAPAD, 0xffffffff,
323 MGA_DMAPAD, 0xffffffff,
324 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
325 MGA_WMODE_START | dev_priv->wagp_enable));
326
327 ADVANCE_DMA();
328}
329
330static void mga_g200_emit_state(drm_mga_private_t * dev_priv)
331{
332 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
333 unsigned int dirty = sarea_priv->dirty;
334
335 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
336 mga_g200_emit_pipe(dev_priv);
337 dev_priv->warp_pipe = sarea_priv->warp_pipe;
338 }
339
340 if (dirty & MGA_UPLOAD_CONTEXT) {
341 mga_g200_emit_context(dev_priv);
342 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
343 }
344
345 if (dirty & MGA_UPLOAD_TEX0) {
346 mga_g200_emit_tex0(dev_priv);
347 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
348 }
349}
350
351static void mga_g400_emit_state(drm_mga_private_t * dev_priv)
352{
353 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
354 unsigned int dirty = sarea_priv->dirty;
355 int multitex = sarea_priv->warp_pipe & MGA_T2;
356
357 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
358 mga_g400_emit_pipe(dev_priv);
359 dev_priv->warp_pipe = sarea_priv->warp_pipe;
360 }
361
362 if (dirty & MGA_UPLOAD_CONTEXT) {
363 mga_g400_emit_context(dev_priv);
364 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
365 }
366
367 if (dirty & MGA_UPLOAD_TEX0) {
368 mga_g400_emit_tex0(dev_priv);
369 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
370 }
371
372 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
373 mga_g400_emit_tex1(dev_priv);
374 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
375 }
376}
377
378/* ================================================================
379 * SAREA state verification
380 */
381
382/* Disallow all write destinations except the front and backbuffer.
383 */
384static int mga_verify_context(drm_mga_private_t * dev_priv)
385{
386 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
387 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
388
389 if (ctx->dstorg != dev_priv->front_offset &&
390 ctx->dstorg != dev_priv->back_offset) {
391 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
392 ctx->dstorg, dev_priv->front_offset,
393 dev_priv->back_offset);
394 ctx->dstorg = 0;
395 return -EINVAL;
396 }
397
398 return 0;
399}
400
401/* Disallow texture reads from PCI space.
402 */
403static int mga_verify_tex(drm_mga_private_t * dev_priv, int unit)
404{
405 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
406 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
407 unsigned int org;
408
409 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
410
411 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
412 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
413 tex->texorg = 0;
414 return -EINVAL;
415 }
416
417 return 0;
418}
419
420static int mga_verify_state(drm_mga_private_t * dev_priv)
421{
422 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
423 unsigned int dirty = sarea_priv->dirty;
424 int ret = 0;
425
426 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
427 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
428
429 if (dirty & MGA_UPLOAD_CONTEXT)
430 ret |= mga_verify_context(dev_priv);
431
432 if (dirty & MGA_UPLOAD_TEX0)
433 ret |= mga_verify_tex(dev_priv, 0);
434
435 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
436 if (dirty & MGA_UPLOAD_TEX1)
437 ret |= mga_verify_tex(dev_priv, 1);
438
439 if (dirty & MGA_UPLOAD_PIPE)
440 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
441 } else {
442 if (dirty & MGA_UPLOAD_PIPE)
443 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
444 }
445
446 return (ret == 0);
447}
448
449static int mga_verify_iload(drm_mga_private_t * dev_priv,
450 unsigned int dstorg, unsigned int length)
451{
452 if (dstorg < dev_priv->texture_offset ||
453 dstorg + length > (dev_priv->texture_offset +
454 dev_priv->texture_size)) {
455 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
456 return -EINVAL;
457 }
458
459 if (length & MGA_ILOAD_MASK) {
460 DRM_ERROR("*** bad iload length: 0x%x\n",
461 length & MGA_ILOAD_MASK);
462 return -EINVAL;
463 }
464
465 return 0;
466}
467
468static int mga_verify_blit(drm_mga_private_t * dev_priv,
469 unsigned int srcorg, unsigned int dstorg)
470{
471 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
472 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
473 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
474 return -EINVAL;
475 }
476 return 0;
477}
478
479/* ================================================================
480 *
481 */
482
483static void mga_dma_dispatch_clear(struct drm_device * dev, drm_mga_clear_t * clear)
484{
485 drm_mga_private_t *dev_priv = dev->dev_private;
486 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
487 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
488 struct drm_clip_rect *pbox = sarea_priv->boxes;
489 int nbox = sarea_priv->nbox;
490 int i;
491 DMA_LOCALS;
492 DRM_DEBUG("\n");
493
494 BEGIN_DMA(1);
495
496 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
497 MGA_DMAPAD, 0x00000000,
498 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
499
500 ADVANCE_DMA();
501
502 for (i = 0; i < nbox; i++) {
503 struct drm_clip_rect *box = &pbox[i];
504 u32 height = box->y2 - box->y1;
505
506 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
507 box->x1, box->y1, box->x2, box->y2);
508
509 if (clear->flags & MGA_FRONT) {
510 BEGIN_DMA(2);
511
512 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
513 MGA_PLNWT, clear->color_mask,
514 MGA_YDSTLEN, (box->y1 << 16) | height,
515 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
516
517 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
518 MGA_FCOL, clear->clear_color,
519 MGA_DSTORG, dev_priv->front_offset,
520 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
521
522 ADVANCE_DMA();
523 }
524
525 if (clear->flags & MGA_BACK) {
526 BEGIN_DMA(2);
527
528 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
529 MGA_PLNWT, clear->color_mask,
530 MGA_YDSTLEN, (box->y1 << 16) | height,
531 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
532
533 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
534 MGA_FCOL, clear->clear_color,
535 MGA_DSTORG, dev_priv->back_offset,
536 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
537
538 ADVANCE_DMA();
539 }
540
541 if (clear->flags & MGA_DEPTH) {
542 BEGIN_DMA(2);
543
544 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
545 MGA_PLNWT, clear->depth_mask,
546 MGA_YDSTLEN, (box->y1 << 16) | height,
547 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
548
549 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
550 MGA_FCOL, clear->clear_depth,
551 MGA_DSTORG, dev_priv->depth_offset,
552 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
553
554 ADVANCE_DMA();
555 }
556
557 }
558
559 BEGIN_DMA(1);
560
561 /* Force reset of DWGCTL */
562 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
563 MGA_DMAPAD, 0x00000000,
564 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
565
566 ADVANCE_DMA();
567
568 FLUSH_DMA();
569}
570
571static void mga_dma_dispatch_swap(struct drm_device * dev)
572{
573 drm_mga_private_t *dev_priv = dev->dev_private;
574 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
575 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
576 struct drm_clip_rect *pbox = sarea_priv->boxes;
577 int nbox = sarea_priv->nbox;
578 int i;
579 DMA_LOCALS;
580 DRM_DEBUG("\n");
581
582 sarea_priv->last_frame.head = dev_priv->prim.tail;
583 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
584
585 BEGIN_DMA(4 + nbox);
586
587 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
588 MGA_DMAPAD, 0x00000000,
589 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
590
591 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
592 MGA_MACCESS, dev_priv->maccess,
593 MGA_SRCORG, dev_priv->back_offset,
594 MGA_AR5, dev_priv->front_pitch);
595
596 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
597 MGA_DMAPAD, 0x00000000,
598 MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
599
600 for (i = 0; i < nbox; i++) {
601 struct drm_clip_rect *box = &pbox[i];
602 u32 height = box->y2 - box->y1;
603 u32 start = box->y1 * dev_priv->front_pitch;
604
605 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
606 box->x1, box->y1, box->x2, box->y2);
607
608 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
609 MGA_AR3, start + box->x1,
610 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
611 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
612 }
613
614 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
615 MGA_PLNWT, ctx->plnwt,
616 MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
617
618 ADVANCE_DMA();
619
620 FLUSH_DMA();
621
622 DRM_DEBUG("... done.\n");
623}
624
625static void mga_dma_dispatch_vertex(struct drm_device * dev, struct drm_buf * buf)
626{
627 drm_mga_private_t *dev_priv = dev->dev_private;
628 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
629 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
630 u32 address = (u32) buf->bus_address;
631 u32 length = (u32) buf->used;
632 int i = 0;
633 DMA_LOCALS;
634 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
635
636 if (buf->used) {
637 buf_priv->dispatched = 1;
638
639 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
640
641 do {
642 if (i < sarea_priv->nbox) {
643 mga_emit_clip_rect(dev_priv,
644 &sarea_priv->boxes[i]);
645 }
646
647 BEGIN_DMA(1);
648
649 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
650 MGA_DMAPAD, 0x00000000,
651 MGA_SECADDRESS, (address |
652 MGA_DMA_VERTEX),
653 MGA_SECEND, ((address + length) |
654 dev_priv->dma_access));
655
656 ADVANCE_DMA();
657 } while (++i < sarea_priv->nbox);
658 }
659
660 if (buf_priv->discard) {
661 AGE_BUFFER(buf_priv);
662 buf->pending = 0;
663 buf->used = 0;
664 buf_priv->dispatched = 0;
665
666 mga_freelist_put(dev, buf);
667 }
668
669 FLUSH_DMA();
670}
671
672static void mga_dma_dispatch_indices(struct drm_device * dev, struct drm_buf * buf,
673 unsigned int start, unsigned int end)
674{
675 drm_mga_private_t *dev_priv = dev->dev_private;
676 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
677 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
678 u32 address = (u32) buf->bus_address;
679 int i = 0;
680 DMA_LOCALS;
681 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
682
683 if (start != end) {
684 buf_priv->dispatched = 1;
685
686 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
687
688 do {
689 if (i < sarea_priv->nbox) {
690 mga_emit_clip_rect(dev_priv,
691 &sarea_priv->boxes[i]);
692 }
693
694 BEGIN_DMA(1);
695
696 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
697 MGA_DMAPAD, 0x00000000,
698 MGA_SETUPADDRESS, address + start,
699 MGA_SETUPEND, ((address + end) |
700 dev_priv->dma_access));
701
702 ADVANCE_DMA();
703 } while (++i < sarea_priv->nbox);
704 }
705
706 if (buf_priv->discard) {
707 AGE_BUFFER(buf_priv);
708 buf->pending = 0;
709 buf->used = 0;
710 buf_priv->dispatched = 0;
711
712 mga_freelist_put(dev, buf);
713 }
714
715 FLUSH_DMA();
716}
717
718/* This copies a 64 byte aligned agp region to the frambuffer with a
719 * standard blit, the ioctl needs to do checking.
720 */
721static void mga_dma_dispatch_iload(struct drm_device * dev, struct drm_buf * buf,
722 unsigned int dstorg, unsigned int length)
723{
724 drm_mga_private_t *dev_priv = dev->dev_private;
725 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
726 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
727 u32 srcorg =
728 buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
729 u32 y2;
730 DMA_LOCALS;
731 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
732
733 y2 = length / 64;
734
735 BEGIN_DMA(5);
736
737 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
738 MGA_DMAPAD, 0x00000000,
739 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
740
741 DMA_BLOCK(MGA_DSTORG, dstorg,
742 MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
743
744 DMA_BLOCK(MGA_PITCH, 64,
745 MGA_PLNWT, 0xffffffff,
746 MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
747
748 DMA_BLOCK(MGA_AR0, 63,
749 MGA_AR3, 0,
750 MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
751
752 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
753 MGA_SRCORG, dev_priv->front_offset,
754 MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
755
756 ADVANCE_DMA();
757
758 AGE_BUFFER(buf_priv);
759
760 buf->pending = 0;
761 buf->used = 0;
762 buf_priv->dispatched = 0;
763
764 mga_freelist_put(dev, buf);
765
766 FLUSH_DMA();
767}
768
769static void mga_dma_dispatch_blit(struct drm_device * dev, drm_mga_blit_t * blit)
770{
771 drm_mga_private_t *dev_priv = dev->dev_private;
772 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
773 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
774 struct drm_clip_rect *pbox = sarea_priv->boxes;
775 int nbox = sarea_priv->nbox;
776 u32 scandir = 0, i;
777 DMA_LOCALS;
778 DRM_DEBUG("\n");
779
780 BEGIN_DMA(4 + nbox);
781
782 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
783 MGA_DMAPAD, 0x00000000,
784 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
785
786 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
787 MGA_PLNWT, blit->planemask,
788 MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
789
790 DMA_BLOCK(MGA_SGN, scandir,
791 MGA_MACCESS, dev_priv->maccess,
792 MGA_AR5, blit->ydir * blit->src_pitch,
793 MGA_PITCH, blit->dst_pitch);
794
795 for (i = 0; i < nbox; i++) {
796 int srcx = pbox[i].x1 + blit->delta_sx;
797 int srcy = pbox[i].y1 + blit->delta_sy;
798 int dstx = pbox[i].x1 + blit->delta_dx;
799 int dsty = pbox[i].y1 + blit->delta_dy;
800 int h = pbox[i].y2 - pbox[i].y1;
801 int w = pbox[i].x2 - pbox[i].x1 - 1;
802 int start;
803
804 if (blit->ydir == -1) {
805 srcy = blit->height - srcy - 1;
806 }
807
808 start = srcy * blit->src_pitch + srcx;
809
810 DMA_BLOCK(MGA_AR0, start + w,
811 MGA_AR3, start,
812 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
813 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
814 }
815
816 /* Do something to flush AGP?
817 */
818
819 /* Force reset of DWGCTL */
820 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
821 MGA_PLNWT, ctx->plnwt,
822 MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
823
824 ADVANCE_DMA();
825}
826
827/* ================================================================
828 *
829 */
830
831static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
832{
833 drm_mga_private_t *dev_priv = dev->dev_private;
834 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
835 drm_mga_clear_t *clear = data;
836
837 LOCK_TEST_WITH_RETURN(dev, file_priv);
838
839 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
840 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
841
842 WRAP_TEST_WITH_RETURN(dev_priv);
843
844 mga_dma_dispatch_clear(dev, clear);
845
846 /* Make sure we restore the 3D state next time.
847 */
848 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
849
850 return 0;
851}
852
853static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
854{
855 drm_mga_private_t *dev_priv = dev->dev_private;
856 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
857
858 LOCK_TEST_WITH_RETURN(dev, file_priv);
859
860 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
861 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
862
863 WRAP_TEST_WITH_RETURN(dev_priv);
864
865 mga_dma_dispatch_swap(dev);
866
867 /* Make sure we restore the 3D state next time.
868 */
869 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
870
871 return 0;
872}
873
874static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
875{
876 drm_mga_private_t *dev_priv = dev->dev_private;
877 struct drm_device_dma *dma = dev->dma;
878 struct drm_buf *buf;
879 drm_mga_buf_priv_t *buf_priv;
880 drm_mga_vertex_t *vertex = data;
881
882 LOCK_TEST_WITH_RETURN(dev, file_priv);
883
884 if (vertex->idx < 0 || vertex->idx > dma->buf_count)
885 return -EINVAL;
886 buf = dma->buflist[vertex->idx];
887 buf_priv = buf->dev_private;
888
889 buf->used = vertex->used;
890 buf_priv->discard = vertex->discard;
891
892 if (!mga_verify_state(dev_priv)) {
893 if (vertex->discard) {
894 if (buf_priv->dispatched == 1)
895 AGE_BUFFER(buf_priv);
896 buf_priv->dispatched = 0;
897 mga_freelist_put(dev, buf);
898 }
899 return -EINVAL;
900 }
901
902 WRAP_TEST_WITH_RETURN(dev_priv);
903
904 mga_dma_dispatch_vertex(dev, buf);
905
906 return 0;
907}
908
909static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
910{
911 drm_mga_private_t *dev_priv = dev->dev_private;
912 struct drm_device_dma *dma = dev->dma;
913 struct drm_buf *buf;
914 drm_mga_buf_priv_t *buf_priv;
915 drm_mga_indices_t *indices = data;
916
917 LOCK_TEST_WITH_RETURN(dev, file_priv);
918
919 if (indices->idx < 0 || indices->idx > dma->buf_count)
920 return -EINVAL;
921
922 buf = dma->buflist[indices->idx];
923 buf_priv = buf->dev_private;
924
925 buf_priv->discard = indices->discard;
926
927 if (!mga_verify_state(dev_priv)) {
928 if (indices->discard) {
929 if (buf_priv->dispatched == 1)
930 AGE_BUFFER(buf_priv);
931 buf_priv->dispatched = 0;
932 mga_freelist_put(dev, buf);
933 }
934 return -EINVAL;
935 }
936
937 WRAP_TEST_WITH_RETURN(dev_priv);
938
939 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
940
941 return 0;
942}
943
944static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
945{
946 struct drm_device_dma *dma = dev->dma;
947 drm_mga_private_t *dev_priv = dev->dev_private;
948 struct drm_buf *buf;
949 drm_mga_buf_priv_t *buf_priv;
950 drm_mga_iload_t *iload = data;
951 DRM_DEBUG("\n");
952
953 LOCK_TEST_WITH_RETURN(dev, file_priv);
954
955#if 0
956 if (mga_do_wait_for_idle(dev_priv) < 0) {
957 if (MGA_DMA_DEBUG)
958 DRM_INFO("-EBUSY\n");
959 return -EBUSY;
960 }
961#endif
962 if (iload->idx < 0 || iload->idx > dma->buf_count)
963 return -EINVAL;
964
965 buf = dma->buflist[iload->idx];
966 buf_priv = buf->dev_private;
967
968 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
969 mga_freelist_put(dev, buf);
970 return -EINVAL;
971 }
972
973 WRAP_TEST_WITH_RETURN(dev_priv);
974
975 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
976
977 /* Make sure we restore the 3D state next time.
978 */
979 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
980
981 return 0;
982}
983
984static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
985{
986 drm_mga_private_t *dev_priv = dev->dev_private;
987 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
988 drm_mga_blit_t *blit = data;
989 DRM_DEBUG("\n");
990
991 LOCK_TEST_WITH_RETURN(dev, file_priv);
992
993 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
994 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
995
996 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
997 return -EINVAL;
998
999 WRAP_TEST_WITH_RETURN(dev_priv);
1000
1001 mga_dma_dispatch_blit(dev, blit);
1002
1003 /* Make sure we restore the 3D state next time.
1004 */
1005 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1006
1007 return 0;
1008}
1009
1010static int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
1011{
1012 drm_mga_private_t *dev_priv = dev->dev_private;
1013 drm_mga_getparam_t *param = data;
1014 int value;
1015
1016 if (!dev_priv) {
1017 DRM_ERROR("called with no initialization\n");
1018 return -EINVAL;
1019 }
1020
1021 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1022
1023 switch (param->param) {
1024 case MGA_PARAM_IRQ_NR:
1025 value = dev->irq;
1026 break;
1027 case MGA_PARAM_CARD_TYPE:
1028 value = dev_priv->chipset;
1029 break;
1030 default:
1031 return -EINVAL;
1032 }
1033
1034 if (DRM_COPY_TO_USER(param->value, &value, sizeof(int))) {
1035 DRM_ERROR("copy_to_user\n");
1036 return -EFAULT;
1037 }
1038
1039 return 0;
1040}
1041
1042static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1043{
1044 drm_mga_private_t *dev_priv = dev->dev_private;
1045 u32 *fence = data;
1046 DMA_LOCALS;
1047
1048 if (!dev_priv) {
1049 DRM_ERROR("called with no initialization\n");
1050 return -EINVAL;
1051 }
1052
1053 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1054
1055 /* I would normal do this assignment in the declaration of fence,
1056 * but dev_priv may be NULL.
1057 */
1058
1059 *fence = dev_priv->next_fence_to_post;
1060 dev_priv->next_fence_to_post++;
1061
1062 BEGIN_DMA(1);
1063 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1064 MGA_DMAPAD, 0x00000000,
1065 MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
1066 ADVANCE_DMA();
1067
1068 return 0;
1069}
1070
1071static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
1072file_priv)
1073{
1074 drm_mga_private_t *dev_priv = dev->dev_private;
1075 u32 *fence = data;
1076
1077 if (!dev_priv) {
1078 DRM_ERROR("called with no initialization\n");
1079 return -EINVAL;
1080 }
1081
1082 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1083
1084 mga_driver_fence_wait(dev, fence);
1085 return 0;
1086}
1087
1088struct drm_ioctl_desc mga_ioctls[] = {
1089 DRM_IOCTL_DEF(DRM_MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1090 DRM_IOCTL_DEF(DRM_MGA_FLUSH, mga_dma_flush, DRM_AUTH),
1091 DRM_IOCTL_DEF(DRM_MGA_RESET, mga_dma_reset, DRM_AUTH),
1092 DRM_IOCTL_DEF(DRM_MGA_SWAP, mga_dma_swap, DRM_AUTH),
1093 DRM_IOCTL_DEF(DRM_MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1094 DRM_IOCTL_DEF(DRM_MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1095 DRM_IOCTL_DEF(DRM_MGA_INDICES, mga_dma_indices, DRM_AUTH),
1096 DRM_IOCTL_DEF(DRM_MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1097 DRM_IOCTL_DEF(DRM_MGA_BLIT, mga_dma_blit, DRM_AUTH),
1098 DRM_IOCTL_DEF(DRM_MGA_GETPARAM, mga_getparam, DRM_AUTH),
1099 DRM_IOCTL_DEF(DRM_MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1100 DRM_IOCTL_DEF(DRM_MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1101 DRM_IOCTL_DEF(DRM_MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1102};
1103
1104int mga_max_ioctl = DRM_ARRAY_SIZE(mga_ioctls);