Loading...
1/* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
3 *
4 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
5 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
6 * All Rights Reserved.
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
17 * Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25 * OTHER DEALINGS IN THE SOFTWARE.
26 *
27 * Authors:
28 * Jeff Hartmann <jhartmann@valinux.com>
29 * Keith Whitwell <keith@tungstengraphics.com>
30 *
31 * Rewritten by:
32 * Gareth Hughes <gareth@valinux.com>
33 */
34
35#include <drm/drmP.h>
36#include <drm/mga_drm.h>
37#include "mga_drv.h"
38
39/* ================================================================
40 * DMA hardware state programming functions
41 */
42
43static void mga_emit_clip_rect(drm_mga_private_t *dev_priv,
44 struct drm_clip_rect *box)
45{
46 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
47 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
48 unsigned int pitch = dev_priv->front_pitch;
49 DMA_LOCALS;
50
51 BEGIN_DMA(2);
52
53 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
54 */
55 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
56 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
57 MGA_LEN + MGA_EXEC, 0x80000000,
58 MGA_DWGCTL, ctx->dwgctl,
59 MGA_LEN + MGA_EXEC, 0x80000000);
60 }
61 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
62 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
63 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
64
65 ADVANCE_DMA();
66}
67
68static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv)
69{
70 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
71 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
72 DMA_LOCALS;
73
74 BEGIN_DMA(3);
75
76 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
77 MGA_MACCESS, ctx->maccess,
78 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
79
80 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
81 MGA_FOGCOL, ctx->fogcolor,
82 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
83
84 DMA_BLOCK(MGA_FCOL, ctx->fcol,
85 MGA_DMAPAD, 0x00000000,
86 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
87
88 ADVANCE_DMA();
89}
90
91static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv)
92{
93 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
94 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
95 DMA_LOCALS;
96
97 BEGIN_DMA(4);
98
99 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
100 MGA_MACCESS, ctx->maccess,
101 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
102
103 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
104 MGA_FOGCOL, ctx->fogcolor,
105 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
106
107 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
108 MGA_TDUALSTAGE0, ctx->tdualstage0,
109 MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
110
111 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
112 MGA_STENCILCTL, ctx->stencilctl,
113 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
114
115 ADVANCE_DMA();
116}
117
118static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv)
119{
120 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
121 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
122 DMA_LOCALS;
123
124 BEGIN_DMA(4);
125
126 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
127 MGA_TEXCTL, tex->texctl,
128 MGA_TEXFILTER, tex->texfilter,
129 MGA_TEXBORDERCOL, tex->texbordercol);
130
131 DMA_BLOCK(MGA_TEXORG, tex->texorg,
132 MGA_TEXORG1, tex->texorg1,
133 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
134
135 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
136 MGA_TEXWIDTH, tex->texwidth,
137 MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
138
139 DMA_BLOCK(MGA_WR34, tex->texheight,
140 MGA_TEXTRANS, 0x0000ffff,
141 MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
142
143 ADVANCE_DMA();
144}
145
146static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv)
147{
148 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
149 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
150 DMA_LOCALS;
151
152/* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
153/* tex->texctl, tex->texctl2); */
154
155 BEGIN_DMA(6);
156
157 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
158 MGA_TEXCTL, tex->texctl,
159 MGA_TEXFILTER, tex->texfilter,
160 MGA_TEXBORDERCOL, tex->texbordercol);
161
162 DMA_BLOCK(MGA_TEXORG, tex->texorg,
163 MGA_TEXORG1, tex->texorg1,
164 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
165
166 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
167 MGA_TEXWIDTH, tex->texwidth,
168 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
169
170 DMA_BLOCK(MGA_WR57, 0x00000000,
171 MGA_WR53, 0x00000000,
172 MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
173
174 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
175 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
176 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
177 MGA_DMAPAD, 0x00000000);
178
179 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
180 MGA_DMAPAD, 0x00000000,
181 MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
182
183 ADVANCE_DMA();
184}
185
186static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv)
187{
188 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
189 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
190 DMA_LOCALS;
191
192/* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
193/* tex->texctl, tex->texctl2); */
194
195 BEGIN_DMA(5);
196
197 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
198 MGA_MAP1_ENABLE |
199 MGA_G400_TC2_MAGIC),
200 MGA_TEXCTL, tex->texctl,
201 MGA_TEXFILTER, tex->texfilter,
202 MGA_TEXBORDERCOL, tex->texbordercol);
203
204 DMA_BLOCK(MGA_TEXORG, tex->texorg,
205 MGA_TEXORG1, tex->texorg1,
206 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
207
208 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
209 MGA_TEXWIDTH, tex->texwidth,
210 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
211
212 DMA_BLOCK(MGA_WR57, 0x00000000,
213 MGA_WR53, 0x00000000,
214 MGA_WR61, 0x00000000,
215 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
216
217 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
218 MGA_TEXTRANS, 0x0000ffff,
219 MGA_TEXTRANSHIGH, 0x0000ffff,
220 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
221
222 ADVANCE_DMA();
223}
224
225static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv)
226{
227 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
228 unsigned int pipe = sarea_priv->warp_pipe;
229 DMA_LOCALS;
230
231 BEGIN_DMA(3);
232
233 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
234 MGA_WVRTXSZ, 0x00000007,
235 MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
236
237 DMA_BLOCK(MGA_WR25, 0x00000100,
238 MGA_WR34, 0x00000000,
239 MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
240
241 /* Padding required due to hardware bug.
242 */
243 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
244 MGA_DMAPAD, 0xffffffff,
245 MGA_DMAPAD, 0xffffffff,
246 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
247 MGA_WMODE_START | dev_priv->wagp_enable));
248
249 ADVANCE_DMA();
250}
251
252static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv)
253{
254 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
255 unsigned int pipe = sarea_priv->warp_pipe;
256 DMA_LOCALS;
257
258/* printk("mga_g400_emit_pipe %x\n", pipe); */
259
260 BEGIN_DMA(10);
261
262 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
263 MGA_DMAPAD, 0x00000000,
264 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
265
266 if (pipe & MGA_T2) {
267 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
268 MGA_DMAPAD, 0x00000000,
269 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
270
271 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
272 MGA_WACCEPTSEQ, 0x00000000,
273 MGA_WACCEPTSEQ, 0x00000000,
274 MGA_WACCEPTSEQ, 0x1e000000);
275 } else {
276 if (dev_priv->warp_pipe & MGA_T2) {
277 /* Flush the WARP pipe */
278 DMA_BLOCK(MGA_YDST, 0x00000000,
279 MGA_FXLEFT, 0x00000000,
280 MGA_FXRIGHT, 0x00000001,
281 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
282
283 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
284 MGA_DWGSYNC, 0x00007000,
285 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
286 MGA_LEN + MGA_EXEC, 0x00000000);
287
288 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
289 MGA_G400_TC2_MAGIC),
290 MGA_LEN + MGA_EXEC, 0x00000000,
291 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
292 MGA_DMAPAD, 0x00000000);
293 }
294
295 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
296 MGA_DMAPAD, 0x00000000,
297 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
298
299 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
300 MGA_WACCEPTSEQ, 0x00000000,
301 MGA_WACCEPTSEQ, 0x00000000,
302 MGA_WACCEPTSEQ, 0x18000000);
303 }
304
305 DMA_BLOCK(MGA_WFLAG, 0x00000000,
306 MGA_WFLAG1, 0x00000000,
307 MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
308
309 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
310 MGA_WR57, 0x00000000, /* tex0 */
311 MGA_WR53, 0x00000000, /* tex1 */
312 MGA_WR61, 0x00000000); /* tex1 */
313
314 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
315 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
316 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
317 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
318
319 /* Padding required due to hardware bug */
320 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
321 MGA_DMAPAD, 0xffffffff,
322 MGA_DMAPAD, 0xffffffff,
323 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
324 MGA_WMODE_START | dev_priv->wagp_enable));
325
326 ADVANCE_DMA();
327}
328
329static void mga_g200_emit_state(drm_mga_private_t *dev_priv)
330{
331 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
332 unsigned int dirty = sarea_priv->dirty;
333
334 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
335 mga_g200_emit_pipe(dev_priv);
336 dev_priv->warp_pipe = sarea_priv->warp_pipe;
337 }
338
339 if (dirty & MGA_UPLOAD_CONTEXT) {
340 mga_g200_emit_context(dev_priv);
341 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
342 }
343
344 if (dirty & MGA_UPLOAD_TEX0) {
345 mga_g200_emit_tex0(dev_priv);
346 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
347 }
348}
349
350static void mga_g400_emit_state(drm_mga_private_t *dev_priv)
351{
352 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
353 unsigned int dirty = sarea_priv->dirty;
354 int multitex = sarea_priv->warp_pipe & MGA_T2;
355
356 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
357 mga_g400_emit_pipe(dev_priv);
358 dev_priv->warp_pipe = sarea_priv->warp_pipe;
359 }
360
361 if (dirty & MGA_UPLOAD_CONTEXT) {
362 mga_g400_emit_context(dev_priv);
363 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
364 }
365
366 if (dirty & MGA_UPLOAD_TEX0) {
367 mga_g400_emit_tex0(dev_priv);
368 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
369 }
370
371 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
372 mga_g400_emit_tex1(dev_priv);
373 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
374 }
375}
376
377/* ================================================================
378 * SAREA state verification
379 */
380
381/* Disallow all write destinations except the front and backbuffer.
382 */
383static int mga_verify_context(drm_mga_private_t *dev_priv)
384{
385 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
386 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
387
388 if (ctx->dstorg != dev_priv->front_offset &&
389 ctx->dstorg != dev_priv->back_offset) {
390 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
391 ctx->dstorg, dev_priv->front_offset,
392 dev_priv->back_offset);
393 ctx->dstorg = 0;
394 return -EINVAL;
395 }
396
397 return 0;
398}
399
400/* Disallow texture reads from PCI space.
401 */
402static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit)
403{
404 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
405 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
406 unsigned int org;
407
408 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
409
410 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
411 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
412 tex->texorg = 0;
413 return -EINVAL;
414 }
415
416 return 0;
417}
418
419static int mga_verify_state(drm_mga_private_t *dev_priv)
420{
421 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
422 unsigned int dirty = sarea_priv->dirty;
423 int ret = 0;
424
425 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
426 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
427
428 if (dirty & MGA_UPLOAD_CONTEXT)
429 ret |= mga_verify_context(dev_priv);
430
431 if (dirty & MGA_UPLOAD_TEX0)
432 ret |= mga_verify_tex(dev_priv, 0);
433
434 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
435 if (dirty & MGA_UPLOAD_TEX1)
436 ret |= mga_verify_tex(dev_priv, 1);
437
438 if (dirty & MGA_UPLOAD_PIPE)
439 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
440 } else {
441 if (dirty & MGA_UPLOAD_PIPE)
442 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
443 }
444
445 return (ret == 0);
446}
447
448static int mga_verify_iload(drm_mga_private_t *dev_priv,
449 unsigned int dstorg, unsigned int length)
450{
451 if (dstorg < dev_priv->texture_offset ||
452 dstorg + length > (dev_priv->texture_offset +
453 dev_priv->texture_size)) {
454 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
455 return -EINVAL;
456 }
457
458 if (length & MGA_ILOAD_MASK) {
459 DRM_ERROR("*** bad iload length: 0x%x\n",
460 length & MGA_ILOAD_MASK);
461 return -EINVAL;
462 }
463
464 return 0;
465}
466
467static int mga_verify_blit(drm_mga_private_t *dev_priv,
468 unsigned int srcorg, unsigned int dstorg)
469{
470 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
471 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
472 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
473 return -EINVAL;
474 }
475 return 0;
476}
477
478/* ================================================================
479 *
480 */
481
482static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear)
483{
484 drm_mga_private_t *dev_priv = dev->dev_private;
485 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
486 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
487 struct drm_clip_rect *pbox = sarea_priv->boxes;
488 int nbox = sarea_priv->nbox;
489 int i;
490 DMA_LOCALS;
491 DRM_DEBUG("\n");
492
493 BEGIN_DMA(1);
494
495 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
496 MGA_DMAPAD, 0x00000000,
497 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
498
499 ADVANCE_DMA();
500
501 for (i = 0; i < nbox; i++) {
502 struct drm_clip_rect *box = &pbox[i];
503 u32 height = box->y2 - box->y1;
504
505 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
506 box->x1, box->y1, box->x2, box->y2);
507
508 if (clear->flags & MGA_FRONT) {
509 BEGIN_DMA(2);
510
511 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
512 MGA_PLNWT, clear->color_mask,
513 MGA_YDSTLEN, (box->y1 << 16) | height,
514 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
515
516 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
517 MGA_FCOL, clear->clear_color,
518 MGA_DSTORG, dev_priv->front_offset,
519 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
520
521 ADVANCE_DMA();
522 }
523
524 if (clear->flags & MGA_BACK) {
525 BEGIN_DMA(2);
526
527 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
528 MGA_PLNWT, clear->color_mask,
529 MGA_YDSTLEN, (box->y1 << 16) | height,
530 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
531
532 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
533 MGA_FCOL, clear->clear_color,
534 MGA_DSTORG, dev_priv->back_offset,
535 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
536
537 ADVANCE_DMA();
538 }
539
540 if (clear->flags & MGA_DEPTH) {
541 BEGIN_DMA(2);
542
543 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
544 MGA_PLNWT, clear->depth_mask,
545 MGA_YDSTLEN, (box->y1 << 16) | height,
546 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
547
548 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
549 MGA_FCOL, clear->clear_depth,
550 MGA_DSTORG, dev_priv->depth_offset,
551 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
552
553 ADVANCE_DMA();
554 }
555
556 }
557
558 BEGIN_DMA(1);
559
560 /* Force reset of DWGCTL */
561 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
562 MGA_DMAPAD, 0x00000000,
563 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
564
565 ADVANCE_DMA();
566
567 FLUSH_DMA();
568}
569
570static void mga_dma_dispatch_swap(struct drm_device *dev)
571{
572 drm_mga_private_t *dev_priv = dev->dev_private;
573 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
574 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
575 struct drm_clip_rect *pbox = sarea_priv->boxes;
576 int nbox = sarea_priv->nbox;
577 int i;
578 DMA_LOCALS;
579 DRM_DEBUG("\n");
580
581 sarea_priv->last_frame.head = dev_priv->prim.tail;
582 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
583
584 BEGIN_DMA(4 + nbox);
585
586 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
587 MGA_DMAPAD, 0x00000000,
588 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
589
590 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
591 MGA_MACCESS, dev_priv->maccess,
592 MGA_SRCORG, dev_priv->back_offset,
593 MGA_AR5, dev_priv->front_pitch);
594
595 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
596 MGA_DMAPAD, 0x00000000,
597 MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
598
599 for (i = 0; i < nbox; i++) {
600 struct drm_clip_rect *box = &pbox[i];
601 u32 height = box->y2 - box->y1;
602 u32 start = box->y1 * dev_priv->front_pitch;
603
604 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
605 box->x1, box->y1, box->x2, box->y2);
606
607 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
608 MGA_AR3, start + box->x1,
609 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
610 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
611 }
612
613 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
614 MGA_PLNWT, ctx->plnwt,
615 MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
616
617 ADVANCE_DMA();
618
619 FLUSH_DMA();
620
621 DRM_DEBUG("... done.\n");
622}
623
624static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
625{
626 drm_mga_private_t *dev_priv = dev->dev_private;
627 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
628 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
629 u32 address = (u32) buf->bus_address;
630 u32 length = (u32) buf->used;
631 int i = 0;
632 DMA_LOCALS;
633 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
634
635 if (buf->used) {
636 buf_priv->dispatched = 1;
637
638 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
639
640 do {
641 if (i < sarea_priv->nbox) {
642 mga_emit_clip_rect(dev_priv,
643 &sarea_priv->boxes[i]);
644 }
645
646 BEGIN_DMA(1);
647
648 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
649 MGA_DMAPAD, 0x00000000,
650 MGA_SECADDRESS, (address |
651 MGA_DMA_VERTEX),
652 MGA_SECEND, ((address + length) |
653 dev_priv->dma_access));
654
655 ADVANCE_DMA();
656 } while (++i < sarea_priv->nbox);
657 }
658
659 if (buf_priv->discard) {
660 AGE_BUFFER(buf_priv);
661 buf->pending = 0;
662 buf->used = 0;
663 buf_priv->dispatched = 0;
664
665 mga_freelist_put(dev, buf);
666 }
667
668 FLUSH_DMA();
669}
670
671static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf,
672 unsigned int start, unsigned int end)
673{
674 drm_mga_private_t *dev_priv = dev->dev_private;
675 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
676 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
677 u32 address = (u32) buf->bus_address;
678 int i = 0;
679 DMA_LOCALS;
680 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
681
682 if (start != end) {
683 buf_priv->dispatched = 1;
684
685 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
686
687 do {
688 if (i < sarea_priv->nbox) {
689 mga_emit_clip_rect(dev_priv,
690 &sarea_priv->boxes[i]);
691 }
692
693 BEGIN_DMA(1);
694
695 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
696 MGA_DMAPAD, 0x00000000,
697 MGA_SETUPADDRESS, address + start,
698 MGA_SETUPEND, ((address + end) |
699 dev_priv->dma_access));
700
701 ADVANCE_DMA();
702 } while (++i < sarea_priv->nbox);
703 }
704
705 if (buf_priv->discard) {
706 AGE_BUFFER(buf_priv);
707 buf->pending = 0;
708 buf->used = 0;
709 buf_priv->dispatched = 0;
710
711 mga_freelist_put(dev, buf);
712 }
713
714 FLUSH_DMA();
715}
716
717/* This copies a 64 byte aligned agp region to the frambuffer with a
718 * standard blit, the ioctl needs to do checking.
719 */
720static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf,
721 unsigned int dstorg, unsigned int length)
722{
723 drm_mga_private_t *dev_priv = dev->dev_private;
724 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
725 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
726 u32 srcorg =
727 buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
728 u32 y2;
729 DMA_LOCALS;
730 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
731
732 y2 = length / 64;
733
734 BEGIN_DMA(5);
735
736 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
737 MGA_DMAPAD, 0x00000000,
738 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
739
740 DMA_BLOCK(MGA_DSTORG, dstorg,
741 MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
742
743 DMA_BLOCK(MGA_PITCH, 64,
744 MGA_PLNWT, 0xffffffff,
745 MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
746
747 DMA_BLOCK(MGA_AR0, 63,
748 MGA_AR3, 0,
749 MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
750
751 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
752 MGA_SRCORG, dev_priv->front_offset,
753 MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
754
755 ADVANCE_DMA();
756
757 AGE_BUFFER(buf_priv);
758
759 buf->pending = 0;
760 buf->used = 0;
761 buf_priv->dispatched = 0;
762
763 mga_freelist_put(dev, buf);
764
765 FLUSH_DMA();
766}
767
768static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit)
769{
770 drm_mga_private_t *dev_priv = dev->dev_private;
771 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
772 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
773 struct drm_clip_rect *pbox = sarea_priv->boxes;
774 int nbox = sarea_priv->nbox;
775 u32 scandir = 0, i;
776 DMA_LOCALS;
777 DRM_DEBUG("\n");
778
779 BEGIN_DMA(4 + nbox);
780
781 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
782 MGA_DMAPAD, 0x00000000,
783 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
784
785 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
786 MGA_PLNWT, blit->planemask,
787 MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
788
789 DMA_BLOCK(MGA_SGN, scandir,
790 MGA_MACCESS, dev_priv->maccess,
791 MGA_AR5, blit->ydir * blit->src_pitch,
792 MGA_PITCH, blit->dst_pitch);
793
794 for (i = 0; i < nbox; i++) {
795 int srcx = pbox[i].x1 + blit->delta_sx;
796 int srcy = pbox[i].y1 + blit->delta_sy;
797 int dstx = pbox[i].x1 + blit->delta_dx;
798 int dsty = pbox[i].y1 + blit->delta_dy;
799 int h = pbox[i].y2 - pbox[i].y1;
800 int w = pbox[i].x2 - pbox[i].x1 - 1;
801 int start;
802
803 if (blit->ydir == -1)
804 srcy = blit->height - srcy - 1;
805
806 start = srcy * blit->src_pitch + srcx;
807
808 DMA_BLOCK(MGA_AR0, start + w,
809 MGA_AR3, start,
810 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
811 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
812 }
813
814 /* Do something to flush AGP?
815 */
816
817 /* Force reset of DWGCTL */
818 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
819 MGA_PLNWT, ctx->plnwt,
820 MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
821
822 ADVANCE_DMA();
823}
824
825/* ================================================================
826 *
827 */
828
829static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
830{
831 drm_mga_private_t *dev_priv = dev->dev_private;
832 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
833 drm_mga_clear_t *clear = data;
834
835 LOCK_TEST_WITH_RETURN(dev, file_priv);
836
837 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
838 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
839
840 WRAP_TEST_WITH_RETURN(dev_priv);
841
842 mga_dma_dispatch_clear(dev, clear);
843
844 /* Make sure we restore the 3D state next time.
845 */
846 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
847
848 return 0;
849}
850
851static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
852{
853 drm_mga_private_t *dev_priv = dev->dev_private;
854 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
855
856 LOCK_TEST_WITH_RETURN(dev, file_priv);
857
858 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
859 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
860
861 WRAP_TEST_WITH_RETURN(dev_priv);
862
863 mga_dma_dispatch_swap(dev);
864
865 /* Make sure we restore the 3D state next time.
866 */
867 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
868
869 return 0;
870}
871
872static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
873{
874 drm_mga_private_t *dev_priv = dev->dev_private;
875 struct drm_device_dma *dma = dev->dma;
876 struct drm_buf *buf;
877 drm_mga_buf_priv_t *buf_priv;
878 drm_mga_vertex_t *vertex = data;
879
880 LOCK_TEST_WITH_RETURN(dev, file_priv);
881
882 if (vertex->idx < 0 || vertex->idx > dma->buf_count)
883 return -EINVAL;
884 buf = dma->buflist[vertex->idx];
885 buf_priv = buf->dev_private;
886
887 buf->used = vertex->used;
888 buf_priv->discard = vertex->discard;
889
890 if (!mga_verify_state(dev_priv)) {
891 if (vertex->discard) {
892 if (buf_priv->dispatched == 1)
893 AGE_BUFFER(buf_priv);
894 buf_priv->dispatched = 0;
895 mga_freelist_put(dev, buf);
896 }
897 return -EINVAL;
898 }
899
900 WRAP_TEST_WITH_RETURN(dev_priv);
901
902 mga_dma_dispatch_vertex(dev, buf);
903
904 return 0;
905}
906
907static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
908{
909 drm_mga_private_t *dev_priv = dev->dev_private;
910 struct drm_device_dma *dma = dev->dma;
911 struct drm_buf *buf;
912 drm_mga_buf_priv_t *buf_priv;
913 drm_mga_indices_t *indices = data;
914
915 LOCK_TEST_WITH_RETURN(dev, file_priv);
916
917 if (indices->idx < 0 || indices->idx > dma->buf_count)
918 return -EINVAL;
919
920 buf = dma->buflist[indices->idx];
921 buf_priv = buf->dev_private;
922
923 buf_priv->discard = indices->discard;
924
925 if (!mga_verify_state(dev_priv)) {
926 if (indices->discard) {
927 if (buf_priv->dispatched == 1)
928 AGE_BUFFER(buf_priv);
929 buf_priv->dispatched = 0;
930 mga_freelist_put(dev, buf);
931 }
932 return -EINVAL;
933 }
934
935 WRAP_TEST_WITH_RETURN(dev_priv);
936
937 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
938
939 return 0;
940}
941
942static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
943{
944 struct drm_device_dma *dma = dev->dma;
945 drm_mga_private_t *dev_priv = dev->dev_private;
946 struct drm_buf *buf;
947 drm_mga_buf_priv_t *buf_priv;
948 drm_mga_iload_t *iload = data;
949 DRM_DEBUG("\n");
950
951 LOCK_TEST_WITH_RETURN(dev, file_priv);
952
953#if 0
954 if (mga_do_wait_for_idle(dev_priv) < 0) {
955 if (MGA_DMA_DEBUG)
956 DRM_INFO("-EBUSY\n");
957 return -EBUSY;
958 }
959#endif
960 if (iload->idx < 0 || iload->idx > dma->buf_count)
961 return -EINVAL;
962
963 buf = dma->buflist[iload->idx];
964 buf_priv = buf->dev_private;
965
966 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
967 mga_freelist_put(dev, buf);
968 return -EINVAL;
969 }
970
971 WRAP_TEST_WITH_RETURN(dev_priv);
972
973 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
974
975 /* Make sure we restore the 3D state next time.
976 */
977 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
978
979 return 0;
980}
981
982static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
983{
984 drm_mga_private_t *dev_priv = dev->dev_private;
985 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
986 drm_mga_blit_t *blit = data;
987 DRM_DEBUG("\n");
988
989 LOCK_TEST_WITH_RETURN(dev, file_priv);
990
991 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
992 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
993
994 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
995 return -EINVAL;
996
997 WRAP_TEST_WITH_RETURN(dev_priv);
998
999 mga_dma_dispatch_blit(dev, blit);
1000
1001 /* Make sure we restore the 3D state next time.
1002 */
1003 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1004
1005 return 0;
1006}
1007
1008static int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
1009{
1010 drm_mga_private_t *dev_priv = dev->dev_private;
1011 drm_mga_getparam_t *param = data;
1012 int value;
1013
1014 if (!dev_priv) {
1015 DRM_ERROR("called with no initialization\n");
1016 return -EINVAL;
1017 }
1018
1019 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1020
1021 switch (param->param) {
1022 case MGA_PARAM_IRQ_NR:
1023 value = dev->pdev->irq;
1024 break;
1025 case MGA_PARAM_CARD_TYPE:
1026 value = dev_priv->chipset;
1027 break;
1028 default:
1029 return -EINVAL;
1030 }
1031
1032 if (copy_to_user(param->value, &value, sizeof(int))) {
1033 DRM_ERROR("copy_to_user\n");
1034 return -EFAULT;
1035 }
1036
1037 return 0;
1038}
1039
1040static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1041{
1042 drm_mga_private_t *dev_priv = dev->dev_private;
1043 u32 *fence = data;
1044 DMA_LOCALS;
1045
1046 if (!dev_priv) {
1047 DRM_ERROR("called with no initialization\n");
1048 return -EINVAL;
1049 }
1050
1051 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1052
1053 /* I would normal do this assignment in the declaration of fence,
1054 * but dev_priv may be NULL.
1055 */
1056
1057 *fence = dev_priv->next_fence_to_post;
1058 dev_priv->next_fence_to_post++;
1059
1060 BEGIN_DMA(1);
1061 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1062 MGA_DMAPAD, 0x00000000,
1063 MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
1064 ADVANCE_DMA();
1065
1066 return 0;
1067}
1068
1069static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
1070file_priv)
1071{
1072 drm_mga_private_t *dev_priv = dev->dev_private;
1073 u32 *fence = data;
1074
1075 if (!dev_priv) {
1076 DRM_ERROR("called with no initialization\n");
1077 return -EINVAL;
1078 }
1079
1080 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1081
1082 mga_driver_fence_wait(dev, fence);
1083 return 0;
1084}
1085
1086const struct drm_ioctl_desc mga_ioctls[] = {
1087 DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1088 DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH),
1089 DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH),
1090 DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
1091 DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1092 DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1093 DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
1094 DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1095 DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
1096 DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
1097 DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1098 DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1099 DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1100};
1101
1102int mga_max_ioctl = ARRAY_SIZE(mga_ioctls);
1/* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
3 *
4 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
5 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
6 * All Rights Reserved.
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
17 * Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25 * OTHER DEALINGS IN THE SOFTWARE.
26 *
27 * Authors:
28 * Jeff Hartmann <jhartmann@valinux.com>
29 * Keith Whitwell <keith@tungstengraphics.com>
30 *
31 * Rewritten by:
32 * Gareth Hughes <gareth@valinux.com>
33 */
34
35#include "mga_drv.h"
36
37/* ================================================================
38 * DMA hardware state programming functions
39 */
40
41static void mga_emit_clip_rect(drm_mga_private_t *dev_priv,
42 struct drm_clip_rect *box)
43{
44 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
45 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
46 unsigned int pitch = dev_priv->front_pitch;
47 DMA_LOCALS;
48
49 BEGIN_DMA(2);
50
51 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
52 */
53 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
54 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
55 MGA_LEN + MGA_EXEC, 0x80000000,
56 MGA_DWGCTL, ctx->dwgctl,
57 MGA_LEN + MGA_EXEC, 0x80000000);
58 }
59 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
60 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
61 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
62
63 ADVANCE_DMA();
64}
65
66static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv)
67{
68 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
69 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
70 DMA_LOCALS;
71
72 BEGIN_DMA(3);
73
74 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
75 MGA_MACCESS, ctx->maccess,
76 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
77
78 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
79 MGA_FOGCOL, ctx->fogcolor,
80 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
81
82 DMA_BLOCK(MGA_FCOL, ctx->fcol,
83 MGA_DMAPAD, 0x00000000,
84 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
85
86 ADVANCE_DMA();
87}
88
89static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv)
90{
91 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
92 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
93 DMA_LOCALS;
94
95 BEGIN_DMA(4);
96
97 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
98 MGA_MACCESS, ctx->maccess,
99 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
100
101 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
102 MGA_FOGCOL, ctx->fogcolor,
103 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
104
105 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
106 MGA_TDUALSTAGE0, ctx->tdualstage0,
107 MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
108
109 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
110 MGA_STENCILCTL, ctx->stencilctl,
111 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
112
113 ADVANCE_DMA();
114}
115
116static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv)
117{
118 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
119 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
120 DMA_LOCALS;
121
122 BEGIN_DMA(4);
123
124 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
125 MGA_TEXCTL, tex->texctl,
126 MGA_TEXFILTER, tex->texfilter,
127 MGA_TEXBORDERCOL, tex->texbordercol);
128
129 DMA_BLOCK(MGA_TEXORG, tex->texorg,
130 MGA_TEXORG1, tex->texorg1,
131 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
132
133 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
134 MGA_TEXWIDTH, tex->texwidth,
135 MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
136
137 DMA_BLOCK(MGA_WR34, tex->texheight,
138 MGA_TEXTRANS, 0x0000ffff,
139 MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
140
141 ADVANCE_DMA();
142}
143
144static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv)
145{
146 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
147 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
148 DMA_LOCALS;
149
150/* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
151/* tex->texctl, tex->texctl2); */
152
153 BEGIN_DMA(6);
154
155 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
156 MGA_TEXCTL, tex->texctl,
157 MGA_TEXFILTER, tex->texfilter,
158 MGA_TEXBORDERCOL, tex->texbordercol);
159
160 DMA_BLOCK(MGA_TEXORG, tex->texorg,
161 MGA_TEXORG1, tex->texorg1,
162 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
163
164 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
165 MGA_TEXWIDTH, tex->texwidth,
166 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
167
168 DMA_BLOCK(MGA_WR57, 0x00000000,
169 MGA_WR53, 0x00000000,
170 MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
171
172 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
173 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
174 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
175 MGA_DMAPAD, 0x00000000);
176
177 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
178 MGA_DMAPAD, 0x00000000,
179 MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
180
181 ADVANCE_DMA();
182}
183
184static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv)
185{
186 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
187 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
188 DMA_LOCALS;
189
190/* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
191/* tex->texctl, tex->texctl2); */
192
193 BEGIN_DMA(5);
194
195 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
196 MGA_MAP1_ENABLE |
197 MGA_G400_TC2_MAGIC),
198 MGA_TEXCTL, tex->texctl,
199 MGA_TEXFILTER, tex->texfilter,
200 MGA_TEXBORDERCOL, tex->texbordercol);
201
202 DMA_BLOCK(MGA_TEXORG, tex->texorg,
203 MGA_TEXORG1, tex->texorg1,
204 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
205
206 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
207 MGA_TEXWIDTH, tex->texwidth,
208 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
209
210 DMA_BLOCK(MGA_WR57, 0x00000000,
211 MGA_WR53, 0x00000000,
212 MGA_WR61, 0x00000000,
213 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
214
215 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
216 MGA_TEXTRANS, 0x0000ffff,
217 MGA_TEXTRANSHIGH, 0x0000ffff,
218 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
219
220 ADVANCE_DMA();
221}
222
223static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv)
224{
225 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
226 unsigned int pipe = sarea_priv->warp_pipe;
227 DMA_LOCALS;
228
229 BEGIN_DMA(3);
230
231 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
232 MGA_WVRTXSZ, 0x00000007,
233 MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
234
235 DMA_BLOCK(MGA_WR25, 0x00000100,
236 MGA_WR34, 0x00000000,
237 MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
238
239 /* Padding required due to hardware bug.
240 */
241 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
242 MGA_DMAPAD, 0xffffffff,
243 MGA_DMAPAD, 0xffffffff,
244 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
245 MGA_WMODE_START | dev_priv->wagp_enable));
246
247 ADVANCE_DMA();
248}
249
250static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv)
251{
252 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
253 unsigned int pipe = sarea_priv->warp_pipe;
254 DMA_LOCALS;
255
256/* printk("mga_g400_emit_pipe %x\n", pipe); */
257
258 BEGIN_DMA(10);
259
260 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
261 MGA_DMAPAD, 0x00000000,
262 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
263
264 if (pipe & MGA_T2) {
265 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
266 MGA_DMAPAD, 0x00000000,
267 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
268
269 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
270 MGA_WACCEPTSEQ, 0x00000000,
271 MGA_WACCEPTSEQ, 0x00000000,
272 MGA_WACCEPTSEQ, 0x1e000000);
273 } else {
274 if (dev_priv->warp_pipe & MGA_T2) {
275 /* Flush the WARP pipe */
276 DMA_BLOCK(MGA_YDST, 0x00000000,
277 MGA_FXLEFT, 0x00000000,
278 MGA_FXRIGHT, 0x00000001,
279 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
280
281 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
282 MGA_DWGSYNC, 0x00007000,
283 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
284 MGA_LEN + MGA_EXEC, 0x00000000);
285
286 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
287 MGA_G400_TC2_MAGIC),
288 MGA_LEN + MGA_EXEC, 0x00000000,
289 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
290 MGA_DMAPAD, 0x00000000);
291 }
292
293 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
294 MGA_DMAPAD, 0x00000000,
295 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
296
297 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
298 MGA_WACCEPTSEQ, 0x00000000,
299 MGA_WACCEPTSEQ, 0x00000000,
300 MGA_WACCEPTSEQ, 0x18000000);
301 }
302
303 DMA_BLOCK(MGA_WFLAG, 0x00000000,
304 MGA_WFLAG1, 0x00000000,
305 MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
306
307 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
308 MGA_WR57, 0x00000000, /* tex0 */
309 MGA_WR53, 0x00000000, /* tex1 */
310 MGA_WR61, 0x00000000); /* tex1 */
311
312 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
313 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
314 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
315 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
316
317 /* Padding required due to hardware bug */
318 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
319 MGA_DMAPAD, 0xffffffff,
320 MGA_DMAPAD, 0xffffffff,
321 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
322 MGA_WMODE_START | dev_priv->wagp_enable));
323
324 ADVANCE_DMA();
325}
326
327static void mga_g200_emit_state(drm_mga_private_t *dev_priv)
328{
329 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
330 unsigned int dirty = sarea_priv->dirty;
331
332 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
333 mga_g200_emit_pipe(dev_priv);
334 dev_priv->warp_pipe = sarea_priv->warp_pipe;
335 }
336
337 if (dirty & MGA_UPLOAD_CONTEXT) {
338 mga_g200_emit_context(dev_priv);
339 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
340 }
341
342 if (dirty & MGA_UPLOAD_TEX0) {
343 mga_g200_emit_tex0(dev_priv);
344 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
345 }
346}
347
348static void mga_g400_emit_state(drm_mga_private_t *dev_priv)
349{
350 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
351 unsigned int dirty = sarea_priv->dirty;
352 int multitex = sarea_priv->warp_pipe & MGA_T2;
353
354 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
355 mga_g400_emit_pipe(dev_priv);
356 dev_priv->warp_pipe = sarea_priv->warp_pipe;
357 }
358
359 if (dirty & MGA_UPLOAD_CONTEXT) {
360 mga_g400_emit_context(dev_priv);
361 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
362 }
363
364 if (dirty & MGA_UPLOAD_TEX0) {
365 mga_g400_emit_tex0(dev_priv);
366 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
367 }
368
369 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
370 mga_g400_emit_tex1(dev_priv);
371 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
372 }
373}
374
375/* ================================================================
376 * SAREA state verification
377 */
378
379/* Disallow all write destinations except the front and backbuffer.
380 */
381static int mga_verify_context(drm_mga_private_t *dev_priv)
382{
383 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
384 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
385
386 if (ctx->dstorg != dev_priv->front_offset &&
387 ctx->dstorg != dev_priv->back_offset) {
388 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
389 ctx->dstorg, dev_priv->front_offset,
390 dev_priv->back_offset);
391 ctx->dstorg = 0;
392 return -EINVAL;
393 }
394
395 return 0;
396}
397
398/* Disallow texture reads from PCI space.
399 */
400static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit)
401{
402 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
403 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
404 unsigned int org;
405
406 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
407
408 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
409 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
410 tex->texorg = 0;
411 return -EINVAL;
412 }
413
414 return 0;
415}
416
417static int mga_verify_state(drm_mga_private_t *dev_priv)
418{
419 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
420 unsigned int dirty = sarea_priv->dirty;
421 int ret = 0;
422
423 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
424 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
425
426 if (dirty & MGA_UPLOAD_CONTEXT)
427 ret |= mga_verify_context(dev_priv);
428
429 if (dirty & MGA_UPLOAD_TEX0)
430 ret |= mga_verify_tex(dev_priv, 0);
431
432 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
433 if (dirty & MGA_UPLOAD_TEX1)
434 ret |= mga_verify_tex(dev_priv, 1);
435
436 if (dirty & MGA_UPLOAD_PIPE)
437 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
438 } else {
439 if (dirty & MGA_UPLOAD_PIPE)
440 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
441 }
442
443 return (ret == 0);
444}
445
446static int mga_verify_iload(drm_mga_private_t *dev_priv,
447 unsigned int dstorg, unsigned int length)
448{
449 if (dstorg < dev_priv->texture_offset ||
450 dstorg + length > (dev_priv->texture_offset +
451 dev_priv->texture_size)) {
452 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
453 return -EINVAL;
454 }
455
456 if (length & MGA_ILOAD_MASK) {
457 DRM_ERROR("*** bad iload length: 0x%x\n",
458 length & MGA_ILOAD_MASK);
459 return -EINVAL;
460 }
461
462 return 0;
463}
464
465static int mga_verify_blit(drm_mga_private_t *dev_priv,
466 unsigned int srcorg, unsigned int dstorg)
467{
468 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
469 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
470 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
471 return -EINVAL;
472 }
473 return 0;
474}
475
476/* ================================================================
477 *
478 */
479
480static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear)
481{
482 drm_mga_private_t *dev_priv = dev->dev_private;
483 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
484 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
485 struct drm_clip_rect *pbox = sarea_priv->boxes;
486 int nbox = sarea_priv->nbox;
487 int i;
488 DMA_LOCALS;
489 DRM_DEBUG("\n");
490
491 BEGIN_DMA(1);
492
493 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
494 MGA_DMAPAD, 0x00000000,
495 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
496
497 ADVANCE_DMA();
498
499 for (i = 0; i < nbox; i++) {
500 struct drm_clip_rect *box = &pbox[i];
501 u32 height = box->y2 - box->y1;
502
503 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
504 box->x1, box->y1, box->x2, box->y2);
505
506 if (clear->flags & MGA_FRONT) {
507 BEGIN_DMA(2);
508
509 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
510 MGA_PLNWT, clear->color_mask,
511 MGA_YDSTLEN, (box->y1 << 16) | height,
512 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
513
514 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
515 MGA_FCOL, clear->clear_color,
516 MGA_DSTORG, dev_priv->front_offset,
517 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
518
519 ADVANCE_DMA();
520 }
521
522 if (clear->flags & MGA_BACK) {
523 BEGIN_DMA(2);
524
525 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
526 MGA_PLNWT, clear->color_mask,
527 MGA_YDSTLEN, (box->y1 << 16) | height,
528 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
529
530 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
531 MGA_FCOL, clear->clear_color,
532 MGA_DSTORG, dev_priv->back_offset,
533 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
534
535 ADVANCE_DMA();
536 }
537
538 if (clear->flags & MGA_DEPTH) {
539 BEGIN_DMA(2);
540
541 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
542 MGA_PLNWT, clear->depth_mask,
543 MGA_YDSTLEN, (box->y1 << 16) | height,
544 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
545
546 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
547 MGA_FCOL, clear->clear_depth,
548 MGA_DSTORG, dev_priv->depth_offset,
549 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
550
551 ADVANCE_DMA();
552 }
553
554 }
555
556 BEGIN_DMA(1);
557
558 /* Force reset of DWGCTL */
559 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
560 MGA_DMAPAD, 0x00000000,
561 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
562
563 ADVANCE_DMA();
564
565 FLUSH_DMA();
566}
567
568static void mga_dma_dispatch_swap(struct drm_device *dev)
569{
570 drm_mga_private_t *dev_priv = dev->dev_private;
571 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
572 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
573 struct drm_clip_rect *pbox = sarea_priv->boxes;
574 int nbox = sarea_priv->nbox;
575 int i;
576 DMA_LOCALS;
577 DRM_DEBUG("\n");
578
579 sarea_priv->last_frame.head = dev_priv->prim.tail;
580 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
581
582 BEGIN_DMA(4 + nbox);
583
584 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
585 MGA_DMAPAD, 0x00000000,
586 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
587
588 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
589 MGA_MACCESS, dev_priv->maccess,
590 MGA_SRCORG, dev_priv->back_offset,
591 MGA_AR5, dev_priv->front_pitch);
592
593 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
594 MGA_DMAPAD, 0x00000000,
595 MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
596
597 for (i = 0; i < nbox; i++) {
598 struct drm_clip_rect *box = &pbox[i];
599 u32 height = box->y2 - box->y1;
600 u32 start = box->y1 * dev_priv->front_pitch;
601
602 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
603 box->x1, box->y1, box->x2, box->y2);
604
605 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
606 MGA_AR3, start + box->x1,
607 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
608 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
609 }
610
611 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
612 MGA_PLNWT, ctx->plnwt,
613 MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
614
615 ADVANCE_DMA();
616
617 FLUSH_DMA();
618
619 DRM_DEBUG("... done.\n");
620}
621
622static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
623{
624 drm_mga_private_t *dev_priv = dev->dev_private;
625 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
626 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
627 u32 address = (u32) buf->bus_address;
628 u32 length = (u32) buf->used;
629 int i = 0;
630 DMA_LOCALS;
631 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
632
633 if (buf->used) {
634 buf_priv->dispatched = 1;
635
636 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
637
638 do {
639 if (i < sarea_priv->nbox) {
640 mga_emit_clip_rect(dev_priv,
641 &sarea_priv->boxes[i]);
642 }
643
644 BEGIN_DMA(1);
645
646 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
647 MGA_DMAPAD, 0x00000000,
648 MGA_SECADDRESS, (address |
649 MGA_DMA_VERTEX),
650 MGA_SECEND, ((address + length) |
651 dev_priv->dma_access));
652
653 ADVANCE_DMA();
654 } while (++i < sarea_priv->nbox);
655 }
656
657 if (buf_priv->discard) {
658 AGE_BUFFER(buf_priv);
659 buf->pending = 0;
660 buf->used = 0;
661 buf_priv->dispatched = 0;
662
663 mga_freelist_put(dev, buf);
664 }
665
666 FLUSH_DMA();
667}
668
669static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf,
670 unsigned int start, unsigned int end)
671{
672 drm_mga_private_t *dev_priv = dev->dev_private;
673 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
674 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
675 u32 address = (u32) buf->bus_address;
676 int i = 0;
677 DMA_LOCALS;
678 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
679
680 if (start != end) {
681 buf_priv->dispatched = 1;
682
683 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
684
685 do {
686 if (i < sarea_priv->nbox) {
687 mga_emit_clip_rect(dev_priv,
688 &sarea_priv->boxes[i]);
689 }
690
691 BEGIN_DMA(1);
692
693 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
694 MGA_DMAPAD, 0x00000000,
695 MGA_SETUPADDRESS, address + start,
696 MGA_SETUPEND, ((address + end) |
697 dev_priv->dma_access));
698
699 ADVANCE_DMA();
700 } while (++i < sarea_priv->nbox);
701 }
702
703 if (buf_priv->discard) {
704 AGE_BUFFER(buf_priv);
705 buf->pending = 0;
706 buf->used = 0;
707 buf_priv->dispatched = 0;
708
709 mga_freelist_put(dev, buf);
710 }
711
712 FLUSH_DMA();
713}
714
715/* This copies a 64 byte aligned agp region to the frambuffer with a
716 * standard blit, the ioctl needs to do checking.
717 */
718static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf,
719 unsigned int dstorg, unsigned int length)
720{
721 drm_mga_private_t *dev_priv = dev->dev_private;
722 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
723 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
724 u32 srcorg =
725 buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
726 u32 y2;
727 DMA_LOCALS;
728 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
729
730 y2 = length / 64;
731
732 BEGIN_DMA(5);
733
734 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
735 MGA_DMAPAD, 0x00000000,
736 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
737
738 DMA_BLOCK(MGA_DSTORG, dstorg,
739 MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
740
741 DMA_BLOCK(MGA_PITCH, 64,
742 MGA_PLNWT, 0xffffffff,
743 MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
744
745 DMA_BLOCK(MGA_AR0, 63,
746 MGA_AR3, 0,
747 MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
748
749 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
750 MGA_SRCORG, dev_priv->front_offset,
751 MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
752
753 ADVANCE_DMA();
754
755 AGE_BUFFER(buf_priv);
756
757 buf->pending = 0;
758 buf->used = 0;
759 buf_priv->dispatched = 0;
760
761 mga_freelist_put(dev, buf);
762
763 FLUSH_DMA();
764}
765
766static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit)
767{
768 drm_mga_private_t *dev_priv = dev->dev_private;
769 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
770 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
771 struct drm_clip_rect *pbox = sarea_priv->boxes;
772 int nbox = sarea_priv->nbox;
773 u32 scandir = 0, i;
774 DMA_LOCALS;
775 DRM_DEBUG("\n");
776
777 BEGIN_DMA(4 + nbox);
778
779 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
780 MGA_DMAPAD, 0x00000000,
781 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
782
783 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
784 MGA_PLNWT, blit->planemask,
785 MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
786
787 DMA_BLOCK(MGA_SGN, scandir,
788 MGA_MACCESS, dev_priv->maccess,
789 MGA_AR5, blit->ydir * blit->src_pitch,
790 MGA_PITCH, blit->dst_pitch);
791
792 for (i = 0; i < nbox; i++) {
793 int srcx = pbox[i].x1 + blit->delta_sx;
794 int srcy = pbox[i].y1 + blit->delta_sy;
795 int dstx = pbox[i].x1 + blit->delta_dx;
796 int dsty = pbox[i].y1 + blit->delta_dy;
797 int h = pbox[i].y2 - pbox[i].y1;
798 int w = pbox[i].x2 - pbox[i].x1 - 1;
799 int start;
800
801 if (blit->ydir == -1)
802 srcy = blit->height - srcy - 1;
803
804 start = srcy * blit->src_pitch + srcx;
805
806 DMA_BLOCK(MGA_AR0, start + w,
807 MGA_AR3, start,
808 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
809 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
810 }
811
812 /* Do something to flush AGP?
813 */
814
815 /* Force reset of DWGCTL */
816 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
817 MGA_PLNWT, ctx->plnwt,
818 MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
819
820 ADVANCE_DMA();
821}
822
823/* ================================================================
824 *
825 */
826
827static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
828{
829 drm_mga_private_t *dev_priv = dev->dev_private;
830 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
831 drm_mga_clear_t *clear = data;
832
833 LOCK_TEST_WITH_RETURN(dev, file_priv);
834
835 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
836 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
837
838 WRAP_TEST_WITH_RETURN(dev_priv);
839
840 mga_dma_dispatch_clear(dev, clear);
841
842 /* Make sure we restore the 3D state next time.
843 */
844 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
845
846 return 0;
847}
848
849static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
850{
851 drm_mga_private_t *dev_priv = dev->dev_private;
852 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
853
854 LOCK_TEST_WITH_RETURN(dev, file_priv);
855
856 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
857 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
858
859 WRAP_TEST_WITH_RETURN(dev_priv);
860
861 mga_dma_dispatch_swap(dev);
862
863 /* Make sure we restore the 3D state next time.
864 */
865 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
866
867 return 0;
868}
869
870static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
871{
872 drm_mga_private_t *dev_priv = dev->dev_private;
873 struct drm_device_dma *dma = dev->dma;
874 struct drm_buf *buf;
875 drm_mga_buf_priv_t *buf_priv;
876 drm_mga_vertex_t *vertex = data;
877
878 LOCK_TEST_WITH_RETURN(dev, file_priv);
879
880 if (vertex->idx < 0 || vertex->idx > dma->buf_count)
881 return -EINVAL;
882 buf = dma->buflist[vertex->idx];
883 buf_priv = buf->dev_private;
884
885 buf->used = vertex->used;
886 buf_priv->discard = vertex->discard;
887
888 if (!mga_verify_state(dev_priv)) {
889 if (vertex->discard) {
890 if (buf_priv->dispatched == 1)
891 AGE_BUFFER(buf_priv);
892 buf_priv->dispatched = 0;
893 mga_freelist_put(dev, buf);
894 }
895 return -EINVAL;
896 }
897
898 WRAP_TEST_WITH_RETURN(dev_priv);
899
900 mga_dma_dispatch_vertex(dev, buf);
901
902 return 0;
903}
904
905static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
906{
907 drm_mga_private_t *dev_priv = dev->dev_private;
908 struct drm_device_dma *dma = dev->dma;
909 struct drm_buf *buf;
910 drm_mga_buf_priv_t *buf_priv;
911 drm_mga_indices_t *indices = data;
912
913 LOCK_TEST_WITH_RETURN(dev, file_priv);
914
915 if (indices->idx < 0 || indices->idx > dma->buf_count)
916 return -EINVAL;
917
918 buf = dma->buflist[indices->idx];
919 buf_priv = buf->dev_private;
920
921 buf_priv->discard = indices->discard;
922
923 if (!mga_verify_state(dev_priv)) {
924 if (indices->discard) {
925 if (buf_priv->dispatched == 1)
926 AGE_BUFFER(buf_priv);
927 buf_priv->dispatched = 0;
928 mga_freelist_put(dev, buf);
929 }
930 return -EINVAL;
931 }
932
933 WRAP_TEST_WITH_RETURN(dev_priv);
934
935 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
936
937 return 0;
938}
939
940static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
941{
942 struct drm_device_dma *dma = dev->dma;
943 drm_mga_private_t *dev_priv = dev->dev_private;
944 struct drm_buf *buf;
945 drm_mga_iload_t *iload = data;
946 DRM_DEBUG("\n");
947
948 LOCK_TEST_WITH_RETURN(dev, file_priv);
949
950#if 0
951 if (mga_do_wait_for_idle(dev_priv) < 0) {
952 if (MGA_DMA_DEBUG)
953 DRM_INFO("-EBUSY\n");
954 return -EBUSY;
955 }
956#endif
957 if (iload->idx < 0 || iload->idx > dma->buf_count)
958 return -EINVAL;
959
960 buf = dma->buflist[iload->idx];
961
962 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
963 mga_freelist_put(dev, buf);
964 return -EINVAL;
965 }
966
967 WRAP_TEST_WITH_RETURN(dev_priv);
968
969 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
970
971 /* Make sure we restore the 3D state next time.
972 */
973 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
974
975 return 0;
976}
977
978static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
979{
980 drm_mga_private_t *dev_priv = dev->dev_private;
981 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
982 drm_mga_blit_t *blit = data;
983 DRM_DEBUG("\n");
984
985 LOCK_TEST_WITH_RETURN(dev, file_priv);
986
987 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
988 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
989
990 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
991 return -EINVAL;
992
993 WRAP_TEST_WITH_RETURN(dev_priv);
994
995 mga_dma_dispatch_blit(dev, blit);
996
997 /* Make sure we restore the 3D state next time.
998 */
999 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1000
1001 return 0;
1002}
1003
1004int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
1005{
1006 drm_mga_private_t *dev_priv = dev->dev_private;
1007 drm_mga_getparam_t *param = data;
1008 struct pci_dev *pdev = to_pci_dev(dev->dev);
1009 int value;
1010
1011 if (!dev_priv) {
1012 DRM_ERROR("called with no initialization\n");
1013 return -EINVAL;
1014 }
1015
1016 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1017
1018 switch (param->param) {
1019 case MGA_PARAM_IRQ_NR:
1020 value = pdev->irq;
1021 break;
1022 case MGA_PARAM_CARD_TYPE:
1023 value = dev_priv->chipset;
1024 break;
1025 default:
1026 return -EINVAL;
1027 }
1028
1029 if (copy_to_user(param->value, &value, sizeof(int))) {
1030 DRM_ERROR("copy_to_user\n");
1031 return -EFAULT;
1032 }
1033
1034 return 0;
1035}
1036
1037static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1038{
1039 drm_mga_private_t *dev_priv = dev->dev_private;
1040 u32 *fence = data;
1041 DMA_LOCALS;
1042
1043 if (!dev_priv) {
1044 DRM_ERROR("called with no initialization\n");
1045 return -EINVAL;
1046 }
1047
1048 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1049
1050 /* I would normal do this assignment in the declaration of fence,
1051 * but dev_priv may be NULL.
1052 */
1053
1054 *fence = dev_priv->next_fence_to_post;
1055 dev_priv->next_fence_to_post++;
1056
1057 BEGIN_DMA(1);
1058 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1059 MGA_DMAPAD, 0x00000000,
1060 MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
1061 ADVANCE_DMA();
1062
1063 return 0;
1064}
1065
1066static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
1067file_priv)
1068{
1069 drm_mga_private_t *dev_priv = dev->dev_private;
1070 u32 *fence = data;
1071
1072 if (!dev_priv) {
1073 DRM_ERROR("called with no initialization\n");
1074 return -EINVAL;
1075 }
1076
1077 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1078
1079 mga_driver_fence_wait(dev, fence);
1080 return 0;
1081}
1082
1083const struct drm_ioctl_desc mga_ioctls[] = {
1084 DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1085 DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH),
1086 DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH),
1087 DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
1088 DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1089 DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1090 DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
1091 DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1092 DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
1093 DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
1094 DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1095 DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1096 DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1097};
1098
1099int mga_max_ioctl = ARRAY_SIZE(mga_ioctls);