Loading...
1/* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
3 *
4 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
5 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
6 * All Rights Reserved.
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
17 * Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25 * OTHER DEALINGS IN THE SOFTWARE.
26 *
27 * Authors:
28 * Jeff Hartmann <jhartmann@valinux.com>
29 * Keith Whitwell <keith@tungstengraphics.com>
30 *
31 * Rewritten by:
32 * Gareth Hughes <gareth@valinux.com>
33 */
34
35#include "drmP.h"
36#include "drm.h"
37#include "mga_drm.h"
38#include "mga_drv.h"
39
40/* ================================================================
41 * DMA hardware state programming functions
42 */
43
44static void mga_emit_clip_rect(drm_mga_private_t *dev_priv,
45 struct drm_clip_rect *box)
46{
47 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
48 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
49 unsigned int pitch = dev_priv->front_pitch;
50 DMA_LOCALS;
51
52 BEGIN_DMA(2);
53
54 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
55 */
56 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
57 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
58 MGA_LEN + MGA_EXEC, 0x80000000,
59 MGA_DWGCTL, ctx->dwgctl,
60 MGA_LEN + MGA_EXEC, 0x80000000);
61 }
62 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
63 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
64 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
65
66 ADVANCE_DMA();
67}
68
69static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv)
70{
71 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
72 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
73 DMA_LOCALS;
74
75 BEGIN_DMA(3);
76
77 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
78 MGA_MACCESS, ctx->maccess,
79 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
80
81 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
82 MGA_FOGCOL, ctx->fogcolor,
83 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
84
85 DMA_BLOCK(MGA_FCOL, ctx->fcol,
86 MGA_DMAPAD, 0x00000000,
87 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
88
89 ADVANCE_DMA();
90}
91
92static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv)
93{
94 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
95 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
96 DMA_LOCALS;
97
98 BEGIN_DMA(4);
99
100 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
101 MGA_MACCESS, ctx->maccess,
102 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
103
104 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
105 MGA_FOGCOL, ctx->fogcolor,
106 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
107
108 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
109 MGA_TDUALSTAGE0, ctx->tdualstage0,
110 MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
111
112 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
113 MGA_STENCILCTL, ctx->stencilctl,
114 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
115
116 ADVANCE_DMA();
117}
118
119static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv)
120{
121 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
122 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
123 DMA_LOCALS;
124
125 BEGIN_DMA(4);
126
127 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
128 MGA_TEXCTL, tex->texctl,
129 MGA_TEXFILTER, tex->texfilter,
130 MGA_TEXBORDERCOL, tex->texbordercol);
131
132 DMA_BLOCK(MGA_TEXORG, tex->texorg,
133 MGA_TEXORG1, tex->texorg1,
134 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
135
136 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
137 MGA_TEXWIDTH, tex->texwidth,
138 MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
139
140 DMA_BLOCK(MGA_WR34, tex->texheight,
141 MGA_TEXTRANS, 0x0000ffff,
142 MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
143
144 ADVANCE_DMA();
145}
146
147static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv)
148{
149 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
150 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
151 DMA_LOCALS;
152
153/* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
154/* tex->texctl, tex->texctl2); */
155
156 BEGIN_DMA(6);
157
158 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
159 MGA_TEXCTL, tex->texctl,
160 MGA_TEXFILTER, tex->texfilter,
161 MGA_TEXBORDERCOL, tex->texbordercol);
162
163 DMA_BLOCK(MGA_TEXORG, tex->texorg,
164 MGA_TEXORG1, tex->texorg1,
165 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
166
167 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
168 MGA_TEXWIDTH, tex->texwidth,
169 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
170
171 DMA_BLOCK(MGA_WR57, 0x00000000,
172 MGA_WR53, 0x00000000,
173 MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
174
175 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
176 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
177 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
178 MGA_DMAPAD, 0x00000000);
179
180 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
181 MGA_DMAPAD, 0x00000000,
182 MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
183
184 ADVANCE_DMA();
185}
186
187static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv)
188{
189 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
190 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
191 DMA_LOCALS;
192
193/* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
194/* tex->texctl, tex->texctl2); */
195
196 BEGIN_DMA(5);
197
198 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
199 MGA_MAP1_ENABLE |
200 MGA_G400_TC2_MAGIC),
201 MGA_TEXCTL, tex->texctl,
202 MGA_TEXFILTER, tex->texfilter,
203 MGA_TEXBORDERCOL, tex->texbordercol);
204
205 DMA_BLOCK(MGA_TEXORG, tex->texorg,
206 MGA_TEXORG1, tex->texorg1,
207 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
208
209 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
210 MGA_TEXWIDTH, tex->texwidth,
211 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
212
213 DMA_BLOCK(MGA_WR57, 0x00000000,
214 MGA_WR53, 0x00000000,
215 MGA_WR61, 0x00000000,
216 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
217
218 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
219 MGA_TEXTRANS, 0x0000ffff,
220 MGA_TEXTRANSHIGH, 0x0000ffff,
221 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
222
223 ADVANCE_DMA();
224}
225
226static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv)
227{
228 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
229 unsigned int pipe = sarea_priv->warp_pipe;
230 DMA_LOCALS;
231
232 BEGIN_DMA(3);
233
234 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
235 MGA_WVRTXSZ, 0x00000007,
236 MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
237
238 DMA_BLOCK(MGA_WR25, 0x00000100,
239 MGA_WR34, 0x00000000,
240 MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
241
242 /* Padding required due to hardware bug.
243 */
244 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
245 MGA_DMAPAD, 0xffffffff,
246 MGA_DMAPAD, 0xffffffff,
247 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
248 MGA_WMODE_START | dev_priv->wagp_enable));
249
250 ADVANCE_DMA();
251}
252
253static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv)
254{
255 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
256 unsigned int pipe = sarea_priv->warp_pipe;
257 DMA_LOCALS;
258
259/* printk("mga_g400_emit_pipe %x\n", pipe); */
260
261 BEGIN_DMA(10);
262
263 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
264 MGA_DMAPAD, 0x00000000,
265 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
266
267 if (pipe & MGA_T2) {
268 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
269 MGA_DMAPAD, 0x00000000,
270 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
271
272 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
273 MGA_WACCEPTSEQ, 0x00000000,
274 MGA_WACCEPTSEQ, 0x00000000,
275 MGA_WACCEPTSEQ, 0x1e000000);
276 } else {
277 if (dev_priv->warp_pipe & MGA_T2) {
278 /* Flush the WARP pipe */
279 DMA_BLOCK(MGA_YDST, 0x00000000,
280 MGA_FXLEFT, 0x00000000,
281 MGA_FXRIGHT, 0x00000001,
282 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
283
284 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
285 MGA_DWGSYNC, 0x00007000,
286 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
287 MGA_LEN + MGA_EXEC, 0x00000000);
288
289 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
290 MGA_G400_TC2_MAGIC),
291 MGA_LEN + MGA_EXEC, 0x00000000,
292 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
293 MGA_DMAPAD, 0x00000000);
294 }
295
296 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
297 MGA_DMAPAD, 0x00000000,
298 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
299
300 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
301 MGA_WACCEPTSEQ, 0x00000000,
302 MGA_WACCEPTSEQ, 0x00000000,
303 MGA_WACCEPTSEQ, 0x18000000);
304 }
305
306 DMA_BLOCK(MGA_WFLAG, 0x00000000,
307 MGA_WFLAG1, 0x00000000,
308 MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
309
310 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
311 MGA_WR57, 0x00000000, /* tex0 */
312 MGA_WR53, 0x00000000, /* tex1 */
313 MGA_WR61, 0x00000000); /* tex1 */
314
315 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
316 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
317 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
318 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
319
320 /* Padding required due to hardware bug */
321 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
322 MGA_DMAPAD, 0xffffffff,
323 MGA_DMAPAD, 0xffffffff,
324 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
325 MGA_WMODE_START | dev_priv->wagp_enable));
326
327 ADVANCE_DMA();
328}
329
330static void mga_g200_emit_state(drm_mga_private_t *dev_priv)
331{
332 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
333 unsigned int dirty = sarea_priv->dirty;
334
335 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
336 mga_g200_emit_pipe(dev_priv);
337 dev_priv->warp_pipe = sarea_priv->warp_pipe;
338 }
339
340 if (dirty & MGA_UPLOAD_CONTEXT) {
341 mga_g200_emit_context(dev_priv);
342 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
343 }
344
345 if (dirty & MGA_UPLOAD_TEX0) {
346 mga_g200_emit_tex0(dev_priv);
347 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
348 }
349}
350
351static void mga_g400_emit_state(drm_mga_private_t *dev_priv)
352{
353 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
354 unsigned int dirty = sarea_priv->dirty;
355 int multitex = sarea_priv->warp_pipe & MGA_T2;
356
357 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
358 mga_g400_emit_pipe(dev_priv);
359 dev_priv->warp_pipe = sarea_priv->warp_pipe;
360 }
361
362 if (dirty & MGA_UPLOAD_CONTEXT) {
363 mga_g400_emit_context(dev_priv);
364 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
365 }
366
367 if (dirty & MGA_UPLOAD_TEX0) {
368 mga_g400_emit_tex0(dev_priv);
369 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
370 }
371
372 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
373 mga_g400_emit_tex1(dev_priv);
374 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
375 }
376}
377
378/* ================================================================
379 * SAREA state verification
380 */
381
382/* Disallow all write destinations except the front and backbuffer.
383 */
384static int mga_verify_context(drm_mga_private_t *dev_priv)
385{
386 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
387 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
388
389 if (ctx->dstorg != dev_priv->front_offset &&
390 ctx->dstorg != dev_priv->back_offset) {
391 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
392 ctx->dstorg, dev_priv->front_offset,
393 dev_priv->back_offset);
394 ctx->dstorg = 0;
395 return -EINVAL;
396 }
397
398 return 0;
399}
400
401/* Disallow texture reads from PCI space.
402 */
403static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit)
404{
405 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
406 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
407 unsigned int org;
408
409 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
410
411 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
412 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
413 tex->texorg = 0;
414 return -EINVAL;
415 }
416
417 return 0;
418}
419
420static int mga_verify_state(drm_mga_private_t *dev_priv)
421{
422 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
423 unsigned int dirty = sarea_priv->dirty;
424 int ret = 0;
425
426 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
427 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
428
429 if (dirty & MGA_UPLOAD_CONTEXT)
430 ret |= mga_verify_context(dev_priv);
431
432 if (dirty & MGA_UPLOAD_TEX0)
433 ret |= mga_verify_tex(dev_priv, 0);
434
435 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
436 if (dirty & MGA_UPLOAD_TEX1)
437 ret |= mga_verify_tex(dev_priv, 1);
438
439 if (dirty & MGA_UPLOAD_PIPE)
440 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
441 } else {
442 if (dirty & MGA_UPLOAD_PIPE)
443 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
444 }
445
446 return (ret == 0);
447}
448
449static int mga_verify_iload(drm_mga_private_t *dev_priv,
450 unsigned int dstorg, unsigned int length)
451{
452 if (dstorg < dev_priv->texture_offset ||
453 dstorg + length > (dev_priv->texture_offset +
454 dev_priv->texture_size)) {
455 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
456 return -EINVAL;
457 }
458
459 if (length & MGA_ILOAD_MASK) {
460 DRM_ERROR("*** bad iload length: 0x%x\n",
461 length & MGA_ILOAD_MASK);
462 return -EINVAL;
463 }
464
465 return 0;
466}
467
468static int mga_verify_blit(drm_mga_private_t *dev_priv,
469 unsigned int srcorg, unsigned int dstorg)
470{
471 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
472 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
473 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
474 return -EINVAL;
475 }
476 return 0;
477}
478
479/* ================================================================
480 *
481 */
482
483static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear)
484{
485 drm_mga_private_t *dev_priv = dev->dev_private;
486 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
487 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
488 struct drm_clip_rect *pbox = sarea_priv->boxes;
489 int nbox = sarea_priv->nbox;
490 int i;
491 DMA_LOCALS;
492 DRM_DEBUG("\n");
493
494 BEGIN_DMA(1);
495
496 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
497 MGA_DMAPAD, 0x00000000,
498 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
499
500 ADVANCE_DMA();
501
502 for (i = 0; i < nbox; i++) {
503 struct drm_clip_rect *box = &pbox[i];
504 u32 height = box->y2 - box->y1;
505
506 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
507 box->x1, box->y1, box->x2, box->y2);
508
509 if (clear->flags & MGA_FRONT) {
510 BEGIN_DMA(2);
511
512 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
513 MGA_PLNWT, clear->color_mask,
514 MGA_YDSTLEN, (box->y1 << 16) | height,
515 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
516
517 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
518 MGA_FCOL, clear->clear_color,
519 MGA_DSTORG, dev_priv->front_offset,
520 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
521
522 ADVANCE_DMA();
523 }
524
525 if (clear->flags & MGA_BACK) {
526 BEGIN_DMA(2);
527
528 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
529 MGA_PLNWT, clear->color_mask,
530 MGA_YDSTLEN, (box->y1 << 16) | height,
531 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
532
533 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
534 MGA_FCOL, clear->clear_color,
535 MGA_DSTORG, dev_priv->back_offset,
536 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
537
538 ADVANCE_DMA();
539 }
540
541 if (clear->flags & MGA_DEPTH) {
542 BEGIN_DMA(2);
543
544 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
545 MGA_PLNWT, clear->depth_mask,
546 MGA_YDSTLEN, (box->y1 << 16) | height,
547 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
548
549 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
550 MGA_FCOL, clear->clear_depth,
551 MGA_DSTORG, dev_priv->depth_offset,
552 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
553
554 ADVANCE_DMA();
555 }
556
557 }
558
559 BEGIN_DMA(1);
560
561 /* Force reset of DWGCTL */
562 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
563 MGA_DMAPAD, 0x00000000,
564 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
565
566 ADVANCE_DMA();
567
568 FLUSH_DMA();
569}
570
571static void mga_dma_dispatch_swap(struct drm_device *dev)
572{
573 drm_mga_private_t *dev_priv = dev->dev_private;
574 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
575 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
576 struct drm_clip_rect *pbox = sarea_priv->boxes;
577 int nbox = sarea_priv->nbox;
578 int i;
579 DMA_LOCALS;
580 DRM_DEBUG("\n");
581
582 sarea_priv->last_frame.head = dev_priv->prim.tail;
583 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
584
585 BEGIN_DMA(4 + nbox);
586
587 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
588 MGA_DMAPAD, 0x00000000,
589 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
590
591 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
592 MGA_MACCESS, dev_priv->maccess,
593 MGA_SRCORG, dev_priv->back_offset,
594 MGA_AR5, dev_priv->front_pitch);
595
596 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
597 MGA_DMAPAD, 0x00000000,
598 MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
599
600 for (i = 0; i < nbox; i++) {
601 struct drm_clip_rect *box = &pbox[i];
602 u32 height = box->y2 - box->y1;
603 u32 start = box->y1 * dev_priv->front_pitch;
604
605 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
606 box->x1, box->y1, box->x2, box->y2);
607
608 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
609 MGA_AR3, start + box->x1,
610 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
611 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
612 }
613
614 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
615 MGA_PLNWT, ctx->plnwt,
616 MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
617
618 ADVANCE_DMA();
619
620 FLUSH_DMA();
621
622 DRM_DEBUG("... done.\n");
623}
624
625static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
626{
627 drm_mga_private_t *dev_priv = dev->dev_private;
628 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
629 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
630 u32 address = (u32) buf->bus_address;
631 u32 length = (u32) buf->used;
632 int i = 0;
633 DMA_LOCALS;
634 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
635
636 if (buf->used) {
637 buf_priv->dispatched = 1;
638
639 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
640
641 do {
642 if (i < sarea_priv->nbox) {
643 mga_emit_clip_rect(dev_priv,
644 &sarea_priv->boxes[i]);
645 }
646
647 BEGIN_DMA(1);
648
649 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
650 MGA_DMAPAD, 0x00000000,
651 MGA_SECADDRESS, (address |
652 MGA_DMA_VERTEX),
653 MGA_SECEND, ((address + length) |
654 dev_priv->dma_access));
655
656 ADVANCE_DMA();
657 } while (++i < sarea_priv->nbox);
658 }
659
660 if (buf_priv->discard) {
661 AGE_BUFFER(buf_priv);
662 buf->pending = 0;
663 buf->used = 0;
664 buf_priv->dispatched = 0;
665
666 mga_freelist_put(dev, buf);
667 }
668
669 FLUSH_DMA();
670}
671
672static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf,
673 unsigned int start, unsigned int end)
674{
675 drm_mga_private_t *dev_priv = dev->dev_private;
676 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
677 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
678 u32 address = (u32) buf->bus_address;
679 int i = 0;
680 DMA_LOCALS;
681 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
682
683 if (start != end) {
684 buf_priv->dispatched = 1;
685
686 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
687
688 do {
689 if (i < sarea_priv->nbox) {
690 mga_emit_clip_rect(dev_priv,
691 &sarea_priv->boxes[i]);
692 }
693
694 BEGIN_DMA(1);
695
696 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
697 MGA_DMAPAD, 0x00000000,
698 MGA_SETUPADDRESS, address + start,
699 MGA_SETUPEND, ((address + end) |
700 dev_priv->dma_access));
701
702 ADVANCE_DMA();
703 } while (++i < sarea_priv->nbox);
704 }
705
706 if (buf_priv->discard) {
707 AGE_BUFFER(buf_priv);
708 buf->pending = 0;
709 buf->used = 0;
710 buf_priv->dispatched = 0;
711
712 mga_freelist_put(dev, buf);
713 }
714
715 FLUSH_DMA();
716}
717
718/* This copies a 64 byte aligned agp region to the frambuffer with a
719 * standard blit, the ioctl needs to do checking.
720 */
721static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf,
722 unsigned int dstorg, unsigned int length)
723{
724 drm_mga_private_t *dev_priv = dev->dev_private;
725 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
726 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
727 u32 srcorg =
728 buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
729 u32 y2;
730 DMA_LOCALS;
731 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
732
733 y2 = length / 64;
734
735 BEGIN_DMA(5);
736
737 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
738 MGA_DMAPAD, 0x00000000,
739 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
740
741 DMA_BLOCK(MGA_DSTORG, dstorg,
742 MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
743
744 DMA_BLOCK(MGA_PITCH, 64,
745 MGA_PLNWT, 0xffffffff,
746 MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
747
748 DMA_BLOCK(MGA_AR0, 63,
749 MGA_AR3, 0,
750 MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
751
752 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
753 MGA_SRCORG, dev_priv->front_offset,
754 MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
755
756 ADVANCE_DMA();
757
758 AGE_BUFFER(buf_priv);
759
760 buf->pending = 0;
761 buf->used = 0;
762 buf_priv->dispatched = 0;
763
764 mga_freelist_put(dev, buf);
765
766 FLUSH_DMA();
767}
768
769static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit)
770{
771 drm_mga_private_t *dev_priv = dev->dev_private;
772 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
773 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
774 struct drm_clip_rect *pbox = sarea_priv->boxes;
775 int nbox = sarea_priv->nbox;
776 u32 scandir = 0, i;
777 DMA_LOCALS;
778 DRM_DEBUG("\n");
779
780 BEGIN_DMA(4 + nbox);
781
782 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
783 MGA_DMAPAD, 0x00000000,
784 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
785
786 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
787 MGA_PLNWT, blit->planemask,
788 MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
789
790 DMA_BLOCK(MGA_SGN, scandir,
791 MGA_MACCESS, dev_priv->maccess,
792 MGA_AR5, blit->ydir * blit->src_pitch,
793 MGA_PITCH, blit->dst_pitch);
794
795 for (i = 0; i < nbox; i++) {
796 int srcx = pbox[i].x1 + blit->delta_sx;
797 int srcy = pbox[i].y1 + blit->delta_sy;
798 int dstx = pbox[i].x1 + blit->delta_dx;
799 int dsty = pbox[i].y1 + blit->delta_dy;
800 int h = pbox[i].y2 - pbox[i].y1;
801 int w = pbox[i].x2 - pbox[i].x1 - 1;
802 int start;
803
804 if (blit->ydir == -1)
805 srcy = blit->height - srcy - 1;
806
807 start = srcy * blit->src_pitch + srcx;
808
809 DMA_BLOCK(MGA_AR0, start + w,
810 MGA_AR3, start,
811 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
812 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
813 }
814
815 /* Do something to flush AGP?
816 */
817
818 /* Force reset of DWGCTL */
819 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
820 MGA_PLNWT, ctx->plnwt,
821 MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
822
823 ADVANCE_DMA();
824}
825
826/* ================================================================
827 *
828 */
829
830static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
831{
832 drm_mga_private_t *dev_priv = dev->dev_private;
833 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
834 drm_mga_clear_t *clear = data;
835
836 LOCK_TEST_WITH_RETURN(dev, file_priv);
837
838 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
839 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
840
841 WRAP_TEST_WITH_RETURN(dev_priv);
842
843 mga_dma_dispatch_clear(dev, clear);
844
845 /* Make sure we restore the 3D state next time.
846 */
847 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
848
849 return 0;
850}
851
852static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
853{
854 drm_mga_private_t *dev_priv = dev->dev_private;
855 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
856
857 LOCK_TEST_WITH_RETURN(dev, file_priv);
858
859 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
860 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
861
862 WRAP_TEST_WITH_RETURN(dev_priv);
863
864 mga_dma_dispatch_swap(dev);
865
866 /* Make sure we restore the 3D state next time.
867 */
868 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
869
870 return 0;
871}
872
873static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
874{
875 drm_mga_private_t *dev_priv = dev->dev_private;
876 struct drm_device_dma *dma = dev->dma;
877 struct drm_buf *buf;
878 drm_mga_buf_priv_t *buf_priv;
879 drm_mga_vertex_t *vertex = data;
880
881 LOCK_TEST_WITH_RETURN(dev, file_priv);
882
883 if (vertex->idx < 0 || vertex->idx > dma->buf_count)
884 return -EINVAL;
885 buf = dma->buflist[vertex->idx];
886 buf_priv = buf->dev_private;
887
888 buf->used = vertex->used;
889 buf_priv->discard = vertex->discard;
890
891 if (!mga_verify_state(dev_priv)) {
892 if (vertex->discard) {
893 if (buf_priv->dispatched == 1)
894 AGE_BUFFER(buf_priv);
895 buf_priv->dispatched = 0;
896 mga_freelist_put(dev, buf);
897 }
898 return -EINVAL;
899 }
900
901 WRAP_TEST_WITH_RETURN(dev_priv);
902
903 mga_dma_dispatch_vertex(dev, buf);
904
905 return 0;
906}
907
908static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
909{
910 drm_mga_private_t *dev_priv = dev->dev_private;
911 struct drm_device_dma *dma = dev->dma;
912 struct drm_buf *buf;
913 drm_mga_buf_priv_t *buf_priv;
914 drm_mga_indices_t *indices = data;
915
916 LOCK_TEST_WITH_RETURN(dev, file_priv);
917
918 if (indices->idx < 0 || indices->idx > dma->buf_count)
919 return -EINVAL;
920
921 buf = dma->buflist[indices->idx];
922 buf_priv = buf->dev_private;
923
924 buf_priv->discard = indices->discard;
925
926 if (!mga_verify_state(dev_priv)) {
927 if (indices->discard) {
928 if (buf_priv->dispatched == 1)
929 AGE_BUFFER(buf_priv);
930 buf_priv->dispatched = 0;
931 mga_freelist_put(dev, buf);
932 }
933 return -EINVAL;
934 }
935
936 WRAP_TEST_WITH_RETURN(dev_priv);
937
938 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
939
940 return 0;
941}
942
943static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
944{
945 struct drm_device_dma *dma = dev->dma;
946 drm_mga_private_t *dev_priv = dev->dev_private;
947 struct drm_buf *buf;
948 drm_mga_buf_priv_t *buf_priv;
949 drm_mga_iload_t *iload = data;
950 DRM_DEBUG("\n");
951
952 LOCK_TEST_WITH_RETURN(dev, file_priv);
953
954#if 0
955 if (mga_do_wait_for_idle(dev_priv) < 0) {
956 if (MGA_DMA_DEBUG)
957 DRM_INFO("-EBUSY\n");
958 return -EBUSY;
959 }
960#endif
961 if (iload->idx < 0 || iload->idx > dma->buf_count)
962 return -EINVAL;
963
964 buf = dma->buflist[iload->idx];
965 buf_priv = buf->dev_private;
966
967 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
968 mga_freelist_put(dev, buf);
969 return -EINVAL;
970 }
971
972 WRAP_TEST_WITH_RETURN(dev_priv);
973
974 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
975
976 /* Make sure we restore the 3D state next time.
977 */
978 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
979
980 return 0;
981}
982
983static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
984{
985 drm_mga_private_t *dev_priv = dev->dev_private;
986 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
987 drm_mga_blit_t *blit = data;
988 DRM_DEBUG("\n");
989
990 LOCK_TEST_WITH_RETURN(dev, file_priv);
991
992 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
993 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
994
995 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
996 return -EINVAL;
997
998 WRAP_TEST_WITH_RETURN(dev_priv);
999
1000 mga_dma_dispatch_blit(dev, blit);
1001
1002 /* Make sure we restore the 3D state next time.
1003 */
1004 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1005
1006 return 0;
1007}
1008
1009static int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
1010{
1011 drm_mga_private_t *dev_priv = dev->dev_private;
1012 drm_mga_getparam_t *param = data;
1013 int value;
1014
1015 if (!dev_priv) {
1016 DRM_ERROR("called with no initialization\n");
1017 return -EINVAL;
1018 }
1019
1020 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1021
1022 switch (param->param) {
1023 case MGA_PARAM_IRQ_NR:
1024 value = drm_dev_to_irq(dev);
1025 break;
1026 case MGA_PARAM_CARD_TYPE:
1027 value = dev_priv->chipset;
1028 break;
1029 default:
1030 return -EINVAL;
1031 }
1032
1033 if (DRM_COPY_TO_USER(param->value, &value, sizeof(int))) {
1034 DRM_ERROR("copy_to_user\n");
1035 return -EFAULT;
1036 }
1037
1038 return 0;
1039}
1040
1041static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1042{
1043 drm_mga_private_t *dev_priv = dev->dev_private;
1044 u32 *fence = data;
1045 DMA_LOCALS;
1046
1047 if (!dev_priv) {
1048 DRM_ERROR("called with no initialization\n");
1049 return -EINVAL;
1050 }
1051
1052 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1053
1054 /* I would normal do this assignment in the declaration of fence,
1055 * but dev_priv may be NULL.
1056 */
1057
1058 *fence = dev_priv->next_fence_to_post;
1059 dev_priv->next_fence_to_post++;
1060
1061 BEGIN_DMA(1);
1062 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1063 MGA_DMAPAD, 0x00000000,
1064 MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
1065 ADVANCE_DMA();
1066
1067 return 0;
1068}
1069
1070static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
1071file_priv)
1072{
1073 drm_mga_private_t *dev_priv = dev->dev_private;
1074 u32 *fence = data;
1075
1076 if (!dev_priv) {
1077 DRM_ERROR("called with no initialization\n");
1078 return -EINVAL;
1079 }
1080
1081 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1082
1083 mga_driver_fence_wait(dev, fence);
1084 return 0;
1085}
1086
1087struct drm_ioctl_desc mga_ioctls[] = {
1088 DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1089 DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH),
1090 DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH),
1091 DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
1092 DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1093 DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1094 DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
1095 DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1096 DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
1097 DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
1098 DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1099 DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1100 DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1101};
1102
1103int mga_max_ioctl = DRM_ARRAY_SIZE(mga_ioctls);
1/* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
3 *
4 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
5 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
6 * All Rights Reserved.
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
17 * Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25 * OTHER DEALINGS IN THE SOFTWARE.
26 *
27 * Authors:
28 * Jeff Hartmann <jhartmann@valinux.com>
29 * Keith Whitwell <keith@tungstengraphics.com>
30 *
31 * Rewritten by:
32 * Gareth Hughes <gareth@valinux.com>
33 */
34
35#include "mga_drv.h"
36
37/* ================================================================
38 * DMA hardware state programming functions
39 */
40
41static void mga_emit_clip_rect(drm_mga_private_t *dev_priv,
42 struct drm_clip_rect *box)
43{
44 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
45 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
46 unsigned int pitch = dev_priv->front_pitch;
47 DMA_LOCALS;
48
49 BEGIN_DMA(2);
50
51 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
52 */
53 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
54 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
55 MGA_LEN + MGA_EXEC, 0x80000000,
56 MGA_DWGCTL, ctx->dwgctl,
57 MGA_LEN + MGA_EXEC, 0x80000000);
58 }
59 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
60 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
61 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
62
63 ADVANCE_DMA();
64}
65
66static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv)
67{
68 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
69 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
70 DMA_LOCALS;
71
72 BEGIN_DMA(3);
73
74 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
75 MGA_MACCESS, ctx->maccess,
76 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
77
78 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
79 MGA_FOGCOL, ctx->fogcolor,
80 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
81
82 DMA_BLOCK(MGA_FCOL, ctx->fcol,
83 MGA_DMAPAD, 0x00000000,
84 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
85
86 ADVANCE_DMA();
87}
88
89static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv)
90{
91 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
92 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
93 DMA_LOCALS;
94
95 BEGIN_DMA(4);
96
97 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
98 MGA_MACCESS, ctx->maccess,
99 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
100
101 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
102 MGA_FOGCOL, ctx->fogcolor,
103 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
104
105 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
106 MGA_TDUALSTAGE0, ctx->tdualstage0,
107 MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
108
109 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
110 MGA_STENCILCTL, ctx->stencilctl,
111 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
112
113 ADVANCE_DMA();
114}
115
116static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv)
117{
118 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
119 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
120 DMA_LOCALS;
121
122 BEGIN_DMA(4);
123
124 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
125 MGA_TEXCTL, tex->texctl,
126 MGA_TEXFILTER, tex->texfilter,
127 MGA_TEXBORDERCOL, tex->texbordercol);
128
129 DMA_BLOCK(MGA_TEXORG, tex->texorg,
130 MGA_TEXORG1, tex->texorg1,
131 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
132
133 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
134 MGA_TEXWIDTH, tex->texwidth,
135 MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
136
137 DMA_BLOCK(MGA_WR34, tex->texheight,
138 MGA_TEXTRANS, 0x0000ffff,
139 MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
140
141 ADVANCE_DMA();
142}
143
144static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv)
145{
146 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
147 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
148 DMA_LOCALS;
149
150/* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
151/* tex->texctl, tex->texctl2); */
152
153 BEGIN_DMA(6);
154
155 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
156 MGA_TEXCTL, tex->texctl,
157 MGA_TEXFILTER, tex->texfilter,
158 MGA_TEXBORDERCOL, tex->texbordercol);
159
160 DMA_BLOCK(MGA_TEXORG, tex->texorg,
161 MGA_TEXORG1, tex->texorg1,
162 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
163
164 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
165 MGA_TEXWIDTH, tex->texwidth,
166 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
167
168 DMA_BLOCK(MGA_WR57, 0x00000000,
169 MGA_WR53, 0x00000000,
170 MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
171
172 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
173 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
174 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
175 MGA_DMAPAD, 0x00000000);
176
177 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
178 MGA_DMAPAD, 0x00000000,
179 MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
180
181 ADVANCE_DMA();
182}
183
184static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv)
185{
186 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
187 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
188 DMA_LOCALS;
189
190/* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
191/* tex->texctl, tex->texctl2); */
192
193 BEGIN_DMA(5);
194
195 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
196 MGA_MAP1_ENABLE |
197 MGA_G400_TC2_MAGIC),
198 MGA_TEXCTL, tex->texctl,
199 MGA_TEXFILTER, tex->texfilter,
200 MGA_TEXBORDERCOL, tex->texbordercol);
201
202 DMA_BLOCK(MGA_TEXORG, tex->texorg,
203 MGA_TEXORG1, tex->texorg1,
204 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
205
206 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
207 MGA_TEXWIDTH, tex->texwidth,
208 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
209
210 DMA_BLOCK(MGA_WR57, 0x00000000,
211 MGA_WR53, 0x00000000,
212 MGA_WR61, 0x00000000,
213 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
214
215 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
216 MGA_TEXTRANS, 0x0000ffff,
217 MGA_TEXTRANSHIGH, 0x0000ffff,
218 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
219
220 ADVANCE_DMA();
221}
222
223static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv)
224{
225 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
226 unsigned int pipe = sarea_priv->warp_pipe;
227 DMA_LOCALS;
228
229 BEGIN_DMA(3);
230
231 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
232 MGA_WVRTXSZ, 0x00000007,
233 MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
234
235 DMA_BLOCK(MGA_WR25, 0x00000100,
236 MGA_WR34, 0x00000000,
237 MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
238
239 /* Padding required due to hardware bug.
240 */
241 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
242 MGA_DMAPAD, 0xffffffff,
243 MGA_DMAPAD, 0xffffffff,
244 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
245 MGA_WMODE_START | dev_priv->wagp_enable));
246
247 ADVANCE_DMA();
248}
249
250static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv)
251{
252 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
253 unsigned int pipe = sarea_priv->warp_pipe;
254 DMA_LOCALS;
255
256/* printk("mga_g400_emit_pipe %x\n", pipe); */
257
258 BEGIN_DMA(10);
259
260 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
261 MGA_DMAPAD, 0x00000000,
262 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
263
264 if (pipe & MGA_T2) {
265 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
266 MGA_DMAPAD, 0x00000000,
267 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
268
269 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
270 MGA_WACCEPTSEQ, 0x00000000,
271 MGA_WACCEPTSEQ, 0x00000000,
272 MGA_WACCEPTSEQ, 0x1e000000);
273 } else {
274 if (dev_priv->warp_pipe & MGA_T2) {
275 /* Flush the WARP pipe */
276 DMA_BLOCK(MGA_YDST, 0x00000000,
277 MGA_FXLEFT, 0x00000000,
278 MGA_FXRIGHT, 0x00000001,
279 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
280
281 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
282 MGA_DWGSYNC, 0x00007000,
283 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
284 MGA_LEN + MGA_EXEC, 0x00000000);
285
286 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
287 MGA_G400_TC2_MAGIC),
288 MGA_LEN + MGA_EXEC, 0x00000000,
289 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
290 MGA_DMAPAD, 0x00000000);
291 }
292
293 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
294 MGA_DMAPAD, 0x00000000,
295 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
296
297 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
298 MGA_WACCEPTSEQ, 0x00000000,
299 MGA_WACCEPTSEQ, 0x00000000,
300 MGA_WACCEPTSEQ, 0x18000000);
301 }
302
303 DMA_BLOCK(MGA_WFLAG, 0x00000000,
304 MGA_WFLAG1, 0x00000000,
305 MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
306
307 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
308 MGA_WR57, 0x00000000, /* tex0 */
309 MGA_WR53, 0x00000000, /* tex1 */
310 MGA_WR61, 0x00000000); /* tex1 */
311
312 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
313 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
314 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
315 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
316
317 /* Padding required due to hardware bug */
318 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
319 MGA_DMAPAD, 0xffffffff,
320 MGA_DMAPAD, 0xffffffff,
321 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
322 MGA_WMODE_START | dev_priv->wagp_enable));
323
324 ADVANCE_DMA();
325}
326
327static void mga_g200_emit_state(drm_mga_private_t *dev_priv)
328{
329 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
330 unsigned int dirty = sarea_priv->dirty;
331
332 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
333 mga_g200_emit_pipe(dev_priv);
334 dev_priv->warp_pipe = sarea_priv->warp_pipe;
335 }
336
337 if (dirty & MGA_UPLOAD_CONTEXT) {
338 mga_g200_emit_context(dev_priv);
339 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
340 }
341
342 if (dirty & MGA_UPLOAD_TEX0) {
343 mga_g200_emit_tex0(dev_priv);
344 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
345 }
346}
347
348static void mga_g400_emit_state(drm_mga_private_t *dev_priv)
349{
350 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
351 unsigned int dirty = sarea_priv->dirty;
352 int multitex = sarea_priv->warp_pipe & MGA_T2;
353
354 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
355 mga_g400_emit_pipe(dev_priv);
356 dev_priv->warp_pipe = sarea_priv->warp_pipe;
357 }
358
359 if (dirty & MGA_UPLOAD_CONTEXT) {
360 mga_g400_emit_context(dev_priv);
361 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
362 }
363
364 if (dirty & MGA_UPLOAD_TEX0) {
365 mga_g400_emit_tex0(dev_priv);
366 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
367 }
368
369 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
370 mga_g400_emit_tex1(dev_priv);
371 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
372 }
373}
374
375/* ================================================================
376 * SAREA state verification
377 */
378
379/* Disallow all write destinations except the front and backbuffer.
380 */
381static int mga_verify_context(drm_mga_private_t *dev_priv)
382{
383 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
384 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
385
386 if (ctx->dstorg != dev_priv->front_offset &&
387 ctx->dstorg != dev_priv->back_offset) {
388 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
389 ctx->dstorg, dev_priv->front_offset,
390 dev_priv->back_offset);
391 ctx->dstorg = 0;
392 return -EINVAL;
393 }
394
395 return 0;
396}
397
398/* Disallow texture reads from PCI space.
399 */
400static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit)
401{
402 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
403 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
404 unsigned int org;
405
406 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
407
408 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
409 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
410 tex->texorg = 0;
411 return -EINVAL;
412 }
413
414 return 0;
415}
416
417static int mga_verify_state(drm_mga_private_t *dev_priv)
418{
419 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
420 unsigned int dirty = sarea_priv->dirty;
421 int ret = 0;
422
423 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
424 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
425
426 if (dirty & MGA_UPLOAD_CONTEXT)
427 ret |= mga_verify_context(dev_priv);
428
429 if (dirty & MGA_UPLOAD_TEX0)
430 ret |= mga_verify_tex(dev_priv, 0);
431
432 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
433 if (dirty & MGA_UPLOAD_TEX1)
434 ret |= mga_verify_tex(dev_priv, 1);
435
436 if (dirty & MGA_UPLOAD_PIPE)
437 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
438 } else {
439 if (dirty & MGA_UPLOAD_PIPE)
440 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
441 }
442
443 return (ret == 0);
444}
445
446static int mga_verify_iload(drm_mga_private_t *dev_priv,
447 unsigned int dstorg, unsigned int length)
448{
449 if (dstorg < dev_priv->texture_offset ||
450 dstorg + length > (dev_priv->texture_offset +
451 dev_priv->texture_size)) {
452 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
453 return -EINVAL;
454 }
455
456 if (length & MGA_ILOAD_MASK) {
457 DRM_ERROR("*** bad iload length: 0x%x\n",
458 length & MGA_ILOAD_MASK);
459 return -EINVAL;
460 }
461
462 return 0;
463}
464
465static int mga_verify_blit(drm_mga_private_t *dev_priv,
466 unsigned int srcorg, unsigned int dstorg)
467{
468 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
469 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
470 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
471 return -EINVAL;
472 }
473 return 0;
474}
475
476/* ================================================================
477 *
478 */
479
480static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear)
481{
482 drm_mga_private_t *dev_priv = dev->dev_private;
483 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
484 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
485 struct drm_clip_rect *pbox = sarea_priv->boxes;
486 int nbox = sarea_priv->nbox;
487 int i;
488 DMA_LOCALS;
489 DRM_DEBUG("\n");
490
491 BEGIN_DMA(1);
492
493 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
494 MGA_DMAPAD, 0x00000000,
495 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
496
497 ADVANCE_DMA();
498
499 for (i = 0; i < nbox; i++) {
500 struct drm_clip_rect *box = &pbox[i];
501 u32 height = box->y2 - box->y1;
502
503 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
504 box->x1, box->y1, box->x2, box->y2);
505
506 if (clear->flags & MGA_FRONT) {
507 BEGIN_DMA(2);
508
509 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
510 MGA_PLNWT, clear->color_mask,
511 MGA_YDSTLEN, (box->y1 << 16) | height,
512 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
513
514 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
515 MGA_FCOL, clear->clear_color,
516 MGA_DSTORG, dev_priv->front_offset,
517 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
518
519 ADVANCE_DMA();
520 }
521
522 if (clear->flags & MGA_BACK) {
523 BEGIN_DMA(2);
524
525 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
526 MGA_PLNWT, clear->color_mask,
527 MGA_YDSTLEN, (box->y1 << 16) | height,
528 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
529
530 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
531 MGA_FCOL, clear->clear_color,
532 MGA_DSTORG, dev_priv->back_offset,
533 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
534
535 ADVANCE_DMA();
536 }
537
538 if (clear->flags & MGA_DEPTH) {
539 BEGIN_DMA(2);
540
541 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
542 MGA_PLNWT, clear->depth_mask,
543 MGA_YDSTLEN, (box->y1 << 16) | height,
544 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
545
546 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
547 MGA_FCOL, clear->clear_depth,
548 MGA_DSTORG, dev_priv->depth_offset,
549 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
550
551 ADVANCE_DMA();
552 }
553
554 }
555
556 BEGIN_DMA(1);
557
558 /* Force reset of DWGCTL */
559 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
560 MGA_DMAPAD, 0x00000000,
561 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
562
563 ADVANCE_DMA();
564
565 FLUSH_DMA();
566}
567
568static void mga_dma_dispatch_swap(struct drm_device *dev)
569{
570 drm_mga_private_t *dev_priv = dev->dev_private;
571 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
572 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
573 struct drm_clip_rect *pbox = sarea_priv->boxes;
574 int nbox = sarea_priv->nbox;
575 int i;
576 DMA_LOCALS;
577 DRM_DEBUG("\n");
578
579 sarea_priv->last_frame.head = dev_priv->prim.tail;
580 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
581
582 BEGIN_DMA(4 + nbox);
583
584 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
585 MGA_DMAPAD, 0x00000000,
586 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
587
588 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
589 MGA_MACCESS, dev_priv->maccess,
590 MGA_SRCORG, dev_priv->back_offset,
591 MGA_AR5, dev_priv->front_pitch);
592
593 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
594 MGA_DMAPAD, 0x00000000,
595 MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
596
597 for (i = 0; i < nbox; i++) {
598 struct drm_clip_rect *box = &pbox[i];
599 u32 height = box->y2 - box->y1;
600 u32 start = box->y1 * dev_priv->front_pitch;
601
602 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
603 box->x1, box->y1, box->x2, box->y2);
604
605 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
606 MGA_AR3, start + box->x1,
607 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
608 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
609 }
610
611 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
612 MGA_PLNWT, ctx->plnwt,
613 MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
614
615 ADVANCE_DMA();
616
617 FLUSH_DMA();
618
619 DRM_DEBUG("... done.\n");
620}
621
622static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
623{
624 drm_mga_private_t *dev_priv = dev->dev_private;
625 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
626 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
627 u32 address = (u32) buf->bus_address;
628 u32 length = (u32) buf->used;
629 int i = 0;
630 DMA_LOCALS;
631 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
632
633 if (buf->used) {
634 buf_priv->dispatched = 1;
635
636 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
637
638 do {
639 if (i < sarea_priv->nbox) {
640 mga_emit_clip_rect(dev_priv,
641 &sarea_priv->boxes[i]);
642 }
643
644 BEGIN_DMA(1);
645
646 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
647 MGA_DMAPAD, 0x00000000,
648 MGA_SECADDRESS, (address |
649 MGA_DMA_VERTEX),
650 MGA_SECEND, ((address + length) |
651 dev_priv->dma_access));
652
653 ADVANCE_DMA();
654 } while (++i < sarea_priv->nbox);
655 }
656
657 if (buf_priv->discard) {
658 AGE_BUFFER(buf_priv);
659 buf->pending = 0;
660 buf->used = 0;
661 buf_priv->dispatched = 0;
662
663 mga_freelist_put(dev, buf);
664 }
665
666 FLUSH_DMA();
667}
668
669static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf,
670 unsigned int start, unsigned int end)
671{
672 drm_mga_private_t *dev_priv = dev->dev_private;
673 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
674 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
675 u32 address = (u32) buf->bus_address;
676 int i = 0;
677 DMA_LOCALS;
678 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
679
680 if (start != end) {
681 buf_priv->dispatched = 1;
682
683 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
684
685 do {
686 if (i < sarea_priv->nbox) {
687 mga_emit_clip_rect(dev_priv,
688 &sarea_priv->boxes[i]);
689 }
690
691 BEGIN_DMA(1);
692
693 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
694 MGA_DMAPAD, 0x00000000,
695 MGA_SETUPADDRESS, address + start,
696 MGA_SETUPEND, ((address + end) |
697 dev_priv->dma_access));
698
699 ADVANCE_DMA();
700 } while (++i < sarea_priv->nbox);
701 }
702
703 if (buf_priv->discard) {
704 AGE_BUFFER(buf_priv);
705 buf->pending = 0;
706 buf->used = 0;
707 buf_priv->dispatched = 0;
708
709 mga_freelist_put(dev, buf);
710 }
711
712 FLUSH_DMA();
713}
714
715/* This copies a 64 byte aligned agp region to the frambuffer with a
716 * standard blit, the ioctl needs to do checking.
717 */
718static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf,
719 unsigned int dstorg, unsigned int length)
720{
721 drm_mga_private_t *dev_priv = dev->dev_private;
722 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
723 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
724 u32 srcorg =
725 buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
726 u32 y2;
727 DMA_LOCALS;
728 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
729
730 y2 = length / 64;
731
732 BEGIN_DMA(5);
733
734 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
735 MGA_DMAPAD, 0x00000000,
736 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
737
738 DMA_BLOCK(MGA_DSTORG, dstorg,
739 MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
740
741 DMA_BLOCK(MGA_PITCH, 64,
742 MGA_PLNWT, 0xffffffff,
743 MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
744
745 DMA_BLOCK(MGA_AR0, 63,
746 MGA_AR3, 0,
747 MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
748
749 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
750 MGA_SRCORG, dev_priv->front_offset,
751 MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
752
753 ADVANCE_DMA();
754
755 AGE_BUFFER(buf_priv);
756
757 buf->pending = 0;
758 buf->used = 0;
759 buf_priv->dispatched = 0;
760
761 mga_freelist_put(dev, buf);
762
763 FLUSH_DMA();
764}
765
766static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit)
767{
768 drm_mga_private_t *dev_priv = dev->dev_private;
769 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
770 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
771 struct drm_clip_rect *pbox = sarea_priv->boxes;
772 int nbox = sarea_priv->nbox;
773 u32 scandir = 0, i;
774 DMA_LOCALS;
775 DRM_DEBUG("\n");
776
777 BEGIN_DMA(4 + nbox);
778
779 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
780 MGA_DMAPAD, 0x00000000,
781 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
782
783 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
784 MGA_PLNWT, blit->planemask,
785 MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
786
787 DMA_BLOCK(MGA_SGN, scandir,
788 MGA_MACCESS, dev_priv->maccess,
789 MGA_AR5, blit->ydir * blit->src_pitch,
790 MGA_PITCH, blit->dst_pitch);
791
792 for (i = 0; i < nbox; i++) {
793 int srcx = pbox[i].x1 + blit->delta_sx;
794 int srcy = pbox[i].y1 + blit->delta_sy;
795 int dstx = pbox[i].x1 + blit->delta_dx;
796 int dsty = pbox[i].y1 + blit->delta_dy;
797 int h = pbox[i].y2 - pbox[i].y1;
798 int w = pbox[i].x2 - pbox[i].x1 - 1;
799 int start;
800
801 if (blit->ydir == -1)
802 srcy = blit->height - srcy - 1;
803
804 start = srcy * blit->src_pitch + srcx;
805
806 DMA_BLOCK(MGA_AR0, start + w,
807 MGA_AR3, start,
808 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
809 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
810 }
811
812 /* Do something to flush AGP?
813 */
814
815 /* Force reset of DWGCTL */
816 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
817 MGA_PLNWT, ctx->plnwt,
818 MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
819
820 ADVANCE_DMA();
821}
822
823/* ================================================================
824 *
825 */
826
827static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
828{
829 drm_mga_private_t *dev_priv = dev->dev_private;
830 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
831 drm_mga_clear_t *clear = data;
832
833 LOCK_TEST_WITH_RETURN(dev, file_priv);
834
835 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
836 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
837
838 WRAP_TEST_WITH_RETURN(dev_priv);
839
840 mga_dma_dispatch_clear(dev, clear);
841
842 /* Make sure we restore the 3D state next time.
843 */
844 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
845
846 return 0;
847}
848
849static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
850{
851 drm_mga_private_t *dev_priv = dev->dev_private;
852 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
853
854 LOCK_TEST_WITH_RETURN(dev, file_priv);
855
856 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
857 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
858
859 WRAP_TEST_WITH_RETURN(dev_priv);
860
861 mga_dma_dispatch_swap(dev);
862
863 /* Make sure we restore the 3D state next time.
864 */
865 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
866
867 return 0;
868}
869
870static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
871{
872 drm_mga_private_t *dev_priv = dev->dev_private;
873 struct drm_device_dma *dma = dev->dma;
874 struct drm_buf *buf;
875 drm_mga_buf_priv_t *buf_priv;
876 drm_mga_vertex_t *vertex = data;
877
878 LOCK_TEST_WITH_RETURN(dev, file_priv);
879
880 if (vertex->idx < 0 || vertex->idx > dma->buf_count)
881 return -EINVAL;
882 buf = dma->buflist[vertex->idx];
883 buf_priv = buf->dev_private;
884
885 buf->used = vertex->used;
886 buf_priv->discard = vertex->discard;
887
888 if (!mga_verify_state(dev_priv)) {
889 if (vertex->discard) {
890 if (buf_priv->dispatched == 1)
891 AGE_BUFFER(buf_priv);
892 buf_priv->dispatched = 0;
893 mga_freelist_put(dev, buf);
894 }
895 return -EINVAL;
896 }
897
898 WRAP_TEST_WITH_RETURN(dev_priv);
899
900 mga_dma_dispatch_vertex(dev, buf);
901
902 return 0;
903}
904
905static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
906{
907 drm_mga_private_t *dev_priv = dev->dev_private;
908 struct drm_device_dma *dma = dev->dma;
909 struct drm_buf *buf;
910 drm_mga_buf_priv_t *buf_priv;
911 drm_mga_indices_t *indices = data;
912
913 LOCK_TEST_WITH_RETURN(dev, file_priv);
914
915 if (indices->idx < 0 || indices->idx > dma->buf_count)
916 return -EINVAL;
917
918 buf = dma->buflist[indices->idx];
919 buf_priv = buf->dev_private;
920
921 buf_priv->discard = indices->discard;
922
923 if (!mga_verify_state(dev_priv)) {
924 if (indices->discard) {
925 if (buf_priv->dispatched == 1)
926 AGE_BUFFER(buf_priv);
927 buf_priv->dispatched = 0;
928 mga_freelist_put(dev, buf);
929 }
930 return -EINVAL;
931 }
932
933 WRAP_TEST_WITH_RETURN(dev_priv);
934
935 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
936
937 return 0;
938}
939
940static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
941{
942 struct drm_device_dma *dma = dev->dma;
943 drm_mga_private_t *dev_priv = dev->dev_private;
944 struct drm_buf *buf;
945 drm_mga_buf_priv_t *buf_priv;
946 drm_mga_iload_t *iload = data;
947 DRM_DEBUG("\n");
948
949 LOCK_TEST_WITH_RETURN(dev, file_priv);
950
951#if 0
952 if (mga_do_wait_for_idle(dev_priv) < 0) {
953 if (MGA_DMA_DEBUG)
954 DRM_INFO("-EBUSY\n");
955 return -EBUSY;
956 }
957#endif
958 if (iload->idx < 0 || iload->idx > dma->buf_count)
959 return -EINVAL;
960
961 buf = dma->buflist[iload->idx];
962 buf_priv = buf->dev_private;
963
964 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
965 mga_freelist_put(dev, buf);
966 return -EINVAL;
967 }
968
969 WRAP_TEST_WITH_RETURN(dev_priv);
970
971 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
972
973 /* Make sure we restore the 3D state next time.
974 */
975 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
976
977 return 0;
978}
979
980static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
981{
982 drm_mga_private_t *dev_priv = dev->dev_private;
983 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
984 drm_mga_blit_t *blit = data;
985 DRM_DEBUG("\n");
986
987 LOCK_TEST_WITH_RETURN(dev, file_priv);
988
989 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
990 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
991
992 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
993 return -EINVAL;
994
995 WRAP_TEST_WITH_RETURN(dev_priv);
996
997 mga_dma_dispatch_blit(dev, blit);
998
999 /* Make sure we restore the 3D state next time.
1000 */
1001 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1002
1003 return 0;
1004}
1005
1006int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
1007{
1008 drm_mga_private_t *dev_priv = dev->dev_private;
1009 drm_mga_getparam_t *param = data;
1010 int value;
1011
1012 if (!dev_priv) {
1013 DRM_ERROR("called with no initialization\n");
1014 return -EINVAL;
1015 }
1016
1017 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1018
1019 switch (param->param) {
1020 case MGA_PARAM_IRQ_NR:
1021 value = dev->pdev->irq;
1022 break;
1023 case MGA_PARAM_CARD_TYPE:
1024 value = dev_priv->chipset;
1025 break;
1026 default:
1027 return -EINVAL;
1028 }
1029
1030 if (copy_to_user(param->value, &value, sizeof(int))) {
1031 DRM_ERROR("copy_to_user\n");
1032 return -EFAULT;
1033 }
1034
1035 return 0;
1036}
1037
1038static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1039{
1040 drm_mga_private_t *dev_priv = dev->dev_private;
1041 u32 *fence = data;
1042 DMA_LOCALS;
1043
1044 if (!dev_priv) {
1045 DRM_ERROR("called with no initialization\n");
1046 return -EINVAL;
1047 }
1048
1049 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1050
1051 /* I would normal do this assignment in the declaration of fence,
1052 * but dev_priv may be NULL.
1053 */
1054
1055 *fence = dev_priv->next_fence_to_post;
1056 dev_priv->next_fence_to_post++;
1057
1058 BEGIN_DMA(1);
1059 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1060 MGA_DMAPAD, 0x00000000,
1061 MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
1062 ADVANCE_DMA();
1063
1064 return 0;
1065}
1066
1067static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
1068file_priv)
1069{
1070 drm_mga_private_t *dev_priv = dev->dev_private;
1071 u32 *fence = data;
1072
1073 if (!dev_priv) {
1074 DRM_ERROR("called with no initialization\n");
1075 return -EINVAL;
1076 }
1077
1078 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1079
1080 mga_driver_fence_wait(dev, fence);
1081 return 0;
1082}
1083
1084const struct drm_ioctl_desc mga_ioctls[] = {
1085 DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1086 DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH),
1087 DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH),
1088 DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
1089 DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1090 DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1091 DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
1092 DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1093 DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
1094 DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
1095 DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1096 DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1097 DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1098};
1099
1100int mga_max_ioctl = ARRAY_SIZE(mga_ioctls);