255 unsigned int pipe = sarea_priv->
warp_pipe;
335 mga_g200_emit_pipe(dev_priv);
340 mga_g200_emit_context(dev_priv);
341 sarea_priv->
dirty &= ~MGA_UPLOAD_CONTEXT;
345 mga_g200_emit_tex0(dev_priv);
346 sarea_priv->
dirty &= ~MGA_UPLOAD_TEX0;
353 unsigned int dirty = sarea_priv->
dirty;
357 mga_g400_emit_pipe(dev_priv);
361 if (dirty & MGA_UPLOAD_CONTEXT) {
362 mga_g400_emit_context(dev_priv);
363 sarea_priv->
dirty &= ~MGA_UPLOAD_CONTEXT;
367 mga_g400_emit_tex0(dev_priv);
368 sarea_priv->
dirty &= ~MGA_UPLOAD_TEX0;
372 mga_g400_emit_tex1(dev_priv);
373 sarea_priv->
dirty &= ~MGA_UPLOAD_TEX1;
390 DRM_ERROR(
"*** bad DSTORG: %x (front %x, back %x)\n\n",
411 DRM_ERROR(
"*** bad TEXORG: 0x%x, unit %d\n", tex->
texorg, unit);
422 unsigned int dirty = sarea_priv->
dirty;
428 if (dirty & MGA_UPLOAD_CONTEXT)
429 ret |= mga_verify_context(dev_priv);
431 if (dirty & MGA_UPLOAD_TEX0)
432 ret |= mga_verify_tex(dev_priv, 0);
436 ret |= mga_verify_tex(dev_priv, 1);
449 unsigned int dstorg,
unsigned int length)
451 if (dstorg < dev_priv->texture_offset ||
454 DRM_ERROR(
"*** bad iload DSTORG: 0x%x\n", dstorg);
459 DRM_ERROR(
"*** bad iload length: 0x%x\n",
460 length & MGA_ILOAD_MASK);
468 unsigned int srcorg,
unsigned int dstorg)
472 DRM_ERROR(
"*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
488 int nbox = sarea_priv->
nbox;
501 for (i = 0; i < nbox; i++) {
505 DRM_DEBUG(
" from=%d,%d to=%d,%d\n",
506 box->
x1, box->
y1, box->
x2, box->
y2);
570 static void mga_dma_dispatch_swap(
struct drm_device *dev)
576 int nbox = sarea_priv->
nbox;
599 for (i = 0; i < nbox; i++) {
601 u32 height = box->
y2 - box->
y1;
604 DRM_DEBUG(
" from=%d,%d to=%d,%d\n",
605 box->
x1, box->
y1, box->
x2, box->
y2);
621 DRM_DEBUG(
"... done.\n");
624 static void mga_dma_dispatch_vertex(
struct drm_device *dev,
struct drm_buf *
buf)
630 u32 length = (
u32) buf->used;
633 DRM_DEBUG(
"buf=%d used=%d\n", buf->idx, buf->used);
641 if (i < sarea_priv->nbox) {
642 mga_emit_clip_rect(dev_priv,
643 &sarea_priv->
boxes[i]);
656 }
while (++i < sarea_priv->nbox);
671 static void mga_dma_dispatch_indices(
struct drm_device *dev,
struct drm_buf *buf,
672 unsigned int start,
unsigned int end)
677 u32 address = (
u32) buf->bus_address;
680 DRM_DEBUG(
"buf=%d start=%d end=%d\n", buf->idx, start, end);
688 if (i < sarea_priv->nbox) {
689 mga_emit_clip_rect(dev_priv,
690 &sarea_priv->
boxes[i]);
702 }
while (++i < sarea_priv->nbox);
720 static void mga_dma_dispatch_iload(
struct drm_device *dev,
struct drm_buf *buf,
721 unsigned int dstorg,
unsigned int length)
730 DRM_DEBUG(
"buf=%d used=%d\n", buf->idx, buf->used);
774 int nbox = sarea_priv->
nbox;
794 for (i = 0; i < nbox; i++) {
799 int h = pbox[
i].
y2 - pbox[
i].
y1;
800 int w = pbox[
i].
x2 - pbox[
i].
x1 - 1;
803 if (blit->
ydir == -1)
804 srcy = blit->
height - srcy - 1;
829 static int mga_dma_clear(
struct drm_device *dev,
void *
data,
struct drm_file *file_priv)
835 LOCK_TEST_WITH_RETURN(dev, file_priv);
842 mga_dma_dispatch_clear(dev, clear);
851 static int mga_dma_swap(
struct drm_device *dev,
void *data,
struct drm_file *file_priv)
856 LOCK_TEST_WITH_RETURN(dev, file_priv);
863 mga_dma_dispatch_swap(dev);
872 static int mga_dma_vertex(
struct drm_device *dev,
void *data,
struct drm_file *file_priv)
875 struct drm_device_dma *
dma = dev->dma;
880 LOCK_TEST_WITH_RETURN(dev, file_priv);
882 if (vertex->
idx < 0 || vertex->
idx > dma->buf_count)
884 buf = dma->buflist[vertex->
idx];
885 buf_priv = buf->dev_private;
887 buf->used = vertex->
used;
890 if (!mga_verify_state(dev_priv)) {
902 mga_dma_dispatch_vertex(dev, buf);
907 static int mga_dma_indices(
struct drm_device *dev,
void *data,
struct drm_file *file_priv)
910 struct drm_device_dma *dma = dev->dma;
915 LOCK_TEST_WITH_RETURN(dev, file_priv);
917 if (indices->
idx < 0 || indices->
idx > dma->buf_count)
920 buf = dma->buflist[indices->
idx];
921 buf_priv = buf->dev_private;
925 if (!mga_verify_state(dev_priv)) {
937 mga_dma_dispatch_indices(dev, buf, indices->
start, indices->
end);
942 static int mga_dma_iload(
struct drm_device *dev,
void *data,
struct drm_file *file_priv)
944 struct drm_device_dma *dma = dev->dma;
951 LOCK_TEST_WITH_RETURN(dev, file_priv);
956 DRM_INFO(
"-EBUSY\n");
960 if (iload->
idx < 0 || iload->
idx > dma->buf_count)
963 buf = dma->buflist[iload->
idx];
964 buf_priv = buf->dev_private;
966 if (mga_verify_iload(dev_priv, iload->
dstorg, iload->
length)) {
973 mga_dma_dispatch_iload(dev, buf, iload->
dstorg, iload->
length);
982 static int mga_dma_blit(
struct drm_device *dev,
void *data,
struct drm_file *file_priv)
989 LOCK_TEST_WITH_RETURN(dev, file_priv);
994 if (mga_verify_blit(dev_priv, blit->
srcorg, blit->
dstorg))
999 mga_dma_dispatch_blit(dev, blit);
1008 static int mga_getparam(
struct drm_device *dev,
void *data,
struct drm_file *file_priv)
1015 DRM_ERROR(
"called with no initialization\n");
1021 switch (param->
param) {
1023 value = drm_dev_to_irq(dev);
1033 DRM_ERROR(
"copy_to_user\n");
1040 static int mga_set_fence(
struct drm_device *dev,
void *data,
struct drm_file *file_priv)
1047 DRM_ERROR(
"called with no initialization\n");
1069 static int mga_wait_fence(
struct drm_device *dev,
void *data,
struct drm_file *
1076 DRM_ERROR(
"called with no initialization\n");
1087 DRM_IOCTL_DEF_DRV(MGA_INIT,
mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1090 DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
1091 DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1092 DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1093 DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
1094 DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1095 DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
1096 DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
1097 DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1098 DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1099 DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP,
mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),