Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
nv04.c
Go to the documentation of this file.
1 /*
2  * Copyright 2007 Stephane Marchesin
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragraph) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22  * DEALINGS IN THE SOFTWARE.
23  */
24 
25 #include <core/os.h>
26 #include <core/class.h>
27 #include <core/handle.h>
28 #include <core/namedb.h>
29 
30 #include <subdev/fb.h>
31 #include <subdev/instmem.h>
32 #include <subdev/timer.h>
33 
34 #include <engine/fifo.h>
35 #include <engine/graph.h>
36 
37 #include "regs.h"
38 
39 static u32
40 nv04_graph_ctx_regs[] = {
41  0x0040053c,
42  0x00400544,
43  0x00400540,
44  0x00400548,
53  0x00400184,
54  0x004001a4,
55  0x004001c4,
56  0x004001e4,
57  0x00400188,
58  0x004001a8,
59  0x004001c8,
60  0x004001e8,
61  0x0040018c,
62  0x004001ac,
63  0x004001cc,
64  0x004001ec,
65  0x00400190,
66  0x004001b0,
67  0x004001d0,
68  0x004001f0,
69  0x00400194,
70  0x004001b4,
71  0x004001d4,
72  0x004001f4,
73  0x00400198,
74  0x004001b8,
75  0x004001d8,
76  0x004001f8,
77  0x0040019c,
78  0x004001bc,
79  0x004001dc,
80  0x004001fc,
81  0x00400174,
119  NV04_PGRAPH_PATT_COLORRAM+0x04,
120  NV04_PGRAPH_PATT_COLORRAM+0x08,
121  NV04_PGRAPH_PATT_COLORRAM+0x0c,
122  NV04_PGRAPH_PATT_COLORRAM+0x10,
123  NV04_PGRAPH_PATT_COLORRAM+0x14,
124  NV04_PGRAPH_PATT_COLORRAM+0x18,
125  NV04_PGRAPH_PATT_COLORRAM+0x1c,
126  NV04_PGRAPH_PATT_COLORRAM+0x20,
127  NV04_PGRAPH_PATT_COLORRAM+0x24,
128  NV04_PGRAPH_PATT_COLORRAM+0x28,
129  NV04_PGRAPH_PATT_COLORRAM+0x2c,
130  NV04_PGRAPH_PATT_COLORRAM+0x30,
131  NV04_PGRAPH_PATT_COLORRAM+0x34,
132  NV04_PGRAPH_PATT_COLORRAM+0x38,
133  NV04_PGRAPH_PATT_COLORRAM+0x3c,
134  NV04_PGRAPH_PATT_COLORRAM+0x40,
135  NV04_PGRAPH_PATT_COLORRAM+0x44,
136  NV04_PGRAPH_PATT_COLORRAM+0x48,
137  NV04_PGRAPH_PATT_COLORRAM+0x4c,
138  NV04_PGRAPH_PATT_COLORRAM+0x50,
139  NV04_PGRAPH_PATT_COLORRAM+0x54,
140  NV04_PGRAPH_PATT_COLORRAM+0x58,
141  NV04_PGRAPH_PATT_COLORRAM+0x5c,
142  NV04_PGRAPH_PATT_COLORRAM+0x60,
143  NV04_PGRAPH_PATT_COLORRAM+0x64,
144  NV04_PGRAPH_PATT_COLORRAM+0x68,
145  NV04_PGRAPH_PATT_COLORRAM+0x6c,
146  NV04_PGRAPH_PATT_COLORRAM+0x70,
147  NV04_PGRAPH_PATT_COLORRAM+0x74,
148  NV04_PGRAPH_PATT_COLORRAM+0x78,
149  NV04_PGRAPH_PATT_COLORRAM+0x7c,
150  NV04_PGRAPH_PATT_COLORRAM+0x80,
151  NV04_PGRAPH_PATT_COLORRAM+0x84,
152  NV04_PGRAPH_PATT_COLORRAM+0x88,
153  NV04_PGRAPH_PATT_COLORRAM+0x8c,
154  NV04_PGRAPH_PATT_COLORRAM+0x90,
155  NV04_PGRAPH_PATT_COLORRAM+0x94,
156  NV04_PGRAPH_PATT_COLORRAM+0x98,
157  NV04_PGRAPH_PATT_COLORRAM+0x9c,
158  NV04_PGRAPH_PATT_COLORRAM+0xa0,
159  NV04_PGRAPH_PATT_COLORRAM+0xa4,
160  NV04_PGRAPH_PATT_COLORRAM+0xa8,
161  NV04_PGRAPH_PATT_COLORRAM+0xac,
162  NV04_PGRAPH_PATT_COLORRAM+0xb0,
163  NV04_PGRAPH_PATT_COLORRAM+0xb4,
164  NV04_PGRAPH_PATT_COLORRAM+0xb8,
165  NV04_PGRAPH_PATT_COLORRAM+0xbc,
166  NV04_PGRAPH_PATT_COLORRAM+0xc0,
167  NV04_PGRAPH_PATT_COLORRAM+0xc4,
168  NV04_PGRAPH_PATT_COLORRAM+0xc8,
169  NV04_PGRAPH_PATT_COLORRAM+0xcc,
170  NV04_PGRAPH_PATT_COLORRAM+0xd0,
171  NV04_PGRAPH_PATT_COLORRAM+0xd4,
172  NV04_PGRAPH_PATT_COLORRAM+0xd8,
173  NV04_PGRAPH_PATT_COLORRAM+0xdc,
174  NV04_PGRAPH_PATT_COLORRAM+0xe0,
175  NV04_PGRAPH_PATT_COLORRAM+0xe4,
176  NV04_PGRAPH_PATT_COLORRAM+0xe8,
177  NV04_PGRAPH_PATT_COLORRAM+0xec,
178  NV04_PGRAPH_PATT_COLORRAM+0xf0,
179  NV04_PGRAPH_PATT_COLORRAM+0xf4,
180  NV04_PGRAPH_PATT_COLORRAM+0xf8,
181  NV04_PGRAPH_PATT_COLORRAM+0xfc,
183  0x0040080c,
185  0x00400600,
196  0x00400560,
197  0x00400568,
198  0x00400564,
199  0x0040056c,
200  0x00400400,
201  0x00400480,
202  0x00400404,
203  0x00400484,
204  0x00400408,
205  0x00400488,
206  0x0040040c,
207  0x0040048c,
208  0x00400410,
209  0x00400490,
210  0x00400414,
211  0x00400494,
212  0x00400418,
213  0x00400498,
214  0x0040041c,
215  0x0040049c,
216  0x00400420,
217  0x004004a0,
218  0x00400424,
219  0x004004a4,
220  0x00400428,
221  0x004004a8,
222  0x0040042c,
223  0x004004ac,
224  0x00400430,
225  0x004004b0,
226  0x00400434,
227  0x004004b4,
228  0x00400438,
229  0x004004b8,
230  0x0040043c,
231  0x004004bc,
232  0x00400440,
233  0x004004c0,
234  0x00400444,
235  0x004004c4,
236  0x00400448,
237  0x004004c8,
238  0x0040044c,
239  0x004004cc,
240  0x00400450,
241  0x004004d0,
242  0x00400454,
243  0x004004d4,
244  0x00400458,
245  0x004004d8,
246  0x0040045c,
247  0x004004dc,
248  0x00400460,
249  0x004004e0,
250  0x00400464,
251  0x004004e4,
252  0x00400468,
253  0x004004e8,
254  0x0040046c,
255  0x004004ec,
256  0x00400470,
257  0x004004f0,
258  0x00400474,
259  0x004004f4,
260  0x00400478,
261  0x004004f8,
262  0x0040047c,
263  0x004004fc,
264  0x00400534,
265  0x00400538,
266  0x00400514,
267  0x00400518,
268  0x0040051c,
269  0x00400520,
270  0x00400524,
271  0x00400528,
272  0x0040052c,
273  0x00400530,
274  0x00400d00,
275  0x00400d40,
276  0x00400d80,
277  0x00400d04,
278  0x00400d44,
279  0x00400d84,
280  0x00400d08,
281  0x00400d48,
282  0x00400d88,
283  0x00400d0c,
284  0x00400d4c,
285  0x00400d8c,
286  0x00400d10,
287  0x00400d50,
288  0x00400d90,
289  0x00400d14,
290  0x00400d54,
291  0x00400d94,
292  0x00400d18,
293  0x00400d58,
294  0x00400d98,
295  0x00400d1c,
296  0x00400d5c,
297  0x00400d9c,
298  0x00400d20,
299  0x00400d60,
300  0x00400da0,
301  0x00400d24,
302  0x00400d64,
303  0x00400da4,
304  0x00400d28,
305  0x00400d68,
306  0x00400da8,
307  0x00400d2c,
308  0x00400d6c,
309  0x00400dac,
310  0x00400d30,
311  0x00400d70,
312  0x00400db0,
313  0x00400d34,
314  0x00400d74,
315  0x00400db4,
316  0x00400d38,
317  0x00400d78,
318  0x00400db8,
319  0x00400d3c,
320  0x00400d7c,
321  0x00400dbc,
322  0x00400590,
323  0x00400594,
324  0x00400598,
325  0x0040059c,
326  0x004005a8,
327  0x004005ac,
328  0x004005b0,
329  0x004005b4,
330  0x004005c0,
331  0x004005c4,
332  0x004005c8,
333  0x004005cc,
334  0x004005d0,
335  0x004005d4,
336  0x004005d8,
337  0x004005dc,
338  0x004005e0,
347  0x00400500,
348  0x00400504,
352 };
353 
356  struct nv04_graph_chan *chan[16];
358 };
359 
362  int chid;
363  u32 nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
364 };
365 
366 
367 static inline struct nv04_graph_priv *
369 {
370  return (void *)nv_object(chan)->engine;
371 }
372 
373 /*******************************************************************************
374  * Graphics object classes
375  ******************************************************************************/
376 
377 /*
378  * Software methods, why they are needed, and how they all work:
379  *
380  * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
381  * 2d engine settings are kept inside the grobjs themselves. The grobjs are
382  * 3 words long on both. grobj format on NV04 is:
383  *
384  * word 0:
385  * - bits 0-7: class
386  * - bit 12: color key active
387  * - bit 13: clip rect active
388  * - bit 14: if set, destination surface is swizzled and taken from buffer 5
389  * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
390  * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
391  * NV03_CONTEXT_SURFACE_DST].
392  * - bits 15-17: 2d operation [aka patch config]
393  * - bit 24: patch valid [enables rendering using this object]
394  * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
395  * word 1:
396  * - bits 0-1: mono format
397  * - bits 8-13: color format
398  * - bits 16-31: DMA_NOTIFY instance
399  * word 2:
400  * - bits 0-15: DMA_A instance
401  * - bits 16-31: DMA_B instance
402  *
403  * On NV05 it's:
404  *
405  * word 0:
406  * - bits 0-7: class
407  * - bit 12: color key active
408  * - bit 13: clip rect active
409  * - bit 14: if set, destination surface is swizzled and taken from buffer 5
410  * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
411  * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
412  * NV03_CONTEXT_SURFACE_DST].
413  * - bits 15-17: 2d operation [aka patch config]
414  * - bits 20-22: dither mode
415  * - bit 24: patch valid [enables rendering using this object]
416  * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
417  * - bit 26: surface_src/surface_zeta valid
418  * - bit 27: pattern valid
419  * - bit 28: rop valid
420  * - bit 29: beta1 valid
421  * - bit 30: beta4 valid
422  * word 1:
423  * - bits 0-1: mono format
424  * - bits 8-13: color format
425  * - bits 16-31: DMA_NOTIFY instance
426  * word 2:
427  * - bits 0-15: DMA_A instance
428  * - bits 16-31: DMA_B instance
429  *
430  * NV05 will set/unset the relevant valid bits when you poke the relevant
431  * object-binding methods with object of the proper type, or with the NULL
432  * type. It'll only allow rendering using the grobj if all needed objects
433  * are bound. The needed set of objects depends on selected operation: for
434  * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
435  *
436  * NV04 doesn't have these methods implemented at all, and doesn't have the
437  * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
438  * is set. So we have to emulate them in software, internally keeping the
439  * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
440  * but the last word isn't actually used for anything, we abuse it for this
441  * purpose.
442  *
443  * Actually, NV05 can optionally check bit 24 too, but we disable this since
444  * there's no use for it.
445  *
446  * For unknown reasons, NV04 implements surf3d binding in hardware as an
447  * exception. Also for unknown reasons, NV04 doesn't implement the clipping
448  * methods on the surf3d object, so we have to emulate them too.
449  */
450 
451 static void
452 nv04_graph_set_ctx1(struct nouveau_object *object, u32 mask, u32 value)
453 {
454  struct nv04_graph_priv *priv = (void *)object->engine;
455  int subc = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
456  u32 tmp;
457 
458  tmp = nv_ro32(object, 0x00);
459  tmp &= ~mask;
460  tmp |= value;
461  nv_wo32(object, 0x00, tmp);
462 
463  nv_wr32(priv, NV04_PGRAPH_CTX_SWITCH1, tmp);
464  nv_wr32(priv, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
465 }
466 
467 static void
468 nv04_graph_set_ctx_val(struct nouveau_object *object, u32 mask, u32 value)
469 {
470  int class, op, valid = 1;
471  u32 tmp, ctx1;
472 
473  ctx1 = nv_ro32(object, 0x00);
474  class = ctx1 & 0xff;
475  op = (ctx1 >> 15) & 7;
476 
477  tmp = nv_ro32(object, 0x0c);
478  tmp &= ~mask;
479  tmp |= value;
480  nv_wo32(object, 0x0c, tmp);
481 
482  /* check for valid surf2d/surf_dst/surf_color */
483  if (!(tmp & 0x02000000))
484  valid = 0;
485  /* check for valid surf_src/surf_zeta */
486  if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
487  valid = 0;
488 
489  switch (op) {
490  /* SRCCOPY_AND, SRCCOPY: no extra objects required */
491  case 0:
492  case 3:
493  break;
494  /* ROP_AND: requires pattern and rop */
495  case 1:
496  if (!(tmp & 0x18000000))
497  valid = 0;
498  break;
499  /* BLEND_AND: requires beta1 */
500  case 2:
501  if (!(tmp & 0x20000000))
502  valid = 0;
503  break;
504  /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
505  case 4:
506  case 5:
507  if (!(tmp & 0x40000000))
508  valid = 0;
509  break;
510  }
511 
512  nv04_graph_set_ctx1(object, 0x01000000, valid << 24);
513 }
514 
515 static int
516 nv04_graph_mthd_set_operation(struct nouveau_object *object, u32 mthd,
517  void *args, u32 size)
518 {
519  u32 class = nv_ro32(object, 0) & 0xff;
520  u32 data = *(u32 *)args;
521  if (data > 5)
522  return 1;
523  /* Old versions of the objects only accept first three operations. */
524  if (data > 2 && class < 0x40)
525  return 1;
526  nv04_graph_set_ctx1(object, 0x00038000, data << 15);
527  /* changing operation changes set of objects needed for validation */
528  nv04_graph_set_ctx_val(object, 0, 0);
529  return 0;
530 }
531 
532 static int
533 nv04_graph_mthd_surf3d_clip_h(struct nouveau_object *object, u32 mthd,
534  void *args, u32 size)
535 {
536  struct nv04_graph_priv *priv = (void *)object->engine;
537  u32 data = *(u32 *)args;
538  u32 min = data & 0xffff, max;
539  u32 w = data >> 16;
540  if (min & 0x8000)
541  /* too large */
542  return 1;
543  if (w & 0x8000)
544  /* yes, it accepts negative for some reason. */
545  w |= 0xffff0000;
546  max = min + w;
547  max &= 0x3ffff;
548  nv_wr32(priv, 0x40053c, min);
549  nv_wr32(priv, 0x400544, max);
550  return 0;
551 }
552 
553 static int
554 nv04_graph_mthd_surf3d_clip_v(struct nouveau_object *object, u32 mthd,
555  void *args, u32 size)
556 {
557  struct nv04_graph_priv *priv = (void *)object->engine;
558  u32 data = *(u32 *)args;
559  u32 min = data & 0xffff, max;
560  u32 w = data >> 16;
561  if (min & 0x8000)
562  /* too large */
563  return 1;
564  if (w & 0x8000)
565  /* yes, it accepts negative for some reason. */
566  w |= 0xffff0000;
567  max = min + w;
568  max &= 0x3ffff;
569  nv_wr32(priv, 0x400540, min);
570  nv_wr32(priv, 0x400548, max);
571  return 0;
572 }
573 
574 static u16
575 nv04_graph_mthd_bind_class(struct nouveau_object *object, u32 *args, u32 size)
576 {
577  struct nouveau_instmem *imem = nouveau_instmem(object);
578  u32 inst = *(u32 *)args << 4;
579  return nv_ro32(imem, inst);
580 }
581 
582 static int
583 nv04_graph_mthd_bind_surf2d(struct nouveau_object *object, u32 mthd,
584  void *args, u32 size)
585 {
586  switch (nv04_graph_mthd_bind_class(object, args, size)) {
587  case 0x30:
588  nv04_graph_set_ctx1(object, 0x00004000, 0);
589  nv04_graph_set_ctx_val(object, 0x02000000, 0);
590  return 0;
591  case 0x42:
592  nv04_graph_set_ctx1(object, 0x00004000, 0);
593  nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
594  return 0;
595  }
596  return 1;
597 }
598 
599 static int
600 nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_object *object, u32 mthd,
601  void *args, u32 size)
602 {
603  switch (nv04_graph_mthd_bind_class(object, args, size)) {
604  case 0x30:
605  nv04_graph_set_ctx1(object, 0x00004000, 0);
606  nv04_graph_set_ctx_val(object, 0x02000000, 0);
607  return 0;
608  case 0x42:
609  nv04_graph_set_ctx1(object, 0x00004000, 0);
610  nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
611  return 0;
612  case 0x52:
613  nv04_graph_set_ctx1(object, 0x00004000, 0x00004000);
614  nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
615  return 0;
616  }
617  return 1;
618 }
619 
620 static int
621 nv01_graph_mthd_bind_patt(struct nouveau_object *object, u32 mthd,
622  void *args, u32 size)
623 {
624  switch (nv04_graph_mthd_bind_class(object, args, size)) {
625  case 0x30:
626  nv04_graph_set_ctx_val(object, 0x08000000, 0);
627  return 0;
628  case 0x18:
629  nv04_graph_set_ctx_val(object, 0x08000000, 0x08000000);
630  return 0;
631  }
632  return 1;
633 }
634 
635 static int
636 nv04_graph_mthd_bind_patt(struct nouveau_object *object, u32 mthd,
637  void *args, u32 size)
638 {
639  switch (nv04_graph_mthd_bind_class(object, args, size)) {
640  case 0x30:
641  nv04_graph_set_ctx_val(object, 0x08000000, 0);
642  return 0;
643  case 0x44:
644  nv04_graph_set_ctx_val(object, 0x08000000, 0x08000000);
645  return 0;
646  }
647  return 1;
648 }
649 
650 static int
651 nv04_graph_mthd_bind_rop(struct nouveau_object *object, u32 mthd,
652  void *args, u32 size)
653 {
654  switch (nv04_graph_mthd_bind_class(object, args, size)) {
655  case 0x30:
656  nv04_graph_set_ctx_val(object, 0x10000000, 0);
657  return 0;
658  case 0x43:
659  nv04_graph_set_ctx_val(object, 0x10000000, 0x10000000);
660  return 0;
661  }
662  return 1;
663 }
664 
665 static int
666 nv04_graph_mthd_bind_beta1(struct nouveau_object *object, u32 mthd,
667  void *args, u32 size)
668 {
669  switch (nv04_graph_mthd_bind_class(object, args, size)) {
670  case 0x30:
671  nv04_graph_set_ctx_val(object, 0x20000000, 0);
672  return 0;
673  case 0x12:
674  nv04_graph_set_ctx_val(object, 0x20000000, 0x20000000);
675  return 0;
676  }
677  return 1;
678 }
679 
680 static int
681 nv04_graph_mthd_bind_beta4(struct nouveau_object *object, u32 mthd,
682  void *args, u32 size)
683 {
684  switch (nv04_graph_mthd_bind_class(object, args, size)) {
685  case 0x30:
686  nv04_graph_set_ctx_val(object, 0x40000000, 0);
687  return 0;
688  case 0x72:
689  nv04_graph_set_ctx_val(object, 0x40000000, 0x40000000);
690  return 0;
691  }
692  return 1;
693 }
694 
695 static int
696 nv04_graph_mthd_bind_surf_dst(struct nouveau_object *object, u32 mthd,
697  void *args, u32 size)
698 {
699  switch (nv04_graph_mthd_bind_class(object, args, size)) {
700  case 0x30:
701  nv04_graph_set_ctx_val(object, 0x02000000, 0);
702  return 0;
703  case 0x58:
704  nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
705  return 0;
706  }
707  return 1;
708 }
709 
710 static int
711 nv04_graph_mthd_bind_surf_src(struct nouveau_object *object, u32 mthd,
712  void *args, u32 size)
713 {
714  switch (nv04_graph_mthd_bind_class(object, args, size)) {
715  case 0x30:
716  nv04_graph_set_ctx_val(object, 0x04000000, 0);
717  return 0;
718  case 0x59:
719  nv04_graph_set_ctx_val(object, 0x04000000, 0x04000000);
720  return 0;
721  }
722  return 1;
723 }
724 
725 static int
726 nv04_graph_mthd_bind_surf_color(struct nouveau_object *object, u32 mthd,
727  void *args, u32 size)
728 {
729  switch (nv04_graph_mthd_bind_class(object, args, size)) {
730  case 0x30:
731  nv04_graph_set_ctx_val(object, 0x02000000, 0);
732  return 0;
733  case 0x5a:
734  nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
735  return 0;
736  }
737  return 1;
738 }
739 
740 static int
741 nv04_graph_mthd_bind_surf_zeta(struct nouveau_object *object, u32 mthd,
742  void *args, u32 size)
743 {
744  switch (nv04_graph_mthd_bind_class(object, args, size)) {
745  case 0x30:
746  nv04_graph_set_ctx_val(object, 0x04000000, 0);
747  return 0;
748  case 0x5b:
749  nv04_graph_set_ctx_val(object, 0x04000000, 0x04000000);
750  return 0;
751  }
752  return 1;
753 }
754 
755 static int
756 nv01_graph_mthd_bind_clip(struct nouveau_object *object, u32 mthd,
757  void *args, u32 size)
758 {
759  switch (nv04_graph_mthd_bind_class(object, args, size)) {
760  case 0x30:
761  nv04_graph_set_ctx1(object, 0x2000, 0);
762  return 0;
763  case 0x19:
764  nv04_graph_set_ctx1(object, 0x2000, 0x2000);
765  return 0;
766  }
767  return 1;
768 }
769 
770 static int
771 nv01_graph_mthd_bind_chroma(struct nouveau_object *object, u32 mthd,
772  void *args, u32 size)
773 {
774  switch (nv04_graph_mthd_bind_class(object, args, size)) {
775  case 0x30:
776  nv04_graph_set_ctx1(object, 0x1000, 0);
777  return 0;
778  /* Yes, for some reason even the old versions of objects
779  * accept 0x57 and not 0x17. Consistency be damned.
780  */
781  case 0x57:
782  nv04_graph_set_ctx1(object, 0x1000, 0x1000);
783  return 0;
784  }
785  return 1;
786 }
787 
788 static struct nouveau_omthds
789 nv03_graph_gdi_omthds[] = {
790  { 0x0184, nv01_graph_mthd_bind_patt },
791  { 0x0188, nv04_graph_mthd_bind_rop },
792  { 0x018c, nv04_graph_mthd_bind_beta1 },
793  { 0x0190, nv04_graph_mthd_bind_surf_dst },
794  { 0x02fc, nv04_graph_mthd_set_operation },
795  {}
796 };
797 
798 static struct nouveau_omthds
799 nv04_graph_gdi_omthds[] = {
800  { 0x0188, nv04_graph_mthd_bind_patt },
801  { 0x018c, nv04_graph_mthd_bind_rop },
802  { 0x0190, nv04_graph_mthd_bind_beta1 },
803  { 0x0194, nv04_graph_mthd_bind_beta4 },
804  { 0x0198, nv04_graph_mthd_bind_surf2d },
805  { 0x02fc, nv04_graph_mthd_set_operation },
806  {}
807 };
808 
809 static struct nouveau_omthds
810 nv01_graph_blit_omthds[] = {
811  { 0x0184, nv01_graph_mthd_bind_chroma },
812  { 0x0188, nv01_graph_mthd_bind_clip },
813  { 0x018c, nv01_graph_mthd_bind_patt },
814  { 0x0190, nv04_graph_mthd_bind_rop },
815  { 0x0194, nv04_graph_mthd_bind_beta1 },
816  { 0x0198, nv04_graph_mthd_bind_surf_dst },
817  { 0x019c, nv04_graph_mthd_bind_surf_src },
818  { 0x02fc, nv04_graph_mthd_set_operation },
819  {}
820 };
821 
822 static struct nouveau_omthds
823 nv04_graph_blit_omthds[] = {
824  { 0x0184, nv01_graph_mthd_bind_chroma },
825  { 0x0188, nv01_graph_mthd_bind_clip },
826  { 0x018c, nv04_graph_mthd_bind_patt },
827  { 0x0190, nv04_graph_mthd_bind_rop },
828  { 0x0194, nv04_graph_mthd_bind_beta1 },
829  { 0x0198, nv04_graph_mthd_bind_beta4 },
830  { 0x019c, nv04_graph_mthd_bind_surf2d },
831  { 0x02fc, nv04_graph_mthd_set_operation },
832  {}
833 };
834 
835 static struct nouveau_omthds
836 nv04_graph_iifc_omthds[] = {
837  { 0x0188, nv01_graph_mthd_bind_chroma },
838  { 0x018c, nv01_graph_mthd_bind_clip },
839  { 0x0190, nv04_graph_mthd_bind_patt },
840  { 0x0194, nv04_graph_mthd_bind_rop },
841  { 0x0198, nv04_graph_mthd_bind_beta1 },
842  { 0x019c, nv04_graph_mthd_bind_beta4 },
843  { 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf },
844  { 0x03e4, nv04_graph_mthd_set_operation },
845  {}
846 };
847 
848 static struct nouveau_omthds
849 nv01_graph_ifc_omthds[] = {
850  { 0x0184, nv01_graph_mthd_bind_chroma },
851  { 0x0188, nv01_graph_mthd_bind_clip },
852  { 0x018c, nv01_graph_mthd_bind_patt },
853  { 0x0190, nv04_graph_mthd_bind_rop },
854  { 0x0194, nv04_graph_mthd_bind_beta1 },
855  { 0x0198, nv04_graph_mthd_bind_surf_dst },
856  { 0x02fc, nv04_graph_mthd_set_operation },
857  {}
858 };
859 
860 static struct nouveau_omthds
861 nv04_graph_ifc_omthds[] = {
862  { 0x0184, nv01_graph_mthd_bind_chroma },
863  { 0x0188, nv01_graph_mthd_bind_clip },
864  { 0x018c, nv04_graph_mthd_bind_patt },
865  { 0x0190, nv04_graph_mthd_bind_rop },
866  { 0x0194, nv04_graph_mthd_bind_beta1 },
867  { 0x0198, nv04_graph_mthd_bind_beta4 },
868  { 0x019c, nv04_graph_mthd_bind_surf2d },
869  { 0x02fc, nv04_graph_mthd_set_operation },
870  {}
871 };
872 
873 static struct nouveau_omthds
874 nv03_graph_sifc_omthds[] = {
875  { 0x0184, nv01_graph_mthd_bind_chroma },
876  { 0x0188, nv01_graph_mthd_bind_patt },
877  { 0x018c, nv04_graph_mthd_bind_rop },
878  { 0x0190, nv04_graph_mthd_bind_beta1 },
879  { 0x0194, nv04_graph_mthd_bind_surf_dst },
880  { 0x02fc, nv04_graph_mthd_set_operation },
881  {}
882 };
883 
884 static struct nouveau_omthds
885 nv04_graph_sifc_omthds[] = {
886  { 0x0184, nv01_graph_mthd_bind_chroma },
887  { 0x0188, nv04_graph_mthd_bind_patt },
888  { 0x018c, nv04_graph_mthd_bind_rop },
889  { 0x0190, nv04_graph_mthd_bind_beta1 },
890  { 0x0194, nv04_graph_mthd_bind_beta4 },
891  { 0x0198, nv04_graph_mthd_bind_surf2d },
892  { 0x02fc, nv04_graph_mthd_set_operation },
893  {}
894 };
895 
896 static struct nouveau_omthds
897 nv03_graph_sifm_omthds[] = {
898  { 0x0188, nv01_graph_mthd_bind_patt },
899  { 0x018c, nv04_graph_mthd_bind_rop },
900  { 0x0190, nv04_graph_mthd_bind_beta1 },
901  { 0x0194, nv04_graph_mthd_bind_surf_dst },
902  { 0x0304, nv04_graph_mthd_set_operation },
903  {}
904 };
905 
906 static struct nouveau_omthds
907 nv04_graph_sifm_omthds[] = {
908  { 0x0188, nv04_graph_mthd_bind_patt },
909  { 0x018c, nv04_graph_mthd_bind_rop },
910  { 0x0190, nv04_graph_mthd_bind_beta1 },
911  { 0x0194, nv04_graph_mthd_bind_beta4 },
912  { 0x0198, nv04_graph_mthd_bind_surf2d },
913  { 0x0304, nv04_graph_mthd_set_operation },
914  {}
915 };
916 
917 static struct nouveau_omthds
918 nv04_graph_surf3d_omthds[] = {
919  { 0x02f8, nv04_graph_mthd_surf3d_clip_h },
920  { 0x02fc, nv04_graph_mthd_surf3d_clip_v },
921  {}
922 };
923 
924 static struct nouveau_omthds
925 nv03_graph_ttri_omthds[] = {
926  { 0x0188, nv01_graph_mthd_bind_clip },
927  { 0x018c, nv04_graph_mthd_bind_surf_color },
928  { 0x0190, nv04_graph_mthd_bind_surf_zeta },
929  {}
930 };
931 
932 static struct nouveau_omthds
933 nv01_graph_prim_omthds[] = {
934  { 0x0184, nv01_graph_mthd_bind_clip },
935  { 0x0188, nv01_graph_mthd_bind_patt },
936  { 0x018c, nv04_graph_mthd_bind_rop },
937  { 0x0190, nv04_graph_mthd_bind_beta1 },
938  { 0x0194, nv04_graph_mthd_bind_surf_dst },
939  { 0x02fc, nv04_graph_mthd_set_operation },
940  {}
941 };
942 
943 static struct nouveau_omthds
944 nv04_graph_prim_omthds[] = {
945  { 0x0184, nv01_graph_mthd_bind_clip },
946  { 0x0188, nv04_graph_mthd_bind_patt },
947  { 0x018c, nv04_graph_mthd_bind_rop },
948  { 0x0190, nv04_graph_mthd_bind_beta1 },
949  { 0x0194, nv04_graph_mthd_bind_beta4 },
950  { 0x0198, nv04_graph_mthd_bind_surf2d },
951  { 0x02fc, nv04_graph_mthd_set_operation },
952  {}
953 };
954 
955 static int
956 nv04_graph_object_ctor(struct nouveau_object *parent,
957  struct nouveau_object *engine,
958  struct nouveau_oclass *oclass, void *data, u32 size,
959  struct nouveau_object **pobject)
960 {
961  struct nouveau_gpuobj *obj;
962  int ret;
963 
964  ret = nouveau_gpuobj_create(parent, engine, oclass, 0, parent,
965  16, 16, 0, &obj);
966  *pobject = nv_object(obj);
967  if (ret)
968  return ret;
969 
970  nv_wo32(obj, 0x00, nv_mclass(obj));
971 #ifdef __BIG_ENDIAN
972  nv_mo32(obj, 0x00, 0x00080000, 0x00080000);
973 #endif
974  nv_wo32(obj, 0x04, 0x00000000);
975  nv_wo32(obj, 0x08, 0x00000000);
976  nv_wo32(obj, 0x0c, 0x00000000);
977  return 0;
978 }
979 
980 struct nouveau_ofuncs
982  .ctor = nv04_graph_object_ctor,
983  .dtor = _nouveau_gpuobj_dtor,
984  .init = _nouveau_gpuobj_init,
985  .fini = _nouveau_gpuobj_fini,
986  .rd32 = _nouveau_gpuobj_rd32,
987  .wr32 = _nouveau_gpuobj_wr32,
988 };
989 
990 static struct nouveau_oclass
991 nv04_graph_sclass[] = {
992  { 0x0012, &nv04_graph_ofuncs }, /* beta1 */
993  { 0x0017, &nv04_graph_ofuncs }, /* chroma */
994  { 0x0018, &nv04_graph_ofuncs }, /* pattern (nv01) */
995  { 0x0019, &nv04_graph_ofuncs }, /* clip */
996  { 0x001c, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* line */
997  { 0x001d, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* tri */
998  { 0x001e, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* rect */
999  { 0x001f, &nv04_graph_ofuncs, nv01_graph_blit_omthds },
1000  { 0x0021, &nv04_graph_ofuncs, nv01_graph_ifc_omthds },
1001  { 0x0030, &nv04_graph_ofuncs }, /* null */
1002  { 0x0036, &nv04_graph_ofuncs, nv03_graph_sifc_omthds },
1003  { 0x0037, &nv04_graph_ofuncs, nv03_graph_sifm_omthds },
1004  { 0x0038, &nv04_graph_ofuncs }, /* dvd subpicture */
1005  { 0x0039, &nv04_graph_ofuncs }, /* m2mf */
1006  { 0x0042, &nv04_graph_ofuncs }, /* surf2d */
1007  { 0x0043, &nv04_graph_ofuncs }, /* rop */
1008  { 0x0044, &nv04_graph_ofuncs }, /* pattern */
1009  { 0x0048, &nv04_graph_ofuncs, nv03_graph_ttri_omthds },
1010  { 0x004a, &nv04_graph_ofuncs, nv04_graph_gdi_omthds },
1011  { 0x004b, &nv04_graph_ofuncs, nv03_graph_gdi_omthds },
1012  { 0x0052, &nv04_graph_ofuncs }, /* swzsurf */
1013  { 0x0053, &nv04_graph_ofuncs, nv04_graph_surf3d_omthds },
1014  { 0x0054, &nv04_graph_ofuncs }, /* ttri */
1015  { 0x0055, &nv04_graph_ofuncs }, /* mtri */
1016  { 0x0057, &nv04_graph_ofuncs }, /* chroma */
1017  { 0x0058, &nv04_graph_ofuncs }, /* surf_dst */
1018  { 0x0059, &nv04_graph_ofuncs }, /* surf_src */
1019  { 0x005a, &nv04_graph_ofuncs }, /* surf_color */
1020  { 0x005b, &nv04_graph_ofuncs }, /* surf_zeta */
1021  { 0x005c, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* line */
1022  { 0x005d, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* tri */
1023  { 0x005e, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* rect */
1024  { 0x005f, &nv04_graph_ofuncs, nv04_graph_blit_omthds },
1025  { 0x0060, &nv04_graph_ofuncs, nv04_graph_iifc_omthds },
1026  { 0x0061, &nv04_graph_ofuncs, nv04_graph_ifc_omthds },
1027  { 0x0064, &nv04_graph_ofuncs }, /* iifc (nv05) */
1028  { 0x0065, &nv04_graph_ofuncs }, /* ifc (nv05) */
1029  { 0x0066, &nv04_graph_ofuncs }, /* sifc (nv05) */
1030  { 0x0072, &nv04_graph_ofuncs }, /* beta4 */
1031  { 0x0076, &nv04_graph_ofuncs, nv04_graph_sifc_omthds },
1032  { 0x0077, &nv04_graph_ofuncs, nv04_graph_sifm_omthds },
1033  {},
1034 };
1035 
1036 /*******************************************************************************
1037  * PGRAPH context
1038  ******************************************************************************/
1039 
1040 static struct nv04_graph_chan *
1041 nv04_graph_channel(struct nv04_graph_priv *priv)
1042 {
1043  struct nv04_graph_chan *chan = NULL;
1044  if (nv_rd32(priv, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) {
1045  int chid = nv_rd32(priv, NV04_PGRAPH_CTX_USER) >> 24;
1046  if (chid < ARRAY_SIZE(priv->chan))
1047  chan = priv->chan[chid];
1048  }
1049  return chan;
1050 }
1051 
1052 static int
1053 nv04_graph_load_context(struct nv04_graph_chan *chan, int chid)
1054 {
1055  struct nv04_graph_priv *priv = nv04_graph_priv(chan);
1056  int i;
1057 
1058  for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
1059  nv_wr32(priv, nv04_graph_ctx_regs[i], chan->nv04[i]);
1060 
1061  nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
1062  nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24);
1063  nv_mask(priv, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000);
1064  return 0;
1065 }
1066 
1067 static int
1068 nv04_graph_unload_context(struct nv04_graph_chan *chan)
1069 {
1070  struct nv04_graph_priv *priv = nv04_graph_priv(chan);
1071  int i;
1072 
1073  for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
1074  chan->nv04[i] = nv_rd32(priv, nv04_graph_ctx_regs[i]);
1075 
1076  nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
1077  nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1078  return 0;
1079 }
1080 
1081 static void
1082 nv04_graph_context_switch(struct nv04_graph_priv *priv)
1083 {
1084  struct nv04_graph_chan *prev = NULL;
1085  struct nv04_graph_chan *next = NULL;
1086  unsigned long flags;
1087  int chid;
1088 
1089  spin_lock_irqsave(&priv->lock, flags);
1090  nv04_graph_idle(priv);
1091 
1092  /* If previous context is valid, we need to save it */
1093  prev = nv04_graph_channel(priv);
1094  if (prev)
1095  nv04_graph_unload_context(prev);
1096 
1097  /* load context for next channel */
1098  chid = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f;
1099  next = priv->chan[chid];
1100  if (next)
1101  nv04_graph_load_context(next, chid);
1102 
1103  spin_unlock_irqrestore(&priv->lock, flags);
1104 }
1105 
1106 static u32 *ctx_reg(struct nv04_graph_chan *chan, u32 reg)
1107 {
1108  int i;
1109 
1110  for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
1111  if (nv04_graph_ctx_regs[i] == reg)
1112  return &chan->nv04[i];
1113  }
1114 
1115  return NULL;
1116 }
1117 
1118 static int
1119 nv04_graph_context_ctor(struct nouveau_object *parent,
1120  struct nouveau_object *engine,
1121  struct nouveau_oclass *oclass, void *data, u32 size,
1122  struct nouveau_object **pobject)
1123 {
1124  struct nouveau_fifo_chan *fifo = (void *)parent;
1125  struct nv04_graph_priv *priv = (void *)engine;
1126  struct nv04_graph_chan *chan;
1127  unsigned long flags;
1128  int ret;
1129 
1130  ret = nouveau_object_create(parent, engine, oclass, 0, &chan);
1131  *pobject = nv_object(chan);
1132  if (ret)
1133  return ret;
1134 
1135  spin_lock_irqsave(&priv->lock, flags);
1136  if (priv->chan[fifo->chid]) {
1137  *pobject = nv_object(priv->chan[fifo->chid]);
1138  atomic_inc(&(*pobject)->refcount);
1139  spin_unlock_irqrestore(&priv->lock, flags);
1140  nouveau_object_destroy(&chan->base);
1141  return 1;
1142  }
1143 
1144  *ctx_reg(chan, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
1145 
1146  priv->chan[fifo->chid] = chan;
1147  chan->chid = fifo->chid;
1148  spin_unlock_irqrestore(&priv->lock, flags);
1149  return 0;
1150 }
1151 
1152 static void
1153 nv04_graph_context_dtor(struct nouveau_object *object)
1154 {
1155  struct nv04_graph_priv *priv = (void *)object->engine;
1156  struct nv04_graph_chan *chan = (void *)object;
1157  unsigned long flags;
1158 
1159  spin_lock_irqsave(&priv->lock, flags);
1160  priv->chan[chan->chid] = NULL;
1161  spin_unlock_irqrestore(&priv->lock, flags);
1162 
1163  nouveau_object_destroy(&chan->base);
1164 }
1165 
1166 static int
1167 nv04_graph_context_fini(struct nouveau_object *object, bool suspend)
1168 {
1169  struct nv04_graph_priv *priv = (void *)object->engine;
1170  struct nv04_graph_chan *chan = (void *)object;
1171  unsigned long flags;
1172 
1173  spin_lock_irqsave(&priv->lock, flags);
1174  nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
1175  if (nv04_graph_channel(priv) == chan)
1176  nv04_graph_unload_context(chan);
1177  nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
1178  spin_unlock_irqrestore(&priv->lock, flags);
1179 
1180  return nouveau_object_fini(&chan->base, suspend);
1181 }
1182 
1183 static struct nouveau_oclass
1184 nv04_graph_cclass = {
1185  .handle = NV_ENGCTX(GR, 0x04),
1186  .ofuncs = &(struct nouveau_ofuncs) {
1187  .ctor = nv04_graph_context_ctor,
1188  .dtor = nv04_graph_context_dtor,
1189  .init = nouveau_object_init,
1190  .fini = nv04_graph_context_fini,
1191  },
1192 };
1193 
1194 /*******************************************************************************
1195  * PGRAPH engine/subdev functions
1196  ******************************************************************************/
1197 
1198 bool
1200 {
1201  struct nouveau_graph *graph = nouveau_graph(obj);
1202  u32 mask = 0xffffffff;
1203 
1204  if (nv_device(obj)->card_type == NV_40)
1206 
1207  if (!nv_wait(graph, NV04_PGRAPH_STATUS, mask, 0)) {
1208  nv_error(graph, "idle timed out with status 0x%08x\n",
1209  nv_rd32(graph, NV04_PGRAPH_STATUS));
1210  return false;
1211  }
1212 
1213  return true;
1214 }
1215 
1216 static const struct nouveau_bitfield
1217 nv04_graph_intr_name[] = {
1218  { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1219  {}
1220 };
1221 
1222 static const struct nouveau_bitfield
1223 nv04_graph_nstatus[] = {
1224  { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1225  { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1226  { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1227  { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1228  {}
1229 };
1230 
1231 const struct nouveau_bitfield
1233  { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
1234  { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
1235  { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
1236  { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
1237  { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
1238  { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
1239  { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
1240  { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
1241  { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
1242  { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
1243  { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
1244  { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
1245  { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
1246  { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
1247  { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
1248  { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
1249  { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1250  { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
1251  { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
1252  {}
1253 };
1254 
1255 static void
1256 nv04_graph_intr(struct nouveau_subdev *subdev)
1257 {
1258  struct nv04_graph_priv *priv = (void *)subdev;
1259  struct nv04_graph_chan *chan = NULL;
1260  struct nouveau_namedb *namedb = NULL;
1261  struct nouveau_handle *handle = NULL;
1262  u32 stat = nv_rd32(priv, NV03_PGRAPH_INTR);
1263  u32 nsource = nv_rd32(priv, NV03_PGRAPH_NSOURCE);
1264  u32 nstatus = nv_rd32(priv, NV03_PGRAPH_NSTATUS);
1265  u32 addr = nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR);
1266  u32 chid = (addr & 0x0f000000) >> 24;
1267  u32 subc = (addr & 0x0000e000) >> 13;
1268  u32 mthd = (addr & 0x00001ffc);
1269  u32 data = nv_rd32(priv, NV04_PGRAPH_TRAPPED_DATA);
1270  u32 class = nv_rd32(priv, 0x400180 + subc * 4) & 0xff;
1271  u32 inst = (nv_rd32(priv, 0x40016c) & 0xffff) << 4;
1272  u32 show = stat;
1273  unsigned long flags;
1274 
1275  spin_lock_irqsave(&priv->lock, flags);
1276  chan = priv->chan[chid];
1277  if (chan)
1278  namedb = (void *)nv_pclass(nv_object(chan), NV_NAMEDB_CLASS);
1279  spin_unlock_irqrestore(&priv->lock, flags);
1280 
1281  if (stat & NV_PGRAPH_INTR_NOTIFY) {
1282  if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
1283  handle = nouveau_namedb_get_vinst(namedb, inst);
1284  if (handle && !nv_call(handle->object, mthd, data))
1285  show &= ~NV_PGRAPH_INTR_NOTIFY;
1286  }
1287  }
1288 
1289  if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1290  nv_wr32(priv, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1291  stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1292  show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1293  nv04_graph_context_switch(priv);
1294  }
1295 
1296  nv_wr32(priv, NV03_PGRAPH_INTR, stat);
1297  nv_wr32(priv, NV04_PGRAPH_FIFO, 0x00000001);
1298 
1299  if (show) {
1300  nv_error(priv, "");
1301  nouveau_bitfield_print(nv04_graph_intr_name, show);
1302  printk(" nsource:");
1303  nouveau_bitfield_print(nv04_graph_nsource, nsource);
1304  printk(" nstatus:");
1305  nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1306  printk("\n");
1307  nv_error(priv, "ch %d/%d class 0x%04x "
1308  "mthd 0x%04x data 0x%08x\n",
1309  chid, subc, class, mthd, data);
1310  }
1311 
1312  nouveau_namedb_put(handle);
1313 }
1314 
1315 static int
1316 nv04_graph_ctor(struct nouveau_object *parent, struct nouveau_object *engine,
1317  struct nouveau_oclass *oclass, void *data, u32 size,
1318  struct nouveau_object **pobject)
1319 {
1320  struct nv04_graph_priv *priv;
1321  int ret;
1322 
1323  ret = nouveau_graph_create(parent, engine, oclass, true, &priv);
1324  *pobject = nv_object(priv);
1325  if (ret)
1326  return ret;
1327 
1328  nv_subdev(priv)->unit = 0x00001000;
1329  nv_subdev(priv)->intr = nv04_graph_intr;
1330  nv_engine(priv)->cclass = &nv04_graph_cclass;
1331  nv_engine(priv)->sclass = nv04_graph_sclass;
1332  spin_lock_init(&priv->lock);
1333  return 0;
1334 }
1335 
1336 static int
1337 nv04_graph_init(struct nouveau_object *object)
1338 {
1339  struct nouveau_engine *engine = nv_engine(object);
1340  struct nv04_graph_priv *priv = (void *)engine;
1341  int ret;
1342 
1343  ret = nouveau_graph_init(&priv->base);
1344  if (ret)
1345  return ret;
1346 
1347  /* Enable PGRAPH interrupts */
1348  nv_wr32(priv, NV03_PGRAPH_INTR, 0xFFFFFFFF);
1349  nv_wr32(priv, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
1350 
1351  nv_wr32(priv, NV04_PGRAPH_VALID1, 0);
1352  nv_wr32(priv, NV04_PGRAPH_VALID2, 0);
1353  /*nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x000001FF);
1354  nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
1355  nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x1231c000);
1356  /*1231C000 blob, 001 haiku*/
1357  /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
1358  nv_wr32(priv, NV04_PGRAPH_DEBUG_1, 0x72111100);
1359  /*0x72111100 blob , 01 haiku*/
1360  /*nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
1361  nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
1362  /*haiku same*/
1363 
1364  /*nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
1365  nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
1366  /*haiku and blob 10d4*/
1367 
1368  nv_wr32(priv, NV04_PGRAPH_STATE , 0xFFFFFFFF);
1369  nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
1370  nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1371 
1372  /* These don't belong here, they're part of a per-channel context */
1373  nv_wr32(priv, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
1374  nv_wr32(priv, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
1375  return 0;
1376 }
1377 
1378 struct nouveau_oclass
1380  .handle = NV_ENGINE(GR, 0x04),
1381  .ofuncs = &(struct nouveau_ofuncs) {
1382  .ctor = nv04_graph_ctor,
1383  .dtor = _nouveau_graph_dtor,
1384  .init = nv04_graph_init,
1385  .fini = _nouveau_graph_fini,
1386  },
1387 };