2 * Copyright 2007 Stephane Marchesin
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_hw.h"
30 #include "nouveau_util.h"
31 #include "nouveau_ramht.h"
33 struct nv04_graph_engine {
34 struct nouveau_exec_engine base;
37 static uint32_t nv04_graph_ctx_regs[] = {
42 NV04_PGRAPH_CTX_SWITCH1,
43 NV04_PGRAPH_CTX_SWITCH2,
44 NV04_PGRAPH_CTX_SWITCH3,
45 NV04_PGRAPH_CTX_SWITCH4,
46 NV04_PGRAPH_CTX_CACHE1,
47 NV04_PGRAPH_CTX_CACHE2,
48 NV04_PGRAPH_CTX_CACHE3,
49 NV04_PGRAPH_CTX_CACHE4,
79 NV04_PGRAPH_DMA_START_0,
80 NV04_PGRAPH_DMA_START_1,
81 NV04_PGRAPH_DMA_LENGTH,
83 NV04_PGRAPH_DMA_PITCH,
109 NV04_PGRAPH_BSWIZZLE2,
110 NV04_PGRAPH_BSWIZZLE5,
113 NV04_PGRAPH_PATT_COLOR0,
114 NV04_PGRAPH_PATT_COLOR1,
115 NV04_PGRAPH_PATT_COLORRAM+0x00,
116 NV04_PGRAPH_PATT_COLORRAM+0x04,
117 NV04_PGRAPH_PATT_COLORRAM+0x08,
118 NV04_PGRAPH_PATT_COLORRAM+0x0c,
119 NV04_PGRAPH_PATT_COLORRAM+0x10,
120 NV04_PGRAPH_PATT_COLORRAM+0x14,
121 NV04_PGRAPH_PATT_COLORRAM+0x18,
122 NV04_PGRAPH_PATT_COLORRAM+0x1c,
123 NV04_PGRAPH_PATT_COLORRAM+0x20,
124 NV04_PGRAPH_PATT_COLORRAM+0x24,
125 NV04_PGRAPH_PATT_COLORRAM+0x28,
126 NV04_PGRAPH_PATT_COLORRAM+0x2c,
127 NV04_PGRAPH_PATT_COLORRAM+0x30,
128 NV04_PGRAPH_PATT_COLORRAM+0x34,
129 NV04_PGRAPH_PATT_COLORRAM+0x38,
130 NV04_PGRAPH_PATT_COLORRAM+0x3c,
131 NV04_PGRAPH_PATT_COLORRAM+0x40,
132 NV04_PGRAPH_PATT_COLORRAM+0x44,
133 NV04_PGRAPH_PATT_COLORRAM+0x48,
134 NV04_PGRAPH_PATT_COLORRAM+0x4c,
135 NV04_PGRAPH_PATT_COLORRAM+0x50,
136 NV04_PGRAPH_PATT_COLORRAM+0x54,
137 NV04_PGRAPH_PATT_COLORRAM+0x58,
138 NV04_PGRAPH_PATT_COLORRAM+0x5c,
139 NV04_PGRAPH_PATT_COLORRAM+0x60,
140 NV04_PGRAPH_PATT_COLORRAM+0x64,
141 NV04_PGRAPH_PATT_COLORRAM+0x68,
142 NV04_PGRAPH_PATT_COLORRAM+0x6c,
143 NV04_PGRAPH_PATT_COLORRAM+0x70,
144 NV04_PGRAPH_PATT_COLORRAM+0x74,
145 NV04_PGRAPH_PATT_COLORRAM+0x78,
146 NV04_PGRAPH_PATT_COLORRAM+0x7c,
147 NV04_PGRAPH_PATT_COLORRAM+0x80,
148 NV04_PGRAPH_PATT_COLORRAM+0x84,
149 NV04_PGRAPH_PATT_COLORRAM+0x88,
150 NV04_PGRAPH_PATT_COLORRAM+0x8c,
151 NV04_PGRAPH_PATT_COLORRAM+0x90,
152 NV04_PGRAPH_PATT_COLORRAM+0x94,
153 NV04_PGRAPH_PATT_COLORRAM+0x98,
154 NV04_PGRAPH_PATT_COLORRAM+0x9c,
155 NV04_PGRAPH_PATT_COLORRAM+0xa0,
156 NV04_PGRAPH_PATT_COLORRAM+0xa4,
157 NV04_PGRAPH_PATT_COLORRAM+0xa8,
158 NV04_PGRAPH_PATT_COLORRAM+0xac,
159 NV04_PGRAPH_PATT_COLORRAM+0xb0,
160 NV04_PGRAPH_PATT_COLORRAM+0xb4,
161 NV04_PGRAPH_PATT_COLORRAM+0xb8,
162 NV04_PGRAPH_PATT_COLORRAM+0xbc,
163 NV04_PGRAPH_PATT_COLORRAM+0xc0,
164 NV04_PGRAPH_PATT_COLORRAM+0xc4,
165 NV04_PGRAPH_PATT_COLORRAM+0xc8,
166 NV04_PGRAPH_PATT_COLORRAM+0xcc,
167 NV04_PGRAPH_PATT_COLORRAM+0xd0,
168 NV04_PGRAPH_PATT_COLORRAM+0xd4,
169 NV04_PGRAPH_PATT_COLORRAM+0xd8,
170 NV04_PGRAPH_PATT_COLORRAM+0xdc,
171 NV04_PGRAPH_PATT_COLORRAM+0xe0,
172 NV04_PGRAPH_PATT_COLORRAM+0xe4,
173 NV04_PGRAPH_PATT_COLORRAM+0xe8,
174 NV04_PGRAPH_PATT_COLORRAM+0xec,
175 NV04_PGRAPH_PATT_COLORRAM+0xf0,
176 NV04_PGRAPH_PATT_COLORRAM+0xf4,
177 NV04_PGRAPH_PATT_COLORRAM+0xf8,
178 NV04_PGRAPH_PATT_COLORRAM+0xfc,
181 NV04_PGRAPH_PATTERN_SHAPE,
185 NV04_PGRAPH_BETA_AND,
186 NV04_PGRAPH_BETA_PREMULT,
187 NV04_PGRAPH_CONTROL0,
188 NV04_PGRAPH_CONTROL1,
189 NV04_PGRAPH_CONTROL2,
191 NV04_PGRAPH_STORED_FMT,
192 NV04_PGRAPH_SOURCE_COLOR,
336 NV04_PGRAPH_PASSTHRU_0,
337 NV04_PGRAPH_PASSTHRU_1,
338 NV04_PGRAPH_PASSTHRU_2,
339 NV04_PGRAPH_DVD_COLORFMT,
340 NV04_PGRAPH_SCALED_FORMAT,
341 NV04_PGRAPH_MISC24_0,
342 NV04_PGRAPH_MISC24_1,
343 NV04_PGRAPH_MISC24_2,
352 uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
355 static struct nouveau_channel *
356 nv04_graph_channel(struct drm_device *dev)
358 struct drm_nouveau_private *dev_priv = dev->dev_private;
359 int chid = dev_priv->engine.fifo.channels;
361 if (nv_rd32(dev, NV04_PGRAPH_CTX_CONTROL) & 0x00010000)
362 chid = nv_rd32(dev, NV04_PGRAPH_CTX_USER) >> 24;
364 if (chid >= dev_priv->engine.fifo.channels)
367 return dev_priv->channels.ptr[chid];
370 static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
374 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
375 if (nv04_graph_ctx_regs[i] == reg)
376 return &ctx->nv04[i];
383 nv04_graph_load_context(struct nouveau_channel *chan)
385 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
386 struct drm_device *dev = chan->dev;
390 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
391 nv_wr32(dev, nv04_graph_ctx_regs[i], pgraph_ctx->nv04[i]);
393 nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
395 tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
396 nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp | chan->id << 24);
398 tmp = nv_rd32(dev, NV04_PGRAPH_FFINTFC_ST2);
399 nv_wr32(dev, NV04_PGRAPH_FFINTFC_ST2, tmp & 0x000fffff);
405 nv04_graph_unload_context(struct drm_device *dev)
407 struct drm_nouveau_private *dev_priv = dev->dev_private;
408 struct nouveau_channel *chan = NULL;
409 struct graph_state *ctx;
413 chan = nv04_graph_channel(dev);
416 ctx = chan->engctx[NVOBJ_ENGINE_GR];
418 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
419 ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]);
421 nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
422 tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
423 tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
424 nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
429 nv04_graph_context_new(struct nouveau_channel *chan, int engine)
431 struct graph_state *pgraph_ctx;
432 NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
434 pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
435 if (pgraph_ctx == NULL)
438 *ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
440 chan->engctx[engine] = pgraph_ctx;
445 nv04_graph_context_del(struct nouveau_channel *chan, int engine)
447 struct drm_device *dev = chan->dev;
448 struct drm_nouveau_private *dev_priv = dev->dev_private;
449 struct graph_state *pgraph_ctx = chan->engctx[engine];
452 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
453 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
455 /* Unload the context if it's the currently active one */
456 if (nv04_graph_channel(dev) == chan)
457 nv04_graph_unload_context(dev);
459 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
460 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
462 /* Free the context resources */
464 chan->engctx[engine] = NULL;
468 nv04_graph_object_new(struct nouveau_channel *chan, int engine,
469 u32 handle, u16 class)
471 struct drm_device *dev = chan->dev;
472 struct nouveau_gpuobj *obj = NULL;
475 ret = nouveau_gpuobj_new(dev, chan, 16, 16, NVOBJ_FLAG_ZERO_FREE, &obj);
482 nv_wo32(obj, 0x00, 0x00080000 | class);
484 nv_wo32(obj, 0x00, class);
486 nv_wo32(obj, 0x04, 0x00000000);
487 nv_wo32(obj, 0x08, 0x00000000);
488 nv_wo32(obj, 0x0c, 0x00000000);
490 ret = nouveau_ramht_insert(chan, handle, obj);
491 nouveau_gpuobj_ref(NULL, &obj);
496 nv04_graph_init(struct drm_device *dev, int engine)
498 struct drm_nouveau_private *dev_priv = dev->dev_private;
501 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
502 ~NV_PMC_ENABLE_PGRAPH);
503 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
504 NV_PMC_ENABLE_PGRAPH);
506 /* Enable PGRAPH interrupts */
507 nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF);
508 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
510 nv_wr32(dev, NV04_PGRAPH_VALID1, 0);
511 nv_wr32(dev, NV04_PGRAPH_VALID2, 0);
512 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x000001FF);
513 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
514 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x1231c000);
515 /*1231C000 blob, 001 haiku*/
516 /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
517 nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x72111100);
518 /*0x72111100 blob , 01 haiku*/
519 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
520 nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
523 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
524 nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
525 /*haiku and blob 10d4*/
527 nv_wr32(dev, NV04_PGRAPH_STATE , 0xFFFFFFFF);
528 nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
529 tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
530 tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
531 nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
533 /* These don't belong here, they're part of a per-channel context */
534 nv_wr32(dev, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
535 nv_wr32(dev, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
541 nv04_graph_fini(struct drm_device *dev, int engine, bool suspend)
543 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
544 if (!nv_wait(dev, NV04_PGRAPH_STATUS, ~0, 0) && suspend) {
545 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
548 nv04_graph_unload_context(dev);
549 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
554 * Software methods, why they are needed, and how they all work:
556 * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
557 * 2d engine settings are kept inside the grobjs themselves. The grobjs are
558 * 3 words long on both. grobj format on NV04 is:
562 * - bit 12: color key active
563 * - bit 13: clip rect active
564 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
565 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
566 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
567 * NV03_CONTEXT_SURFACE_DST].
568 * - bits 15-17: 2d operation [aka patch config]
569 * - bit 24: patch valid [enables rendering using this object]
570 * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
572 * - bits 0-1: mono format
573 * - bits 8-13: color format
574 * - bits 16-31: DMA_NOTIFY instance
576 * - bits 0-15: DMA_A instance
577 * - bits 16-31: DMA_B instance
583 * - bit 12: color key active
584 * - bit 13: clip rect active
585 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
586 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
587 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
588 * NV03_CONTEXT_SURFACE_DST].
589 * - bits 15-17: 2d operation [aka patch config]
590 * - bits 20-22: dither mode
591 * - bit 24: patch valid [enables rendering using this object]
592 * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
593 * - bit 26: surface_src/surface_zeta valid
594 * - bit 27: pattern valid
595 * - bit 28: rop valid
596 * - bit 29: beta1 valid
597 * - bit 30: beta4 valid
599 * - bits 0-1: mono format
600 * - bits 8-13: color format
601 * - bits 16-31: DMA_NOTIFY instance
603 * - bits 0-15: DMA_A instance
604 * - bits 16-31: DMA_B instance
606 * NV05 will set/unset the relevant valid bits when you poke the relevant
607 * object-binding methods with object of the proper type, or with the NULL
608 * type. It'll only allow rendering using the grobj if all needed objects
609 * are bound. The needed set of objects depends on selected operation: for
610 * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
612 * NV04 doesn't have these methods implemented at all, and doesn't have the
613 * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
614 * is set. So we have to emulate them in software, internally keeping the
615 * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
616 * but the last word isn't actually used for anything, we abuse it for this
619 * Actually, NV05 can optionally check bit 24 too, but we disable this since
620 * there's no use for it.
622 * For unknown reasons, NV04 implements surf3d binding in hardware as an
623 * exception. Also for unknown reasons, NV04 doesn't implement the clipping
624 * methods on the surf3d object, so we have to emulate them too.
628 nv04_graph_set_ctx1(struct nouveau_channel *chan, u32 mask, u32 value)
630 struct drm_device *dev = chan->dev;
631 u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
632 int subc = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
635 tmp = nv_ri32(dev, instance);
639 nv_wi32(dev, instance, tmp);
640 nv_wr32(dev, NV04_PGRAPH_CTX_SWITCH1, tmp);
641 nv_wr32(dev, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
645 nv04_graph_set_ctx_val(struct nouveau_channel *chan, u32 mask, u32 value)
647 struct drm_device *dev = chan->dev;
648 u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
650 int class, op, valid = 1;
652 ctx1 = nv_ri32(dev, instance);
654 op = (ctx1 >> 15) & 7;
655 tmp = nv_ri32(dev, instance + 0xc);
658 nv_wi32(dev, instance + 0xc, tmp);
660 /* check for valid surf2d/surf_dst/surf_color */
661 if (!(tmp & 0x02000000))
663 /* check for valid surf_src/surf_zeta */
664 if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
668 /* SRCCOPY_AND, SRCCOPY: no extra objects required */
672 /* ROP_AND: requires pattern and rop */
674 if (!(tmp & 0x18000000))
677 /* BLEND_AND: requires beta1 */
679 if (!(tmp & 0x20000000))
682 /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
685 if (!(tmp & 0x40000000))
690 nv04_graph_set_ctx1(chan, 0x01000000, valid << 24);
694 nv04_graph_mthd_set_operation(struct nouveau_channel *chan,
695 u32 class, u32 mthd, u32 data)
699 /* Old versions of the objects only accept first three operations. */
700 if (data > 2 && class < 0x40)
702 nv04_graph_set_ctx1(chan, 0x00038000, data << 15);
703 /* changing operation changes set of objects needed for validation */
704 nv04_graph_set_ctx_val(chan, 0, 0);
709 nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan,
710 u32 class, u32 mthd, u32 data)
712 uint32_t min = data & 0xffff, max;
713 uint32_t w = data >> 16;
718 /* yes, it accepts negative for some reason. */
722 nv_wr32(chan->dev, 0x40053c, min);
723 nv_wr32(chan->dev, 0x400544, max);
728 nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan,
729 u32 class, u32 mthd, u32 data)
731 uint32_t min = data & 0xffff, max;
732 uint32_t w = data >> 16;
737 /* yes, it accepts negative for some reason. */
741 nv_wr32(chan->dev, 0x400540, min);
742 nv_wr32(chan->dev, 0x400548, max);
747 nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan,
748 u32 class, u32 mthd, u32 data)
750 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
752 nv04_graph_set_ctx1(chan, 0x00004000, 0);
753 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
756 nv04_graph_set_ctx1(chan, 0x00004000, 0);
757 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
764 nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan,
765 u32 class, u32 mthd, u32 data)
767 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
769 nv04_graph_set_ctx1(chan, 0x00004000, 0);
770 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
773 nv04_graph_set_ctx1(chan, 0x00004000, 0);
774 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
777 nv04_graph_set_ctx1(chan, 0x00004000, 0x00004000);
778 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
785 nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan,
786 u32 class, u32 mthd, u32 data)
788 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
790 nv04_graph_set_ctx_val(chan, 0x08000000, 0);
793 nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
800 nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan,
801 u32 class, u32 mthd, u32 data)
803 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
805 nv04_graph_set_ctx_val(chan, 0x08000000, 0);
808 nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
815 nv04_graph_mthd_bind_rop(struct nouveau_channel *chan,
816 u32 class, u32 mthd, u32 data)
818 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
820 nv04_graph_set_ctx_val(chan, 0x10000000, 0);
823 nv04_graph_set_ctx_val(chan, 0x10000000, 0x10000000);
830 nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan,
831 u32 class, u32 mthd, u32 data)
833 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
835 nv04_graph_set_ctx_val(chan, 0x20000000, 0);
838 nv04_graph_set_ctx_val(chan, 0x20000000, 0x20000000);
845 nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan,
846 u32 class, u32 mthd, u32 data)
848 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
850 nv04_graph_set_ctx_val(chan, 0x40000000, 0);
853 nv04_graph_set_ctx_val(chan, 0x40000000, 0x40000000);
860 nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan,
861 u32 class, u32 mthd, u32 data)
863 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
865 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
868 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
875 nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan,
876 u32 class, u32 mthd, u32 data)
878 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
880 nv04_graph_set_ctx_val(chan, 0x04000000, 0);
883 nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
890 nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan,
891 u32 class, u32 mthd, u32 data)
893 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
895 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
898 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
905 nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan,
906 u32 class, u32 mthd, u32 data)
908 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
910 nv04_graph_set_ctx_val(chan, 0x04000000, 0);
913 nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
920 nv04_graph_mthd_bind_clip(struct nouveau_channel *chan,
921 u32 class, u32 mthd, u32 data)
923 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
925 nv04_graph_set_ctx1(chan, 0x2000, 0);
928 nv04_graph_set_ctx1(chan, 0x2000, 0x2000);
935 nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan,
936 u32 class, u32 mthd, u32 data)
938 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
940 nv04_graph_set_ctx1(chan, 0x1000, 0);
942 /* Yes, for some reason even the old versions of objects
943 * accept 0x57 and not 0x17. Consistency be damned.
946 nv04_graph_set_ctx1(chan, 0x1000, 0x1000);
952 static struct nouveau_bitfield nv04_graph_intr[] = {
953 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
957 static struct nouveau_bitfield nv04_graph_nstatus[] = {
958 { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
959 { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
960 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
961 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
965 struct nouveau_bitfield nv04_graph_nsource[] = {
966 { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
967 { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
968 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
969 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
970 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
971 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
972 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
973 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
974 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
975 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
976 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
977 { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
978 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
979 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
980 { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
981 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
982 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
983 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
984 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
989 nv04_graph_context_switch(struct drm_device *dev)
991 struct drm_nouveau_private *dev_priv = dev->dev_private;
992 struct nouveau_channel *chan = NULL;
995 nouveau_wait_for_idle(dev);
997 /* If previous context is valid, we need to save it */
998 nv04_graph_unload_context(dev);
1000 /* Load context for next channel */
1001 chid = nv_rd32(dev, NV03_PFIFO_CACHE1_PUSH1) &
1002 NV03_PFIFO_CACHE1_PUSH1_CHID_MASK;
1003 chan = dev_priv->channels.ptr[chid];
1005 nv04_graph_load_context(chan);
1009 nv04_graph_isr(struct drm_device *dev)
1013 while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1014 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1015 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1016 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1017 u32 chid = (addr & 0x0f000000) >> 24;
1018 u32 subc = (addr & 0x0000e000) >> 13;
1019 u32 mthd = (addr & 0x00001ffc);
1020 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1021 u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
1024 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1025 if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1026 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1027 show &= ~NV_PGRAPH_INTR_NOTIFY;
1031 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1032 nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1033 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1034 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1035 nv04_graph_context_switch(dev);
1038 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1039 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1041 if (show && nouveau_ratelimit()) {
1042 NV_INFO(dev, "PGRAPH -");
1043 nouveau_bitfield_print(nv04_graph_intr, show);
1044 printk(" nsource:");
1045 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1046 printk(" nstatus:");
1047 nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1049 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1050 "mthd 0x%04x data 0x%08x\n",
1051 chid, subc, class, mthd, data);
1057 nv04_graph_destroy(struct drm_device *dev, int engine)
1059 struct nv04_graph_engine *pgraph = nv_engine(dev, engine);
1061 nouveau_irq_unregister(dev, 12);
1063 NVOBJ_ENGINE_DEL(dev, GR);
1068 nv04_graph_create(struct drm_device *dev)
1070 struct nv04_graph_engine *pgraph;
1072 pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
1076 pgraph->base.destroy = nv04_graph_destroy;
1077 pgraph->base.init = nv04_graph_init;
1078 pgraph->base.fini = nv04_graph_fini;
1079 pgraph->base.context_new = nv04_graph_context_new;
1080 pgraph->base.context_del = nv04_graph_context_del;
1081 pgraph->base.object_new = nv04_graph_object_new;
1083 NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
1084 nouveau_irq_register(dev, 12, nv04_graph_isr);
1086 /* dvd subpicture */
1087 NVOBJ_CLASS(dev, 0x0038, GR);
1090 NVOBJ_CLASS(dev, 0x0039, GR);
1093 NVOBJ_CLASS(dev, 0x004b, GR);
1094 NVOBJ_MTHD (dev, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt);
1095 NVOBJ_MTHD (dev, 0x004b, 0x0188, nv04_graph_mthd_bind_rop);
1096 NVOBJ_MTHD (dev, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1);
1097 NVOBJ_MTHD (dev, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst);
1098 NVOBJ_MTHD (dev, 0x004b, 0x02fc, nv04_graph_mthd_set_operation);
1101 NVOBJ_CLASS(dev, 0x004a, GR);
1102 NVOBJ_MTHD (dev, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1103 NVOBJ_MTHD (dev, 0x004a, 0x018c, nv04_graph_mthd_bind_rop);
1104 NVOBJ_MTHD (dev, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1);
1105 NVOBJ_MTHD (dev, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4);
1106 NVOBJ_MTHD (dev, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d);
1107 NVOBJ_MTHD (dev, 0x004a, 0x02fc, nv04_graph_mthd_set_operation);
1109 /* nv01 imageblit */
1110 NVOBJ_CLASS(dev, 0x001f, GR);
1111 NVOBJ_MTHD (dev, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma);
1112 NVOBJ_MTHD (dev, 0x001f, 0x0188, nv04_graph_mthd_bind_clip);
1113 NVOBJ_MTHD (dev, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt);
1114 NVOBJ_MTHD (dev, 0x001f, 0x0190, nv04_graph_mthd_bind_rop);
1115 NVOBJ_MTHD (dev, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1);
1116 NVOBJ_MTHD (dev, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst);
1117 NVOBJ_MTHD (dev, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src);
1118 NVOBJ_MTHD (dev, 0x001f, 0x02fc, nv04_graph_mthd_set_operation);
1120 /* nv04 imageblit */
1121 NVOBJ_CLASS(dev, 0x005f, GR);
1122 NVOBJ_MTHD (dev, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma);
1123 NVOBJ_MTHD (dev, 0x005f, 0x0188, nv04_graph_mthd_bind_clip);
1124 NVOBJ_MTHD (dev, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt);
1125 NVOBJ_MTHD (dev, 0x005f, 0x0190, nv04_graph_mthd_bind_rop);
1126 NVOBJ_MTHD (dev, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1);
1127 NVOBJ_MTHD (dev, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4);
1128 NVOBJ_MTHD (dev, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d);
1129 NVOBJ_MTHD (dev, 0x005f, 0x02fc, nv04_graph_mthd_set_operation);
1132 NVOBJ_CLASS(dev, 0x0060, GR);
1133 NVOBJ_MTHD (dev, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma);
1134 NVOBJ_MTHD (dev, 0x0060, 0x018c, nv04_graph_mthd_bind_clip);
1135 NVOBJ_MTHD (dev, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt);
1136 NVOBJ_MTHD (dev, 0x0060, 0x0194, nv04_graph_mthd_bind_rop);
1137 NVOBJ_MTHD (dev, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1);
1138 NVOBJ_MTHD (dev, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4);
1139 NVOBJ_MTHD (dev, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf);
1140 NVOBJ_MTHD (dev, 0x0060, 0x03e4, nv04_graph_mthd_set_operation);
1143 NVOBJ_CLASS(dev, 0x0064, GR);
1146 NVOBJ_CLASS(dev, 0x0021, GR);
1147 NVOBJ_MTHD (dev, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma);
1148 NVOBJ_MTHD (dev, 0x0021, 0x0188, nv04_graph_mthd_bind_clip);
1149 NVOBJ_MTHD (dev, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt);
1150 NVOBJ_MTHD (dev, 0x0021, 0x0190, nv04_graph_mthd_bind_rop);
1151 NVOBJ_MTHD (dev, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1);
1152 NVOBJ_MTHD (dev, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst);
1153 NVOBJ_MTHD (dev, 0x0021, 0x02fc, nv04_graph_mthd_set_operation);
1156 NVOBJ_CLASS(dev, 0x0061, GR);
1157 NVOBJ_MTHD (dev, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma);
1158 NVOBJ_MTHD (dev, 0x0061, 0x0188, nv04_graph_mthd_bind_clip);
1159 NVOBJ_MTHD (dev, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt);
1160 NVOBJ_MTHD (dev, 0x0061, 0x0190, nv04_graph_mthd_bind_rop);
1161 NVOBJ_MTHD (dev, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1);
1162 NVOBJ_MTHD (dev, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4);
1163 NVOBJ_MTHD (dev, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d);
1164 NVOBJ_MTHD (dev, 0x0061, 0x02fc, nv04_graph_mthd_set_operation);
1167 NVOBJ_CLASS(dev, 0x0065, GR);
1170 NVOBJ_CLASS(dev, 0x0036, GR);
1171 NVOBJ_MTHD (dev, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma);
1172 NVOBJ_MTHD (dev, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1173 NVOBJ_MTHD (dev, 0x0036, 0x018c, nv04_graph_mthd_bind_rop);
1174 NVOBJ_MTHD (dev, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1);
1175 NVOBJ_MTHD (dev, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst);
1176 NVOBJ_MTHD (dev, 0x0036, 0x02fc, nv04_graph_mthd_set_operation);
1179 NVOBJ_CLASS(dev, 0x0076, GR);
1180 NVOBJ_MTHD (dev, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma);
1181 NVOBJ_MTHD (dev, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1182 NVOBJ_MTHD (dev, 0x0076, 0x018c, nv04_graph_mthd_bind_rop);
1183 NVOBJ_MTHD (dev, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1);
1184 NVOBJ_MTHD (dev, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4);
1185 NVOBJ_MTHD (dev, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d);
1186 NVOBJ_MTHD (dev, 0x0076, 0x02fc, nv04_graph_mthd_set_operation);
1189 NVOBJ_CLASS(dev, 0x0066, GR);
1192 NVOBJ_CLASS(dev, 0x0037, GR);
1193 NVOBJ_MTHD (dev, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1194 NVOBJ_MTHD (dev, 0x0037, 0x018c, nv04_graph_mthd_bind_rop);
1195 NVOBJ_MTHD (dev, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1);
1196 NVOBJ_MTHD (dev, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst);
1197 NVOBJ_MTHD (dev, 0x0037, 0x0304, nv04_graph_mthd_set_operation);
1200 NVOBJ_CLASS(dev, 0x0077, GR);
1201 NVOBJ_MTHD (dev, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1202 NVOBJ_MTHD (dev, 0x0077, 0x018c, nv04_graph_mthd_bind_rop);
1203 NVOBJ_MTHD (dev, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1);
1204 NVOBJ_MTHD (dev, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4);
1205 NVOBJ_MTHD (dev, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf);
1206 NVOBJ_MTHD (dev, 0x0077, 0x0304, nv04_graph_mthd_set_operation);
1209 NVOBJ_CLASS(dev, 0x0030, GR);
1212 NVOBJ_CLASS(dev, 0x0042, GR);
1215 NVOBJ_CLASS(dev, 0x0043, GR);
1218 NVOBJ_CLASS(dev, 0x0012, GR);
1221 NVOBJ_CLASS(dev, 0x0072, GR);
1224 NVOBJ_CLASS(dev, 0x0019, GR);
1227 NVOBJ_CLASS(dev, 0x0018, GR);
1230 NVOBJ_CLASS(dev, 0x0044, GR);
1233 NVOBJ_CLASS(dev, 0x0052, GR);
1236 NVOBJ_CLASS(dev, 0x0053, GR);
1237 NVOBJ_MTHD (dev, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h);
1238 NVOBJ_MTHD (dev, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v);
1241 NVOBJ_CLASS(dev, 0x0048, GR);
1242 NVOBJ_MTHD (dev, 0x0048, 0x0188, nv04_graph_mthd_bind_clip);
1243 NVOBJ_MTHD (dev, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color);
1244 NVOBJ_MTHD (dev, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta);
1247 NVOBJ_CLASS(dev, 0x0054, GR);
1250 NVOBJ_CLASS(dev, 0x0055, GR);
1253 NVOBJ_CLASS(dev, 0x0017, GR);
1256 NVOBJ_CLASS(dev, 0x0057, GR);
1259 NVOBJ_CLASS(dev, 0x0058, GR);
1262 NVOBJ_CLASS(dev, 0x0059, GR);
1265 NVOBJ_CLASS(dev, 0x005a, GR);
1268 NVOBJ_CLASS(dev, 0x005b, GR);
1271 NVOBJ_CLASS(dev, 0x001c, GR);
1272 NVOBJ_MTHD (dev, 0x001c, 0x0184, nv04_graph_mthd_bind_clip);
1273 NVOBJ_MTHD (dev, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1274 NVOBJ_MTHD (dev, 0x001c, 0x018c, nv04_graph_mthd_bind_rop);
1275 NVOBJ_MTHD (dev, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1);
1276 NVOBJ_MTHD (dev, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst);
1277 NVOBJ_MTHD (dev, 0x001c, 0x02fc, nv04_graph_mthd_set_operation);
1280 NVOBJ_CLASS(dev, 0x005c, GR);
1281 NVOBJ_MTHD (dev, 0x005c, 0x0184, nv04_graph_mthd_bind_clip);
1282 NVOBJ_MTHD (dev, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1283 NVOBJ_MTHD (dev, 0x005c, 0x018c, nv04_graph_mthd_bind_rop);
1284 NVOBJ_MTHD (dev, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1);
1285 NVOBJ_MTHD (dev, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4);
1286 NVOBJ_MTHD (dev, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d);
1287 NVOBJ_MTHD (dev, 0x005c, 0x02fc, nv04_graph_mthd_set_operation);
1290 NVOBJ_CLASS(dev, 0x001d, GR);
1291 NVOBJ_MTHD (dev, 0x001d, 0x0184, nv04_graph_mthd_bind_clip);
1292 NVOBJ_MTHD (dev, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1293 NVOBJ_MTHD (dev, 0x001d, 0x018c, nv04_graph_mthd_bind_rop);
1294 NVOBJ_MTHD (dev, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1);
1295 NVOBJ_MTHD (dev, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst);
1296 NVOBJ_MTHD (dev, 0x001d, 0x02fc, nv04_graph_mthd_set_operation);
1299 NVOBJ_CLASS(dev, 0x005d, GR);
1300 NVOBJ_MTHD (dev, 0x005d, 0x0184, nv04_graph_mthd_bind_clip);
1301 NVOBJ_MTHD (dev, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1302 NVOBJ_MTHD (dev, 0x005d, 0x018c, nv04_graph_mthd_bind_rop);
1303 NVOBJ_MTHD (dev, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1);
1304 NVOBJ_MTHD (dev, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4);
1305 NVOBJ_MTHD (dev, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d);
1306 NVOBJ_MTHD (dev, 0x005d, 0x02fc, nv04_graph_mthd_set_operation);
1309 NVOBJ_CLASS(dev, 0x001e, GR);
1310 NVOBJ_MTHD (dev, 0x001e, 0x0184, nv04_graph_mthd_bind_clip);
1311 NVOBJ_MTHD (dev, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1312 NVOBJ_MTHD (dev, 0x001e, 0x018c, nv04_graph_mthd_bind_rop);
1313 NVOBJ_MTHD (dev, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1);
1314 NVOBJ_MTHD (dev, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst);
1315 NVOBJ_MTHD (dev, 0x001e, 0x02fc, nv04_graph_mthd_set_operation);
1318 NVOBJ_CLASS(dev, 0x005e, GR);
1319 NVOBJ_MTHD (dev, 0x005e, 0x0184, nv04_graph_mthd_bind_clip);
1320 NVOBJ_MTHD (dev, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1321 NVOBJ_MTHD (dev, 0x005e, 0x018c, nv04_graph_mthd_bind_rop);
1322 NVOBJ_MTHD (dev, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1);
1323 NVOBJ_MTHD (dev, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4);
1324 NVOBJ_MTHD (dev, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d);
1325 NVOBJ_MTHD (dev, 0x005e, 0x02fc, nv04_graph_mthd_set_operation);