drm/nouveau/fifo: convert to new-style nvkm_engine
[deliverable/linux.git] / drivers / gpu / drm / nouveau / nvkm / engine / gr / nv04.c
CommitLineData
6ee73861
BS
1/*
2 * Copyright 2007 Stephane Marchesin
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
b8bf04e1 13 * paragr) shall be included in all copies or substantial portions of the
6ee73861
BS
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
27f3d6cf 24#include "priv.h"
e3c71eb2 25#include "regs.h"
6ee73861 26
93260d3c 27#include <core/client.h>
13de7f46 28#include <core/gpuobj.h>
e3c71eb2 29#include <engine/fifo.h>
9a65a38c 30#include <engine/fifo/chan.h>
ebb945a9
BS
31#include <subdev/instmem.h>
32#include <subdev/timer.h>
33
ebb945a9 34static u32
b8bf04e1 35nv04_gr_ctx_regs[] = {
ea911a1c
FJ
36 0x0040053c,
37 0x00400544,
38 0x00400540,
39 0x00400548,
6ee73861
BS
40 NV04_PGRAPH_CTX_SWITCH1,
41 NV04_PGRAPH_CTX_SWITCH2,
42 NV04_PGRAPH_CTX_SWITCH3,
43 NV04_PGRAPH_CTX_SWITCH4,
44 NV04_PGRAPH_CTX_CACHE1,
45 NV04_PGRAPH_CTX_CACHE2,
46 NV04_PGRAPH_CTX_CACHE3,
47 NV04_PGRAPH_CTX_CACHE4,
48 0x00400184,
49 0x004001a4,
50 0x004001c4,
51 0x004001e4,
52 0x00400188,
53 0x004001a8,
54 0x004001c8,
55 0x004001e8,
56 0x0040018c,
57 0x004001ac,
58 0x004001cc,
59 0x004001ec,
60 0x00400190,
61 0x004001b0,
62 0x004001d0,
63 0x004001f0,
64 0x00400194,
65 0x004001b4,
66 0x004001d4,
67 0x004001f4,
68 0x00400198,
69 0x004001b8,
70 0x004001d8,
71 0x004001f8,
72 0x0040019c,
73 0x004001bc,
74 0x004001dc,
75 0x004001fc,
76 0x00400174,
77 NV04_PGRAPH_DMA_START_0,
78 NV04_PGRAPH_DMA_START_1,
79 NV04_PGRAPH_DMA_LENGTH,
80 NV04_PGRAPH_DMA_MISC,
81 NV04_PGRAPH_DMA_PITCH,
82 NV04_PGRAPH_BOFFSET0,
83 NV04_PGRAPH_BBASE0,
84 NV04_PGRAPH_BLIMIT0,
85 NV04_PGRAPH_BOFFSET1,
86 NV04_PGRAPH_BBASE1,
87 NV04_PGRAPH_BLIMIT1,
88 NV04_PGRAPH_BOFFSET2,
89 NV04_PGRAPH_BBASE2,
90 NV04_PGRAPH_BLIMIT2,
91 NV04_PGRAPH_BOFFSET3,
92 NV04_PGRAPH_BBASE3,
93 NV04_PGRAPH_BLIMIT3,
94 NV04_PGRAPH_BOFFSET4,
95 NV04_PGRAPH_BBASE4,
96 NV04_PGRAPH_BLIMIT4,
97 NV04_PGRAPH_BOFFSET5,
98 NV04_PGRAPH_BBASE5,
99 NV04_PGRAPH_BLIMIT5,
100 NV04_PGRAPH_BPITCH0,
101 NV04_PGRAPH_BPITCH1,
102 NV04_PGRAPH_BPITCH2,
103 NV04_PGRAPH_BPITCH3,
104 NV04_PGRAPH_BPITCH4,
105 NV04_PGRAPH_SURFACE,
106 NV04_PGRAPH_STATE,
107 NV04_PGRAPH_BSWIZZLE2,
108 NV04_PGRAPH_BSWIZZLE5,
109 NV04_PGRAPH_BPIXEL,
110 NV04_PGRAPH_NOTIFY,
111 NV04_PGRAPH_PATT_COLOR0,
112 NV04_PGRAPH_PATT_COLOR1,
113 NV04_PGRAPH_PATT_COLORRAM+0x00,
6ee73861 114 NV04_PGRAPH_PATT_COLORRAM+0x04,
6ee73861 115 NV04_PGRAPH_PATT_COLORRAM+0x08,
ea911a1c 116 NV04_PGRAPH_PATT_COLORRAM+0x0c,
6ee73861 117 NV04_PGRAPH_PATT_COLORRAM+0x10,
6ee73861 118 NV04_PGRAPH_PATT_COLORRAM+0x14,
6ee73861 119 NV04_PGRAPH_PATT_COLORRAM+0x18,
ea911a1c 120 NV04_PGRAPH_PATT_COLORRAM+0x1c,
6ee73861 121 NV04_PGRAPH_PATT_COLORRAM+0x20,
6ee73861 122 NV04_PGRAPH_PATT_COLORRAM+0x24,
6ee73861 123 NV04_PGRAPH_PATT_COLORRAM+0x28,
ea911a1c 124 NV04_PGRAPH_PATT_COLORRAM+0x2c,
6ee73861 125 NV04_PGRAPH_PATT_COLORRAM+0x30,
6ee73861 126 NV04_PGRAPH_PATT_COLORRAM+0x34,
6ee73861 127 NV04_PGRAPH_PATT_COLORRAM+0x38,
ea911a1c
FJ
128 NV04_PGRAPH_PATT_COLORRAM+0x3c,
129 NV04_PGRAPH_PATT_COLORRAM+0x40,
130 NV04_PGRAPH_PATT_COLORRAM+0x44,
131 NV04_PGRAPH_PATT_COLORRAM+0x48,
132 NV04_PGRAPH_PATT_COLORRAM+0x4c,
133 NV04_PGRAPH_PATT_COLORRAM+0x50,
134 NV04_PGRAPH_PATT_COLORRAM+0x54,
135 NV04_PGRAPH_PATT_COLORRAM+0x58,
136 NV04_PGRAPH_PATT_COLORRAM+0x5c,
137 NV04_PGRAPH_PATT_COLORRAM+0x60,
138 NV04_PGRAPH_PATT_COLORRAM+0x64,
139 NV04_PGRAPH_PATT_COLORRAM+0x68,
140 NV04_PGRAPH_PATT_COLORRAM+0x6c,
141 NV04_PGRAPH_PATT_COLORRAM+0x70,
142 NV04_PGRAPH_PATT_COLORRAM+0x74,
143 NV04_PGRAPH_PATT_COLORRAM+0x78,
144 NV04_PGRAPH_PATT_COLORRAM+0x7c,
145 NV04_PGRAPH_PATT_COLORRAM+0x80,
146 NV04_PGRAPH_PATT_COLORRAM+0x84,
147 NV04_PGRAPH_PATT_COLORRAM+0x88,
148 NV04_PGRAPH_PATT_COLORRAM+0x8c,
149 NV04_PGRAPH_PATT_COLORRAM+0x90,
150 NV04_PGRAPH_PATT_COLORRAM+0x94,
151 NV04_PGRAPH_PATT_COLORRAM+0x98,
152 NV04_PGRAPH_PATT_COLORRAM+0x9c,
153 NV04_PGRAPH_PATT_COLORRAM+0xa0,
154 NV04_PGRAPH_PATT_COLORRAM+0xa4,
155 NV04_PGRAPH_PATT_COLORRAM+0xa8,
156 NV04_PGRAPH_PATT_COLORRAM+0xac,
157 NV04_PGRAPH_PATT_COLORRAM+0xb0,
158 NV04_PGRAPH_PATT_COLORRAM+0xb4,
159 NV04_PGRAPH_PATT_COLORRAM+0xb8,
160 NV04_PGRAPH_PATT_COLORRAM+0xbc,
161 NV04_PGRAPH_PATT_COLORRAM+0xc0,
162 NV04_PGRAPH_PATT_COLORRAM+0xc4,
163 NV04_PGRAPH_PATT_COLORRAM+0xc8,
164 NV04_PGRAPH_PATT_COLORRAM+0xcc,
165 NV04_PGRAPH_PATT_COLORRAM+0xd0,
166 NV04_PGRAPH_PATT_COLORRAM+0xd4,
167 NV04_PGRAPH_PATT_COLORRAM+0xd8,
168 NV04_PGRAPH_PATT_COLORRAM+0xdc,
169 NV04_PGRAPH_PATT_COLORRAM+0xe0,
170 NV04_PGRAPH_PATT_COLORRAM+0xe4,
171 NV04_PGRAPH_PATT_COLORRAM+0xe8,
172 NV04_PGRAPH_PATT_COLORRAM+0xec,
173 NV04_PGRAPH_PATT_COLORRAM+0xf0,
174 NV04_PGRAPH_PATT_COLORRAM+0xf4,
175 NV04_PGRAPH_PATT_COLORRAM+0xf8,
176 NV04_PGRAPH_PATT_COLORRAM+0xfc,
6ee73861
BS
177 NV04_PGRAPH_PATTERN,
178 0x0040080c,
179 NV04_PGRAPH_PATTERN_SHAPE,
180 0x00400600,
181 NV04_PGRAPH_ROP3,
182 NV04_PGRAPH_CHROMA,
183 NV04_PGRAPH_BETA_AND,
184 NV04_PGRAPH_BETA_PREMULT,
185 NV04_PGRAPH_CONTROL0,
186 NV04_PGRAPH_CONTROL1,
187 NV04_PGRAPH_CONTROL2,
188 NV04_PGRAPH_BLEND,
189 NV04_PGRAPH_STORED_FMT,
190 NV04_PGRAPH_SOURCE_COLOR,
191 0x00400560,
192 0x00400568,
193 0x00400564,
194 0x0040056c,
195 0x00400400,
196 0x00400480,
197 0x00400404,
198 0x00400484,
199 0x00400408,
200 0x00400488,
201 0x0040040c,
202 0x0040048c,
203 0x00400410,
204 0x00400490,
205 0x00400414,
206 0x00400494,
207 0x00400418,
208 0x00400498,
209 0x0040041c,
210 0x0040049c,
211 0x00400420,
212 0x004004a0,
213 0x00400424,
214 0x004004a4,
215 0x00400428,
216 0x004004a8,
217 0x0040042c,
218 0x004004ac,
219 0x00400430,
220 0x004004b0,
221 0x00400434,
222 0x004004b4,
223 0x00400438,
224 0x004004b8,
225 0x0040043c,
226 0x004004bc,
227 0x00400440,
228 0x004004c0,
229 0x00400444,
230 0x004004c4,
231 0x00400448,
232 0x004004c8,
233 0x0040044c,
234 0x004004cc,
235 0x00400450,
236 0x004004d0,
237 0x00400454,
238 0x004004d4,
239 0x00400458,
240 0x004004d8,
241 0x0040045c,
242 0x004004dc,
243 0x00400460,
244 0x004004e0,
245 0x00400464,
246 0x004004e4,
247 0x00400468,
248 0x004004e8,
249 0x0040046c,
250 0x004004ec,
251 0x00400470,
252 0x004004f0,
253 0x00400474,
254 0x004004f4,
255 0x00400478,
256 0x004004f8,
257 0x0040047c,
258 0x004004fc,
6ee73861
BS
259 0x00400534,
260 0x00400538,
261 0x00400514,
262 0x00400518,
263 0x0040051c,
264 0x00400520,
265 0x00400524,
266 0x00400528,
267 0x0040052c,
268 0x00400530,
269 0x00400d00,
270 0x00400d40,
271 0x00400d80,
272 0x00400d04,
273 0x00400d44,
274 0x00400d84,
275 0x00400d08,
276 0x00400d48,
277 0x00400d88,
278 0x00400d0c,
279 0x00400d4c,
280 0x00400d8c,
281 0x00400d10,
282 0x00400d50,
283 0x00400d90,
284 0x00400d14,
285 0x00400d54,
286 0x00400d94,
287 0x00400d18,
288 0x00400d58,
289 0x00400d98,
290 0x00400d1c,
291 0x00400d5c,
292 0x00400d9c,
293 0x00400d20,
294 0x00400d60,
295 0x00400da0,
296 0x00400d24,
297 0x00400d64,
298 0x00400da4,
299 0x00400d28,
300 0x00400d68,
301 0x00400da8,
302 0x00400d2c,
303 0x00400d6c,
304 0x00400dac,
305 0x00400d30,
306 0x00400d70,
307 0x00400db0,
308 0x00400d34,
309 0x00400d74,
310 0x00400db4,
311 0x00400d38,
312 0x00400d78,
313 0x00400db8,
314 0x00400d3c,
315 0x00400d7c,
316 0x00400dbc,
317 0x00400590,
318 0x00400594,
319 0x00400598,
320 0x0040059c,
321 0x004005a8,
322 0x004005ac,
323 0x004005b0,
324 0x004005b4,
325 0x004005c0,
326 0x004005c4,
327 0x004005c8,
328 0x004005cc,
329 0x004005d0,
330 0x004005d4,
331 0x004005d8,
332 0x004005dc,
333 0x004005e0,
334 NV04_PGRAPH_PASSTHRU_0,
335 NV04_PGRAPH_PASSTHRU_1,
336 NV04_PGRAPH_PASSTHRU_2,
337 NV04_PGRAPH_DVD_COLORFMT,
338 NV04_PGRAPH_SCALED_FORMAT,
339 NV04_PGRAPH_MISC24_0,
340 NV04_PGRAPH_MISC24_1,
341 NV04_PGRAPH_MISC24_2,
342 0x00400500,
343 0x00400504,
344 NV04_PGRAPH_VALID1,
ea911a1c
FJ
345 NV04_PGRAPH_VALID2,
346 NV04_PGRAPH_DEBUG_3
6ee73861
BS
347};
348
27f3d6cf
BS
349#define nv04_gr(p) container_of((p), struct nv04_gr, base)
350
bfee3f3d 351struct nv04_gr {
e3c71eb2 352 struct nvkm_gr base;
b8bf04e1 353 struct nv04_gr_chan *chan[16];
ebb945a9 354 spinlock_t lock;
6ee73861
BS
355};
356
27f3d6cf
BS
357#define nv04_gr_chan(p) container_of((p), struct nv04_gr_chan, object)
358
b8bf04e1 359struct nv04_gr_chan {
27f3d6cf
BS
360 struct nvkm_object object;
361 struct nv04_gr *gr;
ebb945a9 362 int chid;
b8bf04e1 363 u32 nv04[ARRAY_SIZE(nv04_gr_ctx_regs)];
ebb945a9 364};
4ea52f89 365
ebb945a9
BS
366/*******************************************************************************
367 * Graphics object classes
368 ******************************************************************************/
6ee73861 369
f23d4cf4
MK
370/*
371 * Software methods, why they are needed, and how they all work:
372 *
373 * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
374 * 2d engine settings are kept inside the grobjs themselves. The grobjs are
375 * 3 words long on both. grobj format on NV04 is:
376 *
377 * word 0:
378 * - bits 0-7: class
379 * - bit 12: color key active
380 * - bit 13: clip rect active
381 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
382 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
383 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
384 * NV03_CONTEXT_SURFACE_DST].
385 * - bits 15-17: 2d operation [aka patch config]
386 * - bit 24: patch valid [enables rendering using this object]
387 * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
388 * word 1:
389 * - bits 0-1: mono format
390 * - bits 8-13: color format
391 * - bits 16-31: DMA_NOTIFY instance
392 * word 2:
393 * - bits 0-15: DMA_A instance
394 * - bits 16-31: DMA_B instance
395 *
396 * On NV05 it's:
397 *
398 * word 0:
399 * - bits 0-7: class
400 * - bit 12: color key active
401 * - bit 13: clip rect active
402 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
403 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
404 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
405 * NV03_CONTEXT_SURFACE_DST].
406 * - bits 15-17: 2d operation [aka patch config]
407 * - bits 20-22: dither mode
408 * - bit 24: patch valid [enables rendering using this object]
409 * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
410 * - bit 26: surface_src/surface_zeta valid
411 * - bit 27: pattern valid
412 * - bit 28: rop valid
413 * - bit 29: beta1 valid
414 * - bit 30: beta4 valid
415 * word 1:
416 * - bits 0-1: mono format
417 * - bits 8-13: color format
418 * - bits 16-31: DMA_NOTIFY instance
419 * word 2:
420 * - bits 0-15: DMA_A instance
421 * - bits 16-31: DMA_B instance
422 *
423 * NV05 will set/unset the relevant valid bits when you poke the relevant
424 * object-binding methods with object of the proper type, or with the NULL
425 * type. It'll only allow rendering using the grobj if all needed objects
426 * are bound. The needed set of objects depends on selected operation: for
427 * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
428 *
429 * NV04 doesn't have these methods implemented at all, and doesn't have the
430 * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
431 * is set. So we have to emulate them in software, internally keeping the
432 * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
433 * but the last word isn't actually used for anything, we abuse it for this
434 * purpose.
435 *
436 * Actually, NV05 can optionally check bit 24 too, but we disable this since
437 * there's no use for it.
438 *
439 * For unknown reasons, NV04 implements surf3d binding in hardware as an
440 * exception. Also for unknown reasons, NV04 doesn't implement the clipping
441 * methods on the surf3d object, so we have to emulate them too.
442 */
443
444static void
a65955e1 445nv04_gr_set_ctx1(struct nvkm_device *device, u32 inst, u32 mask, u32 value)
6ee73861 446{
276836d4 447 int subc = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
b8c157d3 448 u32 tmp;
6ee73861 449
a65955e1 450 tmp = nvkm_rd32(device, 0x700000 + inst);
f23d4cf4
MK
451 tmp &= ~mask;
452 tmp |= value;
a65955e1 453 nvkm_wr32(device, 0x700000 + inst, tmp);
6ee73861 454
276836d4 455 nvkm_wr32(device, NV04_PGRAPH_CTX_SWITCH1, tmp);
a65955e1 456 nvkm_wr32(device, NV04_PGRAPH_CTX_CACHE1 + (subc << 2), tmp);
f23d4cf4
MK
457}
458
459static void
a65955e1 460nv04_gr_set_ctx_val(struct nvkm_device *device, u32 inst, u32 mask, u32 value)
f23d4cf4 461{
f23d4cf4 462 int class, op, valid = 1;
ebb945a9 463 u32 tmp, ctx1;
f23d4cf4 464
a65955e1 465 ctx1 = nvkm_rd32(device, 0x700000 + inst);
f23d4cf4
MK
466 class = ctx1 & 0xff;
467 op = (ctx1 >> 15) & 7;
ebb945a9 468
a65955e1 469 tmp = nvkm_rd32(device, 0x70000c + inst);
f23d4cf4
MK
470 tmp &= ~mask;
471 tmp |= value;
a65955e1 472 nvkm_wr32(device, 0x70000c + inst, tmp);
f23d4cf4
MK
473
474 /* check for valid surf2d/surf_dst/surf_color */
475 if (!(tmp & 0x02000000))
476 valid = 0;
477 /* check for valid surf_src/surf_zeta */
478 if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
479 valid = 0;
480
481 switch (op) {
482 /* SRCCOPY_AND, SRCCOPY: no extra objects required */
483 case 0:
484 case 3:
485 break;
486 /* ROP_AND: requires pattern and rop */
487 case 1:
488 if (!(tmp & 0x18000000))
489 valid = 0;
490 break;
491 /* BLEND_AND: requires beta1 */
492 case 2:
493 if (!(tmp & 0x20000000))
494 valid = 0;
495 break;
496 /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
497 case 4:
498 case 5:
499 if (!(tmp & 0x40000000))
500 valid = 0;
501 break;
502 }
503
a65955e1 504 nv04_gr_set_ctx1(device, inst, 0x01000000, valid << 24);
f23d4cf4
MK
505}
506
a65955e1
BS
507static bool
508nv04_gr_mthd_set_operation(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 509{
a65955e1 510 u8 class = nvkm_rd32(device, 0x700000) & 0x000000ff;
f23d4cf4 511 if (data > 5)
a65955e1 512 return false;
f23d4cf4 513 /* Old versions of the objects only accept first three operations. */
b8c157d3 514 if (data > 2 && class < 0x40)
a65955e1
BS
515 return false;
516 nv04_gr_set_ctx1(device, inst, 0x00038000, data << 15);
f23d4cf4 517 /* changing operation changes set of objects needed for validation */
a65955e1
BS
518 nv04_gr_set_ctx_val(device, inst, 0, 0);
519 return true;
f23d4cf4
MK
520}
521
a65955e1
BS
522static bool
523nv04_gr_mthd_surf3d_clip_h(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 524{
ebb945a9
BS
525 u32 min = data & 0xffff, max;
526 u32 w = data >> 16;
f23d4cf4
MK
527 if (min & 0x8000)
528 /* too large */
a65955e1 529 return false;
f23d4cf4
MK
530 if (w & 0x8000)
531 /* yes, it accepts negative for some reason. */
532 w |= 0xffff0000;
533 max = min + w;
534 max &= 0x3ffff;
276836d4
BS
535 nvkm_wr32(device, 0x40053c, min);
536 nvkm_wr32(device, 0x400544, max);
a65955e1 537 return true;
f23d4cf4
MK
538}
539
a65955e1
BS
540static bool
541nv04_gr_mthd_surf3d_clip_v(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 542{
ebb945a9
BS
543 u32 min = data & 0xffff, max;
544 u32 w = data >> 16;
f23d4cf4
MK
545 if (min & 0x8000)
546 /* too large */
a65955e1 547 return false;
f23d4cf4
MK
548 if (w & 0x8000)
549 /* yes, it accepts negative for some reason. */
550 w |= 0xffff0000;
551 max = min + w;
552 max &= 0x3ffff;
276836d4
BS
553 nvkm_wr32(device, 0x400540, min);
554 nvkm_wr32(device, 0x400548, max);
a65955e1 555 return true;
6ee73861
BS
556}
557
a65955e1
BS
558static u8
559nv04_gr_mthd_bind_class(struct nvkm_device *device, u32 inst)
ebb945a9 560{
a65955e1 561 return nvkm_rd32(device, 0x700000 + (inst << 4));
ebb945a9
BS
562}
563
a65955e1
BS
564static bool
565nv04_gr_mthd_bind_surf2d(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 566{
a65955e1 567 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 568 case 0x30:
a65955e1
BS
569 nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
570 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
571 return true;
f23d4cf4 572 case 0x42:
a65955e1
BS
573 nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
574 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
575 return true;
f23d4cf4 576 }
a65955e1 577 return false;
f23d4cf4
MK
578}
579
a65955e1
BS
580static bool
581nv04_gr_mthd_bind_surf2d_swzsurf(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 582{
a65955e1 583 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 584 case 0x30:
a65955e1
BS
585 nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
586 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
587 return true;
f23d4cf4 588 case 0x42:
a65955e1
BS
589 nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
590 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
591 return true;
f23d4cf4 592 case 0x52:
a65955e1
BS
593 nv04_gr_set_ctx1(device, inst, 0x00004000, 0x00004000);
594 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
595 return true;
f23d4cf4 596 }
a65955e1 597 return false;
f23d4cf4
MK
598}
599
a65955e1
BS
600static bool
601nv01_gr_mthd_bind_patt(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 602{
a65955e1 603 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 604 case 0x30:
a65955e1
BS
605 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0);
606 return true;
f23d4cf4 607 case 0x18:
a65955e1
BS
608 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0x08000000);
609 return true;
f23d4cf4 610 }
a65955e1 611 return false;
f23d4cf4
MK
612}
613
a65955e1
BS
614static bool
615nv04_gr_mthd_bind_patt(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 616{
a65955e1 617 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 618 case 0x30:
a65955e1
BS
619 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0);
620 return true;
f23d4cf4 621 case 0x44:
a65955e1
BS
622 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0x08000000);
623 return true;
f23d4cf4 624 }
a65955e1 625 return false;
f23d4cf4
MK
626}
627
a65955e1
BS
628static bool
629nv04_gr_mthd_bind_rop(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 630{
a65955e1 631 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 632 case 0x30:
a65955e1
BS
633 nv04_gr_set_ctx_val(device, inst, 0x10000000, 0);
634 return true;
f23d4cf4 635 case 0x43:
a65955e1
BS
636 nv04_gr_set_ctx_val(device, inst, 0x10000000, 0x10000000);
637 return true;
f23d4cf4 638 }
a65955e1 639 return false;
f23d4cf4
MK
640}
641
a65955e1
BS
642static bool
643nv04_gr_mthd_bind_beta1(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 644{
a65955e1 645 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 646 case 0x30:
a65955e1
BS
647 nv04_gr_set_ctx_val(device, inst, 0x20000000, 0);
648 return true;
f23d4cf4 649 case 0x12:
a65955e1
BS
650 nv04_gr_set_ctx_val(device, inst, 0x20000000, 0x20000000);
651 return true;
f23d4cf4 652 }
a65955e1 653 return false;
f23d4cf4
MK
654}
655
a65955e1
BS
656static bool
657nv04_gr_mthd_bind_beta4(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 658{
a65955e1 659 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 660 case 0x30:
a65955e1
BS
661 nv04_gr_set_ctx_val(device, inst, 0x40000000, 0);
662 return true;
f23d4cf4 663 case 0x72:
a65955e1
BS
664 nv04_gr_set_ctx_val(device, inst, 0x40000000, 0x40000000);
665 return true;
f23d4cf4 666 }
a65955e1 667 return false;
f23d4cf4
MK
668}
669
a65955e1
BS
670static bool
671nv04_gr_mthd_bind_surf_dst(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 672{
a65955e1 673 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 674 case 0x30:
a65955e1
BS
675 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
676 return true;
f23d4cf4 677 case 0x58:
a65955e1
BS
678 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
679 return true;
f23d4cf4 680 }
a65955e1 681 return false;
f23d4cf4
MK
682}
683
a65955e1
BS
684static bool
685nv04_gr_mthd_bind_surf_src(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 686{
a65955e1 687 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 688 case 0x30:
a65955e1
BS
689 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0);
690 return true;
f23d4cf4 691 case 0x59:
a65955e1
BS
692 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0x04000000);
693 return true;
f23d4cf4 694 }
a65955e1 695 return false;
f23d4cf4
MK
696}
697
a65955e1
BS
698static bool
699nv04_gr_mthd_bind_surf_color(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 700{
a65955e1 701 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 702 case 0x30:
a65955e1
BS
703 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
704 return true;
f23d4cf4 705 case 0x5a:
a65955e1
BS
706 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
707 return true;
f23d4cf4 708 }
a65955e1 709 return false;
f23d4cf4
MK
710}
711
a65955e1
BS
712static bool
713nv04_gr_mthd_bind_surf_zeta(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 714{
a65955e1 715 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 716 case 0x30:
a65955e1
BS
717 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0);
718 return true;
f23d4cf4 719 case 0x5b:
a65955e1
BS
720 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0x04000000);
721 return true;
f23d4cf4 722 }
a65955e1 723 return false;
f23d4cf4
MK
724}
725
a65955e1
BS
726static bool
727nv01_gr_mthd_bind_clip(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 728{
a65955e1 729 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 730 case 0x30:
a65955e1
BS
731 nv04_gr_set_ctx1(device, inst, 0x2000, 0);
732 return true;
f23d4cf4 733 case 0x19:
a65955e1
BS
734 nv04_gr_set_ctx1(device, inst, 0x2000, 0x2000);
735 return true;
f23d4cf4 736 }
a65955e1 737 return false;
f23d4cf4
MK
738}
739
a65955e1
BS
740static bool
741nv01_gr_mthd_bind_chroma(struct nvkm_device *device, u32 inst, u32 data)
f23d4cf4 742{
a65955e1 743 switch (nv04_gr_mthd_bind_class(device, data)) {
f23d4cf4 744 case 0x30:
a65955e1
BS
745 nv04_gr_set_ctx1(device, inst, 0x1000, 0);
746 return true;
f23d4cf4
MK
747 /* Yes, for some reason even the old versions of objects
748 * accept 0x57 and not 0x17. Consistency be damned.
749 */
750 case 0x57:
a65955e1
BS
751 nv04_gr_set_ctx1(device, inst, 0x1000, 0x1000);
752 return true;
f23d4cf4 753 }
a65955e1 754 return false;
f23d4cf4
MK
755}
756
a65955e1
BS
757static bool
758nv03_gr_mthd_gdi(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
759{
760 bool (*func)(struct nvkm_device *, u32, u32);
761 switch (mthd) {
762 case 0x0184: func = nv01_gr_mthd_bind_patt; break;
763 case 0x0188: func = nv04_gr_mthd_bind_rop; break;
764 case 0x018c: func = nv04_gr_mthd_bind_beta1; break;
765 case 0x0190: func = nv04_gr_mthd_bind_surf_dst; break;
766 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
767 default:
768 return false;
769 }
770 return func(device, inst, data);
771}
ebb945a9 772
a65955e1
BS
773static bool
774nv04_gr_mthd_gdi(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
775{
776 bool (*func)(struct nvkm_device *, u32, u32);
777 switch (mthd) {
778 case 0x0188: func = nv04_gr_mthd_bind_patt; break;
779 case 0x018c: func = nv04_gr_mthd_bind_rop; break;
780 case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
781 case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
782 case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
783 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
784 default:
785 return false;
786 }
787 return func(device, inst, data);
788}
ebb945a9 789
a65955e1
BS
790static bool
791nv01_gr_mthd_blit(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
792{
793 bool (*func)(struct nvkm_device *, u32, u32);
794 switch (mthd) {
795 case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
796 case 0x0188: func = nv01_gr_mthd_bind_clip; break;
797 case 0x018c: func = nv01_gr_mthd_bind_patt; break;
798 case 0x0190: func = nv04_gr_mthd_bind_rop; break;
799 case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
800 case 0x0198: func = nv04_gr_mthd_bind_surf_dst; break;
801 case 0x019c: func = nv04_gr_mthd_bind_surf_src; break;
802 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
803 default:
804 return false;
805 }
806 return func(device, inst, data);
807}
ebb945a9 808
a65955e1
BS
809static bool
810nv04_gr_mthd_blit(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
811{
812 bool (*func)(struct nvkm_device *, u32, u32);
813 switch (mthd) {
814 case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
815 case 0x0188: func = nv01_gr_mthd_bind_clip; break;
816 case 0x018c: func = nv04_gr_mthd_bind_patt; break;
817 case 0x0190: func = nv04_gr_mthd_bind_rop; break;
818 case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
819 case 0x0198: func = nv04_gr_mthd_bind_beta4; break;
820 case 0x019c: func = nv04_gr_mthd_bind_surf2d; break;
821 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
822 default:
823 return false;
824 }
825 return func(device, inst, data);
826}
ebb945a9 827
a65955e1
BS
828static bool
829nv04_gr_mthd_iifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
830{
831 bool (*func)(struct nvkm_device *, u32, u32);
832 switch (mthd) {
833 case 0x0188: func = nv01_gr_mthd_bind_chroma; break;
834 case 0x018c: func = nv01_gr_mthd_bind_clip; break;
835 case 0x0190: func = nv04_gr_mthd_bind_patt; break;
836 case 0x0194: func = nv04_gr_mthd_bind_rop; break;
837 case 0x0198: func = nv04_gr_mthd_bind_beta1; break;
838 case 0x019c: func = nv04_gr_mthd_bind_beta4; break;
839 case 0x01a0: func = nv04_gr_mthd_bind_surf2d_swzsurf; break;
840 case 0x03e4: func = nv04_gr_mthd_set_operation; break;
841 default:
842 return false;
843 }
844 return func(device, inst, data);
845}
ebb945a9 846
a65955e1
BS
847static bool
848nv01_gr_mthd_ifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
849{
850 bool (*func)(struct nvkm_device *, u32, u32);
851 switch (mthd) {
852 case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
853 case 0x0188: func = nv01_gr_mthd_bind_clip; break;
854 case 0x018c: func = nv01_gr_mthd_bind_patt; break;
855 case 0x0190: func = nv04_gr_mthd_bind_rop; break;
856 case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
857 case 0x0198: func = nv04_gr_mthd_bind_surf_dst; break;
858 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
859 default:
860 return false;
861 }
862 return func(device, inst, data);
863}
ebb945a9 864
a65955e1
BS
865static bool
866nv04_gr_mthd_ifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
867{
868 bool (*func)(struct nvkm_device *, u32, u32);
869 switch (mthd) {
870 case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
871 case 0x0188: func = nv01_gr_mthd_bind_clip; break;
872 case 0x018c: func = nv04_gr_mthd_bind_patt; break;
873 case 0x0190: func = nv04_gr_mthd_bind_rop; break;
874 case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
875 case 0x0198: func = nv04_gr_mthd_bind_beta4; break;
876 case 0x019c: func = nv04_gr_mthd_bind_surf2d; break;
877 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
878 default:
879 return false;
880 }
881 return func(device, inst, data);
882}
ebb945a9 883
a65955e1
BS
884static bool
885nv03_gr_mthd_sifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
886{
887 bool (*func)(struct nvkm_device *, u32, u32);
888 switch (mthd) {
889 case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
890 case 0x0188: func = nv01_gr_mthd_bind_patt; break;
891 case 0x018c: func = nv04_gr_mthd_bind_rop; break;
892 case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
893 case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
894 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
895 default:
896 return false;
897 }
898 return func(device, inst, data);
899}
ebb945a9 900
a65955e1
BS
901static bool
902nv04_gr_mthd_sifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
903{
904 bool (*func)(struct nvkm_device *, u32, u32);
905 switch (mthd) {
906 case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
907 case 0x0188: func = nv04_gr_mthd_bind_patt; break;
908 case 0x018c: func = nv04_gr_mthd_bind_rop; break;
909 case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
910 case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
911 case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
912 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
913 default:
914 return false;
915 }
916 return func(device, inst, data);
917}
ebb945a9 918
a65955e1
BS
919static bool
920nv03_gr_mthd_sifm(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
921{
922 bool (*func)(struct nvkm_device *, u32, u32);
923 switch (mthd) {
924 case 0x0188: func = nv01_gr_mthd_bind_patt; break;
925 case 0x018c: func = nv04_gr_mthd_bind_rop; break;
926 case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
927 case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
928 case 0x0304: func = nv04_gr_mthd_set_operation; break;
929 default:
930 return false;
931 }
932 return func(device, inst, data);
933}
ebb945a9 934
a65955e1
BS
935static bool
936nv04_gr_mthd_sifm(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
937{
938 bool (*func)(struct nvkm_device *, u32, u32);
939 switch (mthd) {
940 case 0x0188: func = nv04_gr_mthd_bind_patt; break;
941 case 0x018c: func = nv04_gr_mthd_bind_rop; break;
942 case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
943 case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
944 case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
945 case 0x0304: func = nv04_gr_mthd_set_operation; break;
946 default:
947 return false;
948 }
949 return func(device, inst, data);
950}
ebb945a9 951
a65955e1
BS
952static bool
953nv04_gr_mthd_surf3d(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
954{
955 bool (*func)(struct nvkm_device *, u32, u32);
956 switch (mthd) {
957 case 0x02f8: func = nv04_gr_mthd_surf3d_clip_h; break;
958 case 0x02fc: func = nv04_gr_mthd_surf3d_clip_v; break;
959 default:
960 return false;
961 }
962 return func(device, inst, data);
963}
ebb945a9 964
a65955e1
BS
965static bool
966nv03_gr_mthd_ttri(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
967{
968 bool (*func)(struct nvkm_device *, u32, u32);
969 switch (mthd) {
970 case 0x0188: func = nv01_gr_mthd_bind_clip; break;
971 case 0x018c: func = nv04_gr_mthd_bind_surf_color; break;
972 case 0x0190: func = nv04_gr_mthd_bind_surf_zeta; break;
973 default:
974 return false;
975 }
976 return func(device, inst, data);
977}
ebb945a9 978
a65955e1
BS
979static bool
980nv01_gr_mthd_prim(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
981{
982 bool (*func)(struct nvkm_device *, u32, u32);
983 switch (mthd) {
984 case 0x0184: func = nv01_gr_mthd_bind_clip; break;
985 case 0x0188: func = nv01_gr_mthd_bind_patt; break;
986 case 0x018c: func = nv04_gr_mthd_bind_rop; break;
987 case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
988 case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
989 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
990 default:
991 return false;
992 }
993 return func(device, inst, data);
994}
ebb945a9 995
a65955e1
BS
996static bool
997nv04_gr_mthd_prim(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
998{
999 bool (*func)(struct nvkm_device *, u32, u32);
1000 switch (mthd) {
1001 case 0x0184: func = nv01_gr_mthd_bind_clip; break;
1002 case 0x0188: func = nv04_gr_mthd_bind_patt; break;
1003 case 0x018c: func = nv04_gr_mthd_bind_rop; break;
1004 case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
1005 case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
1006 case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
1007 case 0x02fc: func = nv04_gr_mthd_set_operation; break;
1008 default:
1009 return false;
1010 }
1011 return func(device, inst, data);
1012}
1013
1014static bool
1015nv04_gr_mthd(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
1016{
1017 bool (*func)(struct nvkm_device *, u32, u32, u32);
1018 switch (nvkm_rd32(device, 0x700000 + inst) & 0x000000ff) {
1019 case 0x1c ... 0x1e:
1020 func = nv01_gr_mthd_prim; break;
1021 case 0x1f: func = nv01_gr_mthd_blit; break;
1022 case 0x21: func = nv01_gr_mthd_ifc; break;
1023 case 0x36: func = nv03_gr_mthd_sifc; break;
1024 case 0x37: func = nv03_gr_mthd_sifm; break;
1025 case 0x48: func = nv03_gr_mthd_ttri; break;
1026 case 0x4a: func = nv04_gr_mthd_gdi; break;
1027 case 0x4b: func = nv03_gr_mthd_gdi; break;
1028 case 0x53: func = nv04_gr_mthd_surf3d; break;
1029 case 0x5c ... 0x5e:
1030 func = nv04_gr_mthd_prim; break;
1031 case 0x5f: func = nv04_gr_mthd_blit; break;
1032 case 0x60: func = nv04_gr_mthd_iifc; break;
1033 case 0x61: func = nv04_gr_mthd_ifc; break;
1034 case 0x76: func = nv04_gr_mthd_sifc; break;
1035 case 0x77: func = nv04_gr_mthd_sifm; break;
1036 default:
1037 return false;
1038 }
1039 return func(device, inst, mthd, data);
1040}
ebb945a9
BS
1041
1042static int
27f3d6cf
BS
1043nv04_gr_object_bind(struct nvkm_object *object, struct nvkm_gpuobj *parent,
1044 int align, struct nvkm_gpuobj **pgpuobj)
ebb945a9 1045{
27f3d6cf
BS
1046 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, align,
1047 false, parent, pgpuobj);
1048 if (ret == 0) {
1049 nvkm_kmap(*pgpuobj);
1050 nvkm_wo32(*pgpuobj, 0x00, object->oclass_name);
1051 nvkm_wo32(*pgpuobj, 0x04, 0x00000000);
1052 nvkm_wo32(*pgpuobj, 0x08, 0x00000000);
ebb945a9 1053#ifdef __BIG_ENDIAN
27f3d6cf 1054 nvkm_mo32(*pgpuobj, 0x08, 0x00080000, 0x00080000);
ebb945a9 1055#endif
27f3d6cf
BS
1056 nvkm_wo32(*pgpuobj, 0x0c, 0x00000000);
1057 nvkm_done(*pgpuobj);
1058 }
1059 return ret;
ebb945a9
BS
1060}
1061
27f3d6cf
BS
1062const struct nvkm_object_func
1063nv04_gr_object = {
1064 .bind = nv04_gr_object_bind,
ebb945a9
BS
1065};
1066
1067/*******************************************************************************
1068 * PGRAPH context
1069 ******************************************************************************/
1070
b8bf04e1 1071static struct nv04_gr_chan *
bfee3f3d 1072nv04_gr_channel(struct nv04_gr *gr)
ebb945a9 1073{
276836d4 1074 struct nvkm_device *device = gr->base.engine.subdev.device;
b8bf04e1 1075 struct nv04_gr_chan *chan = NULL;
276836d4
BS
1076 if (nvkm_rd32(device, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) {
1077 int chid = nvkm_rd32(device, NV04_PGRAPH_CTX_USER) >> 24;
bfee3f3d
BS
1078 if (chid < ARRAY_SIZE(gr->chan))
1079 chan = gr->chan[chid];
ebb945a9
BS
1080 }
1081 return chan;
1082}
1083
1084static int
b8bf04e1 1085nv04_gr_load_context(struct nv04_gr_chan *chan, int chid)
ebb945a9 1086{
27f3d6cf 1087 struct nvkm_device *device = chan->gr->base.engine.subdev.device;
ebb945a9
BS
1088 int i;
1089
b8bf04e1 1090 for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++)
276836d4 1091 nvkm_wr32(device, nv04_gr_ctx_regs[i], chan->nv04[i]);
ebb945a9 1092
276836d4
BS
1093 nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
1094 nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24);
1095 nvkm_mask(device, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000);
ebb945a9
BS
1096 return 0;
1097}
1098
1099static int
b8bf04e1 1100nv04_gr_unload_context(struct nv04_gr_chan *chan)
ebb945a9 1101{
27f3d6cf 1102 struct nvkm_device *device = chan->gr->base.engine.subdev.device;
ebb945a9
BS
1103 int i;
1104
b8bf04e1 1105 for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++)
276836d4 1106 chan->nv04[i] = nvkm_rd32(device, nv04_gr_ctx_regs[i]);
ebb945a9 1107
276836d4
BS
1108 nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
1109 nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
ebb945a9
BS
1110 return 0;
1111}
1112
1113static void
bfee3f3d 1114nv04_gr_context_switch(struct nv04_gr *gr)
ebb945a9 1115{
276836d4 1116 struct nvkm_device *device = gr->base.engine.subdev.device;
b8bf04e1
BS
1117 struct nv04_gr_chan *prev = NULL;
1118 struct nv04_gr_chan *next = NULL;
ebb945a9
BS
1119 int chid;
1120
27f3d6cf 1121 nv04_gr_idle(&gr->base);
ebb945a9
BS
1122
1123 /* If previous context is valid, we need to save it */
bfee3f3d 1124 prev = nv04_gr_channel(gr);
ebb945a9 1125 if (prev)
b8bf04e1 1126 nv04_gr_unload_context(prev);
ebb945a9
BS
1127
1128 /* load context for next channel */
276836d4 1129 chid = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f;
bfee3f3d 1130 next = gr->chan[chid];
ebb945a9 1131 if (next)
b8bf04e1 1132 nv04_gr_load_context(next, chid);
ebb945a9
BS
1133}
1134
b8bf04e1 1135static u32 *ctx_reg(struct nv04_gr_chan *chan, u32 reg)
ebb945a9
BS
1136{
1137 int i;
1138
b8bf04e1
BS
1139 for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++) {
1140 if (nv04_gr_ctx_regs[i] == reg)
ebb945a9
BS
1141 return &chan->nv04[i];
1142 }
1143
1144 return NULL;
1145}
1146
27f3d6cf
BS
1147static void *
1148nv04_gr_chan_dtor(struct nvkm_object *object)
ebb945a9 1149{
27f3d6cf
BS
1150 struct nv04_gr_chan *chan = nv04_gr_chan(object);
1151 struct nv04_gr *gr = chan->gr;
ebb945a9
BS
1152 unsigned long flags;
1153
bfee3f3d
BS
1154 spin_lock_irqsave(&gr->lock, flags);
1155 gr->chan[chan->chid] = NULL;
1156 spin_unlock_irqrestore(&gr->lock, flags);
27f3d6cf 1157 return chan;
ebb945a9
BS
1158}
1159
1160static int
27f3d6cf 1161nv04_gr_chan_fini(struct nvkm_object *object, bool suspend)
ebb945a9 1162{
27f3d6cf
BS
1163 struct nv04_gr_chan *chan = nv04_gr_chan(object);
1164 struct nv04_gr *gr = chan->gr;
276836d4 1165 struct nvkm_device *device = gr->base.engine.subdev.device;
ebb945a9
BS
1166 unsigned long flags;
1167
bfee3f3d 1168 spin_lock_irqsave(&gr->lock, flags);
276836d4 1169 nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
bfee3f3d 1170 if (nv04_gr_channel(gr) == chan)
b8bf04e1 1171 nv04_gr_unload_context(chan);
276836d4 1172 nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
bfee3f3d 1173 spin_unlock_irqrestore(&gr->lock, flags);
27f3d6cf 1174 return 0;
ebb945a9
BS
1175}
1176
27f3d6cf
BS
1177static const struct nvkm_object_func
1178nv04_gr_chan = {
1179 .dtor = nv04_gr_chan_dtor,
1180 .fini = nv04_gr_chan_fini,
ebb945a9
BS
1181};
1182
27f3d6cf
BS
1183static int
1184nv04_gr_chan_new(struct nvkm_gr *base, struct nvkm_fifo_chan *fifoch,
1185 const struct nvkm_oclass *oclass, struct nvkm_object **pobject)
1186{
1187 struct nv04_gr *gr = nv04_gr(base);
1188 struct nv04_gr_chan *chan;
1189 unsigned long flags;
1190
1191 if (!(chan = kzalloc(sizeof(*chan), GFP_KERNEL)))
1192 return -ENOMEM;
1193 nvkm_object_ctor(&nv04_gr_chan, oclass, &chan->object);
1194 chan->gr = gr;
1195 chan->chid = fifoch->chid;
1196 *pobject = &chan->object;
1197
1198 *ctx_reg(chan, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
1199
1200 spin_lock_irqsave(&gr->lock, flags);
1201 gr->chan[chan->chid] = chan;
1202 spin_unlock_irqrestore(&gr->lock, flags);
1203 return 0;
1204}
1205
ebb945a9
BS
1206/*******************************************************************************
1207 * PGRAPH engine/subdev functions
1208 ******************************************************************************/
1209
1210bool
27f3d6cf 1211nv04_gr_idle(struct nvkm_gr *gr)
ebb945a9 1212{
109c2f2f
BS
1213 struct nvkm_subdev *subdev = &gr->engine.subdev;
1214 struct nvkm_device *device = subdev->device;
ebb945a9
BS
1215 u32 mask = 0xffffffff;
1216
27f3d6cf 1217 if (device->card_type == NV_40)
ebb945a9
BS
1218 mask &= ~NV40_PGRAPH_STATUS_SYNC_STALL;
1219
c4584adc
BS
1220 if (nvkm_msec(device, 2000,
1221 if (!(nvkm_rd32(device, NV04_PGRAPH_STATUS) & mask))
1222 break;
1223 ) < 0) {
109c2f2f
BS
1224 nvkm_error(subdev, "idle timed out with status %08x\n",
1225 nvkm_rd32(device, NV04_PGRAPH_STATUS));
ebb945a9
BS
1226 return false;
1227 }
1228
1229 return true;
1230}
1231
e3c71eb2 1232static const struct nvkm_bitfield
b8bf04e1 1233nv04_gr_intr_name[] = {
4976986b
BS
1234 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1235 {}
1236};
1237
e3c71eb2 1238static const struct nvkm_bitfield
b8bf04e1 1239nv04_gr_nstatus[] = {
4976986b
BS
1240 { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1241 { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1242 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1243 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1244 {}
1245};
1246
e3c71eb2 1247const struct nvkm_bitfield
b8bf04e1 1248nv04_gr_nsource[] = {
4976986b
BS
1249 { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
1250 { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
1251 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
1252 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
1253 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
1254 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
1255 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
1256 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
1257 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
1258 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
1259 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
1260 { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
1261 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
1262 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
1263 { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
1264 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
1265 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1266 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
1267 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
1268 {}
1269};
1270
1271static void
e3c71eb2 1272nv04_gr_intr(struct nvkm_subdev *subdev)
b8c157d3 1273{
bfee3f3d 1274 struct nv04_gr *gr = (void *)subdev;
b8bf04e1 1275 struct nv04_gr_chan *chan = NULL;
276836d4
BS
1276 struct nvkm_device *device = gr->base.engine.subdev.device;
1277 u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR);
1278 u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE);
1279 u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS);
1280 u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR);
ebb945a9
BS
1281 u32 chid = (addr & 0x0f000000) >> 24;
1282 u32 subc = (addr & 0x0000e000) >> 13;
1283 u32 mthd = (addr & 0x00001ffc);
276836d4
BS
1284 u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA);
1285 u32 class = nvkm_rd32(device, 0x400180 + subc * 4) & 0xff;
1286 u32 inst = (nvkm_rd32(device, 0x40016c) & 0xffff) << 4;
ebb945a9 1287 u32 show = stat;
109c2f2f 1288 char msg[128], src[128], sta[128];
ebb945a9 1289 unsigned long flags;
4976986b 1290
bfee3f3d
BS
1291 spin_lock_irqsave(&gr->lock, flags);
1292 chan = gr->chan[chid];
ebb945a9
BS
1293
1294 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1295 if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
a65955e1 1296 if (!nv04_gr_mthd(device, inst, mthd, data))
ebb945a9 1297 show &= ~NV_PGRAPH_INTR_NOTIFY;
4976986b 1298 }
ebb945a9 1299 }
4976986b 1300
ebb945a9 1301 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
276836d4 1302 nvkm_wr32(device, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
ebb945a9
BS
1303 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1304 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
bfee3f3d 1305 nv04_gr_context_switch(gr);
ebb945a9 1306 }
4976986b 1307
276836d4
BS
1308 nvkm_wr32(device, NV03_PGRAPH_INTR, stat);
1309 nvkm_wr32(device, NV04_PGRAPH_FIFO, 0x00000001);
ebb945a9
BS
1310
1311 if (show) {
109c2f2f
BS
1312 nvkm_snprintbf(msg, sizeof(msg), nv04_gr_intr_name, show);
1313 nvkm_snprintbf(src, sizeof(src), nv04_gr_nsource, nsource);
1314 nvkm_snprintbf(sta, sizeof(sta), nv04_gr_nstatus, nstatus);
1315 nvkm_error(subdev, "intr %08x [%s] nsource %08x [%s] "
1316 "nstatus %08x [%s] ch %d [%s] subc %d "
1317 "class %04x mthd %04x data %08x\n",
1318 show, msg, nsource, src, nstatus, sta, chid,
27f3d6cf
BS
1319 chan ? chan->object.client->name : "unknown",
1320 subc, class, mthd, data);
4976986b 1321 }
ebb945a9 1322
a65955e1 1323 spin_unlock_irqrestore(&gr->lock, flags);
4976986b
BS
1324}
1325
27f3d6cf
BS
1326static const struct nvkm_gr_func
1327nv04_gr = {
1328 .chan_new = nv04_gr_chan_new,
1329 .sclass = {
1330 { -1, -1, 0x0012, &nv04_gr_object }, /* beta1 */
1331 { -1, -1, 0x0017, &nv04_gr_object }, /* chroma */
1332 { -1, -1, 0x0018, &nv04_gr_object }, /* pattern (nv01) */
1333 { -1, -1, 0x0019, &nv04_gr_object }, /* clip */
1334 { -1, -1, 0x001c, &nv04_gr_object }, /* line */
1335 { -1, -1, 0x001d, &nv04_gr_object }, /* tri */
1336 { -1, -1, 0x001e, &nv04_gr_object }, /* rect */
1337 { -1, -1, 0x001f, &nv04_gr_object },
1338 { -1, -1, 0x0021, &nv04_gr_object },
1339 { -1, -1, 0x0030, &nv04_gr_object }, /* null */
1340 { -1, -1, 0x0036, &nv04_gr_object },
1341 { -1, -1, 0x0037, &nv04_gr_object },
1342 { -1, -1, 0x0038, &nv04_gr_object }, /* dvd subpicture */
1343 { -1, -1, 0x0039, &nv04_gr_object }, /* m2mf */
1344 { -1, -1, 0x0042, &nv04_gr_object }, /* surf2d */
1345 { -1, -1, 0x0043, &nv04_gr_object }, /* rop */
1346 { -1, -1, 0x0044, &nv04_gr_object }, /* pattern */
1347 { -1, -1, 0x0048, &nv04_gr_object },
1348 { -1, -1, 0x004a, &nv04_gr_object },
1349 { -1, -1, 0x004b, &nv04_gr_object },
1350 { -1, -1, 0x0052, &nv04_gr_object }, /* swzsurf */
1351 { -1, -1, 0x0053, &nv04_gr_object },
1352 { -1, -1, 0x0054, &nv04_gr_object }, /* ttri */
1353 { -1, -1, 0x0055, &nv04_gr_object }, /* mtri */
1354 { -1, -1, 0x0057, &nv04_gr_object }, /* chroma */
1355 { -1, -1, 0x0058, &nv04_gr_object }, /* surf_dst */
1356 { -1, -1, 0x0059, &nv04_gr_object }, /* surf_src */
1357 { -1, -1, 0x005a, &nv04_gr_object }, /* surf_color */
1358 { -1, -1, 0x005b, &nv04_gr_object }, /* surf_zeta */
1359 { -1, -1, 0x005c, &nv04_gr_object }, /* line */
1360 { -1, -1, 0x005d, &nv04_gr_object }, /* tri */
1361 { -1, -1, 0x005e, &nv04_gr_object }, /* rect */
1362 { -1, -1, 0x005f, &nv04_gr_object },
1363 { -1, -1, 0x0060, &nv04_gr_object },
1364 { -1, -1, 0x0061, &nv04_gr_object },
1365 { -1, -1, 0x0064, &nv04_gr_object }, /* iifc (nv05) */
1366 { -1, -1, 0x0065, &nv04_gr_object }, /* ifc (nv05) */
1367 { -1, -1, 0x0066, &nv04_gr_object }, /* sifc (nv05) */
1368 { -1, -1, 0x0072, &nv04_gr_object }, /* beta4 */
1369 { -1, -1, 0x0076, &nv04_gr_object },
1370 { -1, -1, 0x0077, &nv04_gr_object },
1371 {}
1372 }
1373};
1374
ebb945a9 1375static int
e3c71eb2
BS
1376nv04_gr_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
1377 struct nvkm_oclass *oclass, void *data, u32 size,
1378 struct nvkm_object **pobject)
4976986b 1379{
bfee3f3d 1380 struct nv04_gr *gr;
ebb945a9 1381 int ret;
4976986b 1382
bfee3f3d
BS
1383 ret = nvkm_gr_create(parent, engine, oclass, true, &gr);
1384 *pobject = nv_object(gr);
ebb945a9
BS
1385 if (ret)
1386 return ret;
4976986b 1387
27f3d6cf 1388 gr->base.func = &nv04_gr;
bfee3f3d
BS
1389 nv_subdev(gr)->unit = 0x00001000;
1390 nv_subdev(gr)->intr = nv04_gr_intr;
bfee3f3d 1391 spin_lock_init(&gr->lock);
ebb945a9 1392 return 0;
4976986b
BS
1393}
1394
ebb945a9 1395static int
e3c71eb2 1396nv04_gr_init(struct nvkm_object *object)
4976986b 1397{
e3c71eb2 1398 struct nvkm_engine *engine = nv_engine(object);
bfee3f3d 1399 struct nv04_gr *gr = (void *)engine;
276836d4 1400 struct nvkm_device *device = gr->base.engine.subdev.device;
ebb945a9
BS
1401 int ret;
1402
bfee3f3d 1403 ret = nvkm_gr_init(&gr->base);
ebb945a9
BS
1404 if (ret)
1405 return ret;
b8c157d3 1406
ebb945a9 1407 /* Enable PGRAPH interrupts */
276836d4
BS
1408 nvkm_wr32(device, NV03_PGRAPH_INTR, 0xFFFFFFFF);
1409 nvkm_wr32(device, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
1410
1411 nvkm_wr32(device, NV04_PGRAPH_VALID1, 0);
1412 nvkm_wr32(device, NV04_PGRAPH_VALID2, 0);
1413 /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x000001FF);
1414 nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
1415 nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x1231c000);
ebb945a9
BS
1416 /*1231C000 blob, 001 haiku*/
1417 /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
276836d4 1418 nvkm_wr32(device, NV04_PGRAPH_DEBUG_1, 0x72111100);
ebb945a9 1419 /*0x72111100 blob , 01 haiku*/
276836d4
BS
1420 /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
1421 nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
ebb945a9
BS
1422 /*haiku same*/
1423
276836d4
BS
1424 /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
1425 nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
ebb945a9
BS
1426 /*haiku and blob 10d4*/
1427
276836d4
BS
1428 nvkm_wr32(device, NV04_PGRAPH_STATE , 0xFFFFFFFF);
1429 nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
1430 nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
ebb945a9
BS
1431
1432 /* These don't belong here, they're part of a per-channel context */
276836d4
BS
1433 nvkm_wr32(device, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
1434 nvkm_wr32(device, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
b8c157d3 1435 return 0;
274fec93 1436}
ebb945a9 1437
e3c71eb2 1438struct nvkm_oclass
b8bf04e1 1439nv04_gr_oclass = {
ebb945a9 1440 .handle = NV_ENGINE(GR, 0x04),
e3c71eb2 1441 .ofuncs = &(struct nvkm_ofuncs) {
b8bf04e1 1442 .ctor = nv04_gr_ctor,
e3c71eb2 1443 .dtor = _nvkm_gr_dtor,
b8bf04e1 1444 .init = nv04_gr_init,
e3c71eb2 1445 .fini = _nvkm_gr_fini,
ebb945a9
BS
1446 },
1447};
This page took 0.7558 seconds and 5 git commands to generate.