Check for SYS/GL during library init. Reason is that
[AROS.git] / workbench / libs / mesa / src / gallium / drivers / nvc0 / nvc0_state_validate.c
bloba5e7a52170b7fa1757f7c99b79b11779d58239d2
2 #include "util/u_math.h"
4 #include "nvc0_context.h"
6 #ifdef USE_UNUSED_CODE
7 static void
8 nvc0_validate_zcull(struct nvc0_context *nvc0)
10 struct nouveau_channel *chan = nvc0->screen->base.channel;
11 struct pipe_framebuffer_state *fb = &nvc0->framebuffer;
12 struct nvc0_surface *sf = nvc0_surface(fb->zsbuf);
13 struct nvc0_miptree *mt = nvc0_miptree(sf->base.texture);
14 struct nouveau_bo *bo = mt->base.bo;
15 uint32_t size;
16 uint32_t offset = align(mt->total_size, 1 << 17);
17 unsigned width, height;
19 assert(mt->base.base.depth0 == 1 && mt->base.base.array_size < 2);
21 size = mt->total_size * 2;
23 height = align(fb->height, 32);
24 width = fb->width % 224;
25 if (width)
26 width = fb->width + (224 - width);
27 else
28 width = fb->width;
30 MARK_RING (chan, 23, 4);
31 BEGIN_RING(chan, RING_3D_(0x1590), 1); /* ZCULL_REGION_INDEX (bits 0x3f) */
32 OUT_RING (chan, 0);
33 BEGIN_RING(chan, RING_3D_(0x07e8), 2); /* ZCULL_ADDRESS_A_HIGH */
34 OUT_RELOCh(chan, bo, offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
35 OUT_RELOCl(chan, bo, offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
36 offset += 1 << 17;
37 BEGIN_RING(chan, RING_3D_(0x07f0), 2); /* ZCULL_ADDRESS_B_HIGH */
38 OUT_RELOCh(chan, bo, offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
39 OUT_RELOCl(chan, bo, offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
40 BEGIN_RING(chan, RING_3D_(0x07e0), 2);
41 OUT_RING (chan, size);
42 OUT_RING (chan, size >> 16);
43 BEGIN_RING(chan, RING_3D_(0x15c8), 1); /* bits 0x3 */
44 OUT_RING (chan, 2);
45 BEGIN_RING(chan, RING_3D_(0x07c0), 4); /* ZCULL dimensions */
46 OUT_RING (chan, width);
47 OUT_RING (chan, height);
48 OUT_RING (chan, 1);
49 OUT_RING (chan, 0);
50 BEGIN_RING(chan, RING_3D_(0x15fc), 2);
51 OUT_RING (chan, 0); /* bits 0xffff */
52 OUT_RING (chan, 0); /* bits 0xffff */
53 BEGIN_RING(chan, RING_3D_(0x1958), 1);
54 OUT_RING (chan, 0); /* bits ~0 */
56 #endif
58 static void
59 nvc0_validate_fb(struct nvc0_context *nvc0)
61 struct nouveau_channel *chan = nvc0->screen->base.channel;
62 struct pipe_framebuffer_state *fb = &nvc0->framebuffer;
63 unsigned i;
64 boolean serialize = FALSE;
66 nvc0_bufctx_reset(nvc0, NVC0_BUFCTX_FRAME);
68 BEGIN_RING(chan, RING_3D(RT_CONTROL), 1);
69 OUT_RING (chan, (076543210 << 4) | fb->nr_cbufs);
70 BEGIN_RING(chan, RING_3D(SCREEN_SCISSOR_HORIZ), 2);
71 OUT_RING (chan, fb->width << 16);
72 OUT_RING (chan, fb->height << 16);
74 MARK_RING(chan, 9 * fb->nr_cbufs, 2 * fb->nr_cbufs);
76 for (i = 0; i < fb->nr_cbufs; ++i) {
77 struct nvc0_miptree *mt = nvc0_miptree(fb->cbufs[i]->texture);
78 struct nvc0_surface *sf = nvc0_surface(fb->cbufs[i]);
79 struct nouveau_bo *bo = mt->base.bo;
80 uint32_t offset = sf->offset;
82 BEGIN_RING(chan, RING_3D(RT_ADDRESS_HIGH(i)), 9);
83 OUT_RELOCh(chan, bo, offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
84 OUT_RELOCl(chan, bo, offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
85 OUT_RING (chan, sf->width);
86 OUT_RING (chan, sf->height);
87 OUT_RING (chan, nvc0_format_table[sf->base.format].rt);
88 OUT_RING (chan, (mt->layout_3d << 16) |
89 mt->level[sf->base.u.tex.level].tile_mode);
90 OUT_RING (chan, sf->base.u.tex.first_layer + sf->depth);
91 OUT_RING (chan, mt->layer_stride >> 2);
92 OUT_RING (chan, sf->base.u.tex.first_layer);
94 if (mt->base.status & NOUVEAU_BUFFER_STATUS_GPU_READING)
95 serialize = TRUE;
96 mt->base.status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
97 mt->base.status &= ~NOUVEAU_BUFFER_STATUS_GPU_READING;
99 /* only register for writing, otherwise we'd always serialize here */
100 nvc0_bufctx_add_resident(nvc0, NVC0_BUFCTX_FRAME, &mt->base,
101 NOUVEAU_BO_VRAM | NOUVEAU_BO_WR);
104 if (fb->zsbuf) {
105 struct nvc0_miptree *mt = nvc0_miptree(fb->zsbuf->texture);
106 struct nvc0_surface *sf = nvc0_surface(fb->zsbuf);
107 struct nouveau_bo *bo = mt->base.bo;
108 int unk = mt->base.base.target == PIPE_TEXTURE_2D;
109 uint32_t offset = sf->offset;
111 MARK_RING (chan, 12, 2);
112 BEGIN_RING(chan, RING_3D(ZETA_ADDRESS_HIGH), 5);
113 OUT_RELOCh(chan, bo, offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
114 OUT_RELOCl(chan, bo, offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
115 OUT_RING (chan, nvc0_format_table[fb->zsbuf->format].rt);
116 OUT_RING (chan, mt->level[sf->base.u.tex.level].tile_mode);
117 OUT_RING (chan, mt->layer_stride >> 2);
118 BEGIN_RING(chan, RING_3D(ZETA_ENABLE), 1);
119 OUT_RING (chan, 1);
120 BEGIN_RING(chan, RING_3D(ZETA_HORIZ), 3);
121 OUT_RING (chan, sf->width);
122 OUT_RING (chan, sf->height);
123 OUT_RING (chan, (unk << 16) |
124 (sf->base.u.tex.first_layer + sf->depth));
125 BEGIN_RING(chan, RING_3D(ZETA_BASE_LAYER), 1);
126 OUT_RING (chan, sf->base.u.tex.first_layer);
128 if (mt->base.status & NOUVEAU_BUFFER_STATUS_GPU_READING)
129 serialize = TRUE;
130 mt->base.status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
131 mt->base.status &= ~NOUVEAU_BUFFER_STATUS_GPU_READING;
133 nvc0_bufctx_add_resident(nvc0, NVC0_BUFCTX_FRAME, &mt->base,
134 NOUVEAU_BO_VRAM | NOUVEAU_BO_WR);
135 } else {
136 BEGIN_RING(chan, RING_3D(ZETA_ENABLE), 1);
137 OUT_RING (chan, 0);
140 if (serialize) {
141 BEGIN_RING(chan, RING_3D(SERIALIZE), 1);
142 OUT_RING (chan, 0);
146 static void
147 nvc0_validate_blend_colour(struct nvc0_context *nvc0)
149 struct nouveau_channel *chan = nvc0->screen->base.channel;
151 BEGIN_RING(chan, RING_3D(BLEND_COLOR(0)), 4);
152 OUT_RINGf (chan, nvc0->blend_colour.color[0]);
153 OUT_RINGf (chan, nvc0->blend_colour.color[1]);
154 OUT_RINGf (chan, nvc0->blend_colour.color[2]);
155 OUT_RINGf (chan, nvc0->blend_colour.color[3]);
158 static void
159 nvc0_validate_stencil_ref(struct nvc0_context *nvc0)
161 struct nouveau_channel *chan = nvc0->screen->base.channel;
162 const ubyte *ref = &nvc0->stencil_ref.ref_value[0];
164 IMMED_RING(chan, RING_3D(STENCIL_FRONT_FUNC_REF), ref[0]);
165 IMMED_RING(chan, RING_3D(STENCIL_BACK_FUNC_REF), ref[1]);
168 static void
169 nvc0_validate_stipple(struct nvc0_context *nvc0)
171 struct nouveau_channel *chan = nvc0->screen->base.channel;
172 unsigned i;
174 BEGIN_RING(chan, RING_3D(POLYGON_STIPPLE_PATTERN(0)), 32);
175 for (i = 0; i < 32; ++i)
176 OUT_RING(chan, util_bswap32(nvc0->stipple.stipple[i]));
179 static void
180 nvc0_validate_scissor(struct nvc0_context *nvc0)
182 struct nouveau_channel *chan = nvc0->screen->base.channel;
183 struct pipe_scissor_state *s = &nvc0->scissor;
185 if (!(nvc0->dirty & NVC0_NEW_SCISSOR) &&
186 nvc0->rast->pipe.scissor == nvc0->state.scissor)
187 return;
188 nvc0->state.scissor = nvc0->rast->pipe.scissor;
190 BEGIN_RING(chan, RING_3D(SCISSOR_HORIZ(0)), 2);
191 if (nvc0->rast->pipe.scissor) {
192 OUT_RING(chan, (s->maxx << 16) | s->minx);
193 OUT_RING(chan, (s->maxy << 16) | s->miny);
194 } else {
195 OUT_RING(chan, (0xffff << 16) | 0);
196 OUT_RING(chan, (0xffff << 16) | 0);
200 static void
201 nvc0_validate_viewport(struct nvc0_context *nvc0)
203 struct nouveau_channel *chan = nvc0->screen->base.channel;
204 struct pipe_viewport_state *vp = &nvc0->viewport;
205 int x, y, w, h;
206 float zmin, zmax;
208 BEGIN_RING(chan, RING_3D(VIEWPORT_TRANSLATE_X(0)), 3);
209 OUT_RINGf (chan, vp->translate[0]);
210 OUT_RINGf (chan, vp->translate[1]);
211 OUT_RINGf (chan, vp->translate[2]);
212 BEGIN_RING(chan, RING_3D(VIEWPORT_SCALE_X(0)), 3);
213 OUT_RINGf (chan, vp->scale[0]);
214 OUT_RINGf (chan, vp->scale[1]);
215 OUT_RINGf (chan, vp->scale[2]);
217 /* now set the viewport rectangle to viewport dimensions for clipping */
219 x = util_iround(MAX2(0.0f, vp->translate[0] - fabsf(vp->scale[0])));
220 y = util_iround(MAX2(0.0f, vp->translate[1] - fabsf(vp->scale[1])));
221 w = util_iround(vp->translate[0] + fabsf(vp->scale[0])) - x;
222 h = util_iround(vp->translate[1] + fabsf(vp->scale[1])) - y;
224 zmin = vp->translate[2] - fabsf(vp->scale[2]);
225 zmax = vp->translate[2] + fabsf(vp->scale[2]);
227 BEGIN_RING(chan, RING_3D(VIEWPORT_HORIZ(0)), 2);
228 OUT_RING (chan, (w << 16) | x);
229 OUT_RING (chan, (h << 16) | y);
230 BEGIN_RING(chan, RING_3D(DEPTH_RANGE_NEAR(0)), 2);
231 OUT_RINGf (chan, zmin);
232 OUT_RINGf (chan, zmax);
235 static void
236 nvc0_validate_clip(struct nvc0_context *nvc0)
238 struct nouveau_channel *chan = nvc0->screen->base.channel;
239 uint32_t clip;
241 if (nvc0->clip.depth_clamp) {
242 clip =
243 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK1_UNK1 |
244 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_DEPTH_CLAMP_NEAR |
245 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_DEPTH_CLAMP_FAR |
246 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK12_UNK2;
247 } else {
248 clip = NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK1_UNK1;
251 BEGIN_RING(chan, RING_3D(VIEW_VOLUME_CLIP_CTRL), 1);
252 OUT_RING (chan, clip);
254 if (nvc0->clip.nr) {
255 struct nouveau_bo *bo = nvc0->screen->uniforms;
257 MARK_RING (chan, 6 + nvc0->clip.nr * 4, 2);
258 BEGIN_RING(chan, RING_3D(CB_SIZE), 3);
259 OUT_RING (chan, 256);
260 OUT_RELOCh(chan, bo, 5 << 16, NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
261 OUT_RELOCl(chan, bo, 5 << 16, NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
262 BEGIN_RING_1I(chan, RING_3D(CB_POS), nvc0->clip.nr * 4 + 1);
263 OUT_RING (chan, 0);
264 OUT_RINGp (chan, &nvc0->clip.ucp[0][0], nvc0->clip.nr * 4);
266 BEGIN_RING(chan, RING_3D(VP_CLIP_DISTANCE_ENABLE), 1);
267 OUT_RING (chan, (1 << nvc0->clip.nr) - 1);
268 } else {
269 IMMED_RING(chan, RING_3D(VP_CLIP_DISTANCE_ENABLE), 0);
273 static void
274 nvc0_validate_blend(struct nvc0_context *nvc0)
276 struct nouveau_channel *chan = nvc0->screen->base.channel;
278 WAIT_RING(chan, nvc0->blend->size);
279 OUT_RINGp(chan, nvc0->blend->state, nvc0->blend->size);
282 static void
283 nvc0_validate_zsa(struct nvc0_context *nvc0)
285 struct nouveau_channel *chan = nvc0->screen->base.channel;
287 WAIT_RING(chan, nvc0->zsa->size);
288 OUT_RINGp(chan, nvc0->zsa->state, nvc0->zsa->size);
291 static void
292 nvc0_validate_rasterizer(struct nvc0_context *nvc0)
294 struct nouveau_channel *chan = nvc0->screen->base.channel;
296 WAIT_RING(chan, nvc0->rast->size);
297 OUT_RINGp(chan, nvc0->rast->state, nvc0->rast->size);
300 static void
301 nvc0_constbufs_validate(struct nvc0_context *nvc0)
303 struct nouveau_channel *chan = nvc0->screen->base.channel;
304 struct nouveau_bo *bo;
305 unsigned s;
307 for (s = 0; s < 5; ++s) {
308 struct nv04_resource *res;
309 int i;
311 while (nvc0->constbuf_dirty[s]) {
312 unsigned base = 0;
313 unsigned offset = 0, words = 0;
314 boolean rebind = TRUE;
316 i = ffs(nvc0->constbuf_dirty[s]) - 1;
317 nvc0->constbuf_dirty[s] &= ~(1 << i);
319 res = nv04_resource(nvc0->constbuf[s][i]);
320 if (!res) {
321 BEGIN_RING(chan, RING_3D(CB_BIND(s)), 1);
322 OUT_RING (chan, (i << 4) | 0);
323 if (i == 0)
324 nvc0->state.uniform_buffer_bound[s] = 0;
325 continue;
328 if (!nouveau_resource_mapped_by_gpu(&res->base)) {
329 if (i == 0) {
330 base = s << 16;
331 bo = nvc0->screen->uniforms;
333 if (nvc0->state.uniform_buffer_bound[s] >= res->base.width0)
334 rebind = FALSE;
335 else
336 nvc0->state.uniform_buffer_bound[s] =
337 align(res->base.width0, 0x100);
338 } else {
339 bo = res->bo;
341 #if 0
342 nvc0_m2mf_push_linear(nvc0, bo, NOUVEAU_BO_VRAM,
343 base, res->base.width0, res->data);
344 BEGIN_RING(chan, RING_3D_(0x021c), 1);
345 OUT_RING (chan, 0x1111);
346 #else
347 words = res->base.width0 / 4;
348 #endif
349 } else {
350 bo = res->bo;
351 if (i == 0)
352 nvc0->state.uniform_buffer_bound[s] = 0;
355 if (bo != nvc0->screen->uniforms)
356 nvc0_bufctx_add_resident(nvc0, NVC0_BUFCTX_CONSTANT, res,
357 NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
359 if (rebind) {
360 MARK_RING (chan, 4, 2);
361 BEGIN_RING(chan, RING_3D(CB_SIZE), 3);
362 OUT_RING (chan, align(res->base.width0, 0x100));
363 OUT_RELOCh(chan, bo, base, NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
364 OUT_RELOCl(chan, bo, base, NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
365 BEGIN_RING(chan, RING_3D(CB_BIND(s)), 1);
366 OUT_RING (chan, (i << 4) | 1);
369 while (words) {
370 unsigned nr = AVAIL_RING(chan);
372 if (nr < 16) {
373 FIRE_RING(chan);
374 continue;
376 nr = MIN2(MIN2(nr - 6, words), NV04_PFIFO_MAX_PACKET_LEN - 1);
378 MARK_RING (chan, nr + 5, 2);
379 BEGIN_RING(chan, RING_3D(CB_SIZE), 3);
380 OUT_RING (chan, align(res->base.width0, 0x100));
381 OUT_RELOCh(chan, bo, base, NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
382 OUT_RELOCl(chan, bo, base, NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
383 BEGIN_RING_1I(chan, RING_3D(CB_POS), nr + 1);
384 OUT_RING (chan, offset);
385 OUT_RINGp (chan, &res->data[offset], nr);
387 offset += nr * 4;
388 words -= nr;
394 static void
395 nvc0_validate_derived_1(struct nvc0_context *nvc0)
397 struct nouveau_channel *chan = nvc0->screen->base.channel;
398 boolean early_z;
400 early_z = nvc0->fragprog->fp.early_z && !nvc0->zsa->pipe.alpha.enabled;
402 if (early_z != nvc0->state.early_z) {
403 nvc0->state.early_z = early_z;
404 IMMED_RING(chan, RING_3D(EARLY_FRAGMENT_TESTS), early_z);
408 static void
409 nvc0_switch_pipe_context(struct nvc0_context *ctx_to)
411 struct nvc0_context *ctx_from = ctx_to->screen->cur_ctx;
413 if (ctx_from)
414 ctx_to->state = ctx_from->state;
416 ctx_to->dirty = ~0;
418 if (!ctx_to->vertex)
419 ctx_to->dirty &= ~(NVC0_NEW_VERTEX | NVC0_NEW_ARRAYS);
421 if (!ctx_to->vertprog)
422 ctx_to->dirty &= ~NVC0_NEW_VERTPROG;
423 if (!ctx_to->fragprog)
424 ctx_to->dirty &= ~NVC0_NEW_FRAGPROG;
426 if (!ctx_to->blend)
427 ctx_to->dirty &= ~NVC0_NEW_BLEND;
428 if (!ctx_to->rast)
429 ctx_to->dirty &= ~NVC0_NEW_RASTERIZER;
430 if (!ctx_to->zsa)
431 ctx_to->dirty &= ~NVC0_NEW_ZSA;
433 ctx_to->screen->base.channel->user_private = ctx_to->screen->cur_ctx =
434 ctx_to;
437 static struct state_validate {
438 void (*func)(struct nvc0_context *);
439 uint32_t states;
440 } validate_list[] = {
441 { nvc0_validate_fb, NVC0_NEW_FRAMEBUFFER },
442 { nvc0_validate_blend, NVC0_NEW_BLEND },
443 { nvc0_validate_zsa, NVC0_NEW_ZSA },
444 { nvc0_validate_rasterizer, NVC0_NEW_RASTERIZER },
445 { nvc0_validate_blend_colour, NVC0_NEW_BLEND_COLOUR },
446 { nvc0_validate_stencil_ref, NVC0_NEW_STENCIL_REF },
447 { nvc0_validate_stipple, NVC0_NEW_STIPPLE },
448 { nvc0_validate_scissor, NVC0_NEW_SCISSOR | NVC0_NEW_RASTERIZER },
449 { nvc0_validate_viewport, NVC0_NEW_VIEWPORT },
450 { nvc0_validate_clip, NVC0_NEW_CLIP },
451 { nvc0_vertprog_validate, NVC0_NEW_VERTPROG },
452 { nvc0_tctlprog_validate, NVC0_NEW_TCTLPROG },
453 { nvc0_tevlprog_validate, NVC0_NEW_TEVLPROG },
454 { nvc0_gmtyprog_validate, NVC0_NEW_GMTYPROG },
455 { nvc0_fragprog_validate, NVC0_NEW_FRAGPROG },
456 { nvc0_validate_derived_1, NVC0_NEW_FRAGPROG | NVC0_NEW_ZSA },
457 { nvc0_constbufs_validate, NVC0_NEW_CONSTBUF },
458 { nvc0_validate_textures, NVC0_NEW_TEXTURES },
459 { nvc0_validate_samplers, NVC0_NEW_SAMPLERS },
460 { nvc0_vertex_arrays_validate, NVC0_NEW_VERTEX | NVC0_NEW_ARRAYS },
461 { nvc0_tfb_validate, NVC0_NEW_TFB | NVC0_NEW_TFB_BUFFERS }
463 #define validate_list_len (sizeof(validate_list) / sizeof(validate_list[0]))
465 boolean
466 nvc0_state_validate(struct nvc0_context *nvc0)
468 unsigned i;
470 if (nvc0->screen->cur_ctx != nvc0)
471 nvc0_switch_pipe_context(nvc0);
473 if (nvc0->dirty) {
474 for (i = 0; i < validate_list_len; ++i) {
475 struct state_validate *validate = &validate_list[i];
477 if (nvc0->dirty & validate->states)
478 validate->func(nvc0);
480 nvc0->dirty = 0;
483 nvc0_bufctx_emit_relocs(nvc0);
485 return TRUE;