1 /**************************************************************************
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
30 * Keith Whitwell <keith@tungstengraphics.com>
33 #include "util/u_memory.h"
34 #include "pipe/p_state.h"
35 #include "translate.h"
40 typedef void (*fetch_func
)(const void *ptr
, float *attrib
);
41 typedef void (*emit_func
)(const float *attrib
, void *ptr
);
45 struct translate_generic
{
46 struct translate translate
;
49 enum translate_element_type type
;
53 unsigned input_offset
;
54 unsigned instance_divisor
;
57 unsigned output_offset
;
60 unsigned input_stride
;
62 } attrib
[PIPE_MAX_ATTRIBS
];
68 static struct translate_generic
*translate_generic( struct translate
*translate
)
70 return (struct translate_generic
*)translate
;
74 * Fetch a float[4] vertex attribute from memory, doing format/type
75 * conversion as needed.
77 * This is probably needed/dupliocated elsewhere, eg format
78 * conversion, texture sampling etc.
80 #define ATTRIB( NAME, SZ, TYPE, FROM, TO ) \
82 fetch_##NAME(const void *ptr, float *attrib) \
84 const float defaults[4] = { 0.0f,0.0f,0.0f,1.0f }; \
87 for (i = 0; i < SZ; i++) { \
88 attrib[i] = FROM(i); \
91 for (; i < 4; i++) { \
92 attrib[i] = defaults[i]; \
97 emit_##NAME(const float *attrib, void *ptr) \
100 TYPE *out = (TYPE *)ptr; \
102 for (i = 0; i < SZ; i++) { \
103 out[i] = TO(attrib[i]); \
108 #define FROM_64_FLOAT(i) ((float) ((double *) ptr)[i])
109 #define FROM_32_FLOAT(i) (((float *) ptr)[i])
111 #define FROM_8_USCALED(i) ((float) ((unsigned char *) ptr)[i])
112 #define FROM_16_USCALED(i) ((float) ((unsigned short *) ptr)[i])
113 #define FROM_32_USCALED(i) ((float) ((unsigned int *) ptr)[i])
115 #define FROM_8_SSCALED(i) ((float) ((char *) ptr)[i])
116 #define FROM_16_SSCALED(i) ((float) ((short *) ptr)[i])
117 #define FROM_32_SSCALED(i) ((float) ((int *) ptr)[i])
119 #define FROM_8_UNORM(i) ((float) ((unsigned char *) ptr)[i] / 255.0f)
120 #define FROM_16_UNORM(i) ((float) ((unsigned short *) ptr)[i] / 65535.0f)
121 #define FROM_32_UNORM(i) ((float) ((unsigned int *) ptr)[i] / 4294967295.0f)
123 #define FROM_8_SNORM(i) ((float) ((char *) ptr)[i] / 127.0f)
124 #define FROM_16_SNORM(i) ((float) ((short *) ptr)[i] / 32767.0f)
125 #define FROM_32_SNORM(i) ((float) ((int *) ptr)[i] / 2147483647.0f)
127 #define FROM_32_FIXED(i) (((int *) ptr)[i] / 65536.0f)
129 #define TO_64_FLOAT(x) ((double) x)
130 #define TO_32_FLOAT(x) (x)
132 #define TO_8_USCALED(x) ((unsigned char) x)
133 #define TO_16_USCALED(x) ((unsigned short) x)
134 #define TO_32_USCALED(x) ((unsigned int) x)
136 #define TO_8_SSCALED(x) ((char) x)
137 #define TO_16_SSCALED(x) ((short) x)
138 #define TO_32_SSCALED(x) ((int) x)
140 #define TO_8_UNORM(x) ((unsigned char) (x * 255.0f))
141 #define TO_16_UNORM(x) ((unsigned short) (x * 65535.0f))
142 #define TO_32_UNORM(x) ((unsigned int) (x * 4294967295.0f))
144 #define TO_8_SNORM(x) ((char) (x * 127.0f))
145 #define TO_16_SNORM(x) ((short) (x * 32767.0f))
146 #define TO_32_SNORM(x) ((int) (x * 2147483647.0f))
148 #define TO_32_FIXED(x) ((int) (x * 65536.0f))
152 ATTRIB( R64G64B64A64_FLOAT
, 4, double, FROM_64_FLOAT
, TO_64_FLOAT
)
153 ATTRIB( R64G64B64_FLOAT
, 3, double, FROM_64_FLOAT
, TO_64_FLOAT
)
154 ATTRIB( R64G64_FLOAT
, 2, double, FROM_64_FLOAT
, TO_64_FLOAT
)
155 ATTRIB( R64_FLOAT
, 1, double, FROM_64_FLOAT
, TO_64_FLOAT
)
157 ATTRIB( R32G32B32A32_FLOAT
, 4, float, FROM_32_FLOAT
, TO_32_FLOAT
)
158 ATTRIB( R32G32B32_FLOAT
, 3, float, FROM_32_FLOAT
, TO_32_FLOAT
)
159 ATTRIB( R32G32_FLOAT
, 2, float, FROM_32_FLOAT
, TO_32_FLOAT
)
160 ATTRIB( R32_FLOAT
, 1, float, FROM_32_FLOAT
, TO_32_FLOAT
)
162 ATTRIB( R32G32B32A32_USCALED
, 4, unsigned, FROM_32_USCALED
, TO_32_USCALED
)
163 ATTRIB( R32G32B32_USCALED
, 3, unsigned, FROM_32_USCALED
, TO_32_USCALED
)
164 ATTRIB( R32G32_USCALED
, 2, unsigned, FROM_32_USCALED
, TO_32_USCALED
)
165 ATTRIB( R32_USCALED
, 1, unsigned, FROM_32_USCALED
, TO_32_USCALED
)
167 ATTRIB( R32G32B32A32_SSCALED
, 4, int, FROM_32_SSCALED
, TO_32_SSCALED
)
168 ATTRIB( R32G32B32_SSCALED
, 3, int, FROM_32_SSCALED
, TO_32_SSCALED
)
169 ATTRIB( R32G32_SSCALED
, 2, int, FROM_32_SSCALED
, TO_32_SSCALED
)
170 ATTRIB( R32_SSCALED
, 1, int, FROM_32_SSCALED
, TO_32_SSCALED
)
172 ATTRIB( R32G32B32A32_UNORM
, 4, unsigned, FROM_32_UNORM
, TO_32_UNORM
)
173 ATTRIB( R32G32B32_UNORM
, 3, unsigned, FROM_32_UNORM
, TO_32_UNORM
)
174 ATTRIB( R32G32_UNORM
, 2, unsigned, FROM_32_UNORM
, TO_32_UNORM
)
175 ATTRIB( R32_UNORM
, 1, unsigned, FROM_32_UNORM
, TO_32_UNORM
)
177 ATTRIB( R32G32B32A32_SNORM
, 4, int, FROM_32_SNORM
, TO_32_SNORM
)
178 ATTRIB( R32G32B32_SNORM
, 3, int, FROM_32_SNORM
, TO_32_SNORM
)
179 ATTRIB( R32G32_SNORM
, 2, int, FROM_32_SNORM
, TO_32_SNORM
)
180 ATTRIB( R32_SNORM
, 1, int, FROM_32_SNORM
, TO_32_SNORM
)
182 ATTRIB( R16G16B16A16_USCALED
, 4, ushort
, FROM_16_USCALED
, TO_16_USCALED
)
183 ATTRIB( R16G16B16_USCALED
, 3, ushort
, FROM_16_USCALED
, TO_16_USCALED
)
184 ATTRIB( R16G16_USCALED
, 2, ushort
, FROM_16_USCALED
, TO_16_USCALED
)
185 ATTRIB( R16_USCALED
, 1, ushort
, FROM_16_USCALED
, TO_16_USCALED
)
187 ATTRIB( R16G16B16A16_SSCALED
, 4, short, FROM_16_SSCALED
, TO_16_SSCALED
)
188 ATTRIB( R16G16B16_SSCALED
, 3, short, FROM_16_SSCALED
, TO_16_SSCALED
)
189 ATTRIB( R16G16_SSCALED
, 2, short, FROM_16_SSCALED
, TO_16_SSCALED
)
190 ATTRIB( R16_SSCALED
, 1, short, FROM_16_SSCALED
, TO_16_SSCALED
)
192 ATTRIB( R16G16B16A16_UNORM
, 4, ushort
, FROM_16_UNORM
, TO_16_UNORM
)
193 ATTRIB( R16G16B16_UNORM
, 3, ushort
, FROM_16_UNORM
, TO_16_UNORM
)
194 ATTRIB( R16G16_UNORM
, 2, ushort
, FROM_16_UNORM
, TO_16_UNORM
)
195 ATTRIB( R16_UNORM
, 1, ushort
, FROM_16_UNORM
, TO_16_UNORM
)
197 ATTRIB( R16G16B16A16_SNORM
, 4, short, FROM_16_SNORM
, TO_16_SNORM
)
198 ATTRIB( R16G16B16_SNORM
, 3, short, FROM_16_SNORM
, TO_16_SNORM
)
199 ATTRIB( R16G16_SNORM
, 2, short, FROM_16_SNORM
, TO_16_SNORM
)
200 ATTRIB( R16_SNORM
, 1, short, FROM_16_SNORM
, TO_16_SNORM
)
202 ATTRIB( R8G8B8A8_USCALED
, 4, ubyte
, FROM_8_USCALED
, TO_8_USCALED
)
203 ATTRIB( R8G8B8_USCALED
, 3, ubyte
, FROM_8_USCALED
, TO_8_USCALED
)
204 ATTRIB( R8G8_USCALED
, 2, ubyte
, FROM_8_USCALED
, TO_8_USCALED
)
205 ATTRIB( R8_USCALED
, 1, ubyte
, FROM_8_USCALED
, TO_8_USCALED
)
207 ATTRIB( R8G8B8A8_SSCALED
, 4, char, FROM_8_SSCALED
, TO_8_SSCALED
)
208 ATTRIB( R8G8B8_SSCALED
, 3, char, FROM_8_SSCALED
, TO_8_SSCALED
)
209 ATTRIB( R8G8_SSCALED
, 2, char, FROM_8_SSCALED
, TO_8_SSCALED
)
210 ATTRIB( R8_SSCALED
, 1, char, FROM_8_SSCALED
, TO_8_SSCALED
)
212 ATTRIB( R8G8B8A8_UNORM
, 4, ubyte
, FROM_8_UNORM
, TO_8_UNORM
)
213 ATTRIB( R8G8B8_UNORM
, 3, ubyte
, FROM_8_UNORM
, TO_8_UNORM
)
214 ATTRIB( R8G8_UNORM
, 2, ubyte
, FROM_8_UNORM
, TO_8_UNORM
)
215 ATTRIB( R8_UNORM
, 1, ubyte
, FROM_8_UNORM
, TO_8_UNORM
)
217 ATTRIB( R8G8B8A8_SNORM
, 4, char, FROM_8_SNORM
, TO_8_SNORM
)
218 ATTRIB( R8G8B8_SNORM
, 3, char, FROM_8_SNORM
, TO_8_SNORM
)
219 ATTRIB( R8G8_SNORM
, 2, char, FROM_8_SNORM
, TO_8_SNORM
)
220 ATTRIB( R8_SNORM
, 1, char, FROM_8_SNORM
, TO_8_SNORM
)
222 ATTRIB( A8R8G8B8_UNORM
, 4, ubyte
, FROM_8_UNORM
, TO_8_UNORM
)
223 /*ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )*/
225 ATTRIB( R32G32B32A32_FIXED
, 4, int, FROM_32_FIXED
, TO_32_FIXED
)
226 ATTRIB( R32G32B32_FIXED
, 3, int, FROM_32_FIXED
, TO_32_FIXED
)
227 ATTRIB( R32G32_FIXED
, 2, int, FROM_32_FIXED
, TO_32_FIXED
)
228 ATTRIB( R32_FIXED
, 1, int, FROM_32_FIXED
, TO_32_FIXED
)
233 fetch_B8G8R8A8_UNORM(const void *ptr
, float *attrib
)
235 attrib
[2] = FROM_8_UNORM(0);
236 attrib
[1] = FROM_8_UNORM(1);
237 attrib
[0] = FROM_8_UNORM(2);
238 attrib
[3] = FROM_8_UNORM(3);
242 emit_B8G8R8A8_UNORM( const float *attrib
, void *ptr
)
244 ubyte
*out
= (ubyte
*)ptr
;
245 out
[2] = TO_8_UNORM(attrib
[0]);
246 out
[1] = TO_8_UNORM(attrib
[1]);
247 out
[0] = TO_8_UNORM(attrib
[2]);
248 out
[3] = TO_8_UNORM(attrib
[3]);
252 fetch_NULL( const void *ptr
, float *attrib
)
261 emit_NULL( const float *attrib
, void *ptr
)
263 /* do nothing is the only sensible option */
266 static fetch_func
get_fetch_func( enum pipe_format format
)
269 case PIPE_FORMAT_R64_FLOAT
:
270 return &fetch_R64_FLOAT
;
271 case PIPE_FORMAT_R64G64_FLOAT
:
272 return &fetch_R64G64_FLOAT
;
273 case PIPE_FORMAT_R64G64B64_FLOAT
:
274 return &fetch_R64G64B64_FLOAT
;
275 case PIPE_FORMAT_R64G64B64A64_FLOAT
:
276 return &fetch_R64G64B64A64_FLOAT
;
278 case PIPE_FORMAT_R32_FLOAT
:
279 return &fetch_R32_FLOAT
;
280 case PIPE_FORMAT_R32G32_FLOAT
:
281 return &fetch_R32G32_FLOAT
;
282 case PIPE_FORMAT_R32G32B32_FLOAT
:
283 return &fetch_R32G32B32_FLOAT
;
284 case PIPE_FORMAT_R32G32B32A32_FLOAT
:
285 return &fetch_R32G32B32A32_FLOAT
;
287 case PIPE_FORMAT_R32_UNORM
:
288 return &fetch_R32_UNORM
;
289 case PIPE_FORMAT_R32G32_UNORM
:
290 return &fetch_R32G32_UNORM
;
291 case PIPE_FORMAT_R32G32B32_UNORM
:
292 return &fetch_R32G32B32_UNORM
;
293 case PIPE_FORMAT_R32G32B32A32_UNORM
:
294 return &fetch_R32G32B32A32_UNORM
;
296 case PIPE_FORMAT_R32_USCALED
:
297 return &fetch_R32_USCALED
;
298 case PIPE_FORMAT_R32G32_USCALED
:
299 return &fetch_R32G32_USCALED
;
300 case PIPE_FORMAT_R32G32B32_USCALED
:
301 return &fetch_R32G32B32_USCALED
;
302 case PIPE_FORMAT_R32G32B32A32_USCALED
:
303 return &fetch_R32G32B32A32_USCALED
;
305 case PIPE_FORMAT_R32_SNORM
:
306 return &fetch_R32_SNORM
;
307 case PIPE_FORMAT_R32G32_SNORM
:
308 return &fetch_R32G32_SNORM
;
309 case PIPE_FORMAT_R32G32B32_SNORM
:
310 return &fetch_R32G32B32_SNORM
;
311 case PIPE_FORMAT_R32G32B32A32_SNORM
:
312 return &fetch_R32G32B32A32_SNORM
;
314 case PIPE_FORMAT_R32_SSCALED
:
315 return &fetch_R32_SSCALED
;
316 case PIPE_FORMAT_R32G32_SSCALED
:
317 return &fetch_R32G32_SSCALED
;
318 case PIPE_FORMAT_R32G32B32_SSCALED
:
319 return &fetch_R32G32B32_SSCALED
;
320 case PIPE_FORMAT_R32G32B32A32_SSCALED
:
321 return &fetch_R32G32B32A32_SSCALED
;
323 case PIPE_FORMAT_R16_UNORM
:
324 return &fetch_R16_UNORM
;
325 case PIPE_FORMAT_R16G16_UNORM
:
326 return &fetch_R16G16_UNORM
;
327 case PIPE_FORMAT_R16G16B16_UNORM
:
328 return &fetch_R16G16B16_UNORM
;
329 case PIPE_FORMAT_R16G16B16A16_UNORM
:
330 return &fetch_R16G16B16A16_UNORM
;
332 case PIPE_FORMAT_R16_USCALED
:
333 return &fetch_R16_USCALED
;
334 case PIPE_FORMAT_R16G16_USCALED
:
335 return &fetch_R16G16_USCALED
;
336 case PIPE_FORMAT_R16G16B16_USCALED
:
337 return &fetch_R16G16B16_USCALED
;
338 case PIPE_FORMAT_R16G16B16A16_USCALED
:
339 return &fetch_R16G16B16A16_USCALED
;
341 case PIPE_FORMAT_R16_SNORM
:
342 return &fetch_R16_SNORM
;
343 case PIPE_FORMAT_R16G16_SNORM
:
344 return &fetch_R16G16_SNORM
;
345 case PIPE_FORMAT_R16G16B16_SNORM
:
346 return &fetch_R16G16B16_SNORM
;
347 case PIPE_FORMAT_R16G16B16A16_SNORM
:
348 return &fetch_R16G16B16A16_SNORM
;
350 case PIPE_FORMAT_R16_SSCALED
:
351 return &fetch_R16_SSCALED
;
352 case PIPE_FORMAT_R16G16_SSCALED
:
353 return &fetch_R16G16_SSCALED
;
354 case PIPE_FORMAT_R16G16B16_SSCALED
:
355 return &fetch_R16G16B16_SSCALED
;
356 case PIPE_FORMAT_R16G16B16A16_SSCALED
:
357 return &fetch_R16G16B16A16_SSCALED
;
359 case PIPE_FORMAT_R8_UNORM
:
360 return &fetch_R8_UNORM
;
361 case PIPE_FORMAT_R8G8_UNORM
:
362 return &fetch_R8G8_UNORM
;
363 case PIPE_FORMAT_R8G8B8_UNORM
:
364 return &fetch_R8G8B8_UNORM
;
365 case PIPE_FORMAT_R8G8B8A8_UNORM
:
366 return &fetch_R8G8B8A8_UNORM
;
368 case PIPE_FORMAT_R8_USCALED
:
369 return &fetch_R8_USCALED
;
370 case PIPE_FORMAT_R8G8_USCALED
:
371 return &fetch_R8G8_USCALED
;
372 case PIPE_FORMAT_R8G8B8_USCALED
:
373 return &fetch_R8G8B8_USCALED
;
374 case PIPE_FORMAT_R8G8B8A8_USCALED
:
375 return &fetch_R8G8B8A8_USCALED
;
377 case PIPE_FORMAT_R8_SNORM
:
378 return &fetch_R8_SNORM
;
379 case PIPE_FORMAT_R8G8_SNORM
:
380 return &fetch_R8G8_SNORM
;
381 case PIPE_FORMAT_R8G8B8_SNORM
:
382 return &fetch_R8G8B8_SNORM
;
383 case PIPE_FORMAT_R8G8B8A8_SNORM
:
384 return &fetch_R8G8B8A8_SNORM
;
386 case PIPE_FORMAT_R8_SSCALED
:
387 return &fetch_R8_SSCALED
;
388 case PIPE_FORMAT_R8G8_SSCALED
:
389 return &fetch_R8G8_SSCALED
;
390 case PIPE_FORMAT_R8G8B8_SSCALED
:
391 return &fetch_R8G8B8_SSCALED
;
392 case PIPE_FORMAT_R8G8B8A8_SSCALED
:
393 return &fetch_R8G8B8A8_SSCALED
;
395 case PIPE_FORMAT_B8G8R8A8_UNORM
:
396 return &fetch_B8G8R8A8_UNORM
;
398 case PIPE_FORMAT_A8R8G8B8_UNORM
:
399 return &fetch_A8R8G8B8_UNORM
;
401 case PIPE_FORMAT_R32_FIXED
:
402 return &fetch_R32_FIXED
;
403 case PIPE_FORMAT_R32G32_FIXED
:
404 return &fetch_R32G32_FIXED
;
405 case PIPE_FORMAT_R32G32B32_FIXED
:
406 return &fetch_R32G32B32_FIXED
;
407 case PIPE_FORMAT_R32G32B32A32_FIXED
:
408 return &fetch_R32G32B32A32_FIXED
;
419 static emit_func
get_emit_func( enum pipe_format format
)
421 /* silence warnings */
422 (void) emit_R32G32B32A32_FIXED
;
423 (void) emit_R32G32B32_FIXED
;
424 (void) emit_R32G32_FIXED
;
425 (void) emit_R32_FIXED
;
428 case PIPE_FORMAT_R64_FLOAT
:
429 return &emit_R64_FLOAT
;
430 case PIPE_FORMAT_R64G64_FLOAT
:
431 return &emit_R64G64_FLOAT
;
432 case PIPE_FORMAT_R64G64B64_FLOAT
:
433 return &emit_R64G64B64_FLOAT
;
434 case PIPE_FORMAT_R64G64B64A64_FLOAT
:
435 return &emit_R64G64B64A64_FLOAT
;
437 case PIPE_FORMAT_R32_FLOAT
:
438 return &emit_R32_FLOAT
;
439 case PIPE_FORMAT_R32G32_FLOAT
:
440 return &emit_R32G32_FLOAT
;
441 case PIPE_FORMAT_R32G32B32_FLOAT
:
442 return &emit_R32G32B32_FLOAT
;
443 case PIPE_FORMAT_R32G32B32A32_FLOAT
:
444 return &emit_R32G32B32A32_FLOAT
;
446 case PIPE_FORMAT_R32_UNORM
:
447 return &emit_R32_UNORM
;
448 case PIPE_FORMAT_R32G32_UNORM
:
449 return &emit_R32G32_UNORM
;
450 case PIPE_FORMAT_R32G32B32_UNORM
:
451 return &emit_R32G32B32_UNORM
;
452 case PIPE_FORMAT_R32G32B32A32_UNORM
:
453 return &emit_R32G32B32A32_UNORM
;
455 case PIPE_FORMAT_R32_USCALED
:
456 return &emit_R32_USCALED
;
457 case PIPE_FORMAT_R32G32_USCALED
:
458 return &emit_R32G32_USCALED
;
459 case PIPE_FORMAT_R32G32B32_USCALED
:
460 return &emit_R32G32B32_USCALED
;
461 case PIPE_FORMAT_R32G32B32A32_USCALED
:
462 return &emit_R32G32B32A32_USCALED
;
464 case PIPE_FORMAT_R32_SNORM
:
465 return &emit_R32_SNORM
;
466 case PIPE_FORMAT_R32G32_SNORM
:
467 return &emit_R32G32_SNORM
;
468 case PIPE_FORMAT_R32G32B32_SNORM
:
469 return &emit_R32G32B32_SNORM
;
470 case PIPE_FORMAT_R32G32B32A32_SNORM
:
471 return &emit_R32G32B32A32_SNORM
;
473 case PIPE_FORMAT_R32_SSCALED
:
474 return &emit_R32_SSCALED
;
475 case PIPE_FORMAT_R32G32_SSCALED
:
476 return &emit_R32G32_SSCALED
;
477 case PIPE_FORMAT_R32G32B32_SSCALED
:
478 return &emit_R32G32B32_SSCALED
;
479 case PIPE_FORMAT_R32G32B32A32_SSCALED
:
480 return &emit_R32G32B32A32_SSCALED
;
482 case PIPE_FORMAT_R16_UNORM
:
483 return &emit_R16_UNORM
;
484 case PIPE_FORMAT_R16G16_UNORM
:
485 return &emit_R16G16_UNORM
;
486 case PIPE_FORMAT_R16G16B16_UNORM
:
487 return &emit_R16G16B16_UNORM
;
488 case PIPE_FORMAT_R16G16B16A16_UNORM
:
489 return &emit_R16G16B16A16_UNORM
;
491 case PIPE_FORMAT_R16_USCALED
:
492 return &emit_R16_USCALED
;
493 case PIPE_FORMAT_R16G16_USCALED
:
494 return &emit_R16G16_USCALED
;
495 case PIPE_FORMAT_R16G16B16_USCALED
:
496 return &emit_R16G16B16_USCALED
;
497 case PIPE_FORMAT_R16G16B16A16_USCALED
:
498 return &emit_R16G16B16A16_USCALED
;
500 case PIPE_FORMAT_R16_SNORM
:
501 return &emit_R16_SNORM
;
502 case PIPE_FORMAT_R16G16_SNORM
:
503 return &emit_R16G16_SNORM
;
504 case PIPE_FORMAT_R16G16B16_SNORM
:
505 return &emit_R16G16B16_SNORM
;
506 case PIPE_FORMAT_R16G16B16A16_SNORM
:
507 return &emit_R16G16B16A16_SNORM
;
509 case PIPE_FORMAT_R16_SSCALED
:
510 return &emit_R16_SSCALED
;
511 case PIPE_FORMAT_R16G16_SSCALED
:
512 return &emit_R16G16_SSCALED
;
513 case PIPE_FORMAT_R16G16B16_SSCALED
:
514 return &emit_R16G16B16_SSCALED
;
515 case PIPE_FORMAT_R16G16B16A16_SSCALED
:
516 return &emit_R16G16B16A16_SSCALED
;
518 case PIPE_FORMAT_R8_UNORM
:
519 return &emit_R8_UNORM
;
520 case PIPE_FORMAT_R8G8_UNORM
:
521 return &emit_R8G8_UNORM
;
522 case PIPE_FORMAT_R8G8B8_UNORM
:
523 return &emit_R8G8B8_UNORM
;
524 case PIPE_FORMAT_R8G8B8A8_UNORM
:
525 return &emit_R8G8B8A8_UNORM
;
527 case PIPE_FORMAT_R8_USCALED
:
528 return &emit_R8_USCALED
;
529 case PIPE_FORMAT_R8G8_USCALED
:
530 return &emit_R8G8_USCALED
;
531 case PIPE_FORMAT_R8G8B8_USCALED
:
532 return &emit_R8G8B8_USCALED
;
533 case PIPE_FORMAT_R8G8B8A8_USCALED
:
534 return &emit_R8G8B8A8_USCALED
;
536 case PIPE_FORMAT_R8_SNORM
:
537 return &emit_R8_SNORM
;
538 case PIPE_FORMAT_R8G8_SNORM
:
539 return &emit_R8G8_SNORM
;
540 case PIPE_FORMAT_R8G8B8_SNORM
:
541 return &emit_R8G8B8_SNORM
;
542 case PIPE_FORMAT_R8G8B8A8_SNORM
:
543 return &emit_R8G8B8A8_SNORM
;
545 case PIPE_FORMAT_R8_SSCALED
:
546 return &emit_R8_SSCALED
;
547 case PIPE_FORMAT_R8G8_SSCALED
:
548 return &emit_R8G8_SSCALED
;
549 case PIPE_FORMAT_R8G8B8_SSCALED
:
550 return &emit_R8G8B8_SSCALED
;
551 case PIPE_FORMAT_R8G8B8A8_SSCALED
:
552 return &emit_R8G8B8A8_SSCALED
;
554 case PIPE_FORMAT_B8G8R8A8_UNORM
:
555 return &emit_B8G8R8A8_UNORM
;
557 case PIPE_FORMAT_A8R8G8B8_UNORM
:
558 return &emit_A8R8G8B8_UNORM
;
569 * Fetch vertex attributes for 'count' vertices.
571 static void PIPE_CDECL
generic_run_elts( struct translate
*translate
,
572 const unsigned *elts
,
574 unsigned instance_id
,
575 void *output_buffer
)
577 struct translate_generic
*tg
= translate_generic(translate
);
578 char *vert
= output_buffer
;
579 unsigned nr_attrs
= tg
->nr_attrib
;
583 /* loop over vertex attributes (vertex shader inputs)
585 for (i
= 0; i
< count
; i
++) {
586 unsigned elt
= *elts
++;
588 for (attr
= 0; attr
< nr_attrs
; attr
++) {
593 tg
->attrib
[attr
].output_offset
);
595 if (tg
->attrib
[attr
].instance_divisor
) {
596 src
= tg
->attrib
[attr
].input_ptr
+
597 tg
->attrib
[attr
].input_stride
*
598 (instance_id
/ tg
->attrib
[attr
].instance_divisor
);
600 src
= tg
->attrib
[attr
].input_ptr
+
601 tg
->attrib
[attr
].input_stride
* elt
;
604 tg
->attrib
[attr
].fetch( src
, data
);
606 if (0) debug_printf("vert %d/%d attr %d: %f %f %f %f\n",
607 i
, elt
, attr
, data
[0], data
[1], data
[2], data
[3]);
609 tg
->attrib
[attr
].emit( data
, dst
);
612 vert
+= tg
->translate
.key
.output_stride
;
618 static void PIPE_CDECL
generic_run( struct translate
*translate
,
621 unsigned instance_id
,
622 void *output_buffer
)
624 struct translate_generic
*tg
= translate_generic(translate
);
625 char *vert
= output_buffer
;
626 unsigned nr_attrs
= tg
->nr_attrib
;
630 /* loop over vertex attributes (vertex shader inputs)
632 for (i
= 0; i
< count
; i
++) {
633 unsigned elt
= start
+ i
;
635 for (attr
= 0; attr
< nr_attrs
; attr
++) {
639 tg
->attrib
[attr
].output_offset
);
641 if (tg
->attrib
[attr
].type
== TRANSLATE_ELEMENT_NORMAL
) {
644 if (tg
->attrib
[attr
].instance_divisor
) {
645 src
= tg
->attrib
[attr
].input_ptr
+
646 tg
->attrib
[attr
].input_stride
*
647 (instance_id
/ tg
->attrib
[attr
].instance_divisor
);
649 src
= tg
->attrib
[attr
].input_ptr
+
650 tg
->attrib
[attr
].input_stride
* elt
;
653 tg
->attrib
[attr
].fetch( src
, data
);
655 data
[0] = (float)instance_id
;
658 if (0) debug_printf("vert %d attr %d: %f %f %f %f\n",
659 i
, attr
, data
[0], data
[1], data
[2], data
[3]);
661 tg
->attrib
[attr
].emit( data
, dst
);
664 vert
+= tg
->translate
.key
.output_stride
;
670 static void generic_set_buffer( struct translate
*translate
,
675 struct translate_generic
*tg
= translate_generic(translate
);
678 for (i
= 0; i
< tg
->nr_attrib
; i
++) {
679 if (tg
->attrib
[i
].buffer
== buf
) {
680 tg
->attrib
[i
].input_ptr
= ((char *)ptr
+
681 tg
->attrib
[i
].input_offset
);
682 tg
->attrib
[i
].input_stride
= stride
;
688 static void generic_release( struct translate
*translate
)
695 struct translate
*translate_generic_create( const struct translate_key
*key
)
697 struct translate_generic
*tg
= CALLOC_STRUCT(translate_generic
);
703 tg
->translate
.key
= *key
;
704 tg
->translate
.release
= generic_release
;
705 tg
->translate
.set_buffer
= generic_set_buffer
;
706 tg
->translate
.run_elts
= generic_run_elts
;
707 tg
->translate
.run
= generic_run
;
709 for (i
= 0; i
< key
->nr_elements
; i
++) {
710 tg
->attrib
[i
].type
= key
->element
[i
].type
;
712 tg
->attrib
[i
].fetch
= get_fetch_func(key
->element
[i
].input_format
);
713 tg
->attrib
[i
].buffer
= key
->element
[i
].input_buffer
;
714 tg
->attrib
[i
].input_offset
= key
->element
[i
].input_offset
;
715 tg
->attrib
[i
].instance_divisor
= key
->element
[i
].instance_divisor
;
717 tg
->attrib
[i
].emit
= get_emit_func(key
->element
[i
].output_format
);
718 tg
->attrib
[i
].output_offset
= key
->element
[i
].output_offset
;
722 tg
->nr_attrib
= key
->nr_elements
;
725 return &tg
->translate
;