translate_generic.c revision 4a49301e
1/************************************************************************** 2 * 3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas. 4 * All Rights Reserved. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the 8 * "Software"), to deal in the Software without restriction, including 9 * without limitation the rights to use, copy, modify, merge, publish, 10 * distribute, sub license, and/or sell copies of the Software, and to 11 * permit persons to whom the Software is furnished to do so, subject to 12 * the following conditions: 13 * 14 * The above copyright notice and this permission notice (including the 15 * next paragraph) shall be included in all copies or substantial portions 16 * of the Software. 17 * 18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. 21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR 22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 25 * 26 **************************************************************************/ 27 28 /* 29 * Authors: 30 * Keith Whitwell <keith@tungstengraphics.com> 31 */ 32 33#include "util/u_memory.h" 34#include "pipe/p_state.h" 35#include "translate.h" 36 37 38#define DRAW_DBG 0 39 40typedef void (*fetch_func)(const void *ptr, float *attrib); 41typedef void (*emit_func)(const float *attrib, void *ptr); 42 43 44 45struct translate_generic { 46 struct translate translate; 47 48 struct { 49 fetch_func fetch; 50 unsigned buffer; 51 unsigned input_offset; 52 53 emit_func emit; 54 unsigned output_offset; 55 56 char *input_ptr; 57 unsigned input_stride; 58 59 } attrib[PIPE_MAX_ATTRIBS]; 60 61 unsigned nr_attrib; 62}; 63 64 65static struct translate_generic *translate_generic( struct translate *translate ) 66{ 67 return (struct translate_generic *)translate; 68} 69 70/** 71 * Fetch a float[4] vertex attribute from memory, doing format/type 72 * conversion as needed. 73 * 74 * This is probably needed/dupliocated elsewhere, eg format 75 * conversion, texture sampling etc. 76 */ 77#define ATTRIB( NAME, SZ, TYPE, FROM, TO ) \ 78static void \ 79fetch_##NAME(const void *ptr, float *attrib) \ 80{ \ 81 const float defaults[4] = { 0.0f,0.0f,0.0f,1.0f }; \ 82 unsigned i; \ 83 \ 84 for (i = 0; i < SZ; i++) { \ 85 attrib[i] = FROM(i); \ 86 } \ 87 \ 88 for (; i < 4; i++) { \ 89 attrib[i] = defaults[i]; \ 90 } \ 91} \ 92 \ 93static void \ 94emit_##NAME(const float *attrib, void *ptr) \ 95{ \ 96 unsigned i; \ 97 TYPE *out = (TYPE *)ptr; \ 98 \ 99 for (i = 0; i < SZ; i++) { \ 100 out[i] = TO(attrib[i]); \ 101 } \ 102} 103 104 105#define FROM_64_FLOAT(i) ((float) ((double *) ptr)[i]) 106#define FROM_32_FLOAT(i) (((float *) ptr)[i]) 107 108#define FROM_8_USCALED(i) ((float) ((unsigned char *) ptr)[i]) 109#define FROM_16_USCALED(i) ((float) ((unsigned short *) ptr)[i]) 110#define FROM_32_USCALED(i) ((float) ((unsigned int *) ptr)[i]) 111 112#define FROM_8_SSCALED(i) ((float) ((char *) ptr)[i]) 113#define FROM_16_SSCALED(i) ((float) ((short *) ptr)[i]) 114#define FROM_32_SSCALED(i) ((float) ((int *) ptr)[i]) 115 116#define FROM_8_UNORM(i) ((float) ((unsigned char *) ptr)[i] / 255.0f) 117#define FROM_16_UNORM(i) ((float) ((unsigned short *) ptr)[i] / 65535.0f) 118#define FROM_32_UNORM(i) ((float) ((unsigned int *) ptr)[i] / 4294967295.0f) 119 120#define FROM_8_SNORM(i) ((float) ((char *) ptr)[i] / 127.0f) 121#define FROM_16_SNORM(i) ((float) ((short *) ptr)[i] / 32767.0f) 122#define FROM_32_SNORM(i) ((float) ((int *) ptr)[i] / 2147483647.0f) 123 124#define FROM_32_FIXED(i) (((int *) ptr)[i] / 65536.0f) 125 126#define TO_64_FLOAT(x) ((double) x) 127#define TO_32_FLOAT(x) (x) 128 129#define TO_8_USCALED(x) ((unsigned char) x) 130#define TO_16_USCALED(x) ((unsigned short) x) 131#define TO_32_USCALED(x) ((unsigned int) x) 132 133#define TO_8_SSCALED(x) ((char) x) 134#define TO_16_SSCALED(x) ((short) x) 135#define TO_32_SSCALED(x) ((int) x) 136 137#define TO_8_UNORM(x) ((unsigned char) (x * 255.0f)) 138#define TO_16_UNORM(x) ((unsigned short) (x * 65535.0f)) 139#define TO_32_UNORM(x) ((unsigned int) (x * 4294967295.0f)) 140 141#define TO_8_SNORM(x) ((char) (x * 127.0f)) 142#define TO_16_SNORM(x) ((short) (x * 32767.0f)) 143#define TO_32_SNORM(x) ((int) (x * 2147483647.0f)) 144 145#define TO_32_FIXED(x) ((int) (x * 65536.0f)) 146 147 148 149ATTRIB( R64G64B64A64_FLOAT, 4, double, FROM_64_FLOAT, TO_64_FLOAT ) 150ATTRIB( R64G64B64_FLOAT, 3, double, FROM_64_FLOAT, TO_64_FLOAT ) 151ATTRIB( R64G64_FLOAT, 2, double, FROM_64_FLOAT, TO_64_FLOAT ) 152ATTRIB( R64_FLOAT, 1, double, FROM_64_FLOAT, TO_64_FLOAT ) 153 154ATTRIB( R32G32B32A32_FLOAT, 4, float, FROM_32_FLOAT, TO_32_FLOAT ) 155ATTRIB( R32G32B32_FLOAT, 3, float, FROM_32_FLOAT, TO_32_FLOAT ) 156ATTRIB( R32G32_FLOAT, 2, float, FROM_32_FLOAT, TO_32_FLOAT ) 157ATTRIB( R32_FLOAT, 1, float, FROM_32_FLOAT, TO_32_FLOAT ) 158 159ATTRIB( R32G32B32A32_USCALED, 4, unsigned, FROM_32_USCALED, TO_32_USCALED ) 160ATTRIB( R32G32B32_USCALED, 3, unsigned, FROM_32_USCALED, TO_32_USCALED ) 161ATTRIB( R32G32_USCALED, 2, unsigned, FROM_32_USCALED, TO_32_USCALED ) 162ATTRIB( R32_USCALED, 1, unsigned, FROM_32_USCALED, TO_32_USCALED ) 163 164ATTRIB( R32G32B32A32_SSCALED, 4, int, FROM_32_SSCALED, TO_32_SSCALED ) 165ATTRIB( R32G32B32_SSCALED, 3, int, FROM_32_SSCALED, TO_32_SSCALED ) 166ATTRIB( R32G32_SSCALED, 2, int, FROM_32_SSCALED, TO_32_SSCALED ) 167ATTRIB( R32_SSCALED, 1, int, FROM_32_SSCALED, TO_32_SSCALED ) 168 169ATTRIB( R32G32B32A32_UNORM, 4, unsigned, FROM_32_UNORM, TO_32_UNORM ) 170ATTRIB( R32G32B32_UNORM, 3, unsigned, FROM_32_UNORM, TO_32_UNORM ) 171ATTRIB( R32G32_UNORM, 2, unsigned, FROM_32_UNORM, TO_32_UNORM ) 172ATTRIB( R32_UNORM, 1, unsigned, FROM_32_UNORM, TO_32_UNORM ) 173 174ATTRIB( R32G32B32A32_SNORM, 4, int, FROM_32_SNORM, TO_32_SNORM ) 175ATTRIB( R32G32B32_SNORM, 3, int, FROM_32_SNORM, TO_32_SNORM ) 176ATTRIB( R32G32_SNORM, 2, int, FROM_32_SNORM, TO_32_SNORM ) 177ATTRIB( R32_SNORM, 1, int, FROM_32_SNORM, TO_32_SNORM ) 178 179ATTRIB( R16G16B16A16_USCALED, 4, ushort, FROM_16_USCALED, TO_16_USCALED ) 180ATTRIB( R16G16B16_USCALED, 3, ushort, FROM_16_USCALED, TO_16_USCALED ) 181ATTRIB( R16G16_USCALED, 2, ushort, FROM_16_USCALED, TO_16_USCALED ) 182ATTRIB( R16_USCALED, 1, ushort, FROM_16_USCALED, TO_16_USCALED ) 183 184ATTRIB( R16G16B16A16_SSCALED, 4, short, FROM_16_SSCALED, TO_16_SSCALED ) 185ATTRIB( R16G16B16_SSCALED, 3, short, FROM_16_SSCALED, TO_16_SSCALED ) 186ATTRIB( R16G16_SSCALED, 2, short, FROM_16_SSCALED, TO_16_SSCALED ) 187ATTRIB( R16_SSCALED, 1, short, FROM_16_SSCALED, TO_16_SSCALED ) 188 189ATTRIB( R16G16B16A16_UNORM, 4, ushort, FROM_16_UNORM, TO_16_UNORM ) 190ATTRIB( R16G16B16_UNORM, 3, ushort, FROM_16_UNORM, TO_16_UNORM ) 191ATTRIB( R16G16_UNORM, 2, ushort, FROM_16_UNORM, TO_16_UNORM ) 192ATTRIB( R16_UNORM, 1, ushort, FROM_16_UNORM, TO_16_UNORM ) 193 194ATTRIB( R16G16B16A16_SNORM, 4, short, FROM_16_SNORM, TO_16_SNORM ) 195ATTRIB( R16G16B16_SNORM, 3, short, FROM_16_SNORM, TO_16_SNORM ) 196ATTRIB( R16G16_SNORM, 2, short, FROM_16_SNORM, TO_16_SNORM ) 197ATTRIB( R16_SNORM, 1, short, FROM_16_SNORM, TO_16_SNORM ) 198 199ATTRIB( R8G8B8A8_USCALED, 4, ubyte, FROM_8_USCALED, TO_8_USCALED ) 200ATTRIB( R8G8B8_USCALED, 3, ubyte, FROM_8_USCALED, TO_8_USCALED ) 201ATTRIB( R8G8_USCALED, 2, ubyte, FROM_8_USCALED, TO_8_USCALED ) 202ATTRIB( R8_USCALED, 1, ubyte, FROM_8_USCALED, TO_8_USCALED ) 203 204ATTRIB( R8G8B8A8_SSCALED, 4, char, FROM_8_SSCALED, TO_8_SSCALED ) 205ATTRIB( R8G8B8_SSCALED, 3, char, FROM_8_SSCALED, TO_8_SSCALED ) 206ATTRIB( R8G8_SSCALED, 2, char, FROM_8_SSCALED, TO_8_SSCALED ) 207ATTRIB( R8_SSCALED, 1, char, FROM_8_SSCALED, TO_8_SSCALED ) 208 209ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM ) 210ATTRIB( R8G8B8_UNORM, 3, ubyte, FROM_8_UNORM, TO_8_UNORM ) 211ATTRIB( R8G8_UNORM, 2, ubyte, FROM_8_UNORM, TO_8_UNORM ) 212ATTRIB( R8_UNORM, 1, ubyte, FROM_8_UNORM, TO_8_UNORM ) 213 214ATTRIB( R8G8B8A8_SNORM, 4, char, FROM_8_SNORM, TO_8_SNORM ) 215ATTRIB( R8G8B8_SNORM, 3, char, FROM_8_SNORM, TO_8_SNORM ) 216ATTRIB( R8G8_SNORM, 2, char, FROM_8_SNORM, TO_8_SNORM ) 217ATTRIB( R8_SNORM, 1, char, FROM_8_SNORM, TO_8_SNORM ) 218 219ATTRIB( A8R8G8B8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM ) 220/*ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )*/ 221 222ATTRIB( R32G32B32A32_FIXED, 4, int, FROM_32_FIXED, TO_32_FIXED ) 223ATTRIB( R32G32B32_FIXED, 3, int, FROM_32_FIXED, TO_32_FIXED ) 224ATTRIB( R32G32_FIXED, 2, int, FROM_32_FIXED, TO_32_FIXED ) 225ATTRIB( R32_FIXED, 1, int, FROM_32_FIXED, TO_32_FIXED ) 226 227 228 229static void 230fetch_B8G8R8A8_UNORM(const void *ptr, float *attrib) 231{ 232 attrib[2] = FROM_8_UNORM(0); 233 attrib[1] = FROM_8_UNORM(1); 234 attrib[0] = FROM_8_UNORM(2); 235 attrib[3] = FROM_8_UNORM(3); 236} 237 238static void 239emit_B8G8R8A8_UNORM( const float *attrib, void *ptr) 240{ 241 ubyte *out = (ubyte *)ptr; 242 out[2] = TO_8_UNORM(attrib[0]); 243 out[1] = TO_8_UNORM(attrib[1]); 244 out[0] = TO_8_UNORM(attrib[2]); 245 out[3] = TO_8_UNORM(attrib[3]); 246} 247 248static void 249fetch_NULL( const void *ptr, float *attrib ) 250{ 251 attrib[0] = 0; 252 attrib[1] = 0; 253 attrib[2] = 0; 254 attrib[3] = 1; 255} 256 257static void 258emit_NULL( const float *attrib, void *ptr ) 259{ 260 /* do nothing is the only sensible option */ 261} 262 263static fetch_func get_fetch_func( enum pipe_format format ) 264{ 265 switch (format) { 266 case PIPE_FORMAT_R64_FLOAT: 267 return &fetch_R64_FLOAT; 268 case PIPE_FORMAT_R64G64_FLOAT: 269 return &fetch_R64G64_FLOAT; 270 case PIPE_FORMAT_R64G64B64_FLOAT: 271 return &fetch_R64G64B64_FLOAT; 272 case PIPE_FORMAT_R64G64B64A64_FLOAT: 273 return &fetch_R64G64B64A64_FLOAT; 274 275 case PIPE_FORMAT_R32_FLOAT: 276 return &fetch_R32_FLOAT; 277 case PIPE_FORMAT_R32G32_FLOAT: 278 return &fetch_R32G32_FLOAT; 279 case PIPE_FORMAT_R32G32B32_FLOAT: 280 return &fetch_R32G32B32_FLOAT; 281 case PIPE_FORMAT_R32G32B32A32_FLOAT: 282 return &fetch_R32G32B32A32_FLOAT; 283 284 case PIPE_FORMAT_R32_UNORM: 285 return &fetch_R32_UNORM; 286 case PIPE_FORMAT_R32G32_UNORM: 287 return &fetch_R32G32_UNORM; 288 case PIPE_FORMAT_R32G32B32_UNORM: 289 return &fetch_R32G32B32_UNORM; 290 case PIPE_FORMAT_R32G32B32A32_UNORM: 291 return &fetch_R32G32B32A32_UNORM; 292 293 case PIPE_FORMAT_R32_USCALED: 294 return &fetch_R32_USCALED; 295 case PIPE_FORMAT_R32G32_USCALED: 296 return &fetch_R32G32_USCALED; 297 case PIPE_FORMAT_R32G32B32_USCALED: 298 return &fetch_R32G32B32_USCALED; 299 case PIPE_FORMAT_R32G32B32A32_USCALED: 300 return &fetch_R32G32B32A32_USCALED; 301 302 case PIPE_FORMAT_R32_SNORM: 303 return &fetch_R32_SNORM; 304 case PIPE_FORMAT_R32G32_SNORM: 305 return &fetch_R32G32_SNORM; 306 case PIPE_FORMAT_R32G32B32_SNORM: 307 return &fetch_R32G32B32_SNORM; 308 case PIPE_FORMAT_R32G32B32A32_SNORM: 309 return &fetch_R32G32B32A32_SNORM; 310 311 case PIPE_FORMAT_R32_SSCALED: 312 return &fetch_R32_SSCALED; 313 case PIPE_FORMAT_R32G32_SSCALED: 314 return &fetch_R32G32_SSCALED; 315 case PIPE_FORMAT_R32G32B32_SSCALED: 316 return &fetch_R32G32B32_SSCALED; 317 case PIPE_FORMAT_R32G32B32A32_SSCALED: 318 return &fetch_R32G32B32A32_SSCALED; 319 320 case PIPE_FORMAT_R16_UNORM: 321 return &fetch_R16_UNORM; 322 case PIPE_FORMAT_R16G16_UNORM: 323 return &fetch_R16G16_UNORM; 324 case PIPE_FORMAT_R16G16B16_UNORM: 325 return &fetch_R16G16B16_UNORM; 326 case PIPE_FORMAT_R16G16B16A16_UNORM: 327 return &fetch_R16G16B16A16_UNORM; 328 329 case PIPE_FORMAT_R16_USCALED: 330 return &fetch_R16_USCALED; 331 case PIPE_FORMAT_R16G16_USCALED: 332 return &fetch_R16G16_USCALED; 333 case PIPE_FORMAT_R16G16B16_USCALED: 334 return &fetch_R16G16B16_USCALED; 335 case PIPE_FORMAT_R16G16B16A16_USCALED: 336 return &fetch_R16G16B16A16_USCALED; 337 338 case PIPE_FORMAT_R16_SNORM: 339 return &fetch_R16_SNORM; 340 case PIPE_FORMAT_R16G16_SNORM: 341 return &fetch_R16G16_SNORM; 342 case PIPE_FORMAT_R16G16B16_SNORM: 343 return &fetch_R16G16B16_SNORM; 344 case PIPE_FORMAT_R16G16B16A16_SNORM: 345 return &fetch_R16G16B16A16_SNORM; 346 347 case PIPE_FORMAT_R16_SSCALED: 348 return &fetch_R16_SSCALED; 349 case PIPE_FORMAT_R16G16_SSCALED: 350 return &fetch_R16G16_SSCALED; 351 case PIPE_FORMAT_R16G16B16_SSCALED: 352 return &fetch_R16G16B16_SSCALED; 353 case PIPE_FORMAT_R16G16B16A16_SSCALED: 354 return &fetch_R16G16B16A16_SSCALED; 355 356 case PIPE_FORMAT_R8_UNORM: 357 return &fetch_R8_UNORM; 358 case PIPE_FORMAT_R8G8_UNORM: 359 return &fetch_R8G8_UNORM; 360 case PIPE_FORMAT_R8G8B8_UNORM: 361 return &fetch_R8G8B8_UNORM; 362 case PIPE_FORMAT_R8G8B8A8_UNORM: 363 return &fetch_R8G8B8A8_UNORM; 364 365 case PIPE_FORMAT_R8_USCALED: 366 return &fetch_R8_USCALED; 367 case PIPE_FORMAT_R8G8_USCALED: 368 return &fetch_R8G8_USCALED; 369 case PIPE_FORMAT_R8G8B8_USCALED: 370 return &fetch_R8G8B8_USCALED; 371 case PIPE_FORMAT_R8G8B8A8_USCALED: 372 return &fetch_R8G8B8A8_USCALED; 373 374 case PIPE_FORMAT_R8_SNORM: 375 return &fetch_R8_SNORM; 376 case PIPE_FORMAT_R8G8_SNORM: 377 return &fetch_R8G8_SNORM; 378 case PIPE_FORMAT_R8G8B8_SNORM: 379 return &fetch_R8G8B8_SNORM; 380 case PIPE_FORMAT_R8G8B8A8_SNORM: 381 return &fetch_R8G8B8A8_SNORM; 382 383 case PIPE_FORMAT_R8_SSCALED: 384 return &fetch_R8_SSCALED; 385 case PIPE_FORMAT_R8G8_SSCALED: 386 return &fetch_R8G8_SSCALED; 387 case PIPE_FORMAT_R8G8B8_SSCALED: 388 return &fetch_R8G8B8_SSCALED; 389 case PIPE_FORMAT_R8G8B8A8_SSCALED: 390 return &fetch_R8G8B8A8_SSCALED; 391 392 case PIPE_FORMAT_A8R8G8B8_UNORM: 393 return &fetch_A8R8G8B8_UNORM; 394 395 case PIPE_FORMAT_B8G8R8A8_UNORM: 396 return &fetch_B8G8R8A8_UNORM; 397 398 case PIPE_FORMAT_R32_FIXED: 399 return &fetch_R32_FIXED; 400 case PIPE_FORMAT_R32G32_FIXED: 401 return &fetch_R32G32_FIXED; 402 case PIPE_FORMAT_R32G32B32_FIXED: 403 return &fetch_R32G32B32_FIXED; 404 case PIPE_FORMAT_R32G32B32A32_FIXED: 405 return &fetch_R32G32B32A32_FIXED; 406 407 default: 408 assert(0); 409 return &fetch_NULL; 410 } 411} 412 413 414 415 416static emit_func get_emit_func( enum pipe_format format ) 417{ 418 /* silence warnings */ 419 (void) emit_R32G32B32A32_FIXED; 420 (void) emit_R32G32B32_FIXED; 421 (void) emit_R32G32_FIXED; 422 (void) emit_R32_FIXED; 423 424 switch (format) { 425 case PIPE_FORMAT_R64_FLOAT: 426 return &emit_R64_FLOAT; 427 case PIPE_FORMAT_R64G64_FLOAT: 428 return &emit_R64G64_FLOAT; 429 case PIPE_FORMAT_R64G64B64_FLOAT: 430 return &emit_R64G64B64_FLOAT; 431 case PIPE_FORMAT_R64G64B64A64_FLOAT: 432 return &emit_R64G64B64A64_FLOAT; 433 434 case PIPE_FORMAT_R32_FLOAT: 435 return &emit_R32_FLOAT; 436 case PIPE_FORMAT_R32G32_FLOAT: 437 return &emit_R32G32_FLOAT; 438 case PIPE_FORMAT_R32G32B32_FLOAT: 439 return &emit_R32G32B32_FLOAT; 440 case PIPE_FORMAT_R32G32B32A32_FLOAT: 441 return &emit_R32G32B32A32_FLOAT; 442 443 case PIPE_FORMAT_R32_UNORM: 444 return &emit_R32_UNORM; 445 case PIPE_FORMAT_R32G32_UNORM: 446 return &emit_R32G32_UNORM; 447 case PIPE_FORMAT_R32G32B32_UNORM: 448 return &emit_R32G32B32_UNORM; 449 case PIPE_FORMAT_R32G32B32A32_UNORM: 450 return &emit_R32G32B32A32_UNORM; 451 452 case PIPE_FORMAT_R32_USCALED: 453 return &emit_R32_USCALED; 454 case PIPE_FORMAT_R32G32_USCALED: 455 return &emit_R32G32_USCALED; 456 case PIPE_FORMAT_R32G32B32_USCALED: 457 return &emit_R32G32B32_USCALED; 458 case PIPE_FORMAT_R32G32B32A32_USCALED: 459 return &emit_R32G32B32A32_USCALED; 460 461 case PIPE_FORMAT_R32_SNORM: 462 return &emit_R32_SNORM; 463 case PIPE_FORMAT_R32G32_SNORM: 464 return &emit_R32G32_SNORM; 465 case PIPE_FORMAT_R32G32B32_SNORM: 466 return &emit_R32G32B32_SNORM; 467 case PIPE_FORMAT_R32G32B32A32_SNORM: 468 return &emit_R32G32B32A32_SNORM; 469 470 case PIPE_FORMAT_R32_SSCALED: 471 return &emit_R32_SSCALED; 472 case PIPE_FORMAT_R32G32_SSCALED: 473 return &emit_R32G32_SSCALED; 474 case PIPE_FORMAT_R32G32B32_SSCALED: 475 return &emit_R32G32B32_SSCALED; 476 case PIPE_FORMAT_R32G32B32A32_SSCALED: 477 return &emit_R32G32B32A32_SSCALED; 478 479 case PIPE_FORMAT_R16_UNORM: 480 return &emit_R16_UNORM; 481 case PIPE_FORMAT_R16G16_UNORM: 482 return &emit_R16G16_UNORM; 483 case PIPE_FORMAT_R16G16B16_UNORM: 484 return &emit_R16G16B16_UNORM; 485 case PIPE_FORMAT_R16G16B16A16_UNORM: 486 return &emit_R16G16B16A16_UNORM; 487 488 case PIPE_FORMAT_R16_USCALED: 489 return &emit_R16_USCALED; 490 case PIPE_FORMAT_R16G16_USCALED: 491 return &emit_R16G16_USCALED; 492 case PIPE_FORMAT_R16G16B16_USCALED: 493 return &emit_R16G16B16_USCALED; 494 case PIPE_FORMAT_R16G16B16A16_USCALED: 495 return &emit_R16G16B16A16_USCALED; 496 497 case PIPE_FORMAT_R16_SNORM: 498 return &emit_R16_SNORM; 499 case PIPE_FORMAT_R16G16_SNORM: 500 return &emit_R16G16_SNORM; 501 case PIPE_FORMAT_R16G16B16_SNORM: 502 return &emit_R16G16B16_SNORM; 503 case PIPE_FORMAT_R16G16B16A16_SNORM: 504 return &emit_R16G16B16A16_SNORM; 505 506 case PIPE_FORMAT_R16_SSCALED: 507 return &emit_R16_SSCALED; 508 case PIPE_FORMAT_R16G16_SSCALED: 509 return &emit_R16G16_SSCALED; 510 case PIPE_FORMAT_R16G16B16_SSCALED: 511 return &emit_R16G16B16_SSCALED; 512 case PIPE_FORMAT_R16G16B16A16_SSCALED: 513 return &emit_R16G16B16A16_SSCALED; 514 515 case PIPE_FORMAT_R8_UNORM: 516 return &emit_R8_UNORM; 517 case PIPE_FORMAT_R8G8_UNORM: 518 return &emit_R8G8_UNORM; 519 case PIPE_FORMAT_R8G8B8_UNORM: 520 return &emit_R8G8B8_UNORM; 521 case PIPE_FORMAT_R8G8B8A8_UNORM: 522 return &emit_R8G8B8A8_UNORM; 523 524 case PIPE_FORMAT_R8_USCALED: 525 return &emit_R8_USCALED; 526 case PIPE_FORMAT_R8G8_USCALED: 527 return &emit_R8G8_USCALED; 528 case PIPE_FORMAT_R8G8B8_USCALED: 529 return &emit_R8G8B8_USCALED; 530 case PIPE_FORMAT_R8G8B8A8_USCALED: 531 return &emit_R8G8B8A8_USCALED; 532 533 case PIPE_FORMAT_R8_SNORM: 534 return &emit_R8_SNORM; 535 case PIPE_FORMAT_R8G8_SNORM: 536 return &emit_R8G8_SNORM; 537 case PIPE_FORMAT_R8G8B8_SNORM: 538 return &emit_R8G8B8_SNORM; 539 case PIPE_FORMAT_R8G8B8A8_SNORM: 540 return &emit_R8G8B8A8_SNORM; 541 542 case PIPE_FORMAT_R8_SSCALED: 543 return &emit_R8_SSCALED; 544 case PIPE_FORMAT_R8G8_SSCALED: 545 return &emit_R8G8_SSCALED; 546 case PIPE_FORMAT_R8G8B8_SSCALED: 547 return &emit_R8G8B8_SSCALED; 548 case PIPE_FORMAT_R8G8B8A8_SSCALED: 549 return &emit_R8G8B8A8_SSCALED; 550 551 case PIPE_FORMAT_A8R8G8B8_UNORM: 552 return &emit_A8R8G8B8_UNORM; 553 554 case PIPE_FORMAT_B8G8R8A8_UNORM: 555 return &emit_B8G8R8A8_UNORM; 556 557 default: 558 assert(0); 559 return &emit_NULL; 560 } 561} 562 563 564 565/** 566 * Fetch vertex attributes for 'count' vertices. 567 */ 568static void PIPE_CDECL generic_run_elts( struct translate *translate, 569 const unsigned *elts, 570 unsigned count, 571 void *output_buffer ) 572{ 573 struct translate_generic *tg = translate_generic(translate); 574 char *vert = output_buffer; 575 unsigned nr_attrs = tg->nr_attrib; 576 unsigned attr; 577 unsigned i; 578 579 /* loop over vertex attributes (vertex shader inputs) 580 */ 581 for (i = 0; i < count; i++) { 582 unsigned elt = *elts++; 583 584 for (attr = 0; attr < nr_attrs; attr++) { 585 float data[4]; 586 587 const char *src = (tg->attrib[attr].input_ptr + 588 tg->attrib[attr].input_stride * elt); 589 590 char *dst = (vert + 591 tg->attrib[attr].output_offset); 592 593 tg->attrib[attr].fetch( src, data ); 594 595 if (0) debug_printf("vert %d/%d attr %d: %f %f %f %f\n", 596 i, elt, attr, data[0], data[1], data[2], data[3]); 597 598 tg->attrib[attr].emit( data, dst ); 599 } 600 601 vert += tg->translate.key.output_stride; 602 } 603} 604 605 606 607static void PIPE_CDECL generic_run( struct translate *translate, 608 unsigned start, 609 unsigned count, 610 void *output_buffer ) 611{ 612 struct translate_generic *tg = translate_generic(translate); 613 char *vert = output_buffer; 614 unsigned nr_attrs = tg->nr_attrib; 615 unsigned attr; 616 unsigned i; 617 618 /* loop over vertex attributes (vertex shader inputs) 619 */ 620 for (i = 0; i < count; i++) { 621 unsigned elt = start + i; 622 623 for (attr = 0; attr < nr_attrs; attr++) { 624 float data[4]; 625 626 const char *src = (tg->attrib[attr].input_ptr + 627 tg->attrib[attr].input_stride * elt); 628 629 char *dst = (vert + 630 tg->attrib[attr].output_offset); 631 632 tg->attrib[attr].fetch( src, data ); 633 634 if (0) debug_printf("vert %d attr %d: %f %f %f %f\n", 635 i, attr, data[0], data[1], data[2], data[3]); 636 637 tg->attrib[attr].emit( data, dst ); 638 } 639 640 vert += tg->translate.key.output_stride; 641 } 642} 643 644 645 646static void generic_set_buffer( struct translate *translate, 647 unsigned buf, 648 const void *ptr, 649 unsigned stride ) 650{ 651 struct translate_generic *tg = translate_generic(translate); 652 unsigned i; 653 654 for (i = 0; i < tg->nr_attrib; i++) { 655 if (tg->attrib[i].buffer == buf) { 656 tg->attrib[i].input_ptr = ((char *)ptr + 657 tg->attrib[i].input_offset); 658 tg->attrib[i].input_stride = stride; 659 } 660 } 661} 662 663 664static void generic_release( struct translate *translate ) 665{ 666 /* Refcount? 667 */ 668 FREE(translate); 669} 670 671struct translate *translate_generic_create( const struct translate_key *key ) 672{ 673 struct translate_generic *tg = CALLOC_STRUCT(translate_generic); 674 unsigned i; 675 676 if (tg == NULL) 677 return NULL; 678 679 tg->translate.key = *key; 680 tg->translate.release = generic_release; 681 tg->translate.set_buffer = generic_set_buffer; 682 tg->translate.run_elts = generic_run_elts; 683 tg->translate.run = generic_run; 684 685 for (i = 0; i < key->nr_elements; i++) { 686 687 tg->attrib[i].fetch = get_fetch_func(key->element[i].input_format); 688 tg->attrib[i].buffer = key->element[i].input_buffer; 689 tg->attrib[i].input_offset = key->element[i].input_offset; 690 691 tg->attrib[i].emit = get_emit_func(key->element[i].output_format); 692 tg->attrib[i].output_offset = key->element[i].output_offset; 693 694 } 695 696 tg->nr_attrib = key->nr_elements; 697 698 699 return &tg->translate; 700} 701