1b8e80941Smrg/*
2b8e80941Smrg * Copyright © 2015 Intel Corporation
3b8e80941Smrg *
4b8e80941Smrg * Permission is hereby granted, free of charge, to any person obtaining a
5b8e80941Smrg * copy of this software and associated documentation files (the "Software"),
6b8e80941Smrg * to deal in the Software without restriction, including without limitation
7b8e80941Smrg * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8b8e80941Smrg * and/or sell copies of the Software, and to permit persons to whom the
9b8e80941Smrg * Software is furnished to do so, subject to the following conditions:
10b8e80941Smrg *
11b8e80941Smrg * The above copyright notice and this permission notice (including the next
12b8e80941Smrg * paragraph) shall be included in all copies or substantial portions of the
13b8e80941Smrg * Software.
14b8e80941Smrg *
15b8e80941Smrg * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16b8e80941Smrg * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17b8e80941Smrg * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18b8e80941Smrg * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19b8e80941Smrg * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20b8e80941Smrg * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21b8e80941Smrg * IN THE SOFTWARE.
22b8e80941Smrg *
23b8e80941Smrg * Authors:
24b8e80941Smrg *    Jason Ekstrand (jason@jlekstrand.net)
25b8e80941Smrg *
26b8e80941Smrg */
27b8e80941Smrg
28b8e80941Smrg#ifndef _VTN_PRIVATE_H_
29b8e80941Smrg#define _VTN_PRIVATE_H_
30b8e80941Smrg
31b8e80941Smrg#include <setjmp.h>
32b8e80941Smrg
33b8e80941Smrg#include "nir/nir.h"
34b8e80941Smrg#include "nir/nir_builder.h"
35b8e80941Smrg#include "util/u_dynarray.h"
36b8e80941Smrg#include "nir_spirv.h"
37b8e80941Smrg#include "spirv.h"
38b8e80941Smrg
39b8e80941Smrgstruct vtn_builder;
40b8e80941Smrgstruct vtn_decoration;
41b8e80941Smrg
42b8e80941Smrgvoid vtn_log(struct vtn_builder *b, enum nir_spirv_debug_level level,
43b8e80941Smrg             size_t spirv_offset, const char *message);
44b8e80941Smrg
45b8e80941Smrgvoid vtn_logf(struct vtn_builder *b, enum nir_spirv_debug_level level,
46b8e80941Smrg              size_t spirv_offset, const char *fmt, ...) PRINTFLIKE(4, 5);
47b8e80941Smrg
48b8e80941Smrg#define vtn_info(...) vtn_logf(b, NIR_SPIRV_DEBUG_LEVEL_INFO, 0, __VA_ARGS__)
49b8e80941Smrg
50b8e80941Smrgvoid _vtn_warn(struct vtn_builder *b, const char *file, unsigned line,
51b8e80941Smrg               const char *fmt, ...) PRINTFLIKE(4, 5);
52b8e80941Smrg#define vtn_warn(...) _vtn_warn(b, __FILE__, __LINE__, __VA_ARGS__)
53b8e80941Smrg
54b8e80941Smrgvoid _vtn_err(struct vtn_builder *b, const char *file, unsigned line,
55b8e80941Smrg               const char *fmt, ...) PRINTFLIKE(4, 5);
56b8e80941Smrg#define vtn_err(...) _vtn_err(b, __FILE__, __LINE__, __VA_ARGS__)
57b8e80941Smrg
58b8e80941Smrg/** Fail SPIR-V parsing
59b8e80941Smrg *
60b8e80941Smrg * This function logs an error and then bails out of the shader compile using
61b8e80941Smrg * longjmp.  This being safe relies on two things:
62b8e80941Smrg *
63b8e80941Smrg *  1) We must guarantee that setjmp is called after allocating the builder
64b8e80941Smrg *     and setting up b->debug (so that logging works) but before before any
65b8e80941Smrg *     errors have a chance to occur.
66b8e80941Smrg *
67b8e80941Smrg *  2) While doing the SPIR-V -> NIR conversion, we need to be careful to
68b8e80941Smrg *     ensure that all heap allocations happen through ralloc and are parented
69b8e80941Smrg *     to the builder.  This way they will get properly cleaned up on error.
70b8e80941Smrg *
71b8e80941Smrg *  3) We must ensure that _vtn_fail is never called while a mutex lock or a
72b8e80941Smrg *     reference to any other resource is held with the exception of ralloc
73b8e80941Smrg *     objects which are parented to the builder.
74b8e80941Smrg *
75b8e80941Smrg * So long as these two things continue to hold, we can easily longjmp back to
76b8e80941Smrg * spirv_to_nir(), clean up the builder, and return NULL.
77b8e80941Smrg */
78b8e80941SmrgNORETURN void
79b8e80941Smrg_vtn_fail(struct vtn_builder *b, const char *file, unsigned line,
80b8e80941Smrg             const char *fmt, ...) PRINTFLIKE(4, 5);
81b8e80941Smrg
82b8e80941Smrg#define vtn_fail(...) _vtn_fail(b, __FILE__, __LINE__, __VA_ARGS__)
83b8e80941Smrg
84b8e80941Smrg/** Fail if the given expression evaluates to true */
85b8e80941Smrg#define vtn_fail_if(expr, ...) \
86b8e80941Smrg   do { \
87b8e80941Smrg      if (unlikely(expr)) \
88b8e80941Smrg         vtn_fail(__VA_ARGS__); \
89b8e80941Smrg   } while (0)
90b8e80941Smrg
91b8e80941Smrg#define _vtn_fail_with(t, msg, v) \
92b8e80941Smrg   vtn_fail("%s: %s (%u)\n", msg, spirv_ ## t ## _to_string(v), v)
93b8e80941Smrg
94b8e80941Smrg#define vtn_fail_with_decoration(msg, v) _vtn_fail_with(decoration, msg, v)
95b8e80941Smrg#define vtn_fail_with_opcode(msg, v)     _vtn_fail_with(op, msg, v)
96b8e80941Smrg
97b8e80941Smrg/** Assert that a condition is true and, if it isn't, vtn_fail
98b8e80941Smrg *
99b8e80941Smrg * This macro is transitional only and should not be used in new code.  Use
100b8e80941Smrg * vtn_fail_if and provide a real message instead.
101b8e80941Smrg */
102b8e80941Smrg#define vtn_assert(expr) \
103b8e80941Smrg   do { \
104b8e80941Smrg      if (!likely(expr)) \
105b8e80941Smrg         vtn_fail("%s", #expr); \
106b8e80941Smrg   } while (0)
107b8e80941Smrg
108b8e80941Smrgenum vtn_value_type {
109b8e80941Smrg   vtn_value_type_invalid = 0,
110b8e80941Smrg   vtn_value_type_undef,
111b8e80941Smrg   vtn_value_type_string,
112b8e80941Smrg   vtn_value_type_decoration_group,
113b8e80941Smrg   vtn_value_type_type,
114b8e80941Smrg   vtn_value_type_constant,
115b8e80941Smrg   vtn_value_type_pointer,
116b8e80941Smrg   vtn_value_type_function,
117b8e80941Smrg   vtn_value_type_block,
118b8e80941Smrg   vtn_value_type_ssa,
119b8e80941Smrg   vtn_value_type_extension,
120b8e80941Smrg   vtn_value_type_image_pointer,
121b8e80941Smrg   vtn_value_type_sampled_image,
122b8e80941Smrg};
123b8e80941Smrg
124b8e80941Smrgenum vtn_branch_type {
125b8e80941Smrg   vtn_branch_type_none,
126b8e80941Smrg   vtn_branch_type_switch_break,
127b8e80941Smrg   vtn_branch_type_switch_fallthrough,
128b8e80941Smrg   vtn_branch_type_loop_break,
129b8e80941Smrg   vtn_branch_type_loop_continue,
130b8e80941Smrg   vtn_branch_type_discard,
131b8e80941Smrg   vtn_branch_type_return,
132b8e80941Smrg};
133b8e80941Smrg
134b8e80941Smrgenum vtn_cf_node_type {
135b8e80941Smrg   vtn_cf_node_type_block,
136b8e80941Smrg   vtn_cf_node_type_if,
137b8e80941Smrg   vtn_cf_node_type_loop,
138b8e80941Smrg   vtn_cf_node_type_switch,
139b8e80941Smrg};
140b8e80941Smrg
141b8e80941Smrgstruct vtn_cf_node {
142b8e80941Smrg   struct list_head link;
143b8e80941Smrg   enum vtn_cf_node_type type;
144b8e80941Smrg};
145b8e80941Smrg
146b8e80941Smrgstruct vtn_loop {
147b8e80941Smrg   struct vtn_cf_node node;
148b8e80941Smrg
149b8e80941Smrg   /* The main body of the loop */
150b8e80941Smrg   struct list_head body;
151b8e80941Smrg
152b8e80941Smrg   /* The "continue" part of the loop.  This gets executed after the body
153b8e80941Smrg    * and is where you go when you hit a continue.
154b8e80941Smrg    */
155b8e80941Smrg   struct list_head cont_body;
156b8e80941Smrg
157b8e80941Smrg   SpvLoopControlMask control;
158b8e80941Smrg};
159b8e80941Smrg
160b8e80941Smrgstruct vtn_if {
161b8e80941Smrg   struct vtn_cf_node node;
162b8e80941Smrg
163b8e80941Smrg   uint32_t condition;
164b8e80941Smrg
165b8e80941Smrg   enum vtn_branch_type then_type;
166b8e80941Smrg   struct list_head then_body;
167b8e80941Smrg
168b8e80941Smrg   enum vtn_branch_type else_type;
169b8e80941Smrg   struct list_head else_body;
170b8e80941Smrg
171b8e80941Smrg   SpvSelectionControlMask control;
172b8e80941Smrg};
173b8e80941Smrg
174b8e80941Smrgstruct vtn_case {
175b8e80941Smrg   struct list_head link;
176b8e80941Smrg
177b8e80941Smrg   struct list_head body;
178b8e80941Smrg
179b8e80941Smrg   /* The block that starts this case */
180b8e80941Smrg   struct vtn_block *start_block;
181b8e80941Smrg
182b8e80941Smrg   /* The fallthrough case, if any */
183b8e80941Smrg   struct vtn_case *fallthrough;
184b8e80941Smrg
185b8e80941Smrg   /* The uint32_t values that map to this case */
186b8e80941Smrg   struct util_dynarray values;
187b8e80941Smrg
188b8e80941Smrg   /* True if this is the default case */
189b8e80941Smrg   bool is_default;
190b8e80941Smrg
191b8e80941Smrg   /* Initialized to false; used when sorting the list of cases */
192b8e80941Smrg   bool visited;
193b8e80941Smrg};
194b8e80941Smrg
195b8e80941Smrgstruct vtn_switch {
196b8e80941Smrg   struct vtn_cf_node node;
197b8e80941Smrg
198b8e80941Smrg   uint32_t selector;
199b8e80941Smrg
200b8e80941Smrg   struct list_head cases;
201b8e80941Smrg};
202b8e80941Smrg
203b8e80941Smrgstruct vtn_block {
204b8e80941Smrg   struct vtn_cf_node node;
205b8e80941Smrg
206b8e80941Smrg   /** A pointer to the label instruction */
207b8e80941Smrg   const uint32_t *label;
208b8e80941Smrg
209b8e80941Smrg   /** A pointer to the merge instruction (or NULL if non exists) */
210b8e80941Smrg   const uint32_t *merge;
211b8e80941Smrg
212b8e80941Smrg   /** A pointer to the branch instruction that ends this block */
213b8e80941Smrg   const uint32_t *branch;
214b8e80941Smrg
215b8e80941Smrg   enum vtn_branch_type branch_type;
216b8e80941Smrg
217b8e80941Smrg   /** Points to the loop that this block starts (if it starts a loop) */
218b8e80941Smrg   struct vtn_loop *loop;
219b8e80941Smrg
220b8e80941Smrg   /** Points to the switch case started by this block (if any) */
221b8e80941Smrg   struct vtn_case *switch_case;
222b8e80941Smrg
223b8e80941Smrg   /** Every block ends in a nop intrinsic so that we can find it again */
224b8e80941Smrg   nir_intrinsic_instr *end_nop;
225b8e80941Smrg};
226b8e80941Smrg
227b8e80941Smrgstruct vtn_function {
228b8e80941Smrg   struct exec_node node;
229b8e80941Smrg
230b8e80941Smrg   struct vtn_type *type;
231b8e80941Smrg
232b8e80941Smrg   bool referenced;
233b8e80941Smrg   bool emitted;
234b8e80941Smrg
235b8e80941Smrg   nir_function_impl *impl;
236b8e80941Smrg   struct vtn_block *start_block;
237b8e80941Smrg
238b8e80941Smrg   struct list_head body;
239b8e80941Smrg
240b8e80941Smrg   const uint32_t *end;
241b8e80941Smrg
242b8e80941Smrg   SpvFunctionControlMask control;
243b8e80941Smrg};
244b8e80941Smrg
245b8e80941Smrgtypedef bool (*vtn_instruction_handler)(struct vtn_builder *, SpvOp,
246b8e80941Smrg                                        const uint32_t *, unsigned);
247b8e80941Smrg
248b8e80941Smrgvoid vtn_build_cfg(struct vtn_builder *b, const uint32_t *words,
249b8e80941Smrg                   const uint32_t *end);
250b8e80941Smrgvoid vtn_function_emit(struct vtn_builder *b, struct vtn_function *func,
251b8e80941Smrg                       vtn_instruction_handler instruction_handler);
252b8e80941Smrgvoid vtn_handle_function_call(struct vtn_builder *b, SpvOp opcode,
253b8e80941Smrg                              const uint32_t *w, unsigned count);
254b8e80941Smrg
255b8e80941Smrgconst uint32_t *
256b8e80941Smrgvtn_foreach_instruction(struct vtn_builder *b, const uint32_t *start,
257b8e80941Smrg                        const uint32_t *end, vtn_instruction_handler handler);
258b8e80941Smrg
259b8e80941Smrgstruct vtn_ssa_value {
260b8e80941Smrg   union {
261b8e80941Smrg      nir_ssa_def *def;
262b8e80941Smrg      struct vtn_ssa_value **elems;
263b8e80941Smrg   };
264b8e80941Smrg
265b8e80941Smrg   /* For matrices, if this is non-NULL, then this value is actually the
266b8e80941Smrg    * transpose of some other value.  The value that `transposed` points to
267b8e80941Smrg    * always dominates this value.
268b8e80941Smrg    */
269b8e80941Smrg   struct vtn_ssa_value *transposed;
270b8e80941Smrg
271b8e80941Smrg   const struct glsl_type *type;
272b8e80941Smrg
273b8e80941Smrg   /* Access qualifiers */
274b8e80941Smrg   enum gl_access_qualifier access;
275b8e80941Smrg};
276b8e80941Smrg
277b8e80941Smrgenum vtn_base_type {
278b8e80941Smrg   vtn_base_type_void,
279b8e80941Smrg   vtn_base_type_scalar,
280b8e80941Smrg   vtn_base_type_vector,
281b8e80941Smrg   vtn_base_type_matrix,
282b8e80941Smrg   vtn_base_type_array,
283b8e80941Smrg   vtn_base_type_struct,
284b8e80941Smrg   vtn_base_type_pointer,
285b8e80941Smrg   vtn_base_type_image,
286b8e80941Smrg   vtn_base_type_sampler,
287b8e80941Smrg   vtn_base_type_sampled_image,
288b8e80941Smrg   vtn_base_type_function,
289b8e80941Smrg};
290b8e80941Smrg
291b8e80941Smrgstruct vtn_type {
292b8e80941Smrg   enum vtn_base_type base_type;
293b8e80941Smrg
294b8e80941Smrg   const struct glsl_type *type;
295b8e80941Smrg
296b8e80941Smrg   /* The SPIR-V id of the given type. */
297b8e80941Smrg   uint32_t id;
298b8e80941Smrg
299b8e80941Smrg   /* Specifies the length of complex types.
300b8e80941Smrg    *
301b8e80941Smrg    * For Workgroup pointers, this is the size of the referenced type.
302b8e80941Smrg    */
303b8e80941Smrg   unsigned length;
304b8e80941Smrg
305b8e80941Smrg   /* for arrays, matrices and pointers, the array stride */
306b8e80941Smrg   unsigned stride;
307b8e80941Smrg
308b8e80941Smrg   /* Access qualifiers */
309b8e80941Smrg   enum gl_access_qualifier access;
310b8e80941Smrg
311b8e80941Smrg   union {
312b8e80941Smrg      /* Members for scalar, vector, and array-like types */
313b8e80941Smrg      struct {
314b8e80941Smrg         /* for arrays, the vtn_type for the elements of the array */
315b8e80941Smrg         struct vtn_type *array_element;
316b8e80941Smrg
317b8e80941Smrg         /* for matrices, whether the matrix is stored row-major */
318b8e80941Smrg         bool row_major:1;
319b8e80941Smrg
320b8e80941Smrg         /* Whether this type, or a parent type, has been decorated as a
321b8e80941Smrg          * builtin
322b8e80941Smrg          */
323b8e80941Smrg         bool is_builtin:1;
324b8e80941Smrg
325b8e80941Smrg         /* Which built-in to use */
326b8e80941Smrg         SpvBuiltIn builtin;
327b8e80941Smrg      };
328b8e80941Smrg
329b8e80941Smrg      /* Members for struct types */
330b8e80941Smrg      struct {
331b8e80941Smrg         /* for structures, the vtn_type for each member */
332b8e80941Smrg         struct vtn_type **members;
333b8e80941Smrg
334b8e80941Smrg         /* for structs, the offset of each member */
335b8e80941Smrg         unsigned *offsets;
336b8e80941Smrg
337b8e80941Smrg         /* for structs, whether it was decorated as a "non-SSBO-like" block */
338b8e80941Smrg         bool block:1;
339b8e80941Smrg
340b8e80941Smrg         /* for structs, whether it was decorated as an "SSBO-like" block */
341b8e80941Smrg         bool buffer_block:1;
342b8e80941Smrg
343b8e80941Smrg         /* for structs with block == true, whether this is a builtin block
344b8e80941Smrg          * (i.e. a block that contains only builtins).
345b8e80941Smrg          */
346b8e80941Smrg         bool builtin_block:1;
347b8e80941Smrg
348b8e80941Smrg         /* for structs and unions it specifies the minimum alignment of the
349b8e80941Smrg          * members. 0 means packed.
350b8e80941Smrg          *
351b8e80941Smrg          * Set by CPacked and Alignment Decorations in kernels.
352b8e80941Smrg          */
353b8e80941Smrg         bool packed:1;
354b8e80941Smrg      };
355b8e80941Smrg
356b8e80941Smrg      /* Members for pointer types */
357b8e80941Smrg      struct {
358b8e80941Smrg         /* For pointers, the vtn_type for dereferenced type */
359b8e80941Smrg         struct vtn_type *deref;
360b8e80941Smrg
361b8e80941Smrg         /* Storage class for pointers */
362b8e80941Smrg         SpvStorageClass storage_class;
363b8e80941Smrg
364b8e80941Smrg         /* Required alignment for pointers */
365b8e80941Smrg         uint32_t align;
366b8e80941Smrg      };
367b8e80941Smrg
368b8e80941Smrg      /* Members for image types */
369b8e80941Smrg      struct {
370b8e80941Smrg         /* For images, indicates whether it's sampled or storage */
371b8e80941Smrg         bool sampled;
372b8e80941Smrg
373b8e80941Smrg         /* Image format for image_load_store type images */
374b8e80941Smrg         unsigned image_format;
375b8e80941Smrg
376b8e80941Smrg         /* Access qualifier for storage images */
377b8e80941Smrg         SpvAccessQualifier access_qualifier;
378b8e80941Smrg      };
379b8e80941Smrg
380b8e80941Smrg      /* Members for sampled image types */
381b8e80941Smrg      struct {
382b8e80941Smrg         /* For sampled images, the image type */
383b8e80941Smrg         struct vtn_type *image;
384b8e80941Smrg      };
385b8e80941Smrg
386b8e80941Smrg      /* Members for function types */
387b8e80941Smrg      struct {
388b8e80941Smrg         /* For functions, the vtn_type for each parameter */
389b8e80941Smrg         struct vtn_type **params;
390b8e80941Smrg
391b8e80941Smrg         /* Return type for functions */
392b8e80941Smrg         struct vtn_type *return_type;
393b8e80941Smrg      };
394b8e80941Smrg   };
395b8e80941Smrg};
396b8e80941Smrg
397b8e80941Smrgbool vtn_type_contains_block(struct vtn_builder *b, struct vtn_type *type);
398b8e80941Smrg
399b8e80941Smrgbool vtn_types_compatible(struct vtn_builder *b,
400b8e80941Smrg                          struct vtn_type *t1, struct vtn_type *t2);
401b8e80941Smrg
402b8e80941Smrgstruct vtn_variable;
403b8e80941Smrg
404b8e80941Smrgenum vtn_access_mode {
405b8e80941Smrg   vtn_access_mode_id,
406b8e80941Smrg   vtn_access_mode_literal,
407b8e80941Smrg};
408b8e80941Smrg
409b8e80941Smrgstruct vtn_access_link {
410b8e80941Smrg   enum vtn_access_mode mode;
411b8e80941Smrg   int64_t id;
412b8e80941Smrg};
413b8e80941Smrg
414b8e80941Smrgstruct vtn_access_chain {
415b8e80941Smrg   uint32_t length;
416b8e80941Smrg
417b8e80941Smrg   /** Whether or not to treat the base pointer as an array.  This is only
418b8e80941Smrg    * true if this access chain came from an OpPtrAccessChain.
419b8e80941Smrg    */
420b8e80941Smrg   bool ptr_as_array;
421b8e80941Smrg
422b8e80941Smrg   /* Access qualifiers */
423b8e80941Smrg   enum gl_access_qualifier access;
424b8e80941Smrg
425b8e80941Smrg   /** Struct elements and array offsets.
426b8e80941Smrg    *
427b8e80941Smrg    * This is an array of 1 so that it can conveniently be created on the
428b8e80941Smrg    * stack but the real length is given by the length field.
429b8e80941Smrg    */
430b8e80941Smrg   struct vtn_access_link link[1];
431b8e80941Smrg};
432b8e80941Smrg
433b8e80941Smrgenum vtn_variable_mode {
434b8e80941Smrg   vtn_variable_mode_function,
435b8e80941Smrg   vtn_variable_mode_private,
436b8e80941Smrg   vtn_variable_mode_uniform,
437b8e80941Smrg   vtn_variable_mode_ubo,
438b8e80941Smrg   vtn_variable_mode_ssbo,
439b8e80941Smrg   vtn_variable_mode_phys_ssbo,
440b8e80941Smrg   vtn_variable_mode_push_constant,
441b8e80941Smrg   vtn_variable_mode_workgroup,
442b8e80941Smrg   vtn_variable_mode_cross_workgroup,
443b8e80941Smrg   vtn_variable_mode_input,
444b8e80941Smrg   vtn_variable_mode_output,
445b8e80941Smrg};
446b8e80941Smrg
447b8e80941Smrgstruct vtn_pointer {
448b8e80941Smrg   /** The variable mode for the referenced data */
449b8e80941Smrg   enum vtn_variable_mode mode;
450b8e80941Smrg
451b8e80941Smrg   /** The dereferenced type of this pointer */
452b8e80941Smrg   struct vtn_type *type;
453b8e80941Smrg
454b8e80941Smrg   /** The pointer type of this pointer
455b8e80941Smrg    *
456b8e80941Smrg    * This may be NULL for some temporary pointers constructed as part of a
457b8e80941Smrg    * large load, store, or copy.  It MUST be valid for all pointers which are
458b8e80941Smrg    * stored as SPIR-V SSA values.
459b8e80941Smrg    */
460b8e80941Smrg   struct vtn_type *ptr_type;
461b8e80941Smrg
462b8e80941Smrg   /** The referenced variable, if known
463b8e80941Smrg    *
464b8e80941Smrg    * This field may be NULL if the pointer uses a (block_index, offset) pair
465b8e80941Smrg    * instead of an access chain or if the access chain starts at a deref.
466b8e80941Smrg    */
467b8e80941Smrg   struct vtn_variable *var;
468b8e80941Smrg
469b8e80941Smrg   /** The NIR deref corresponding to this pointer */
470b8e80941Smrg   nir_deref_instr *deref;
471b8e80941Smrg
472b8e80941Smrg   /** A (block_index, offset) pair representing a UBO or SSBO position. */
473b8e80941Smrg   struct nir_ssa_def *block_index;
474b8e80941Smrg   struct nir_ssa_def *offset;
475b8e80941Smrg
476b8e80941Smrg   /* Access qualifiers */
477b8e80941Smrg   enum gl_access_qualifier access;
478b8e80941Smrg};
479b8e80941Smrg
480b8e80941Smrgbool vtn_pointer_uses_ssa_offset(struct vtn_builder *b,
481b8e80941Smrg                                 struct vtn_pointer *ptr);
482b8e80941Smrg
483b8e80941Smrgstruct vtn_variable {
484b8e80941Smrg   enum vtn_variable_mode mode;
485b8e80941Smrg
486b8e80941Smrg   struct vtn_type *type;
487b8e80941Smrg
488b8e80941Smrg   unsigned descriptor_set;
489b8e80941Smrg   unsigned binding;
490b8e80941Smrg   bool explicit_binding;
491b8e80941Smrg   unsigned offset;
492b8e80941Smrg   unsigned input_attachment_index;
493b8e80941Smrg   bool patch;
494b8e80941Smrg
495b8e80941Smrg   nir_variable *var;
496b8e80941Smrg
497b8e80941Smrg   /* If the variable is a struct with a location set on it then this will be
498b8e80941Smrg    * stored here. This will be used to calculate locations for members that
499b8e80941Smrg    * don’t have their own explicit location.
500b8e80941Smrg    */
501b8e80941Smrg   int base_location;
502b8e80941Smrg
503b8e80941Smrg   int shared_location;
504b8e80941Smrg
505b8e80941Smrg   /**
506b8e80941Smrg    * In some early released versions of GLSLang, it implemented all function
507b8e80941Smrg    * calls by making copies of all parameters into temporary variables and
508b8e80941Smrg    * passing those variables into the function.  It even did so for samplers
509b8e80941Smrg    * and images which violates the SPIR-V spec.  Unfortunately, two games
510b8e80941Smrg    * (Talos Principle and Doom) shipped with this old version of GLSLang and
511b8e80941Smrg    * also happen to pass samplers into functions.  Talos Principle received
512b8e80941Smrg    * an update fairly shortly after release with an updated GLSLang.  Doom,
513b8e80941Smrg    * on the other hand, has never received an update so we need to work
514b8e80941Smrg    * around this GLSLang issue in SPIR-V -> NIR.  Hopefully, we can drop this
515b8e80941Smrg    * hack at some point in the future.
516b8e80941Smrg    */
517b8e80941Smrg   struct vtn_pointer *copy_prop_sampler;
518b8e80941Smrg
519b8e80941Smrg   /* Access qualifiers. */
520b8e80941Smrg   enum gl_access_qualifier access;
521b8e80941Smrg};
522b8e80941Smrg
523b8e80941Smrgstruct vtn_image_pointer {
524b8e80941Smrg   struct vtn_pointer *image;
525b8e80941Smrg   nir_ssa_def *coord;
526b8e80941Smrg   nir_ssa_def *sample;
527b8e80941Smrg};
528b8e80941Smrg
529b8e80941Smrgstruct vtn_sampled_image {
530b8e80941Smrg   struct vtn_type *type;
531b8e80941Smrg   struct vtn_pointer *image; /* Image or array of images */
532b8e80941Smrg   struct vtn_pointer *sampler; /* Sampler */
533b8e80941Smrg};
534b8e80941Smrg
535b8e80941Smrgstruct vtn_value {
536b8e80941Smrg   enum vtn_value_type value_type;
537b8e80941Smrg   const char *name;
538b8e80941Smrg   struct vtn_decoration *decoration;
539b8e80941Smrg   struct vtn_type *type;
540b8e80941Smrg   union {
541b8e80941Smrg      void *ptr;
542b8e80941Smrg      char *str;
543b8e80941Smrg      nir_constant *constant;
544b8e80941Smrg      struct vtn_pointer *pointer;
545b8e80941Smrg      struct vtn_image_pointer *image;
546b8e80941Smrg      struct vtn_sampled_image *sampled_image;
547b8e80941Smrg      struct vtn_function *func;
548b8e80941Smrg      struct vtn_block *block;
549b8e80941Smrg      struct vtn_ssa_value *ssa;
550b8e80941Smrg      vtn_instruction_handler ext_handler;
551b8e80941Smrg   };
552b8e80941Smrg};
553b8e80941Smrg
554b8e80941Smrg#define VTN_DEC_DECORATION -1
555b8e80941Smrg#define VTN_DEC_EXECUTION_MODE -2
556b8e80941Smrg#define VTN_DEC_STRUCT_MEMBER0 0
557b8e80941Smrg
558b8e80941Smrgstruct vtn_decoration {
559b8e80941Smrg   struct vtn_decoration *next;
560b8e80941Smrg
561b8e80941Smrg   /* Specifies how to apply this decoration.  Negative values represent a
562b8e80941Smrg    * decoration or execution mode. (See the VTN_DEC_ #defines above.)
563b8e80941Smrg    * Non-negative values specify that it applies to a structure member.
564b8e80941Smrg    */
565b8e80941Smrg   int scope;
566b8e80941Smrg
567b8e80941Smrg   const uint32_t *operands;
568b8e80941Smrg   struct vtn_value *group;
569b8e80941Smrg
570b8e80941Smrg   union {
571b8e80941Smrg      SpvDecoration decoration;
572b8e80941Smrg      SpvExecutionMode exec_mode;
573b8e80941Smrg   };
574b8e80941Smrg};
575b8e80941Smrg
576b8e80941Smrgstruct vtn_builder {
577b8e80941Smrg   nir_builder nb;
578b8e80941Smrg
579b8e80941Smrg   /* Used by vtn_fail to jump back to the beginning of SPIR-V compilation */
580b8e80941Smrg   jmp_buf fail_jump;
581b8e80941Smrg
582b8e80941Smrg   const uint32_t *spirv;
583b8e80941Smrg   size_t spirv_word_count;
584b8e80941Smrg
585b8e80941Smrg   nir_shader *shader;
586b8e80941Smrg   struct spirv_to_nir_options *options;
587b8e80941Smrg   struct vtn_block *block;
588b8e80941Smrg
589b8e80941Smrg   /* Current offset, file, line, and column.  Useful for debugging.  Set
590b8e80941Smrg    * automatically by vtn_foreach_instruction.
591b8e80941Smrg    */
592b8e80941Smrg   size_t spirv_offset;
593b8e80941Smrg   char *file;
594b8e80941Smrg   int line, col;
595b8e80941Smrg
596b8e80941Smrg   /*
597b8e80941Smrg    * In SPIR-V, constants are global, whereas in NIR, the load_const
598b8e80941Smrg    * instruction we use is per-function. So while we parse each function, we
599b8e80941Smrg    * keep a hash table of constants we've resolved to nir_ssa_value's so
600b8e80941Smrg    * far, and we lazily resolve them when we see them used in a function.
601b8e80941Smrg    */
602b8e80941Smrg   struct hash_table *const_table;
603b8e80941Smrg
604b8e80941Smrg   /*
605b8e80941Smrg    * Map from phi instructions (pointer to the start of the instruction)
606b8e80941Smrg    * to the variable corresponding to it.
607b8e80941Smrg    */
608b8e80941Smrg   struct hash_table *phi_table;
609b8e80941Smrg
610b8e80941Smrg   unsigned num_specializations;
611b8e80941Smrg   struct nir_spirv_specialization *specializations;
612b8e80941Smrg
613b8e80941Smrg   unsigned value_id_bound;
614b8e80941Smrg   struct vtn_value *values;
615b8e80941Smrg
616b8e80941Smrg   /* True if we should watch out for GLSLang issue #179 */
617b8e80941Smrg   bool wa_glslang_179;
618b8e80941Smrg
619b8e80941Smrg   gl_shader_stage entry_point_stage;
620b8e80941Smrg   const char *entry_point_name;
621b8e80941Smrg   struct vtn_value *entry_point;
622b8e80941Smrg   struct vtn_value *workgroup_size_builtin;
623b8e80941Smrg   bool variable_pointers;
624b8e80941Smrg
625b8e80941Smrg   struct vtn_function *func;
626b8e80941Smrg   struct exec_list functions;
627b8e80941Smrg
628b8e80941Smrg   /* Current function parameter index */
629b8e80941Smrg   unsigned func_param_idx;
630b8e80941Smrg
631b8e80941Smrg   bool has_loop_continue;
632b8e80941Smrg
633b8e80941Smrg   /* false by default, set to true by the ContractionOff execution mode */
634b8e80941Smrg   bool exact;
635b8e80941Smrg
636b8e80941Smrg   /* when a physical memory model is choosen */
637b8e80941Smrg   bool physical_ptrs;
638b8e80941Smrg};
639b8e80941Smrg
640b8e80941Smrgnir_ssa_def *
641b8e80941Smrgvtn_pointer_to_ssa(struct vtn_builder *b, struct vtn_pointer *ptr);
642b8e80941Smrgstruct vtn_pointer *
643b8e80941Smrgvtn_pointer_from_ssa(struct vtn_builder *b, nir_ssa_def *ssa,
644b8e80941Smrg                     struct vtn_type *ptr_type);
645b8e80941Smrg
646b8e80941Smrgstatic inline struct vtn_value *
647b8e80941Smrgvtn_untyped_value(struct vtn_builder *b, uint32_t value_id)
648b8e80941Smrg{
649b8e80941Smrg   vtn_fail_if(value_id >= b->value_id_bound,
650b8e80941Smrg               "SPIR-V id %u is out-of-bounds", value_id);
651b8e80941Smrg   return &b->values[value_id];
652b8e80941Smrg}
653b8e80941Smrg
654b8e80941Smrg/* Consider not using this function directly and instead use
655b8e80941Smrg * vtn_push_ssa/vtn_push_value_pointer so that appropriate applying of
656b8e80941Smrg * decorations is handled by common code.
657b8e80941Smrg */
658b8e80941Smrgstatic inline struct vtn_value *
659b8e80941Smrgvtn_push_value(struct vtn_builder *b, uint32_t value_id,
660b8e80941Smrg               enum vtn_value_type value_type)
661b8e80941Smrg{
662b8e80941Smrg   struct vtn_value *val = vtn_untyped_value(b, value_id);
663b8e80941Smrg
664b8e80941Smrg   vtn_fail_if(val->value_type != vtn_value_type_invalid,
665b8e80941Smrg               "SPIR-V id %u has already been written by another instruction",
666b8e80941Smrg               value_id);
667b8e80941Smrg
668b8e80941Smrg   val->value_type = value_type;
669b8e80941Smrg
670b8e80941Smrg   return &b->values[value_id];
671b8e80941Smrg}
672b8e80941Smrg
673b8e80941Smrgstatic inline struct vtn_value *
674b8e80941Smrgvtn_value(struct vtn_builder *b, uint32_t value_id,
675b8e80941Smrg          enum vtn_value_type value_type)
676b8e80941Smrg{
677b8e80941Smrg   struct vtn_value *val = vtn_untyped_value(b, value_id);
678b8e80941Smrg   vtn_fail_if(val->value_type != value_type,
679b8e80941Smrg               "SPIR-V id %u is the wrong kind of value", value_id);
680b8e80941Smrg   return val;
681b8e80941Smrg}
682b8e80941Smrg
683b8e80941Smrgbool
684b8e80941Smrgvtn_set_instruction_result_type(struct vtn_builder *b, SpvOp opcode,
685b8e80941Smrg                                const uint32_t *w, unsigned count);
686b8e80941Smrg
687b8e80941Smrgstatic inline uint64_t
688b8e80941Smrgvtn_constant_uint(struct vtn_builder *b, uint32_t value_id)
689b8e80941Smrg{
690b8e80941Smrg   struct vtn_value *val = vtn_value(b, value_id, vtn_value_type_constant);
691b8e80941Smrg
692b8e80941Smrg   vtn_fail_if(val->type->base_type != vtn_base_type_scalar ||
693b8e80941Smrg               !glsl_type_is_integer(val->type->type),
694b8e80941Smrg               "Expected id %u to be an integer constant", value_id);
695b8e80941Smrg
696b8e80941Smrg   switch (glsl_get_bit_size(val->type->type)) {
697b8e80941Smrg   case 8:  return val->constant->values[0][0].u8;
698b8e80941Smrg   case 16: return val->constant->values[0][0].u16;
699b8e80941Smrg   case 32: return val->constant->values[0][0].u32;
700b8e80941Smrg   case 64: return val->constant->values[0][0].u64;
701b8e80941Smrg   default: unreachable("Invalid bit size");
702b8e80941Smrg   }
703b8e80941Smrg}
704b8e80941Smrg
705b8e80941Smrgstatic inline enum gl_access_qualifier vtn_value_access(struct vtn_value *value)
706b8e80941Smrg{
707b8e80941Smrg   switch (value->value_type) {
708b8e80941Smrg   case vtn_value_type_invalid:
709b8e80941Smrg   case vtn_value_type_undef:
710b8e80941Smrg   case vtn_value_type_string:
711b8e80941Smrg   case vtn_value_type_decoration_group:
712b8e80941Smrg   case vtn_value_type_constant:
713b8e80941Smrg   case vtn_value_type_function:
714b8e80941Smrg   case vtn_value_type_block:
715b8e80941Smrg   case vtn_value_type_extension:
716b8e80941Smrg      return 0;
717b8e80941Smrg   case vtn_value_type_type:
718b8e80941Smrg      return value->type->access;
719b8e80941Smrg   case vtn_value_type_pointer:
720b8e80941Smrg      return value->pointer->access;
721b8e80941Smrg   case vtn_value_type_ssa:
722b8e80941Smrg      return value->ssa->access;
723b8e80941Smrg   case vtn_value_type_image_pointer:
724b8e80941Smrg      return value->image->image->access;
725b8e80941Smrg   case vtn_value_type_sampled_image:
726b8e80941Smrg      return value->sampled_image->image->access |
727b8e80941Smrg         value->sampled_image->sampler->access;
728b8e80941Smrg   }
729b8e80941Smrg
730b8e80941Smrg   unreachable("invalid type");
731b8e80941Smrg}
732b8e80941Smrg
733b8e80941Smrgstruct vtn_ssa_value *vtn_ssa_value(struct vtn_builder *b, uint32_t value_id);
734b8e80941Smrg
735b8e80941Smrgstruct vtn_value *vtn_push_value_pointer(struct vtn_builder *b,
736b8e80941Smrg                                         uint32_t value_id,
737b8e80941Smrg                                         struct vtn_pointer *ptr);
738b8e80941Smrg
739b8e80941Smrgstruct vtn_value *vtn_push_ssa(struct vtn_builder *b, uint32_t value_id,
740b8e80941Smrg                               struct vtn_type *type, struct vtn_ssa_value *ssa);
741b8e80941Smrg
742b8e80941Smrgstruct vtn_ssa_value *vtn_create_ssa_value(struct vtn_builder *b,
743b8e80941Smrg                                           const struct glsl_type *type);
744b8e80941Smrg
745b8e80941Smrgstruct vtn_ssa_value *vtn_ssa_transpose(struct vtn_builder *b,
746b8e80941Smrg                                        struct vtn_ssa_value *src);
747b8e80941Smrg
748b8e80941Smrgnir_ssa_def *vtn_vector_extract(struct vtn_builder *b, nir_ssa_def *src,
749b8e80941Smrg                                unsigned index);
750b8e80941Smrgnir_ssa_def *vtn_vector_extract_dynamic(struct vtn_builder *b, nir_ssa_def *src,
751b8e80941Smrg                                        nir_ssa_def *index);
752b8e80941Smrgnir_ssa_def *vtn_vector_insert(struct vtn_builder *b, nir_ssa_def *src,
753b8e80941Smrg                               nir_ssa_def *insert, unsigned index);
754b8e80941Smrgnir_ssa_def *vtn_vector_insert_dynamic(struct vtn_builder *b, nir_ssa_def *src,
755b8e80941Smrg                                       nir_ssa_def *insert, nir_ssa_def *index);
756b8e80941Smrg
757b8e80941Smrgnir_deref_instr *vtn_nir_deref(struct vtn_builder *b, uint32_t id);
758b8e80941Smrg
759b8e80941Smrgstruct vtn_pointer *vtn_pointer_for_variable(struct vtn_builder *b,
760b8e80941Smrg                                             struct vtn_variable *var,
761b8e80941Smrg                                             struct vtn_type *ptr_type);
762b8e80941Smrg
763b8e80941Smrgnir_deref_instr *vtn_pointer_to_deref(struct vtn_builder *b,
764b8e80941Smrg                                      struct vtn_pointer *ptr);
765b8e80941Smrgnir_ssa_def *
766b8e80941Smrgvtn_pointer_to_offset(struct vtn_builder *b, struct vtn_pointer *ptr,
767b8e80941Smrg                      nir_ssa_def **index_out);
768b8e80941Smrg
769b8e80941Smrgstruct vtn_ssa_value *
770b8e80941Smrgvtn_local_load(struct vtn_builder *b, nir_deref_instr *src,
771b8e80941Smrg               enum gl_access_qualifier access);
772b8e80941Smrg
773b8e80941Smrgvoid vtn_local_store(struct vtn_builder *b, struct vtn_ssa_value *src,
774b8e80941Smrg                     nir_deref_instr *dest,
775b8e80941Smrg                     enum gl_access_qualifier access);
776b8e80941Smrg
777b8e80941Smrgstruct vtn_ssa_value *
778b8e80941Smrgvtn_variable_load(struct vtn_builder *b, struct vtn_pointer *src);
779b8e80941Smrg
780b8e80941Smrgvoid vtn_variable_store(struct vtn_builder *b, struct vtn_ssa_value *src,
781b8e80941Smrg                        struct vtn_pointer *dest);
782b8e80941Smrg
783b8e80941Smrgvoid vtn_handle_variables(struct vtn_builder *b, SpvOp opcode,
784b8e80941Smrg                          const uint32_t *w, unsigned count);
785b8e80941Smrg
786b8e80941Smrg
787b8e80941Smrgtypedef void (*vtn_decoration_foreach_cb)(struct vtn_builder *,
788b8e80941Smrg                                          struct vtn_value *,
789b8e80941Smrg                                          int member,
790b8e80941Smrg                                          const struct vtn_decoration *,
791b8e80941Smrg                                          void *);
792b8e80941Smrg
793b8e80941Smrgvoid vtn_foreach_decoration(struct vtn_builder *b, struct vtn_value *value,
794b8e80941Smrg                            vtn_decoration_foreach_cb cb, void *data);
795b8e80941Smrg
796b8e80941Smrgtypedef void (*vtn_execution_mode_foreach_cb)(struct vtn_builder *,
797b8e80941Smrg                                              struct vtn_value *,
798b8e80941Smrg                                              const struct vtn_decoration *,
799b8e80941Smrg                                              void *);
800b8e80941Smrg
801b8e80941Smrgvoid vtn_foreach_execution_mode(struct vtn_builder *b, struct vtn_value *value,
802b8e80941Smrg                                vtn_execution_mode_foreach_cb cb, void *data);
803b8e80941Smrg
804b8e80941Smrgnir_op vtn_nir_alu_op_for_spirv_opcode(struct vtn_builder *b,
805b8e80941Smrg                                       SpvOp opcode, bool *swap,
806b8e80941Smrg                                       unsigned src_bit_size, unsigned dst_bit_size);
807b8e80941Smrg
808b8e80941Smrgvoid vtn_handle_alu(struct vtn_builder *b, SpvOp opcode,
809b8e80941Smrg                    const uint32_t *w, unsigned count);
810b8e80941Smrg
811b8e80941Smrgvoid vtn_handle_bitcast(struct vtn_builder *b, const uint32_t *w,
812b8e80941Smrg                        unsigned count);
813b8e80941Smrg
814b8e80941Smrgvoid vtn_handle_subgroup(struct vtn_builder *b, SpvOp opcode,
815b8e80941Smrg                         const uint32_t *w, unsigned count);
816b8e80941Smrg
817b8e80941Smrgbool vtn_handle_glsl450_instruction(struct vtn_builder *b, SpvOp ext_opcode,
818b8e80941Smrg                                    const uint32_t *words, unsigned count);
819b8e80941Smrg
820b8e80941Smrgbool vtn_handle_opencl_instruction(struct vtn_builder *b, uint32_t ext_opcode,
821b8e80941Smrg                                   const uint32_t *words, unsigned count);
822b8e80941Smrg
823b8e80941Smrgstruct vtn_builder* vtn_create_builder(const uint32_t *words, size_t word_count,
824b8e80941Smrg                                       gl_shader_stage stage, const char *entry_point_name,
825b8e80941Smrg                                       const struct spirv_to_nir_options *options);
826b8e80941Smrg
827b8e80941Smrgvoid vtn_handle_entry_point(struct vtn_builder *b, const uint32_t *w,
828b8e80941Smrg                            unsigned count);
829b8e80941Smrg
830b8e80941Smrgvoid vtn_handle_decoration(struct vtn_builder *b, SpvOp opcode,
831b8e80941Smrg                           const uint32_t *w, unsigned count);
832b8e80941Smrg
833b8e80941Smrgstatic inline uint32_t
834b8e80941Smrgvtn_align_u32(uint32_t v, uint32_t a)
835b8e80941Smrg{
836b8e80941Smrg   assert(a != 0 && a == (a & -((int32_t) a)));
837b8e80941Smrg   return (v + a - 1) & ~(a - 1);
838b8e80941Smrg}
839b8e80941Smrg
840b8e80941Smrgstatic inline uint64_t
841b8e80941Smrgvtn_u64_literal(const uint32_t *w)
842b8e80941Smrg{
843b8e80941Smrg   return (uint64_t)w[1] << 32 | w[0];
844b8e80941Smrg}
845b8e80941Smrg
846b8e80941Smrgbool vtn_handle_amd_gcn_shader_instruction(struct vtn_builder *b, SpvOp ext_opcode,
847b8e80941Smrg                                           const uint32_t *words, unsigned count);
848b8e80941Smrg
849b8e80941Smrgbool vtn_handle_amd_shader_trinary_minmax_instruction(struct vtn_builder *b, SpvOp ext_opcode,
850b8e80941Smrg						      const uint32_t *words, unsigned count);
851b8e80941Smrg#endif /* _VTN_PRIVATE_H_ */
852