vars_tests.cpp revision 01e04c3f
1/*
2 * Copyright © 2018 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 */
23
24#include <gtest/gtest.h>
25
26#include "nir.h"
27#include "nir_builder.h"
28
29namespace {
30
31class nir_vars_test : public ::testing::Test {
32protected:
33   nir_vars_test();
34   ~nir_vars_test();
35
36   nir_variable *create_int(nir_variable_mode mode, const char *name) {
37      if (mode == nir_var_local)
38         return nir_local_variable_create(b->impl, glsl_int_type(), name);
39      return nir_variable_create(b->shader, mode, glsl_int_type(), name);
40   }
41
42   nir_variable *create_ivec2(nir_variable_mode mode, const char *name) {
43      const glsl_type *var_type = glsl_vector_type(GLSL_TYPE_INT, 2);
44      if (mode == nir_var_local)
45         return nir_local_variable_create(b->impl, var_type, name);
46      return nir_variable_create(b->shader, mode, var_type, name);
47   }
48
49   nir_variable **create_many_int(nir_variable_mode mode, const char *prefix, unsigned count) {
50      nir_variable **result = (nir_variable **)linear_alloc_child(lin_ctx, sizeof(nir_variable *) * count);
51      for (unsigned i = 0; i < count; i++)
52         result[i] = create_int(mode, linear_asprintf(lin_ctx, "%s%u", prefix, i));
53      return result;
54   }
55
56   nir_variable **create_many_ivec2(nir_variable_mode mode, const char *prefix, unsigned count) {
57      nir_variable **result = (nir_variable **)linear_alloc_child(lin_ctx, sizeof(nir_variable *) * count);
58      for (unsigned i = 0; i < count; i++)
59         result[i] = create_ivec2(mode, linear_asprintf(lin_ctx, "%s%u", prefix, i));
60      return result;
61   }
62
63   unsigned count_intrinsics(nir_intrinsic_op intrinsic);
64
65   nir_intrinsic_instr *find_next_intrinsic(nir_intrinsic_op intrinsic,
66                                            nir_intrinsic_instr *after);
67
68   void *mem_ctx;
69   void *lin_ctx;
70
71   nir_builder *b;
72};
73
74nir_vars_test::nir_vars_test()
75{
76   mem_ctx = ralloc_context(NULL);
77   lin_ctx = linear_alloc_parent(mem_ctx, 0);
78   static const nir_shader_compiler_options options = { };
79   b = rzalloc(mem_ctx, nir_builder);
80   nir_builder_init_simple_shader(b, mem_ctx, MESA_SHADER_FRAGMENT, &options);
81}
82
83nir_vars_test::~nir_vars_test()
84{
85   if (HasFailure()) {
86      printf("\nShader from the failed test:\n\n");
87      nir_print_shader(b->shader, stdout);
88   }
89
90   ralloc_free(mem_ctx);
91}
92
93unsigned
94nir_vars_test::count_intrinsics(nir_intrinsic_op intrinsic)
95{
96   unsigned count = 0;
97   nir_foreach_block(block, b->impl) {
98      nir_foreach_instr(instr, block) {
99         if (instr->type != nir_instr_type_intrinsic)
100            continue;
101         nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
102         if (intrin->intrinsic == intrinsic)
103            count++;
104      }
105   }
106   return count;
107}
108
109nir_intrinsic_instr *
110nir_vars_test::find_next_intrinsic(nir_intrinsic_op intrinsic,
111                                   nir_intrinsic_instr *after)
112{
113   bool seen = after == NULL;
114   nir_foreach_block(block, b->impl) {
115      /* Skip blocks before the 'after' instruction. */
116      if (!seen && block != after->instr.block)
117         continue;
118      nir_foreach_instr(instr, block) {
119         if (instr->type != nir_instr_type_intrinsic)
120            continue;
121         nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
122         if (!seen) {
123            seen = (after == intrin);
124            continue;
125         }
126         if (intrin->intrinsic == intrinsic)
127            return intrin;
128      }
129   }
130   return NULL;
131}
132
133/* Allow grouping the tests while still sharing the helpers. */
134class nir_redundant_load_vars_test : public nir_vars_test {};
135class nir_copy_prop_vars_test : public nir_vars_test {};
136class nir_dead_write_vars_test : public nir_vars_test {};
137
138} // namespace
139
140TEST_F(nir_redundant_load_vars_test, duplicated_load)
141{
142   /* Load a variable twice in the same block.  One should be removed. */
143
144   nir_variable *in = create_int(nir_var_shader_in, "in");
145   nir_variable **out = create_many_int(nir_var_shader_out, "out", 2);
146
147   nir_store_var(b, out[0], nir_load_var(b, in), 1);
148   nir_store_var(b, out[1], nir_load_var(b, in), 1);
149
150   nir_validate_shader(b->shader, NULL);
151
152   ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref), 2);
153
154   bool progress = nir_opt_copy_prop_vars(b->shader);
155   EXPECT_TRUE(progress);
156
157   nir_validate_shader(b->shader, NULL);
158
159   ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref), 1);
160}
161
162TEST_F(nir_redundant_load_vars_test, duplicated_load_in_two_blocks)
163{
164   /* Load a variable twice in different blocks.  One should be removed. */
165
166   nir_variable *in = create_int(nir_var_shader_in, "in");
167   nir_variable **out = create_many_int(nir_var_shader_out, "out", 2);
168
169   nir_store_var(b, out[0], nir_load_var(b, in), 1);
170
171   /* Forces the stores to be in different blocks. */
172   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
173
174   nir_store_var(b, out[1], nir_load_var(b, in), 1);
175
176   nir_validate_shader(b->shader, NULL);
177
178   ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref), 2);
179
180   bool progress = nir_opt_copy_prop_vars(b->shader);
181   EXPECT_TRUE(progress);
182
183   nir_validate_shader(b->shader, NULL);
184
185   ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref), 1);
186}
187
188TEST_F(nir_redundant_load_vars_test, invalidate_inside_if_block)
189{
190   /* Load variables, then write to some of then in different branches of the
191    * if statement.  They should be invalidated accordingly.
192    */
193
194   nir_variable **g = create_many_int(nir_var_global, "g", 3);
195   nir_variable **out = create_many_int(nir_var_shader_out, "out", 3);
196
197   nir_load_var(b, g[0]);
198   nir_load_var(b, g[1]);
199   nir_load_var(b, g[2]);
200
201   nir_if *if_stmt = nir_push_if(b, nir_imm_int(b, 0));
202   nir_store_var(b, g[0], nir_imm_int(b, 10), 1);
203
204   nir_push_else(b, if_stmt);
205   nir_store_var(b, g[1], nir_imm_int(b, 20), 1);
206
207   nir_pop_if(b, if_stmt);
208
209   nir_store_var(b, out[0], nir_load_var(b, g[0]), 1);
210   nir_store_var(b, out[1], nir_load_var(b, g[1]), 1);
211   nir_store_var(b, out[2], nir_load_var(b, g[2]), 1);
212
213   nir_validate_shader(b->shader, NULL);
214
215   bool progress = nir_opt_copy_prop_vars(b->shader);
216   EXPECT_TRUE(progress);
217
218   /* There are 3 initial loads, plus 2 loads for the values invalidated
219    * inside the if statement.
220    */
221   ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref), 5);
222
223   /* We only load g[2] once. */
224   unsigned g2_load_count = 0;
225   nir_intrinsic_instr *load = NULL;
226   for (int i = 0; i < 5; i++) {
227      load = find_next_intrinsic(nir_intrinsic_load_deref, load);
228      if (nir_intrinsic_get_var(load, 0) == g[2])
229         g2_load_count++;
230   }
231   EXPECT_EQ(g2_load_count, 1);
232}
233
234TEST_F(nir_redundant_load_vars_test, invalidate_live_load_in_the_end_of_loop)
235{
236   /* Invalidating a load in the end of loop body will apply to the whole loop
237    * body.
238    */
239
240   nir_variable *v = create_int(nir_var_shader_storage, "v");
241
242   nir_load_var(b, v);
243
244   nir_loop *loop = nir_push_loop(b);
245
246   nir_if *if_stmt = nir_push_if(b, nir_imm_int(b, 0));
247   nir_jump(b, nir_jump_break);
248   nir_pop_if(b, if_stmt);
249
250   nir_load_var(b, v);
251   nir_store_var(b, v, nir_imm_int(b, 10), 1);
252
253   nir_pop_loop(b, loop);
254
255   bool progress = nir_opt_copy_prop_vars(b->shader);
256   ASSERT_FALSE(progress);
257}
258
259TEST_F(nir_copy_prop_vars_test, simple_copies)
260{
261   nir_variable *in   = create_int(nir_var_shader_in,  "in");
262   nir_variable *temp = create_int(nir_var_local,      "temp");
263   nir_variable *out  = create_int(nir_var_shader_out, "out");
264
265   nir_copy_var(b, temp, in);
266   nir_copy_var(b, out, temp);
267
268   nir_validate_shader(b->shader, NULL);
269
270   bool progress = nir_opt_copy_prop_vars(b->shader);
271   EXPECT_TRUE(progress);
272
273   nir_validate_shader(b->shader, NULL);
274
275   nir_intrinsic_instr *copy = NULL;
276   copy = find_next_intrinsic(nir_intrinsic_copy_deref, copy);
277   ASSERT_TRUE(copy->src[1].is_ssa);
278   nir_ssa_def *first_src = copy->src[1].ssa;
279
280   copy = find_next_intrinsic(nir_intrinsic_copy_deref, copy);
281   ASSERT_TRUE(copy->src[1].is_ssa);
282   EXPECT_EQ(copy->src[1].ssa, first_src);
283}
284
285TEST_F(nir_copy_prop_vars_test, simple_store_load)
286{
287   nir_variable **v = create_many_ivec2(nir_var_local, "v", 2);
288   unsigned mask = 1 | 2;
289
290   nir_ssa_def *stored_value = nir_imm_ivec2(b, 10, 20);
291   nir_store_var(b, v[0], stored_value, mask);
292
293   nir_ssa_def *read_value = nir_load_var(b, v[0]);
294   nir_store_var(b, v[1], read_value, mask);
295
296   nir_validate_shader(b->shader, NULL);
297
298   bool progress = nir_opt_copy_prop_vars(b->shader);
299   EXPECT_TRUE(progress);
300
301   nir_validate_shader(b->shader, NULL);
302
303   ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref), 2);
304
305   nir_intrinsic_instr *store = NULL;
306   for (int i = 0; i < 2; i++) {
307      store = find_next_intrinsic(nir_intrinsic_store_deref, store);
308      ASSERT_TRUE(store->src[1].is_ssa);
309      EXPECT_EQ(store->src[1].ssa, stored_value);
310   }
311}
312
313TEST_F(nir_copy_prop_vars_test, store_store_load)
314{
315   nir_variable **v = create_many_ivec2(nir_var_local, "v", 2);
316   unsigned mask = 1 | 2;
317
318   nir_ssa_def *first_value = nir_imm_ivec2(b, 10, 20);
319   nir_store_var(b, v[0], first_value, mask);
320
321   nir_ssa_def *second_value = nir_imm_ivec2(b, 30, 40);
322   nir_store_var(b, v[0], second_value, mask);
323
324   nir_ssa_def *read_value = nir_load_var(b, v[0]);
325   nir_store_var(b, v[1], read_value, mask);
326
327   nir_validate_shader(b->shader, NULL);
328
329   bool progress = nir_opt_copy_prop_vars(b->shader);
330   EXPECT_TRUE(progress);
331
332   nir_validate_shader(b->shader, NULL);
333
334   /* Store to v[1] should use second_value directly. */
335   nir_intrinsic_instr *store_to_v1 = NULL;
336   while ((store_to_v1 = find_next_intrinsic(nir_intrinsic_store_deref, store_to_v1)) != NULL) {
337      if (nir_intrinsic_get_var(store_to_v1, 0) == v[1]) {
338         ASSERT_TRUE(store_to_v1->src[1].is_ssa);
339         EXPECT_EQ(store_to_v1->src[1].ssa, second_value);
340         break;
341      }
342   }
343   EXPECT_TRUE(store_to_v1);
344}
345
346TEST_F(nir_copy_prop_vars_test, store_store_load_different_components)
347{
348   nir_variable **v = create_many_ivec2(nir_var_local, "v", 2);
349
350   nir_ssa_def *first_value = nir_imm_ivec2(b, 10, 20);
351   nir_store_var(b, v[0], first_value, 1 << 1);
352
353   nir_ssa_def *second_value = nir_imm_ivec2(b, 30, 40);
354   nir_store_var(b, v[0], second_value, 1 << 0);
355
356   nir_ssa_def *read_value = nir_load_var(b, v[0]);
357   nir_store_var(b, v[1], read_value, 1 << 1);
358
359   nir_validate_shader(b->shader, NULL);
360
361   bool progress = nir_opt_copy_prop_vars(b->shader);
362   EXPECT_TRUE(progress);
363
364   nir_validate_shader(b->shader, NULL);
365
366   nir_opt_constant_folding(b->shader);
367   nir_validate_shader(b->shader, NULL);
368
369   /* Store to v[1] should use first_value directly.  The write of
370    * second_value did not overwrite the component it uses.
371    */
372   nir_intrinsic_instr *store_to_v1 = NULL;
373   while ((store_to_v1 = find_next_intrinsic(nir_intrinsic_store_deref, store_to_v1)) != NULL) {
374      if (nir_intrinsic_get_var(store_to_v1, 0) == v[1]) {
375         ASSERT_TRUE(store_to_v1->src[1].is_ssa);
376
377         ASSERT_TRUE(nir_src_is_const(store_to_v1->src[1]));
378         ASSERT_EQ(nir_src_comp_as_uint(store_to_v1->src[1], 1), 20);
379         break;
380      }
381   }
382   EXPECT_TRUE(store_to_v1);
383}
384
385TEST_F(nir_copy_prop_vars_test, store_store_load_different_components_in_many_blocks)
386{
387   nir_variable **v = create_many_ivec2(nir_var_local, "v", 2);
388
389   nir_ssa_def *first_value = nir_imm_ivec2(b, 10, 20);
390   nir_store_var(b, v[0], first_value, 1 << 1);
391
392   /* Adding an if statement will cause blocks to be created. */
393   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
394
395   nir_ssa_def *second_value = nir_imm_ivec2(b, 30, 40);
396   nir_store_var(b, v[0], second_value, 1 << 0);
397
398   /* Adding an if statement will cause blocks to be created. */
399   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
400
401   nir_ssa_def *read_value = nir_load_var(b, v[0]);
402   nir_store_var(b, v[1], read_value, 1 << 1);
403
404   nir_validate_shader(b->shader, NULL);
405
406   nir_print_shader(b->shader, stdout);
407
408   bool progress = nir_opt_copy_prop_vars(b->shader);
409   EXPECT_TRUE(progress);
410
411   nir_print_shader(b->shader, stdout);
412
413   nir_validate_shader(b->shader, NULL);
414
415   nir_opt_constant_folding(b->shader);
416   nir_validate_shader(b->shader, NULL);
417
418   /* Store to v[1] should use first_value directly.  The write of
419    * second_value did not overwrite the component it uses.
420    */
421   nir_intrinsic_instr *store_to_v1 = NULL;
422   while ((store_to_v1 = find_next_intrinsic(nir_intrinsic_store_deref, store_to_v1)) != NULL) {
423      if (nir_intrinsic_get_var(store_to_v1, 0) == v[1]) {
424         ASSERT_TRUE(store_to_v1->src[1].is_ssa);
425
426         ASSERT_TRUE(nir_src_is_const(store_to_v1->src[1]));
427         ASSERT_EQ(nir_src_comp_as_uint(store_to_v1->src[1], 1), 20);
428         break;
429      }
430   }
431   EXPECT_TRUE(store_to_v1);
432}
433
434TEST_F(nir_copy_prop_vars_test, memory_barrier_in_two_blocks)
435{
436   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 4);
437
438   nir_store_var(b, v[0], nir_imm_int(b, 1), 1);
439   nir_store_var(b, v[1], nir_imm_int(b, 2), 1);
440
441   /* Split into many blocks. */
442   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
443
444   nir_store_var(b, v[2], nir_load_var(b, v[0]), 1);
445
446   nir_builder_instr_insert(b, &nir_intrinsic_instr_create(b->shader, nir_intrinsic_memory_barrier)->instr);
447
448   nir_store_var(b, v[3], nir_load_var(b, v[1]), 1);
449
450   bool progress = nir_opt_copy_prop_vars(b->shader);
451   ASSERT_TRUE(progress);
452
453   /* Only the second load will remain after the optimization. */
454   ASSERT_EQ(1, count_intrinsics(nir_intrinsic_load_deref));
455   nir_intrinsic_instr *load = NULL;
456   load = find_next_intrinsic(nir_intrinsic_load_deref, load);
457   ASSERT_EQ(nir_intrinsic_get_var(load, 0), v[1]);
458}
459
460TEST_F(nir_copy_prop_vars_test, simple_store_load_in_two_blocks)
461{
462   nir_variable **v = create_many_ivec2(nir_var_local, "v", 2);
463   unsigned mask = 1 | 2;
464
465   nir_ssa_def *stored_value = nir_imm_ivec2(b, 10, 20);
466   nir_store_var(b, v[0], stored_value, mask);
467
468   /* Adding an if statement will cause blocks to be created. */
469   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
470
471   nir_ssa_def *read_value = nir_load_var(b, v[0]);
472   nir_store_var(b, v[1], read_value, mask);
473
474   nir_validate_shader(b->shader, NULL);
475
476   bool progress = nir_opt_copy_prop_vars(b->shader);
477   EXPECT_TRUE(progress);
478
479   nir_validate_shader(b->shader, NULL);
480
481   ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref), 2);
482
483   nir_intrinsic_instr *store = NULL;
484   for (int i = 0; i < 2; i++) {
485      store = find_next_intrinsic(nir_intrinsic_store_deref, store);
486      ASSERT_TRUE(store->src[1].is_ssa);
487      EXPECT_EQ(store->src[1].ssa, stored_value);
488   }
489}
490
491TEST_F(nir_dead_write_vars_test, no_dead_writes_in_block)
492{
493   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 2);
494
495   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1);
496
497   bool progress = nir_opt_dead_write_vars(b->shader);
498   ASSERT_FALSE(progress);
499}
500
501TEST_F(nir_dead_write_vars_test, no_dead_writes_different_components_in_block)
502{
503   nir_variable **v = create_many_ivec2(nir_var_shader_storage, "v", 3);
504
505   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1 << 0);
506   nir_store_var(b, v[0], nir_load_var(b, v[2]), 1 << 1);
507
508   bool progress = nir_opt_dead_write_vars(b->shader);
509   ASSERT_FALSE(progress);
510}
511
512TEST_F(nir_dead_write_vars_test, no_dead_writes_in_if_statement)
513{
514   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 6);
515
516   nir_store_var(b, v[2], nir_load_var(b, v[0]), 1);
517   nir_store_var(b, v[3], nir_load_var(b, v[1]), 1);
518
519   /* Each arm of the if statement will overwrite one store. */
520   nir_if *if_stmt = nir_push_if(b, nir_imm_int(b, 0));
521   nir_store_var(b, v[2], nir_load_var(b, v[4]), 1);
522
523   nir_push_else(b, if_stmt);
524   nir_store_var(b, v[3], nir_load_var(b, v[5]), 1);
525
526   nir_pop_if(b, if_stmt);
527
528   bool progress = nir_opt_dead_write_vars(b->shader);
529   ASSERT_FALSE(progress);
530}
531
532TEST_F(nir_dead_write_vars_test, no_dead_writes_in_loop_statement)
533{
534   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 3);
535
536   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1);
537
538   /* Loop will write other value.  Since it might not be executed, it doesn't
539    * kill the first write.
540    */
541   nir_loop *loop = nir_push_loop(b);
542
543   nir_if *if_stmt = nir_push_if(b, nir_imm_int(b, 0));
544   nir_jump(b, nir_jump_break);
545   nir_pop_if(b, if_stmt);
546
547   nir_store_var(b, v[0], nir_load_var(b, v[2]), 1);
548   nir_pop_loop(b, loop);
549
550   bool progress = nir_opt_dead_write_vars(b->shader);
551   ASSERT_FALSE(progress);
552}
553
554TEST_F(nir_dead_write_vars_test, dead_write_in_block)
555{
556   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 3);
557
558   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1);
559   nir_ssa_def *load_v2 = nir_load_var(b, v[2]);
560   nir_store_var(b, v[0], load_v2, 1);
561
562   bool progress = nir_opt_dead_write_vars(b->shader);
563   ASSERT_TRUE(progress);
564
565   EXPECT_EQ(1, count_intrinsics(nir_intrinsic_store_deref));
566
567   nir_intrinsic_instr *store = find_next_intrinsic(nir_intrinsic_store_deref, NULL);
568   ASSERT_TRUE(store->src[1].is_ssa);
569   EXPECT_EQ(store->src[1].ssa, load_v2);
570}
571
572TEST_F(nir_dead_write_vars_test, dead_write_components_in_block)
573{
574   nir_variable **v = create_many_ivec2(nir_var_shader_storage, "v", 3);
575
576   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1 << 0);
577   nir_ssa_def *load_v2 = nir_load_var(b, v[2]);
578   nir_store_var(b, v[0], load_v2, 1 << 0);
579
580   bool progress = nir_opt_dead_write_vars(b->shader);
581   ASSERT_TRUE(progress);
582
583   EXPECT_EQ(1, count_intrinsics(nir_intrinsic_store_deref));
584
585   nir_intrinsic_instr *store = find_next_intrinsic(nir_intrinsic_store_deref, NULL);
586   ASSERT_TRUE(store->src[1].is_ssa);
587   EXPECT_EQ(store->src[1].ssa, load_v2);
588}
589
590
591/* TODO: The DISABLED tests below depend on the dead write removal be able to
592 * identify dead writes between multiple blocks.  This is still not
593 * implemented.
594 */
595
596TEST_F(nir_dead_write_vars_test, DISABLED_dead_write_in_two_blocks)
597{
598   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 3);
599
600   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1);
601   nir_ssa_def *load_v2 = nir_load_var(b, v[2]);
602
603   /* Causes the stores to be in different blocks. */
604   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
605
606   nir_store_var(b, v[0], load_v2, 1);
607
608   bool progress = nir_opt_dead_write_vars(b->shader);
609   ASSERT_TRUE(progress);
610
611   EXPECT_EQ(1, count_intrinsics(nir_intrinsic_store_deref));
612
613   nir_intrinsic_instr *store = find_next_intrinsic(nir_intrinsic_store_deref, NULL);
614   ASSERT_TRUE(store->src[1].is_ssa);
615   EXPECT_EQ(store->src[1].ssa, load_v2);
616}
617
618TEST_F(nir_dead_write_vars_test, DISABLED_dead_write_components_in_two_blocks)
619{
620   nir_variable **v = create_many_ivec2(nir_var_shader_storage, "v", 3);
621
622   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1 << 0);
623
624   /* Causes the stores to be in different blocks. */
625   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
626
627   nir_ssa_def *load_v2 = nir_load_var(b, v[2]);
628   nir_store_var(b, v[0], load_v2, 1 << 0);
629
630   bool progress = nir_opt_dead_write_vars(b->shader);
631   ASSERT_TRUE(progress);
632
633   EXPECT_EQ(1, count_intrinsics(nir_intrinsic_store_deref));
634
635   nir_intrinsic_instr *store = find_next_intrinsic(nir_intrinsic_store_deref, NULL);
636   ASSERT_TRUE(store->src[1].is_ssa);
637   EXPECT_EQ(store->src[1].ssa, load_v2);
638}
639
640TEST_F(nir_dead_write_vars_test, DISABLED_dead_writes_in_if_statement)
641{
642   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 4);
643
644   /* Both branches will overwrite, making the previous store dead. */
645   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1);
646
647   nir_if *if_stmt = nir_push_if(b, nir_imm_int(b, 0));
648   nir_ssa_def *load_v2 = nir_load_var(b, v[2]);
649   nir_store_var(b, v[0], load_v2, 1);
650
651   nir_push_else(b, if_stmt);
652   nir_ssa_def *load_v3 = nir_load_var(b, v[3]);
653   nir_store_var(b, v[0], load_v3, 1);
654
655   nir_pop_if(b, if_stmt);
656
657   bool progress = nir_opt_dead_write_vars(b->shader);
658   ASSERT_TRUE(progress);
659   EXPECT_EQ(2, count_intrinsics(nir_intrinsic_store_deref));
660
661   nir_intrinsic_instr *store = NULL;
662   store = find_next_intrinsic(nir_intrinsic_store_deref, store);
663   ASSERT_TRUE(store->src[1].is_ssa);
664   EXPECT_EQ(store->src[1].ssa, load_v2);
665
666   store = find_next_intrinsic(nir_intrinsic_store_deref, store);
667   ASSERT_TRUE(store->src[1].is_ssa);
668   EXPECT_EQ(store->src[1].ssa, load_v3);
669}
670
671TEST_F(nir_dead_write_vars_test, DISABLED_memory_barrier_in_two_blocks)
672{
673   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 2);
674
675   nir_store_var(b, v[0], nir_imm_int(b, 1), 1);
676   nir_store_var(b, v[1], nir_imm_int(b, 2), 1);
677
678   /* Split into many blocks. */
679   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
680
681   /* Because it is before the barrier, this will kill the previous store to that target. */
682   nir_store_var(b, v[0], nir_imm_int(b, 3), 1);
683
684   nir_builder_instr_insert(b, &nir_intrinsic_instr_create(b->shader, nir_intrinsic_memory_barrier)->instr);
685
686   nir_store_var(b, v[1], nir_imm_int(b, 4), 1);
687
688   bool progress = nir_opt_dead_write_vars(b->shader);
689   ASSERT_TRUE(progress);
690
691   EXPECT_EQ(3, count_intrinsics(nir_intrinsic_store_deref));
692}
693
694TEST_F(nir_dead_write_vars_test, DISABLED_unrelated_barrier_in_two_blocks)
695{
696   nir_variable **v = create_many_int(nir_var_shader_storage, "v", 3);
697   nir_variable *out = create_int(nir_var_shader_out, "out");
698
699   nir_store_var(b, out, nir_load_var(b, v[1]), 1);
700   nir_store_var(b, v[0], nir_load_var(b, v[1]), 1);
701
702   /* Split into many blocks. */
703   nir_pop_if(b, nir_push_if(b, nir_imm_int(b, 0)));
704
705   /* Emit vertex will ensure writes to output variables are considered used,
706    * but should not affect other types of variables. */
707
708   nir_builder_instr_insert(b, &nir_intrinsic_instr_create(b->shader, nir_intrinsic_emit_vertex)->instr);
709
710   nir_store_var(b, out, nir_load_var(b, v[2]), 1);
711   nir_store_var(b, v[0], nir_load_var(b, v[2]), 1);
712
713   bool progress = nir_opt_dead_write_vars(b->shader);
714   ASSERT_TRUE(progress);
715
716   /* Verify the first write to v[0] was removed. */
717   EXPECT_EQ(3, count_intrinsics(nir_intrinsic_store_deref));
718
719   nir_intrinsic_instr *store = NULL;
720   store = find_next_intrinsic(nir_intrinsic_store_deref, store);
721   EXPECT_EQ(nir_intrinsic_get_var(store, 0), out);
722   store = find_next_intrinsic(nir_intrinsic_store_deref, store);
723   EXPECT_EQ(nir_intrinsic_get_var(store, 0), out);
724   store = find_next_intrinsic(nir_intrinsic_store_deref, store);
725   EXPECT_EQ(nir_intrinsic_get_var(store, 0), v[0]);
726}
727