1b8e80941Smrg/*
2b8e80941Smrg * Copyright © 2010 Luca Barbieri
3b8e80941Smrg *
4b8e80941Smrg * Permission is hereby granted, free of charge, to any person obtaining a
5b8e80941Smrg * copy of this software and associated documentation files (the "Software"),
6b8e80941Smrg * to deal in the Software without restriction, including without limitation
7b8e80941Smrg * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8b8e80941Smrg * and/or sell copies of the Software, and to permit persons to whom the
9b8e80941Smrg * Software is furnished to do so, subject to the following conditions:
10b8e80941Smrg *
11b8e80941Smrg * The above copyright notice and this permission notice (including the next
12b8e80941Smrg * paragraph) shall be included in all copies or substantial portions of the
13b8e80941Smrg * Software.
14b8e80941Smrg *
15b8e80941Smrg * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16b8e80941Smrg * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17b8e80941Smrg * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18b8e80941Smrg * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19b8e80941Smrg * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20b8e80941Smrg * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21b8e80941Smrg * DEALINGS IN THE SOFTWARE.
22b8e80941Smrg */
23b8e80941Smrg
24b8e80941Smrg/**
25b8e80941Smrg * \file lower_jumps.cpp
26b8e80941Smrg *
27b8e80941Smrg * This pass lowers jumps (break, continue, and return) to if/else structures.
28b8e80941Smrg *
29b8e80941Smrg * It can be asked to:
30b8e80941Smrg * 1. Pull jumps out of ifs where possible
31b8e80941Smrg * 2. Remove all "continue"s, replacing them with an "execute flag"
32b8e80941Smrg * 3. Replace all "break" with a single conditional one at the end of the loop
33b8e80941Smrg * 4. Replace all "return"s with a single return at the end of the function,
34b8e80941Smrg *    for the main function and/or other functions
35b8e80941Smrg *
36b8e80941Smrg * Applying this pass gives several benefits:
37b8e80941Smrg * 1. All functions can be inlined.
38b8e80941Smrg * 2. nv40 and other pre-DX10 chips without "continue" can be supported
39b8e80941Smrg * 3. nv30 and other pre-DX10 chips with no control flow at all are better
40b8e80941Smrg *    supported
41b8e80941Smrg *
42b8e80941Smrg * Continues are lowered by adding a per-loop "execute flag", initialized to
43b8e80941Smrg * true, that when cleared inhibits all execution until the end of the loop.
44b8e80941Smrg *
45b8e80941Smrg * Breaks are lowered to continues, plus setting a "break flag" that is checked
46b8e80941Smrg * at the end of the loop, and trigger the unique "break".
47b8e80941Smrg *
48b8e80941Smrg * Returns are lowered to breaks/continues, plus adding a "return flag" that
49b8e80941Smrg * causes loops to break again out of their enclosing loops until all the
50b8e80941Smrg * loops are exited: then the "execute flag" logic will ignore everything
51b8e80941Smrg * until the end of the function.
52b8e80941Smrg *
53b8e80941Smrg * Note that "continue" and "return" can also be implemented by adding
54b8e80941Smrg * a dummy loop and using break.
55b8e80941Smrg * However, this is bad for hardware with limited nesting depth, and
56b8e80941Smrg * prevents further optimization, and thus is not currently performed.
57b8e80941Smrg */
58b8e80941Smrg
59b8e80941Smrg#include "compiler/glsl_types.h"
60b8e80941Smrg#include <string.h>
61b8e80941Smrg#include "ir.h"
62b8e80941Smrg
63b8e80941Smrg/**
64b8e80941Smrg * Enum recording the result of analyzing how control flow might exit
65b8e80941Smrg * an IR node.
66b8e80941Smrg *
67b8e80941Smrg * Each possible value of jump_strength indicates a strictly stronger
68b8e80941Smrg * guarantee on control flow than the previous value.
69b8e80941Smrg *
70b8e80941Smrg * The ordering of strengths roughly reflects the way jumps are
71b8e80941Smrg * lowered: jumps with higher strength tend to be lowered to jumps of
72b8e80941Smrg * lower strength.  Accordingly, strength is used as a heuristic to
73b8e80941Smrg * determine which lowering to perform first.
74b8e80941Smrg *
75b8e80941Smrg * This enum is also used by get_jump_strength() to categorize
76b8e80941Smrg * instructions as either break, continue, return, or other.  When
77b8e80941Smrg * used in this fashion, strength_always_clears_execute_flag is not
78b8e80941Smrg * used.
79b8e80941Smrg *
80b8e80941Smrg * The control flow analysis made by this optimization pass makes two
81b8e80941Smrg * simplifying assumptions:
82b8e80941Smrg *
83b8e80941Smrg * - It ignores discard instructions, since they are lowered by a
84b8e80941Smrg *   separate pass (lower_discard.cpp).
85b8e80941Smrg *
86b8e80941Smrg * - It assumes it is always possible for control to flow from a loop
87b8e80941Smrg *   to the instruction immediately following it.  Technically, this
88b8e80941Smrg *   is not true (since all execution paths through the loop might
89b8e80941Smrg *   jump back to the top, or return from the function).
90b8e80941Smrg *
91b8e80941Smrg * Both of these simplifying assumtions are safe, since they can never
92b8e80941Smrg * cause reachable code to be incorrectly classified as unreachable;
93b8e80941Smrg * they can only do the opposite.
94b8e80941Smrg */
95b8e80941Smrgenum jump_strength
96b8e80941Smrg{
97b8e80941Smrg   /**
98b8e80941Smrg    * Analysis has produced no guarantee on how control flow might
99b8e80941Smrg    * exit this IR node.  It might fall out the bottom (with or
100b8e80941Smrg    * without clearing the execute flag, if present), or it might
101b8e80941Smrg    * continue to the top of the innermost enclosing loop, break out
102b8e80941Smrg    * of it, or return from the function.
103b8e80941Smrg    */
104b8e80941Smrg   strength_none,
105b8e80941Smrg
106b8e80941Smrg   /**
107b8e80941Smrg    * The only way control can fall out the bottom of this node is
108b8e80941Smrg    * through a code path that clears the execute flag.  It might also
109b8e80941Smrg    * continue to the top of the innermost enclosing loop, break out
110b8e80941Smrg    * of it, or return from the function.
111b8e80941Smrg    */
112b8e80941Smrg   strength_always_clears_execute_flag,
113b8e80941Smrg
114b8e80941Smrg   /**
115b8e80941Smrg    * Control cannot fall out the bottom of this node.  It might
116b8e80941Smrg    * continue to the top of the innermost enclosing loop, break out
117b8e80941Smrg    * of it, or return from the function.
118b8e80941Smrg    */
119b8e80941Smrg   strength_continue,
120b8e80941Smrg
121b8e80941Smrg   /**
122b8e80941Smrg    * Control cannot fall out the bottom of this node, or continue the
123b8e80941Smrg    * top of the innermost enclosing loop.  It can only break out of
124b8e80941Smrg    * it or return from the function.
125b8e80941Smrg    */
126b8e80941Smrg   strength_break,
127b8e80941Smrg
128b8e80941Smrg   /**
129b8e80941Smrg    * Control cannot fall out the bottom of this node, continue to the
130b8e80941Smrg    * top of the innermost enclosing loop, or break out of it.  It can
131b8e80941Smrg    * only return from the function.
132b8e80941Smrg    */
133b8e80941Smrg   strength_return
134b8e80941Smrg};
135b8e80941Smrg
136b8e80941Smrgnamespace {
137b8e80941Smrg
138b8e80941Smrgstruct block_record
139b8e80941Smrg{
140b8e80941Smrg   /* minimum jump strength (of lowered IR, not pre-lowering IR)
141b8e80941Smrg    *
142b8e80941Smrg    * If the block ends with a jump, must be the strength of the jump.
143b8e80941Smrg    * Otherwise, the jump would be dead and have been deleted before)
144b8e80941Smrg    *
145b8e80941Smrg    * If the block doesn't end with a jump, it can be different than strength_none if all paths before it lead to some jump
146b8e80941Smrg    * (e.g. an if with a return in one branch, and a break in the other, while not lowering them)
147b8e80941Smrg    * Note that identical jumps are usually unified though.
148b8e80941Smrg    */
149b8e80941Smrg   jump_strength min_strength;
150b8e80941Smrg
151b8e80941Smrg   /* can anything clear the execute flag? */
152b8e80941Smrg   bool may_clear_execute_flag;
153b8e80941Smrg
154b8e80941Smrg   block_record()
155b8e80941Smrg   {
156b8e80941Smrg      this->min_strength = strength_none;
157b8e80941Smrg      this->may_clear_execute_flag = false;
158b8e80941Smrg   }
159b8e80941Smrg};
160b8e80941Smrg
161b8e80941Smrgstruct loop_record
162b8e80941Smrg{
163b8e80941Smrg   ir_function_signature* signature;
164b8e80941Smrg   ir_loop* loop;
165b8e80941Smrg
166b8e80941Smrg   /* used to avoid lowering the break used to represent lowered breaks */
167b8e80941Smrg   unsigned nesting_depth;
168b8e80941Smrg   bool in_if_at_the_end_of_the_loop;
169b8e80941Smrg
170b8e80941Smrg   bool may_set_return_flag;
171b8e80941Smrg
172b8e80941Smrg   ir_variable* break_flag;
173b8e80941Smrg   ir_variable* execute_flag; /* cleared to emulate continue */
174b8e80941Smrg
175b8e80941Smrg   loop_record(ir_function_signature* p_signature = 0, ir_loop* p_loop = 0)
176b8e80941Smrg   {
177b8e80941Smrg      this->signature = p_signature;
178b8e80941Smrg      this->loop = p_loop;
179b8e80941Smrg      this->nesting_depth = 0;
180b8e80941Smrg      this->in_if_at_the_end_of_the_loop = false;
181b8e80941Smrg      this->may_set_return_flag = false;
182b8e80941Smrg      this->break_flag = 0;
183b8e80941Smrg      this->execute_flag = 0;
184b8e80941Smrg   }
185b8e80941Smrg
186b8e80941Smrg   ir_variable* get_execute_flag()
187b8e80941Smrg   {
188b8e80941Smrg      /* also supported for the "function loop" */
189b8e80941Smrg      if(!this->execute_flag) {
190b8e80941Smrg         exec_list& list = this->loop ? this->loop->body_instructions : signature->body;
191b8e80941Smrg         this->execute_flag = new(this->signature) ir_variable(glsl_type::bool_type, "execute_flag", ir_var_temporary);
192b8e80941Smrg         list.push_head(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(execute_flag), new(this->signature) ir_constant(true)));
193b8e80941Smrg         list.push_head(this->execute_flag);
194b8e80941Smrg      }
195b8e80941Smrg      return this->execute_flag;
196b8e80941Smrg   }
197b8e80941Smrg
198b8e80941Smrg   ir_variable* get_break_flag()
199b8e80941Smrg   {
200b8e80941Smrg      assert(this->loop);
201b8e80941Smrg      if(!this->break_flag) {
202b8e80941Smrg         this->break_flag = new(this->signature) ir_variable(glsl_type::bool_type, "break_flag", ir_var_temporary);
203b8e80941Smrg         this->loop->insert_before(this->break_flag);
204b8e80941Smrg         this->loop->insert_before(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(break_flag), new(this->signature) ir_constant(false)));
205b8e80941Smrg      }
206b8e80941Smrg      return this->break_flag;
207b8e80941Smrg   }
208b8e80941Smrg};
209b8e80941Smrg
210b8e80941Smrgstruct function_record
211b8e80941Smrg{
212b8e80941Smrg   ir_function_signature* signature;
213b8e80941Smrg   ir_variable* return_flag; /* used to break out of all loops and then jump to the return instruction */
214b8e80941Smrg   ir_variable* return_value;
215b8e80941Smrg   bool lower_return;
216b8e80941Smrg   unsigned nesting_depth;
217b8e80941Smrg
218b8e80941Smrg   function_record(ir_function_signature* p_signature = 0,
219b8e80941Smrg                   bool lower_return = false)
220b8e80941Smrg   {
221b8e80941Smrg      this->signature = p_signature;
222b8e80941Smrg      this->return_flag = 0;
223b8e80941Smrg      this->return_value = 0;
224b8e80941Smrg      this->nesting_depth = 0;
225b8e80941Smrg      this->lower_return = lower_return;
226b8e80941Smrg   }
227b8e80941Smrg
228b8e80941Smrg   ir_variable* get_return_flag()
229b8e80941Smrg   {
230b8e80941Smrg      if(!this->return_flag) {
231b8e80941Smrg         this->return_flag = new(this->signature) ir_variable(glsl_type::bool_type, "return_flag", ir_var_temporary);
232b8e80941Smrg         this->signature->body.push_head(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(return_flag), new(this->signature) ir_constant(false)));
233b8e80941Smrg         this->signature->body.push_head(this->return_flag);
234b8e80941Smrg      }
235b8e80941Smrg      return this->return_flag;
236b8e80941Smrg   }
237b8e80941Smrg
238b8e80941Smrg   ir_variable* get_return_value()
239b8e80941Smrg   {
240b8e80941Smrg      if(!this->return_value) {
241b8e80941Smrg         assert(!this->signature->return_type->is_void());
242b8e80941Smrg         return_value = new(this->signature) ir_variable(this->signature->return_type, "return_value", ir_var_temporary);
243b8e80941Smrg         this->signature->body.push_head(this->return_value);
244b8e80941Smrg      }
245b8e80941Smrg      return this->return_value;
246b8e80941Smrg   }
247b8e80941Smrg};
248b8e80941Smrg
249b8e80941Smrgstruct ir_lower_jumps_visitor : public ir_control_flow_visitor {
250b8e80941Smrg   /* Postconditions: on exit of any visit() function:
251b8e80941Smrg    *
252b8e80941Smrg    * ANALYSIS: this->block.min_strength,
253b8e80941Smrg    * this->block.may_clear_execute_flag, and
254b8e80941Smrg    * this->loop.may_set_return_flag are updated to reflect the
255b8e80941Smrg    * characteristics of the visited statement.
256b8e80941Smrg    *
257b8e80941Smrg    * DEAD_CODE_ELIMINATION: If this->block.min_strength is not
258b8e80941Smrg    * strength_none, the visited node is at the end of its exec_list.
259b8e80941Smrg    * In other words, any unreachable statements that follow the
260b8e80941Smrg    * visited statement in its exec_list have been removed.
261b8e80941Smrg    *
262b8e80941Smrg    * CONTAINED_JUMPS_LOWERED: If the visited statement contains other
263b8e80941Smrg    * statements, then should_lower_jump() is false for all of the
264b8e80941Smrg    * return, break, or continue statements it contains.
265b8e80941Smrg    *
266b8e80941Smrg    * Note that visiting a jump does not lower it.  That is the
267b8e80941Smrg    * responsibility of the statement (or function signature) that
268b8e80941Smrg    * contains the jump.
269b8e80941Smrg    */
270b8e80941Smrg
271b8e80941Smrg   bool progress;
272b8e80941Smrg
273b8e80941Smrg   struct function_record function;
274b8e80941Smrg   struct loop_record loop;
275b8e80941Smrg   struct block_record block;
276b8e80941Smrg
277b8e80941Smrg   bool pull_out_jumps;
278b8e80941Smrg   bool lower_continue;
279b8e80941Smrg   bool lower_break;
280b8e80941Smrg   bool lower_sub_return;
281b8e80941Smrg   bool lower_main_return;
282b8e80941Smrg
283b8e80941Smrg   ir_lower_jumps_visitor()
284b8e80941Smrg      : progress(false),
285b8e80941Smrg        pull_out_jumps(false),
286b8e80941Smrg        lower_continue(false),
287b8e80941Smrg        lower_break(false),
288b8e80941Smrg        lower_sub_return(false),
289b8e80941Smrg        lower_main_return(false)
290b8e80941Smrg   {
291b8e80941Smrg   }
292b8e80941Smrg
293b8e80941Smrg   void truncate_after_instruction(exec_node *ir)
294b8e80941Smrg   {
295b8e80941Smrg      if (!ir)
296b8e80941Smrg         return;
297b8e80941Smrg
298b8e80941Smrg      while (!ir->get_next()->is_tail_sentinel()) {
299b8e80941Smrg         ((ir_instruction *)ir->get_next())->remove();
300b8e80941Smrg         this->progress = true;
301b8e80941Smrg      }
302b8e80941Smrg   }
303b8e80941Smrg
304b8e80941Smrg   void move_outer_block_inside(ir_instruction *ir, exec_list *inner_block)
305b8e80941Smrg   {
306b8e80941Smrg      while (!ir->get_next()->is_tail_sentinel()) {
307b8e80941Smrg         ir_instruction *move_ir = (ir_instruction *)ir->get_next();
308b8e80941Smrg
309b8e80941Smrg         move_ir->remove();
310b8e80941Smrg         inner_block->push_tail(move_ir);
311b8e80941Smrg      }
312b8e80941Smrg   }
313b8e80941Smrg
314b8e80941Smrg   /**
315b8e80941Smrg    * Insert the instructions necessary to lower a return statement,
316b8e80941Smrg    * before the given return instruction.
317b8e80941Smrg    */
318b8e80941Smrg   void insert_lowered_return(ir_return *ir)
319b8e80941Smrg   {
320b8e80941Smrg      ir_variable* return_flag = this->function.get_return_flag();
321b8e80941Smrg      if(!this->function.signature->return_type->is_void()) {
322b8e80941Smrg         ir_variable* return_value = this->function.get_return_value();
323b8e80941Smrg         ir->insert_before(
324b8e80941Smrg            new(ir) ir_assignment(
325b8e80941Smrg               new (ir) ir_dereference_variable(return_value),
326b8e80941Smrg               ir->value));
327b8e80941Smrg      }
328b8e80941Smrg      ir->insert_before(
329b8e80941Smrg         new(ir) ir_assignment(
330b8e80941Smrg            new (ir) ir_dereference_variable(return_flag),
331b8e80941Smrg            new (ir) ir_constant(true)));
332b8e80941Smrg      this->loop.may_set_return_flag = true;
333b8e80941Smrg   }
334b8e80941Smrg
335b8e80941Smrg   /**
336b8e80941Smrg    * If the given instruction is a return, lower it to instructions
337b8e80941Smrg    * that store the return value (if there is one), set the return
338b8e80941Smrg    * flag, and then break.
339b8e80941Smrg    *
340b8e80941Smrg    * It is safe to pass NULL to this function.
341b8e80941Smrg    */
342b8e80941Smrg   void lower_return_unconditionally(ir_instruction *ir)
343b8e80941Smrg   {
344b8e80941Smrg      if (get_jump_strength(ir) != strength_return) {
345b8e80941Smrg         return;
346b8e80941Smrg      }
347b8e80941Smrg      insert_lowered_return((ir_return*)ir);
348b8e80941Smrg      ir->replace_with(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
349b8e80941Smrg   }
350b8e80941Smrg
351b8e80941Smrg   /**
352b8e80941Smrg    * Create the necessary instruction to replace a break instruction.
353b8e80941Smrg    */
354b8e80941Smrg   ir_instruction *create_lowered_break()
355b8e80941Smrg   {
356b8e80941Smrg      void *ctx = this->function.signature;
357b8e80941Smrg      return new(ctx) ir_assignment(
358b8e80941Smrg          new(ctx) ir_dereference_variable(this->loop.get_break_flag()),
359b8e80941Smrg          new(ctx) ir_constant(true));
360b8e80941Smrg   }
361b8e80941Smrg
362b8e80941Smrg   /**
363b8e80941Smrg    * If the given instruction is a break, lower it to an instruction
364b8e80941Smrg    * that sets the break flag, without consulting
365b8e80941Smrg    * should_lower_jump().
366b8e80941Smrg    *
367b8e80941Smrg    * It is safe to pass NULL to this function.
368b8e80941Smrg    */
369b8e80941Smrg   void lower_break_unconditionally(ir_instruction *ir)
370b8e80941Smrg   {
371b8e80941Smrg      if (get_jump_strength(ir) != strength_break) {
372b8e80941Smrg         return;
373b8e80941Smrg      }
374b8e80941Smrg      ir->replace_with(create_lowered_break());
375b8e80941Smrg   }
376b8e80941Smrg
377b8e80941Smrg   /**
378b8e80941Smrg    * If the block ends in a conditional or unconditional break, lower
379b8e80941Smrg    * it, even though should_lower_jump() says it needn't be lowered.
380b8e80941Smrg    */
381b8e80941Smrg   void lower_final_breaks(exec_list *block)
382b8e80941Smrg   {
383b8e80941Smrg      ir_instruction *ir = (ir_instruction *) block->get_tail();
384b8e80941Smrg      lower_break_unconditionally(ir);
385b8e80941Smrg      ir_if *ir_if = ir->as_if();
386b8e80941Smrg      if (ir_if) {
387b8e80941Smrg          lower_break_unconditionally(
388b8e80941Smrg              (ir_instruction *) ir_if->then_instructions.get_tail());
389b8e80941Smrg          lower_break_unconditionally(
390b8e80941Smrg              (ir_instruction *) ir_if->else_instructions.get_tail());
391b8e80941Smrg      }
392b8e80941Smrg   }
393b8e80941Smrg
394b8e80941Smrg   virtual void visit(class ir_loop_jump * ir)
395b8e80941Smrg   {
396b8e80941Smrg      /* Eliminate all instructions after each one, since they are
397b8e80941Smrg       * unreachable.  This satisfies the DEAD_CODE_ELIMINATION
398b8e80941Smrg       * postcondition.
399b8e80941Smrg       */
400b8e80941Smrg      truncate_after_instruction(ir);
401b8e80941Smrg
402b8e80941Smrg      /* Set this->block.min_strength based on this instruction.  This
403b8e80941Smrg       * satisfies the ANALYSIS postcondition.  It is not necessary to
404b8e80941Smrg       * update this->block.may_clear_execute_flag or
405b8e80941Smrg       * this->loop.may_set_return_flag, because an unlowered jump
406b8e80941Smrg       * instruction can't change any flags.
407b8e80941Smrg       */
408b8e80941Smrg      this->block.min_strength = ir->is_break() ? strength_break : strength_continue;
409b8e80941Smrg
410b8e80941Smrg      /* The CONTAINED_JUMPS_LOWERED postcondition is already
411b8e80941Smrg       * satisfied, because jump statements can't contain other
412b8e80941Smrg       * statements.
413b8e80941Smrg       */
414b8e80941Smrg   }
415b8e80941Smrg
416b8e80941Smrg   virtual void visit(class ir_return * ir)
417b8e80941Smrg   {
418b8e80941Smrg      /* Eliminate all instructions after each one, since they are
419b8e80941Smrg       * unreachable.  This satisfies the DEAD_CODE_ELIMINATION
420b8e80941Smrg       * postcondition.
421b8e80941Smrg       */
422b8e80941Smrg      truncate_after_instruction(ir);
423b8e80941Smrg
424b8e80941Smrg      /* Set this->block.min_strength based on this instruction.  This
425b8e80941Smrg       * satisfies the ANALYSIS postcondition.  It is not necessary to
426b8e80941Smrg       * update this->block.may_clear_execute_flag or
427b8e80941Smrg       * this->loop.may_set_return_flag, because an unlowered return
428b8e80941Smrg       * instruction can't change any flags.
429b8e80941Smrg       */
430b8e80941Smrg      this->block.min_strength = strength_return;
431b8e80941Smrg
432b8e80941Smrg      /* The CONTAINED_JUMPS_LOWERED postcondition is already
433b8e80941Smrg       * satisfied, because jump statements can't contain other
434b8e80941Smrg       * statements.
435b8e80941Smrg       */
436b8e80941Smrg   }
437b8e80941Smrg
438b8e80941Smrg   virtual void visit(class ir_discard * ir)
439b8e80941Smrg   {
440b8e80941Smrg      /* Nothing needs to be done.  The ANALYSIS and
441b8e80941Smrg       * DEAD_CODE_ELIMINATION postconditions are already satisfied,
442b8e80941Smrg       * because discard statements are ignored by this optimization
443b8e80941Smrg       * pass.  The CONTAINED_JUMPS_LOWERED postcondition is already
444b8e80941Smrg       * satisfied, because discard statements can't contain other
445b8e80941Smrg       * statements.
446b8e80941Smrg       */
447b8e80941Smrg      (void) ir;
448b8e80941Smrg   }
449b8e80941Smrg
450b8e80941Smrg   enum jump_strength get_jump_strength(ir_instruction* ir)
451b8e80941Smrg   {
452b8e80941Smrg      if(!ir)
453b8e80941Smrg         return strength_none;
454b8e80941Smrg      else if(ir->ir_type == ir_type_loop_jump) {
455b8e80941Smrg         if(((ir_loop_jump*)ir)->is_break())
456b8e80941Smrg            return strength_break;
457b8e80941Smrg         else
458b8e80941Smrg            return strength_continue;
459b8e80941Smrg      } else if(ir->ir_type == ir_type_return)
460b8e80941Smrg         return strength_return;
461b8e80941Smrg      else
462b8e80941Smrg         return strength_none;
463b8e80941Smrg   }
464b8e80941Smrg
465b8e80941Smrg   bool should_lower_jump(ir_jump* ir)
466b8e80941Smrg   {
467b8e80941Smrg      unsigned strength = get_jump_strength(ir);
468b8e80941Smrg      bool lower;
469b8e80941Smrg      switch(strength)
470b8e80941Smrg      {
471b8e80941Smrg      case strength_none:
472b8e80941Smrg         lower = false; /* don't change this, code relies on it */
473b8e80941Smrg         break;
474b8e80941Smrg      case strength_continue:
475b8e80941Smrg         lower = lower_continue;
476b8e80941Smrg         break;
477b8e80941Smrg      case strength_break:
478b8e80941Smrg         assert(this->loop.loop);
479b8e80941Smrg         /* never lower "canonical break" */
480b8e80941Smrg         if(ir->get_next()->is_tail_sentinel() && (this->loop.nesting_depth == 0
481b8e80941Smrg               || (this->loop.nesting_depth == 1 && this->loop.in_if_at_the_end_of_the_loop)))
482b8e80941Smrg            lower = false;
483b8e80941Smrg         else
484b8e80941Smrg            lower = lower_break;
485b8e80941Smrg         break;
486b8e80941Smrg      case strength_return:
487b8e80941Smrg         /* never lower return at the end of a this->function */
488b8e80941Smrg         if(this->function.nesting_depth == 0 && ir->get_next()->is_tail_sentinel())
489b8e80941Smrg            lower = false;
490b8e80941Smrg         else
491b8e80941Smrg            lower = this->function.lower_return;
492b8e80941Smrg         break;
493b8e80941Smrg      }
494b8e80941Smrg      return lower;
495b8e80941Smrg   }
496b8e80941Smrg
497b8e80941Smrg   block_record visit_block(exec_list* list)
498b8e80941Smrg   {
499b8e80941Smrg      /* Note: since visiting a node may change that node's next
500b8e80941Smrg       * pointer, we can't use visit_exec_list(), because
501b8e80941Smrg       * visit_exec_list() caches the node's next pointer before
502b8e80941Smrg       * visiting it.  So we use foreach_in_list() instead.
503b8e80941Smrg       *
504b8e80941Smrg       * foreach_in_list() isn't safe if the node being visited gets
505b8e80941Smrg       * removed, but fortunately this visitor doesn't do that.
506b8e80941Smrg       */
507b8e80941Smrg
508b8e80941Smrg      block_record saved_block = this->block;
509b8e80941Smrg      this->block = block_record();
510b8e80941Smrg      foreach_in_list(ir_instruction, node, list) {
511b8e80941Smrg         node->accept(this);
512b8e80941Smrg      }
513b8e80941Smrg      block_record ret = this->block;
514b8e80941Smrg      this->block = saved_block;
515b8e80941Smrg      return ret;
516b8e80941Smrg   }
517b8e80941Smrg
518b8e80941Smrg   virtual void visit(ir_if *ir)
519b8e80941Smrg   {
520b8e80941Smrg      if(this->loop.nesting_depth == 0 && ir->get_next()->is_tail_sentinel())
521b8e80941Smrg         this->loop.in_if_at_the_end_of_the_loop = true;
522b8e80941Smrg
523b8e80941Smrg      ++this->function.nesting_depth;
524b8e80941Smrg      ++this->loop.nesting_depth;
525b8e80941Smrg
526b8e80941Smrg      block_record block_records[2];
527b8e80941Smrg      ir_jump* jumps[2];
528b8e80941Smrg
529b8e80941Smrg      /* Recursively lower nested jumps.  This satisfies the
530b8e80941Smrg       * CONTAINED_JUMPS_LOWERED postcondition, except in the case of
531b8e80941Smrg       * unconditional jumps at the end of ir->then_instructions and
532b8e80941Smrg       * ir->else_instructions, which are handled below.
533b8e80941Smrg       */
534b8e80941Smrg      block_records[0] = visit_block(&ir->then_instructions);
535b8e80941Smrg      block_records[1] = visit_block(&ir->else_instructions);
536b8e80941Smrg
537b8e80941Smrgretry: /* we get here if we put code after the if inside a branch */
538b8e80941Smrg
539b8e80941Smrg      /* Determine which of ir->then_instructions and
540b8e80941Smrg       * ir->else_instructions end with an unconditional jump.
541b8e80941Smrg       */
542b8e80941Smrg      for(unsigned i = 0; i < 2; ++i) {
543b8e80941Smrg         exec_list& list = i ? ir->else_instructions : ir->then_instructions;
544b8e80941Smrg         jumps[i] = 0;
545b8e80941Smrg         if(!list.is_empty() && get_jump_strength((ir_instruction*)list.get_tail()))
546b8e80941Smrg            jumps[i] = (ir_jump*)list.get_tail();
547b8e80941Smrg      }
548b8e80941Smrg
549b8e80941Smrg      /* Loop until we have satisfied the CONTAINED_JUMPS_LOWERED
550b8e80941Smrg       * postcondition by lowering jumps in both then_instructions and
551b8e80941Smrg       * else_instructions.
552b8e80941Smrg       */
553b8e80941Smrg      for(;;) {
554b8e80941Smrg         /* Determine the types of the jumps that terminate
555b8e80941Smrg          * ir->then_instructions and ir->else_instructions.
556b8e80941Smrg          */
557b8e80941Smrg         jump_strength jump_strengths[2];
558b8e80941Smrg
559b8e80941Smrg         for(unsigned i = 0; i < 2; ++i) {
560b8e80941Smrg            if(jumps[i]) {
561b8e80941Smrg               jump_strengths[i] = block_records[i].min_strength;
562b8e80941Smrg               assert(jump_strengths[i] == get_jump_strength(jumps[i]));
563b8e80941Smrg            } else
564b8e80941Smrg               jump_strengths[i] = strength_none;
565b8e80941Smrg         }
566b8e80941Smrg
567b8e80941Smrg         /* If both code paths end in a jump, and the jumps are the
568b8e80941Smrg          * same, and we are pulling out jumps, replace them with a
569b8e80941Smrg          * single jump that comes after the if instruction.  The new
570b8e80941Smrg          * jump will be visited next, and it will be lowered if
571b8e80941Smrg          * necessary by the loop or conditional that encloses it.
572b8e80941Smrg          */
573b8e80941Smrg         if(pull_out_jumps && jump_strengths[0] == jump_strengths[1]) {
574b8e80941Smrg            bool unify = true;
575b8e80941Smrg            if(jump_strengths[0] == strength_continue)
576b8e80941Smrg               ir->insert_after(new(ir) ir_loop_jump(ir_loop_jump::jump_continue));
577b8e80941Smrg            else if(jump_strengths[0] == strength_break)
578b8e80941Smrg               ir->insert_after(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
579b8e80941Smrg            /* FINISHME: unify returns with identical expressions */
580b8e80941Smrg            else if(jump_strengths[0] == strength_return && this->function.signature->return_type->is_void())
581b8e80941Smrg               ir->insert_after(new(ir) ir_return(NULL));
582b8e80941Smrg	    else
583b8e80941Smrg	       unify = false;
584b8e80941Smrg
585b8e80941Smrg            if(unify) {
586b8e80941Smrg               jumps[0]->remove();
587b8e80941Smrg               jumps[1]->remove();
588b8e80941Smrg               this->progress = true;
589b8e80941Smrg
590b8e80941Smrg               /* Update jumps[] to reflect the fact that the jumps
591b8e80941Smrg                * are gone, and update block_records[] to reflect the
592b8e80941Smrg                * fact that control can now flow to the next
593b8e80941Smrg                * instruction.
594b8e80941Smrg                */
595b8e80941Smrg               jumps[0] = 0;
596b8e80941Smrg               jumps[1] = 0;
597b8e80941Smrg               block_records[0].min_strength = strength_none;
598b8e80941Smrg               block_records[1].min_strength = strength_none;
599b8e80941Smrg
600b8e80941Smrg               /* The CONTAINED_JUMPS_LOWERED postcondition is now
601b8e80941Smrg                * satisfied, so we can break out of the loop.
602b8e80941Smrg                */
603b8e80941Smrg               break;
604b8e80941Smrg            }
605b8e80941Smrg         }
606b8e80941Smrg
607b8e80941Smrg         /* lower a jump: if both need to lowered, start with the strongest one, so that
608b8e80941Smrg          * we might later unify the lowered version with the other one
609b8e80941Smrg          */
610b8e80941Smrg         bool should_lower[2];
611b8e80941Smrg         for(unsigned i = 0; i < 2; ++i)
612b8e80941Smrg            should_lower[i] = should_lower_jump(jumps[i]);
613b8e80941Smrg
614b8e80941Smrg         int lower;
615b8e80941Smrg         if(should_lower[1] && should_lower[0])
616b8e80941Smrg            lower = jump_strengths[1] > jump_strengths[0];
617b8e80941Smrg         else if(should_lower[0])
618b8e80941Smrg            lower = 0;
619b8e80941Smrg         else if(should_lower[1])
620b8e80941Smrg            lower = 1;
621b8e80941Smrg         else
622b8e80941Smrg            /* Neither code path ends in a jump that needs to be
623b8e80941Smrg             * lowered, so the CONTAINED_JUMPS_LOWERED postcondition
624b8e80941Smrg             * is satisfied and we can break out of the loop.
625b8e80941Smrg             */
626b8e80941Smrg            break;
627b8e80941Smrg
628b8e80941Smrg         if(jump_strengths[lower] == strength_return) {
629b8e80941Smrg            /* To lower a return, we create a return flag (if the
630b8e80941Smrg             * function doesn't have one already) and add instructions
631b8e80941Smrg             * that: 1. store the return value (if this function has a
632b8e80941Smrg             * non-void return) and 2. set the return flag
633b8e80941Smrg             */
634b8e80941Smrg            insert_lowered_return((ir_return*)jumps[lower]);
635b8e80941Smrg            if(this->loop.loop) {
636b8e80941Smrg               /* If we are in a loop, replace the return instruction
637b8e80941Smrg                * with a break instruction, and then loop so that the
638b8e80941Smrg                * break instruction can be lowered if necessary.
639b8e80941Smrg                */
640b8e80941Smrg               ir_loop_jump* lowered = 0;
641b8e80941Smrg               lowered = new(ir) ir_loop_jump(ir_loop_jump::jump_break);
642b8e80941Smrg               /* Note: we must update block_records and jumps to
643b8e80941Smrg                * reflect the fact that the control path has been
644b8e80941Smrg                * altered from a return to a break.
645b8e80941Smrg                */
646b8e80941Smrg               block_records[lower].min_strength = strength_break;
647b8e80941Smrg               jumps[lower]->replace_with(lowered);
648b8e80941Smrg               jumps[lower] = lowered;
649b8e80941Smrg            } else {
650b8e80941Smrg               /* If we are not in a loop, we then proceed as we would
651b8e80941Smrg                * for a continue statement (set the execute flag to
652b8e80941Smrg                * false to prevent the rest of the function from
653b8e80941Smrg                * executing).
654b8e80941Smrg                */
655b8e80941Smrg               goto lower_continue;
656b8e80941Smrg            }
657b8e80941Smrg            this->progress = true;
658b8e80941Smrg         } else if(jump_strengths[lower] == strength_break) {
659b8e80941Smrg            /* To lower a break, we create a break flag (if the loop
660b8e80941Smrg             * doesn't have one already) and add an instruction that
661b8e80941Smrg             * sets it.
662b8e80941Smrg             *
663b8e80941Smrg             * Then we proceed as we would for a continue statement
664b8e80941Smrg             * (set the execute flag to false to prevent the rest of
665b8e80941Smrg             * the loop body from executing).
666b8e80941Smrg             *
667b8e80941Smrg             * The visit() function for the loop will ensure that the
668b8e80941Smrg             * break flag is checked after executing the loop body.
669b8e80941Smrg             */
670b8e80941Smrg            jumps[lower]->insert_before(create_lowered_break());
671b8e80941Smrg            goto lower_continue;
672b8e80941Smrg         } else if(jump_strengths[lower] == strength_continue) {
673b8e80941Smrglower_continue:
674b8e80941Smrg            /* To lower a continue, we create an execute flag (if the
675b8e80941Smrg             * loop doesn't have one already) and replace the continue
676b8e80941Smrg             * with an instruction that clears it.
677b8e80941Smrg             *
678b8e80941Smrg             * Note that this code path gets exercised when lowering
679b8e80941Smrg             * return statements that are not inside a loop, so
680b8e80941Smrg             * this->loop must be initialized even outside of loops.
681b8e80941Smrg             */
682b8e80941Smrg            ir_variable* execute_flag = this->loop.get_execute_flag();
683b8e80941Smrg            jumps[lower]->replace_with(new(ir) ir_assignment(new (ir) ir_dereference_variable(execute_flag), new (ir) ir_constant(false)));
684b8e80941Smrg            /* Note: we must update block_records and jumps to reflect
685b8e80941Smrg             * the fact that the control path has been altered to an
686b8e80941Smrg             * instruction that clears the execute flag.
687b8e80941Smrg             */
688b8e80941Smrg            jumps[lower] = 0;
689b8e80941Smrg            block_records[lower].min_strength = strength_always_clears_execute_flag;
690b8e80941Smrg            block_records[lower].may_clear_execute_flag = true;
691b8e80941Smrg            this->progress = true;
692b8e80941Smrg
693b8e80941Smrg            /* Let the loop run again, in case the other branch of the
694b8e80941Smrg             * if needs to be lowered too.
695b8e80941Smrg             */
696b8e80941Smrg         }
697b8e80941Smrg      }
698b8e80941Smrg
699b8e80941Smrg      /* move out a jump out if possible */
700b8e80941Smrg      if(pull_out_jumps) {
701b8e80941Smrg         /* If one of the branches ends in a jump, and control cannot
702b8e80941Smrg          * fall out the bottom of the other branch, then we can move
703b8e80941Smrg          * the jump after the if.
704b8e80941Smrg          *
705b8e80941Smrg          * Set move_out to the branch we are moving a jump out of.
706b8e80941Smrg          */
707b8e80941Smrg         int move_out = -1;
708b8e80941Smrg         if(jumps[0] && block_records[1].min_strength >= strength_continue)
709b8e80941Smrg            move_out = 0;
710b8e80941Smrg         else if(jumps[1] && block_records[0].min_strength >= strength_continue)
711b8e80941Smrg            move_out = 1;
712b8e80941Smrg
713b8e80941Smrg         if(move_out >= 0)
714b8e80941Smrg         {
715b8e80941Smrg            jumps[move_out]->remove();
716b8e80941Smrg            ir->insert_after(jumps[move_out]);
717b8e80941Smrg            /* Note: we must update block_records and jumps to reflect
718b8e80941Smrg             * the fact that the jump has been moved out of the if.
719b8e80941Smrg             */
720b8e80941Smrg            jumps[move_out] = 0;
721b8e80941Smrg            block_records[move_out].min_strength = strength_none;
722b8e80941Smrg            this->progress = true;
723b8e80941Smrg         }
724b8e80941Smrg      }
725b8e80941Smrg
726b8e80941Smrg      /* Now satisfy the ANALYSIS postcondition by setting
727b8e80941Smrg       * this->block.min_strength and
728b8e80941Smrg       * this->block.may_clear_execute_flag based on the
729b8e80941Smrg       * characteristics of the two branches.
730b8e80941Smrg       */
731b8e80941Smrg      if(block_records[0].min_strength < block_records[1].min_strength)
732b8e80941Smrg         this->block.min_strength = block_records[0].min_strength;
733b8e80941Smrg      else
734b8e80941Smrg         this->block.min_strength = block_records[1].min_strength;
735b8e80941Smrg      this->block.may_clear_execute_flag = this->block.may_clear_execute_flag || block_records[0].may_clear_execute_flag || block_records[1].may_clear_execute_flag;
736b8e80941Smrg
737b8e80941Smrg      /* Now we need to clean up the instructions that follow the
738b8e80941Smrg       * if.
739b8e80941Smrg       *
740b8e80941Smrg       * If those instructions are unreachable, then satisfy the
741b8e80941Smrg       * DEAD_CODE_ELIMINATION postcondition by eliminating them.
742b8e80941Smrg       * Otherwise that postcondition is already satisfied.
743b8e80941Smrg       */
744b8e80941Smrg      if(this->block.min_strength)
745b8e80941Smrg         truncate_after_instruction(ir);
746b8e80941Smrg      else if(this->block.may_clear_execute_flag)
747b8e80941Smrg      {
748b8e80941Smrg         /* If the "if" instruction might clear the execute flag, then
749b8e80941Smrg          * we need to guard any instructions that follow so that they
750b8e80941Smrg          * are only executed if the execute flag is set.
751b8e80941Smrg          *
752b8e80941Smrg          * If one of the branches of the "if" always clears the
753b8e80941Smrg          * execute flag, and the other branch never clears it, then
754b8e80941Smrg          * this is easy: just move all the instructions following the
755b8e80941Smrg          * "if" into the branch that never clears it.
756b8e80941Smrg          */
757b8e80941Smrg         int move_into = -1;
758b8e80941Smrg         if(block_records[0].min_strength && !block_records[1].may_clear_execute_flag)
759b8e80941Smrg            move_into = 1;
760b8e80941Smrg         else if(block_records[1].min_strength && !block_records[0].may_clear_execute_flag)
761b8e80941Smrg            move_into = 0;
762b8e80941Smrg
763b8e80941Smrg         if(move_into >= 0) {
764b8e80941Smrg            assert(!block_records[move_into].min_strength && !block_records[move_into].may_clear_execute_flag); /* otherwise, we just truncated */
765b8e80941Smrg
766b8e80941Smrg            exec_list* list = move_into ? &ir->else_instructions : &ir->then_instructions;
767b8e80941Smrg            exec_node* next = ir->get_next();
768b8e80941Smrg            if(!next->is_tail_sentinel()) {
769b8e80941Smrg               move_outer_block_inside(ir, list);
770b8e80941Smrg
771b8e80941Smrg               /* If any instructions moved, then we need to visit
772b8e80941Smrg                * them (since they are now inside the "if").  Since
773b8e80941Smrg                * block_records[move_into] is in its default state
774b8e80941Smrg                * (see assertion above), we can safely replace
775b8e80941Smrg                * block_records[move_into] with the result of this
776b8e80941Smrg                * analysis.
777b8e80941Smrg                */
778b8e80941Smrg               exec_list list;
779b8e80941Smrg               list.head_sentinel.next = next;
780b8e80941Smrg               block_records[move_into] = visit_block(&list);
781b8e80941Smrg
782b8e80941Smrg               /*
783b8e80941Smrg                * Then we need to re-start our jump lowering, since one
784b8e80941Smrg                * of the instructions we moved might be a jump that
785b8e80941Smrg                * needs to be lowered.
786b8e80941Smrg                */
787b8e80941Smrg               this->progress = true;
788b8e80941Smrg               goto retry;
789b8e80941Smrg            }
790b8e80941Smrg         } else {
791b8e80941Smrg            /* If we get here, then the simple case didn't apply; we
792b8e80941Smrg             * need to actually guard the instructions that follow.
793b8e80941Smrg             *
794b8e80941Smrg             * To avoid creating unnecessarily-deep nesting, first
795b8e80941Smrg             * look through the instructions that follow and unwrap
796b8e80941Smrg             * any instructions that that are already wrapped in the
797b8e80941Smrg             * appropriate guard.
798b8e80941Smrg             */
799b8e80941Smrg            ir_instruction* ir_after;
800b8e80941Smrg            for(ir_after = (ir_instruction*)ir->get_next(); !ir_after->is_tail_sentinel();)
801b8e80941Smrg            {
802b8e80941Smrg               ir_if* ir_if = ir_after->as_if();
803b8e80941Smrg               if(ir_if && ir_if->else_instructions.is_empty()) {
804b8e80941Smrg                  ir_dereference_variable* ir_if_cond_deref = ir_if->condition->as_dereference_variable();
805b8e80941Smrg                  if(ir_if_cond_deref && ir_if_cond_deref->var == this->loop.execute_flag) {
806b8e80941Smrg                     ir_instruction* ir_next = (ir_instruction*)ir_after->get_next();
807b8e80941Smrg                     ir_after->insert_before(&ir_if->then_instructions);
808b8e80941Smrg                     ir_after->remove();
809b8e80941Smrg                     ir_after = ir_next;
810b8e80941Smrg                     continue;
811b8e80941Smrg                  }
812b8e80941Smrg               }
813b8e80941Smrg               ir_after = (ir_instruction*)ir_after->get_next();
814b8e80941Smrg
815b8e80941Smrg               /* only set this if we find any unprotected instruction */
816b8e80941Smrg               this->progress = true;
817b8e80941Smrg            }
818b8e80941Smrg
819b8e80941Smrg            /* Then, wrap all the instructions that follow in a single
820b8e80941Smrg             * guard.
821b8e80941Smrg             */
822b8e80941Smrg            if(!ir->get_next()->is_tail_sentinel()) {
823b8e80941Smrg               assert(this->loop.execute_flag);
824b8e80941Smrg               ir_if* if_execute = new(ir) ir_if(new(ir) ir_dereference_variable(this->loop.execute_flag));
825b8e80941Smrg               move_outer_block_inside(ir, &if_execute->then_instructions);
826b8e80941Smrg               ir->insert_after(if_execute);
827b8e80941Smrg            }
828b8e80941Smrg         }
829b8e80941Smrg      }
830b8e80941Smrg      --this->loop.nesting_depth;
831b8e80941Smrg      --this->function.nesting_depth;
832b8e80941Smrg   }
833b8e80941Smrg
834b8e80941Smrg   virtual void visit(ir_loop *ir)
835b8e80941Smrg   {
836b8e80941Smrg      /* Visit the body of the loop, with a fresh data structure in
837b8e80941Smrg       * this->loop so that the analysis we do here won't bleed into
838b8e80941Smrg       * enclosing loops.
839b8e80941Smrg       *
840b8e80941Smrg       * We assume that all code after a loop is reachable from the
841b8e80941Smrg       * loop (see comments on enum jump_strength), so the
842b8e80941Smrg       * DEAD_CODE_ELIMINATION postcondition is automatically
843b8e80941Smrg       * satisfied, as is the block.min_strength portion of the
844b8e80941Smrg       * ANALYSIS postcondition.
845b8e80941Smrg       *
846b8e80941Smrg       * The block.may_clear_execute_flag portion of the ANALYSIS
847b8e80941Smrg       * postcondition is automatically satisfied because execute
848b8e80941Smrg       * flags do not propagate outside of loops.
849b8e80941Smrg       *
850b8e80941Smrg       * The loop.may_set_return_flag portion of the ANALYSIS
851b8e80941Smrg       * postcondition is handled below.
852b8e80941Smrg       */
853b8e80941Smrg      ++this->function.nesting_depth;
854b8e80941Smrg      loop_record saved_loop = this->loop;
855b8e80941Smrg      this->loop = loop_record(this->function.signature, ir);
856b8e80941Smrg
857b8e80941Smrg      /* Recursively lower nested jumps.  This satisfies the
858b8e80941Smrg       * CONTAINED_JUMPS_LOWERED postcondition, except in the case of
859b8e80941Smrg       * an unconditional continue or return at the bottom of the
860b8e80941Smrg       * loop, which are handled below.
861b8e80941Smrg       */
862b8e80941Smrg      block_record body = visit_block(&ir->body_instructions);
863b8e80941Smrg
864b8e80941Smrg      /* If the loop ends in an unconditional continue, eliminate it
865b8e80941Smrg       * because it is redundant.
866b8e80941Smrg       */
867b8e80941Smrg      ir_instruction *ir_last
868b8e80941Smrg         = (ir_instruction *) ir->body_instructions.get_tail();
869b8e80941Smrg      if (get_jump_strength(ir_last) == strength_continue) {
870b8e80941Smrg         ir_last->remove();
871b8e80941Smrg      }
872b8e80941Smrg
873b8e80941Smrg      /* If the loop ends in an unconditional return, and we are
874b8e80941Smrg       * lowering returns, lower it.
875b8e80941Smrg       */
876b8e80941Smrg      if (this->function.lower_return)
877b8e80941Smrg         lower_return_unconditionally(ir_last);
878b8e80941Smrg
879b8e80941Smrg      if(body.min_strength >= strength_break) {
880b8e80941Smrg         /* FINISHME: If the min_strength of the loop body is
881b8e80941Smrg          * strength_break or strength_return, that means that it
882b8e80941Smrg          * isn't a loop at all, since control flow always leaves the
883b8e80941Smrg          * body of the loop via break or return.  In principle the
884b8e80941Smrg          * loop could be eliminated in this case.  This optimization
885b8e80941Smrg          * is not implemented yet.
886b8e80941Smrg          */
887b8e80941Smrg      }
888b8e80941Smrg
889b8e80941Smrg      if(this->loop.break_flag) {
890b8e80941Smrg         /* We only get here if we are lowering breaks */
891b8e80941Smrg         assert (lower_break);
892b8e80941Smrg
893b8e80941Smrg         /* If a break flag was generated while visiting the body of
894b8e80941Smrg          * the loop, then at least one break was lowered, so we need
895b8e80941Smrg          * to generate an if statement at the end of the loop that
896b8e80941Smrg          * does a "break" if the break flag is set.  The break we
897b8e80941Smrg          * generate won't violate the CONTAINED_JUMPS_LOWERED
898b8e80941Smrg          * postcondition, because should_lower_jump() always returns
899b8e80941Smrg          * false for a break that happens at the end of a loop.
900b8e80941Smrg          *
901b8e80941Smrg          * However, if the loop already ends in a conditional or
902b8e80941Smrg          * unconditional break, then we need to lower that break,
903b8e80941Smrg          * because it won't be at the end of the loop anymore.
904b8e80941Smrg          */
905b8e80941Smrg         lower_final_breaks(&ir->body_instructions);
906b8e80941Smrg
907b8e80941Smrg         ir_if* break_if = new(ir) ir_if(new(ir) ir_dereference_variable(this->loop.break_flag));
908b8e80941Smrg         break_if->then_instructions.push_tail(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
909b8e80941Smrg         ir->body_instructions.push_tail(break_if);
910b8e80941Smrg      }
911b8e80941Smrg
912b8e80941Smrg      /* If the body of the loop may set the return flag, then at
913b8e80941Smrg       * least one return was lowered to a break, so we need to ensure
914b8e80941Smrg       * that the return flag is checked after the body of the loop is
915b8e80941Smrg       * executed.
916b8e80941Smrg       */
917b8e80941Smrg      if(this->loop.may_set_return_flag) {
918b8e80941Smrg         assert(this->function.return_flag);
919b8e80941Smrg         /* Generate the if statement to check the return flag */
920b8e80941Smrg         ir_if* return_if = new(ir) ir_if(new(ir) ir_dereference_variable(this->function.return_flag));
921b8e80941Smrg         /* Note: we also need to propagate the knowledge that the
922b8e80941Smrg          * return flag may get set to the outer context.  This
923b8e80941Smrg          * satisfies the loop.may_set_return_flag part of the
924b8e80941Smrg          * ANALYSIS postcondition.
925b8e80941Smrg          */
926b8e80941Smrg         saved_loop.may_set_return_flag = true;
927b8e80941Smrg         if(saved_loop.loop)
928b8e80941Smrg            /* If this loop is nested inside another one, then the if
929b8e80941Smrg             * statement that we generated should break out of that
930b8e80941Smrg             * loop if the return flag is set.  Caller will lower that
931b8e80941Smrg             * break statement if necessary.
932b8e80941Smrg             */
933b8e80941Smrg            return_if->then_instructions.push_tail(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
934b8e80941Smrg         else {
935b8e80941Smrg            /* Otherwise, ensure that the instructions that follow are only
936b8e80941Smrg             * executed if the return flag is clear.  We can do that by moving
937b8e80941Smrg             * those instructions into the else clause of the generated if
938b8e80941Smrg             * statement.
939b8e80941Smrg             */
940b8e80941Smrg            move_outer_block_inside(ir, &return_if->else_instructions);
941b8e80941Smrg
942b8e80941Smrg            /* In case the loop is embedded inside an if add a new return to
943b8e80941Smrg             * the return flag then branch and let a future pass tidy it up.
944b8e80941Smrg             */
945b8e80941Smrg            if (this->function.signature->return_type->is_void())
946b8e80941Smrg               return_if->then_instructions.push_tail(new(ir) ir_return(NULL));
947b8e80941Smrg            else {
948b8e80941Smrg               assert(this->function.return_value);
949b8e80941Smrg               ir_variable* return_value = this->function.return_value;
950b8e80941Smrg               return_if->then_instructions.push_tail(
951b8e80941Smrg                  new(ir) ir_return(new(ir) ir_dereference_variable(return_value)));
952b8e80941Smrg            }
953b8e80941Smrg         }
954b8e80941Smrg
955b8e80941Smrg         ir->insert_after(return_if);
956b8e80941Smrg      }
957b8e80941Smrg
958b8e80941Smrg      this->loop = saved_loop;
959b8e80941Smrg      --this->function.nesting_depth;
960b8e80941Smrg   }
961b8e80941Smrg
962b8e80941Smrg   virtual void visit(ir_function_signature *ir)
963b8e80941Smrg   {
964b8e80941Smrg      /* these are not strictly necessary */
965b8e80941Smrg      assert(!this->function.signature);
966b8e80941Smrg      assert(!this->loop.loop);
967b8e80941Smrg
968b8e80941Smrg      bool lower_return;
969b8e80941Smrg      if (strcmp(ir->function_name(), "main") == 0)
970b8e80941Smrg         lower_return = lower_main_return;
971b8e80941Smrg      else
972b8e80941Smrg         lower_return = lower_sub_return;
973b8e80941Smrg
974b8e80941Smrg      function_record saved_function = this->function;
975b8e80941Smrg      loop_record saved_loop = this->loop;
976b8e80941Smrg      this->function = function_record(ir, lower_return);
977b8e80941Smrg      this->loop = loop_record(ir);
978b8e80941Smrg
979b8e80941Smrg      assert(!this->loop.loop);
980b8e80941Smrg
981b8e80941Smrg      /* Visit the body of the function to lower any jumps that occur
982b8e80941Smrg       * in it, except possibly an unconditional return statement at
983b8e80941Smrg       * the end of it.
984b8e80941Smrg       */
985b8e80941Smrg      visit_block(&ir->body);
986b8e80941Smrg
987b8e80941Smrg      /* If the body ended in an unconditional return of non-void,
988b8e80941Smrg       * then we don't need to lower it because it's the one canonical
989b8e80941Smrg       * return.
990b8e80941Smrg       *
991b8e80941Smrg       * If the body ended in a return of void, eliminate it because
992b8e80941Smrg       * it is redundant.
993b8e80941Smrg       */
994b8e80941Smrg      if (ir->return_type->is_void() &&
995b8e80941Smrg          get_jump_strength((ir_instruction *) ir->body.get_tail())) {
996b8e80941Smrg         ir_jump *jump = (ir_jump *) ir->body.get_tail();
997b8e80941Smrg         assert (jump->ir_type == ir_type_return);
998b8e80941Smrg         jump->remove();
999b8e80941Smrg      }
1000b8e80941Smrg
1001b8e80941Smrg      if(this->function.return_value)
1002b8e80941Smrg         ir->body.push_tail(new(ir) ir_return(new (ir) ir_dereference_variable(this->function.return_value)));
1003b8e80941Smrg
1004b8e80941Smrg      this->loop = saved_loop;
1005b8e80941Smrg      this->function = saved_function;
1006b8e80941Smrg   }
1007b8e80941Smrg
1008b8e80941Smrg   virtual void visit(class ir_function * ir)
1009b8e80941Smrg   {
1010b8e80941Smrg      visit_block(&ir->signatures);
1011b8e80941Smrg   }
1012b8e80941Smrg};
1013b8e80941Smrg
1014b8e80941Smrg} /* anonymous namespace */
1015b8e80941Smrg
1016b8e80941Smrgbool
1017b8e80941Smrgdo_lower_jumps(exec_list *instructions, bool pull_out_jumps, bool lower_sub_return, bool lower_main_return, bool lower_continue, bool lower_break)
1018b8e80941Smrg{
1019b8e80941Smrg   ir_lower_jumps_visitor v;
1020b8e80941Smrg   v.pull_out_jumps = pull_out_jumps;
1021b8e80941Smrg   v.lower_continue = lower_continue;
1022b8e80941Smrg   v.lower_break = lower_break;
1023b8e80941Smrg   v.lower_sub_return = lower_sub_return;
1024b8e80941Smrg   v.lower_main_return = lower_main_return;
1025b8e80941Smrg
1026b8e80941Smrg   bool progress_ever = false;
1027b8e80941Smrg   do {
1028b8e80941Smrg      v.progress = false;
1029b8e80941Smrg      visit_exec_list(instructions, &v);
1030b8e80941Smrg      progress_ever = v.progress || progress_ever;
1031b8e80941Smrg   } while (v.progress);
1032b8e80941Smrg
1033b8e80941Smrg   return progress_ever;
1034b8e80941Smrg}
1035