Home | History | Annotate | Line # | Download | only in gcc
      1 /* Expands front end tree to back end RTL for GCC.
      2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
      3 
      4 This file is part of GCC.
      5 
      6 GCC is free software; you can redistribute it and/or modify it under
      7 the terms of the GNU General Public License as published by the Free
      8 Software Foundation; either version 3, or (at your option) any later
      9 version.
     10 
     11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14 for more details.
     15 
     16 You should have received a copy of the GNU General Public License
     17 along with GCC; see the file COPYING3.  If not see
     18 <http://www.gnu.org/licenses/>.  */
     19 
     20 /* This file handles the generation of rtl code from tree structure
     21    at the level of the function as a whole.
     22    It creates the rtl expressions for parameters and auto variables
     23    and has full responsibility for allocating stack slots.
     24 
     25    `expand_function_start' is called at the beginning of a function,
     26    before the function body is parsed, and `expand_function_end' is
     27    called after parsing the body.
     28 
     29    Call `assign_stack_local' to allocate a stack slot for a local variable.
     30    This is usually done during the RTL generation for the function body,
     31    but it can also be done in the reload pass when a pseudo-register does
     32    not get a hard register.  */
     33 
     34 #include "config.h"
     35 #include "system.h"
     36 #include "coretypes.h"
     37 #include "backend.h"
     38 #include "target.h"
     39 #include "rtl.h"
     40 #include "tree.h"
     41 #include "gimple-expr.h"
     42 #include "cfghooks.h"
     43 #include "df.h"
     44 #include "memmodel.h"
     45 #include "tm_p.h"
     46 #include "stringpool.h"
     47 #include "expmed.h"
     48 #include "optabs.h"
     49 #include "opts.h"
     50 #include "regs.h"
     51 #include "emit-rtl.h"
     52 #include "recog.h"
     53 #include "rtl-error.h"
     54 #include "hard-reg-set.h"
     55 #include "alias.h"
     56 #include "fold-const.h"
     57 #include "stor-layout.h"
     58 #include "varasm.h"
     59 #include "except.h"
     60 #include "dojump.h"
     61 #include "explow.h"
     62 #include "calls.h"
     63 #include "expr.h"
     64 #include "optabs-tree.h"
     65 #include "output.h"
     66 #include "langhooks.h"
     67 #include "common/common-target.h"
     68 #include "gimplify.h"
     69 #include "tree-pass.h"
     70 #include "cfgrtl.h"
     71 #include "cfganal.h"
     72 #include "cfgbuild.h"
     73 #include "cfgcleanup.h"
     74 #include "cfgexpand.h"
     75 #include "shrink-wrap.h"
     76 #include "toplev.h"
     77 #include "rtl-iter.h"
     78 #include "tree-dfa.h"
     79 #include "tree-ssa.h"
     80 #include "stringpool.h"
     81 #include "attribs.h"
     82 #include "gimple.h"
     83 #include "options.h"
     84 #include "function-abi.h"
     85 #include "value-range.h"
     86 #include "gimple-range.h"
     87 
     88 /* So we can assign to cfun in this file.  */
     89 #undef cfun
     90 
     91 #ifndef STACK_ALIGNMENT_NEEDED
     92 #define STACK_ALIGNMENT_NEEDED 1
     93 #endif
     94 
     95 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
     96 
     97 /* Round a value to the lowest integer less than it that is a multiple of
     98    the required alignment.  Avoid using division in case the value is
     99    negative.  Assume the alignment is a power of two.  */
    100 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
    101 
    102 /* Similar, but round to the next highest integer that meets the
    103    alignment.  */
    104 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
    105 
    106 /* Nonzero once virtual register instantiation has been done.
    107    assign_stack_local uses frame_pointer_rtx when this is nonzero.
    108    calls.cc:emit_library_call_value_1 uses it to set up
    109    post-instantiation libcalls.  */
    110 int virtuals_instantiated;
    111 
    112 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
    113 static GTY(()) int funcdef_no;
    114 
    115 /* These variables hold pointers to functions to create and destroy
    116    target specific, per-function data structures.  */
    117 struct machine_function * (*init_machine_status) (void);
    118 
    119 /* The currently compiled function.  */
    120 struct function *cfun = 0;
    121 
    122 /* These hashes record the prologue and epilogue insns.  */
    123 
    124 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
    125 {
    126   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
    127   static bool equal (rtx a, rtx b) { return a == b; }
    128 };
    129 
    130 static GTY((cache))
    131   hash_table<insn_cache_hasher> *prologue_insn_hash;
    132 static GTY((cache))
    133   hash_table<insn_cache_hasher> *epilogue_insn_hash;
    134 
    135 
    137 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
    138 vec<tree, va_gc> *types_used_by_cur_var_decl;
    139 
    140 /* Forward declarations.  */
    141 
    142 static class temp_slot *find_temp_slot_from_address (rtx);
    143 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
    144 static void pad_below (struct args_size *, machine_mode, tree);
    145 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
    146 static int all_blocks (tree, tree *);
    147 static tree *get_block_vector (tree, int *);
    148 extern tree debug_find_var_in_block_tree (tree, tree);
    149 /* We always define `record_insns' even if it's not used so that we
    150    can always export `prologue_epilogue_contains'.  */
    151 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
    152      ATTRIBUTE_UNUSED;
    153 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
    154 static void prepare_function_start (void);
    155 static void do_clobber_return_reg (rtx, void *);
    156 static void do_use_return_reg (rtx, void *);
    157 
    158 
    159 /* Stack of nested functions.  */
    161 /* Keep track of the cfun stack.  */
    162 
    163 static vec<function *> function_context_stack;
    164 
    165 /* Save the current context for compilation of a nested function.
    166    This is called from language-specific code.  */
    167 
    168 void
    169 push_function_context (void)
    170 {
    171   if (cfun == 0)
    172     allocate_struct_function (NULL, false);
    173 
    174   function_context_stack.safe_push (cfun);
    175   set_cfun (NULL);
    176 }
    177 
    178 /* Restore the last saved context, at the end of a nested function.
    179    This function is called from language-specific code.  */
    180 
    181 void
    182 pop_function_context (void)
    183 {
    184   struct function *p = function_context_stack.pop ();
    185   set_cfun (p);
    186   current_function_decl = p->decl;
    187 
    188   /* Reset variables that have known state during rtx generation.  */
    189   virtuals_instantiated = 0;
    190   generating_concat_p = 1;
    191 }
    192 
    193 /* Clear out all parts of the state in F that can safely be discarded
    194    after the function has been parsed, but not compiled, to let
    195    garbage collection reclaim the memory.  */
    196 
    197 void
    198 free_after_parsing (struct function *f)
    199 {
    200   f->language = 0;
    201 }
    202 
    203 /* Clear out all parts of the state in F that can safely be discarded
    204    after the function has been compiled, to let garbage collection
    205    reclaim the memory.  */
    206 
    207 void
    208 free_after_compilation (struct function *f)
    209 {
    210   prologue_insn_hash = NULL;
    211   epilogue_insn_hash = NULL;
    212 
    213   free (crtl->emit.regno_pointer_align);
    214 
    215   memset (crtl, 0, sizeof (struct rtl_data));
    216   f->eh = NULL;
    217   f->machine = NULL;
    218   f->cfg = NULL;
    219   f->curr_properties &= ~PROP_cfg;
    220 
    221   regno_reg_rtx = NULL;
    222 }
    223 
    224 /* Return size needed for stack frame based on slots so far allocated.
    226    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
    227    the caller may have to do that.  */
    228 
    229 poly_int64
    230 get_frame_size (void)
    231 {
    232   if (FRAME_GROWS_DOWNWARD)
    233     return -frame_offset;
    234   else
    235     return frame_offset;
    236 }
    237 
    238 /* Issue an error message and return TRUE if frame OFFSET overflows in
    239    the signed target pointer arithmetics for function FUNC.  Otherwise
    240    return FALSE.  */
    241 
    242 bool
    243 frame_offset_overflow (poly_int64 offset, tree func)
    244 {
    245   poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
    246   unsigned HOST_WIDE_INT limit
    247     = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
    248        /* Leave room for the fixed part of the frame.  */
    249        - 64 * UNITS_PER_WORD);
    250 
    251   if (!coeffs_in_range_p (size, 0U, limit))
    252     {
    253       unsigned HOST_WIDE_INT hwisize;
    254       if (size.is_constant (&hwisize))
    255 	error_at (DECL_SOURCE_LOCATION (func),
    256 		  "total size of local objects %wu exceeds maximum %wu",
    257 		  hwisize, limit);
    258       else
    259 	error_at (DECL_SOURCE_LOCATION (func),
    260 		  "total size of local objects exceeds maximum %wu",
    261 		  limit);
    262       return true;
    263     }
    264 
    265   return false;
    266 }
    267 
    268 /* Return the minimum spill slot alignment for a register of mode MODE.  */
    269 
    270 unsigned int
    271 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
    272 {
    273   return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
    274 }
    275 
    276 /* Return stack slot alignment in bits for TYPE and MODE.  */
    277 
    278 static unsigned int
    279 get_stack_local_alignment (tree type, machine_mode mode)
    280 {
    281   unsigned int alignment;
    282 
    283   if (mode == BLKmode)
    284     alignment = BIGGEST_ALIGNMENT;
    285   else
    286     alignment = GET_MODE_ALIGNMENT (mode);
    287 
    288   /* Allow the frond-end to (possibly) increase the alignment of this
    289      stack slot.  */
    290   if (! type)
    291     type = lang_hooks.types.type_for_mode (mode, 0);
    292 
    293   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
    294 }
    295 
    296 /* Determine whether it is possible to fit a stack slot of size SIZE and
    297    alignment ALIGNMENT into an area in the stack frame that starts at
    298    frame offset START and has a length of LENGTH.  If so, store the frame
    299    offset to be used for the stack slot in *POFFSET and return true;
    300    return false otherwise.  This function will extend the frame size when
    301    given a start/length pair that lies at the end of the frame.  */
    302 
    303 static bool
    304 try_fit_stack_local (poly_int64 start, poly_int64 length,
    305 		     poly_int64 size, unsigned int alignment,
    306 		     poly_int64_pod *poffset)
    307 {
    308   poly_int64 this_frame_offset;
    309   int frame_off, frame_alignment, frame_phase;
    310 
    311   /* Calculate how many bytes the start of local variables is off from
    312      stack alignment.  */
    313   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
    314   frame_off = targetm.starting_frame_offset () % frame_alignment;
    315   frame_phase = frame_off ? frame_alignment - frame_off : 0;
    316 
    317   /* Round the frame offset to the specified alignment.  */
    318 
    319   if (FRAME_GROWS_DOWNWARD)
    320     this_frame_offset
    321       = (aligned_lower_bound (start + length - size - frame_phase, alignment)
    322 	 + frame_phase);
    323   else
    324     this_frame_offset
    325       = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
    326 
    327   /* See if it fits.  If this space is at the edge of the frame,
    328      consider extending the frame to make it fit.  Our caller relies on
    329      this when allocating a new slot.  */
    330   if (maybe_lt (this_frame_offset, start))
    331     {
    332       if (known_eq (frame_offset, start))
    333 	frame_offset = this_frame_offset;
    334       else
    335 	return false;
    336     }
    337   else if (maybe_gt (this_frame_offset + size, start + length))
    338     {
    339       if (known_eq (frame_offset, start + length))
    340 	frame_offset = this_frame_offset + size;
    341       else
    342 	return false;
    343     }
    344 
    345   *poffset = this_frame_offset;
    346   return true;
    347 }
    348 
    349 /* Create a new frame_space structure describing free space in the stack
    350    frame beginning at START and ending at END, and chain it into the
    351    function's frame_space_list.  */
    352 
    353 static void
    354 add_frame_space (poly_int64 start, poly_int64 end)
    355 {
    356   class frame_space *space = ggc_alloc<frame_space> ();
    357   space->next = crtl->frame_space_list;
    358   crtl->frame_space_list = space;
    359   space->start = start;
    360   space->length = end - start;
    361 }
    362 
    363 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
    364    with machine mode MODE.
    365 
    366    ALIGN controls the amount of alignment for the address of the slot:
    367    0 means according to MODE,
    368    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
    369    -2 means use BITS_PER_UNIT,
    370    positive specifies alignment boundary in bits.
    371 
    372    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
    373    alignment and ASLK_RECORD_PAD bit set if we should remember
    374    extra space we allocated for alignment purposes.  When we are
    375    called from assign_stack_temp_for_type, it is not set so we don't
    376    track the same stack slot in two independent lists.
    377 
    378    We do not round to stack_boundary here.  */
    379 
    380 rtx
    381 assign_stack_local_1 (machine_mode mode, poly_int64 size,
    382 		      int align, int kind)
    383 {
    384   rtx x, addr;
    385   poly_int64 bigend_correction = 0;
    386   poly_int64 slot_offset = 0, old_frame_offset;
    387   unsigned int alignment, alignment_in_bits;
    388 
    389   if (align == 0)
    390     {
    391       alignment = get_stack_local_alignment (NULL, mode);
    392       alignment /= BITS_PER_UNIT;
    393     }
    394   else if (align == -1)
    395     {
    396       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
    397       size = aligned_upper_bound (size, alignment);
    398     }
    399   else if (align == -2)
    400     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
    401   else
    402     alignment = align / BITS_PER_UNIT;
    403 
    404   alignment_in_bits = alignment * BITS_PER_UNIT;
    405 
    406   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
    407   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
    408     {
    409       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
    410       alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
    411     }
    412 
    413   if (SUPPORTS_STACK_ALIGNMENT)
    414     {
    415       if (crtl->stack_alignment_estimated < alignment_in_bits)
    416 	{
    417           if (!crtl->stack_realign_processed)
    418 	    crtl->stack_alignment_estimated = alignment_in_bits;
    419           else
    420 	    {
    421 	      /* If stack is realigned and stack alignment value
    422 		 hasn't been finalized, it is OK not to increase
    423 		 stack_alignment_estimated.  The bigger alignment
    424 		 requirement is recorded in stack_alignment_needed
    425 		 below.  */
    426 	      gcc_assert (!crtl->stack_realign_finalized);
    427 	      if (!crtl->stack_realign_needed)
    428 		{
    429 		  /* It is OK to reduce the alignment as long as the
    430 		     requested size is 0 or the estimated stack
    431 		     alignment >= mode alignment.  */
    432 		  gcc_assert ((kind & ASLK_REDUCE_ALIGN)
    433 			      || known_eq (size, 0)
    434 			      || (crtl->stack_alignment_estimated
    435 				  >= GET_MODE_ALIGNMENT (mode)));
    436 		  alignment_in_bits = crtl->stack_alignment_estimated;
    437 		  alignment = alignment_in_bits / BITS_PER_UNIT;
    438 		}
    439 	    }
    440 	}
    441     }
    442 
    443   if (crtl->stack_alignment_needed < alignment_in_bits)
    444     crtl->stack_alignment_needed = alignment_in_bits;
    445   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
    446     crtl->max_used_stack_slot_alignment = alignment_in_bits;
    447 
    448   if (mode != BLKmode || maybe_ne (size, 0))
    449     {
    450       if (kind & ASLK_RECORD_PAD)
    451 	{
    452 	  class frame_space **psp;
    453 
    454 	  for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
    455 	    {
    456 	      class frame_space *space = *psp;
    457 	      if (!try_fit_stack_local (space->start, space->length, size,
    458 					alignment, &slot_offset))
    459 		continue;
    460 	      *psp = space->next;
    461 	      if (known_gt (slot_offset, space->start))
    462 		add_frame_space (space->start, slot_offset);
    463 	      if (known_lt (slot_offset + size, space->start + space->length))
    464 		add_frame_space (slot_offset + size,
    465 				 space->start + space->length);
    466 	      goto found_space;
    467 	    }
    468 	}
    469     }
    470   else if (!STACK_ALIGNMENT_NEEDED)
    471     {
    472       slot_offset = frame_offset;
    473       goto found_space;
    474     }
    475 
    476   old_frame_offset = frame_offset;
    477 
    478   if (FRAME_GROWS_DOWNWARD)
    479     {
    480       frame_offset -= size;
    481       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
    482 
    483       if (kind & ASLK_RECORD_PAD)
    484 	{
    485 	  if (known_gt (slot_offset, frame_offset))
    486 	    add_frame_space (frame_offset, slot_offset);
    487 	  if (known_lt (slot_offset + size, old_frame_offset))
    488 	    add_frame_space (slot_offset + size, old_frame_offset);
    489 	}
    490     }
    491   else
    492     {
    493       frame_offset += size;
    494       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
    495 
    496       if (kind & ASLK_RECORD_PAD)
    497 	{
    498 	  if (known_gt (slot_offset, old_frame_offset))
    499 	    add_frame_space (old_frame_offset, slot_offset);
    500 	  if (known_lt (slot_offset + size, frame_offset))
    501 	    add_frame_space (slot_offset + size, frame_offset);
    502 	}
    503     }
    504 
    505  found_space:
    506   /* On a big-endian machine, if we are allocating more space than we will use,
    507      use the least significant bytes of those that are allocated.  */
    508   if (mode != BLKmode)
    509     {
    510       /* The slot size can sometimes be smaller than the mode size;
    511 	 e.g. the rs6000 port allocates slots with a vector mode
    512 	 that have the size of only one element.  However, the slot
    513 	 size must always be ordered wrt to the mode size, in the
    514 	 same way as for a subreg.  */
    515       gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
    516       if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
    517 	bigend_correction = size - GET_MODE_SIZE (mode);
    518     }
    519 
    520   /* If we have already instantiated virtual registers, return the actual
    521      address relative to the frame pointer.  */
    522   if (virtuals_instantiated)
    523     addr = plus_constant (Pmode, frame_pointer_rtx,
    524 			  trunc_int_for_mode
    525 			  (slot_offset + bigend_correction
    526 			   + targetm.starting_frame_offset (), Pmode));
    527   else
    528     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
    529 			  trunc_int_for_mode
    530 			  (slot_offset + bigend_correction,
    531 			   Pmode));
    532 
    533   x = gen_rtx_MEM (mode, addr);
    534   set_mem_align (x, alignment_in_bits);
    535   MEM_NOTRAP_P (x) = 1;
    536 
    537   vec_safe_push (stack_slot_list, x);
    538 
    539   if (frame_offset_overflow (frame_offset, current_function_decl))
    540     frame_offset = 0;
    541 
    542   return x;
    543 }
    544 
    545 /* Wrap up assign_stack_local_1 with last parameter as false.  */
    546 
    547 rtx
    548 assign_stack_local (machine_mode mode, poly_int64 size, int align)
    549 {
    550   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
    551 }
    552 
    553 /* In order to evaluate some expressions, such as function calls returning
    555    structures in memory, we need to temporarily allocate stack locations.
    556    We record each allocated temporary in the following structure.
    557 
    558    Associated with each temporary slot is a nesting level.  When we pop up
    559    one level, all temporaries associated with the previous level are freed.
    560    Normally, all temporaries are freed after the execution of the statement
    561    in which they were created.  However, if we are inside a ({...}) grouping,
    562    the result may be in a temporary and hence must be preserved.  If the
    563    result could be in a temporary, we preserve it if we can determine which
    564    one it is in.  If we cannot determine which temporary may contain the
    565    result, all temporaries are preserved.  A temporary is preserved by
    566    pretending it was allocated at the previous nesting level.  */
    567 
    568 class GTY(()) temp_slot {
    569 public:
    570   /* Points to next temporary slot.  */
    571   class temp_slot *next;
    572   /* Points to previous temporary slot.  */
    573   class temp_slot *prev;
    574   /* The rtx to used to reference the slot.  */
    575   rtx slot;
    576   /* The size, in units, of the slot.  */
    577   poly_int64 size;
    578   /* The type of the object in the slot, or zero if it doesn't correspond
    579      to a type.  We use this to determine whether a slot can be reused.
    580      It can be reused if objects of the type of the new slot will always
    581      conflict with objects of the type of the old slot.  */
    582   tree type;
    583   /* The alignment (in bits) of the slot.  */
    584   unsigned int align;
    585   /* Nonzero if this temporary is currently in use.  */
    586   char in_use;
    587   /* Nesting level at which this slot is being used.  */
    588   int level;
    589   /* The offset of the slot from the frame_pointer, including extra space
    590      for alignment.  This info is for combine_temp_slots.  */
    591   poly_int64 base_offset;
    592   /* The size of the slot, including extra space for alignment.  This
    593      info is for combine_temp_slots.  */
    594   poly_int64 full_size;
    595 };
    596 
    597 /* Entry for the below hash table.  */
    598 struct GTY((for_user)) temp_slot_address_entry {
    599   hashval_t hash;
    600   rtx address;
    601   class temp_slot *temp_slot;
    602 };
    603 
    604 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
    605 {
    606   static hashval_t hash (temp_slot_address_entry *);
    607   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
    608 };
    609 
    610 /* A table of addresses that represent a stack slot.  The table is a mapping
    611    from address RTXen to a temp slot.  */
    612 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
    613 static size_t n_temp_slots_in_use;
    614 
    615 /* Removes temporary slot TEMP from LIST.  */
    616 
    617 static void
    618 cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
    619 {
    620   if (temp->next)
    621     temp->next->prev = temp->prev;
    622   if (temp->prev)
    623     temp->prev->next = temp->next;
    624   else
    625     *list = temp->next;
    626 
    627   temp->prev = temp->next = NULL;
    628 }
    629 
    630 /* Inserts temporary slot TEMP to LIST.  */
    631 
    632 static void
    633 insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
    634 {
    635   temp->next = *list;
    636   if (*list)
    637     (*list)->prev = temp;
    638   temp->prev = NULL;
    639   *list = temp;
    640 }
    641 
    642 /* Returns the list of used temp slots at LEVEL.  */
    643 
    644 static class temp_slot **
    645 temp_slots_at_level (int level)
    646 {
    647   if (level >= (int) vec_safe_length (used_temp_slots))
    648     vec_safe_grow_cleared (used_temp_slots, level + 1, true);
    649 
    650   return &(*used_temp_slots)[level];
    651 }
    652 
    653 /* Returns the maximal temporary slot level.  */
    654 
    655 static int
    656 max_slot_level (void)
    657 {
    658   if (!used_temp_slots)
    659     return -1;
    660 
    661   return used_temp_slots->length () - 1;
    662 }
    663 
    664 /* Moves temporary slot TEMP to LEVEL.  */
    665 
    666 static void
    667 move_slot_to_level (class temp_slot *temp, int level)
    668 {
    669   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
    670   insert_slot_to_list (temp, temp_slots_at_level (level));
    671   temp->level = level;
    672 }
    673 
    674 /* Make temporary slot TEMP available.  */
    675 
    676 static void
    677 make_slot_available (class temp_slot *temp)
    678 {
    679   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
    680   insert_slot_to_list (temp, &avail_temp_slots);
    681   temp->in_use = 0;
    682   temp->level = -1;
    683   n_temp_slots_in_use--;
    684 }
    685 
    686 /* Compute the hash value for an address -> temp slot mapping.
    687    The value is cached on the mapping entry.  */
    688 static hashval_t
    689 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
    690 {
    691   int do_not_record = 0;
    692   return hash_rtx (t->address, GET_MODE (t->address),
    693 		   &do_not_record, NULL, false);
    694 }
    695 
    696 /* Return the hash value for an address -> temp slot mapping.  */
    697 hashval_t
    698 temp_address_hasher::hash (temp_slot_address_entry *t)
    699 {
    700   return t->hash;
    701 }
    702 
    703 /* Compare two address -> temp slot mapping entries.  */
    704 bool
    705 temp_address_hasher::equal (temp_slot_address_entry *t1,
    706 			    temp_slot_address_entry *t2)
    707 {
    708   return exp_equiv_p (t1->address, t2->address, 0, true);
    709 }
    710 
    711 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
    712 static void
    713 insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
    714 {
    715   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
    716   t->address = copy_rtx (address);
    717   t->temp_slot = temp_slot;
    718   t->hash = temp_slot_address_compute_hash (t);
    719   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
    720 }
    721 
    722 /* Remove an address -> temp slot mapping entry if the temp slot is
    723    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
    724 int
    725 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
    726 {
    727   const struct temp_slot_address_entry *t = *slot;
    728   if (! t->temp_slot->in_use)
    729     temp_slot_address_table->clear_slot (slot);
    730   return 1;
    731 }
    732 
    733 /* Remove all mappings of addresses to unused temp slots.  */
    734 static void
    735 remove_unused_temp_slot_addresses (void)
    736 {
    737   /* Use quicker clearing if there aren't any active temp slots.  */
    738   if (n_temp_slots_in_use)
    739     temp_slot_address_table->traverse
    740       <void *, remove_unused_temp_slot_addresses_1> (NULL);
    741   else
    742     temp_slot_address_table->empty ();
    743 }
    744 
    745 /* Find the temp slot corresponding to the object at address X.  */
    746 
    747 static class temp_slot *
    748 find_temp_slot_from_address (rtx x)
    749 {
    750   class temp_slot *p;
    751   struct temp_slot_address_entry tmp, *t;
    752 
    753   /* First try the easy way:
    754      See if X exists in the address -> temp slot mapping.  */
    755   tmp.address = x;
    756   tmp.temp_slot = NULL;
    757   tmp.hash = temp_slot_address_compute_hash (&tmp);
    758   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
    759   if (t)
    760     return t->temp_slot;
    761 
    762   /* If we have a sum involving a register, see if it points to a temp
    763      slot.  */
    764   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
    765       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
    766     return p;
    767   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
    768 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
    769     return p;
    770 
    771   /* Last resort: Address is a virtual stack var address.  */
    772   poly_int64 offset;
    773   if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
    774     {
    775       int i;
    776       for (i = max_slot_level (); i >= 0; i--)
    777 	for (p = *temp_slots_at_level (i); p; p = p->next)
    778 	  if (known_in_range_p (offset, p->base_offset, p->full_size))
    779 	    return p;
    780     }
    781 
    782   return NULL;
    783 }
    784 
    785 /* Allocate a temporary stack slot and record it for possible later
    787    reuse.
    788 
    789    MODE is the machine mode to be given to the returned rtx.
    790 
    791    SIZE is the size in units of the space required.  We do no rounding here
    792    since assign_stack_local will do any required rounding.
    793 
    794    TYPE is the type that will be used for the stack slot.  */
    795 
    796 rtx
    797 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
    798 {
    799   unsigned int align;
    800   class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
    801   rtx slot;
    802 
    803   gcc_assert (known_size_p (size));
    804 
    805   align = get_stack_local_alignment (type, mode);
    806 
    807   /* Try to find an available, already-allocated temporary of the proper
    808      mode which meets the size and alignment requirements.  Choose the
    809      smallest one with the closest alignment.
    810 
    811      If assign_stack_temp is called outside of the tree->rtl expansion,
    812      we cannot reuse the stack slots (that may still refer to
    813      VIRTUAL_STACK_VARS_REGNUM).  */
    814   if (!virtuals_instantiated)
    815     {
    816       for (p = avail_temp_slots; p; p = p->next)
    817 	{
    818 	  if (p->align >= align
    819 	      && known_ge (p->size, size)
    820 	      && GET_MODE (p->slot) == mode
    821 	      && objects_must_conflict_p (p->type, type)
    822 	      && (best_p == 0
    823 		  || (known_eq (best_p->size, p->size)
    824 		      ? best_p->align > p->align
    825 		      : known_ge (best_p->size, p->size))))
    826 	    {
    827 	      if (p->align == align && known_eq (p->size, size))
    828 		{
    829 		  selected = p;
    830 		  cut_slot_from_list (selected, &avail_temp_slots);
    831 		  best_p = 0;
    832 		  break;
    833 		}
    834 	      best_p = p;
    835 	    }
    836 	}
    837     }
    838 
    839   /* Make our best, if any, the one to use.  */
    840   if (best_p)
    841     {
    842       selected = best_p;
    843       cut_slot_from_list (selected, &avail_temp_slots);
    844 
    845       /* If there are enough aligned bytes left over, make them into a new
    846 	 temp_slot so that the extra bytes don't get wasted.  Do this only
    847 	 for BLKmode slots, so that we can be sure of the alignment.  */
    848       if (GET_MODE (best_p->slot) == BLKmode)
    849 	{
    850 	  int alignment = best_p->align / BITS_PER_UNIT;
    851 	  poly_int64 rounded_size = aligned_upper_bound (size, alignment);
    852 
    853 	  if (known_ge (best_p->size - rounded_size, alignment))
    854 	    {
    855 	      p = ggc_alloc<temp_slot> ();
    856 	      p->in_use = 0;
    857 	      p->size = best_p->size - rounded_size;
    858 	      p->base_offset = best_p->base_offset + rounded_size;
    859 	      p->full_size = best_p->full_size - rounded_size;
    860 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
    861 	      p->align = best_p->align;
    862 	      p->type = best_p->type;
    863 	      insert_slot_to_list (p, &avail_temp_slots);
    864 
    865 	      vec_safe_push (stack_slot_list, p->slot);
    866 
    867 	      best_p->size = rounded_size;
    868 	      best_p->full_size = rounded_size;
    869 	    }
    870 	}
    871     }
    872 
    873   /* If we still didn't find one, make a new temporary.  */
    874   if (selected == 0)
    875     {
    876       poly_int64 frame_offset_old = frame_offset;
    877 
    878       p = ggc_alloc<temp_slot> ();
    879 
    880       /* We are passing an explicit alignment request to assign_stack_local.
    881 	 One side effect of that is assign_stack_local will not round SIZE
    882 	 to ensure the frame offset remains suitably aligned.
    883 
    884 	 So for requests which depended on the rounding of SIZE, we go ahead
    885 	 and round it now.  We also make sure ALIGNMENT is at least
    886 	 BIGGEST_ALIGNMENT.  */
    887       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
    888       p->slot = assign_stack_local_1 (mode,
    889 				      (mode == BLKmode
    890 				       ? aligned_upper_bound (size,
    891 							      (int) align
    892 							      / BITS_PER_UNIT)
    893 				       : size),
    894 				      align, 0);
    895 
    896       p->align = align;
    897 
    898       /* The following slot size computation is necessary because we don't
    899 	 know the actual size of the temporary slot until assign_stack_local
    900 	 has performed all the frame alignment and size rounding for the
    901 	 requested temporary.  Note that extra space added for alignment
    902 	 can be either above or below this stack slot depending on which
    903 	 way the frame grows.  We include the extra space if and only if it
    904 	 is above this slot.  */
    905       if (FRAME_GROWS_DOWNWARD)
    906 	p->size = frame_offset_old - frame_offset;
    907       else
    908 	p->size = size;
    909 
    910       /* Now define the fields used by combine_temp_slots.  */
    911       if (FRAME_GROWS_DOWNWARD)
    912 	{
    913 	  p->base_offset = frame_offset;
    914 	  p->full_size = frame_offset_old - frame_offset;
    915 	}
    916       else
    917 	{
    918 	  p->base_offset = frame_offset_old;
    919 	  p->full_size = frame_offset - frame_offset_old;
    920 	}
    921 
    922       selected = p;
    923     }
    924 
    925   p = selected;
    926   p->in_use = 1;
    927   p->type = type;
    928   p->level = temp_slot_level;
    929   n_temp_slots_in_use++;
    930 
    931   pp = temp_slots_at_level (p->level);
    932   insert_slot_to_list (p, pp);
    933   insert_temp_slot_address (XEXP (p->slot, 0), p);
    934 
    935   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
    936   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
    937   vec_safe_push (stack_slot_list, slot);
    938 
    939   /* If we know the alias set for the memory that will be used, use
    940      it.  If there's no TYPE, then we don't know anything about the
    941      alias set for the memory.  */
    942   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
    943   set_mem_align (slot, align);
    944 
    945   /* If a type is specified, set the relevant flags.  */
    946   if (type != 0)
    947     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
    948   MEM_NOTRAP_P (slot) = 1;
    949 
    950   return slot;
    951 }
    952 
    953 /* Allocate a temporary stack slot and record it for possible later
    954    reuse.  First two arguments are same as in preceding function.  */
    955 
    956 rtx
    957 assign_stack_temp (machine_mode mode, poly_int64 size)
    958 {
    959   return assign_stack_temp_for_type (mode, size, NULL_TREE);
    960 }
    961 
    962 /* Assign a temporary.
    964    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
    965    and so that should be used in error messages.  In either case, we
    966    allocate of the given type.
    967    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
    968    it is 0 if a register is OK.
    969    DONT_PROMOTE is 1 if we should not promote values in register
    970    to wider modes.  */
    971 
    972 rtx
    973 assign_temp (tree type_or_decl, int memory_required,
    974 	     int dont_promote ATTRIBUTE_UNUSED)
    975 {
    976   tree type, decl;
    977   machine_mode mode;
    978 #ifdef PROMOTE_MODE
    979   int unsignedp;
    980 #endif
    981 
    982   if (DECL_P (type_or_decl))
    983     decl = type_or_decl, type = TREE_TYPE (decl);
    984   else
    985     decl = NULL, type = type_or_decl;
    986 
    987   mode = TYPE_MODE (type);
    988 #ifdef PROMOTE_MODE
    989   unsignedp = TYPE_UNSIGNED (type);
    990 #endif
    991 
    992   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
    993      end.  See also create_tmp_var for the gimplification-time check.  */
    994   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
    995 
    996   if (mode == BLKmode || memory_required)
    997     {
    998       poly_int64 size;
    999       rtx tmp;
   1000 
   1001       /* Unfortunately, we don't yet know how to allocate variable-sized
   1002 	 temporaries.  However, sometimes we can find a fixed upper limit on
   1003 	 the size, so try that instead.  */
   1004       if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
   1005 	size = max_int_size_in_bytes (type);
   1006 
   1007       /* Zero sized arrays are a GNU C extension.  Set size to 1 to avoid
   1008 	 problems with allocating the stack space.  */
   1009       if (known_eq (size, 0))
   1010 	size = 1;
   1011 
   1012       /* The size of the temporary may be too large to fit into an integer.  */
   1013       /* ??? Not sure this should happen except for user silliness, so limit
   1014 	 this to things that aren't compiler-generated temporaries.  The
   1015 	 rest of the time we'll die in assign_stack_temp_for_type.  */
   1016       if (decl
   1017 	  && !known_size_p (size)
   1018 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
   1019 	{
   1020 	  error ("size of variable %q+D is too large", decl);
   1021 	  size = 1;
   1022 	}
   1023 
   1024       tmp = assign_stack_temp_for_type (mode, size, type);
   1025       return tmp;
   1026     }
   1027 
   1028 #ifdef PROMOTE_MODE
   1029   if (! dont_promote)
   1030     mode = promote_mode (type, mode, &unsignedp);
   1031 #endif
   1032 
   1033   return gen_reg_rtx (mode);
   1034 }
   1035 
   1036 /* Combine temporary stack slots which are adjacent on the stack.
   1038 
   1039    This allows for better use of already allocated stack space.  This is only
   1040    done for BLKmode slots because we can be sure that we won't have alignment
   1041    problems in this case.  */
   1042 
   1043 static void
   1044 combine_temp_slots (void)
   1045 {
   1046   class temp_slot *p, *q, *next, *next_q;
   1047   int num_slots;
   1048 
   1049   /* We can't combine slots, because the information about which slot
   1050      is in which alias set will be lost.  */
   1051   if (flag_strict_aliasing)
   1052     return;
   1053 
   1054   /* If there are a lot of temp slots, don't do anything unless
   1055      high levels of optimization.  */
   1056   if (! flag_expensive_optimizations)
   1057     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
   1058       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
   1059 	return;
   1060 
   1061   for (p = avail_temp_slots; p; p = next)
   1062     {
   1063       int delete_p = 0;
   1064 
   1065       next = p->next;
   1066 
   1067       if (GET_MODE (p->slot) != BLKmode)
   1068 	continue;
   1069 
   1070       for (q = p->next; q; q = next_q)
   1071 	{
   1072        	  int delete_q = 0;
   1073 
   1074 	  next_q = q->next;
   1075 
   1076 	  if (GET_MODE (q->slot) != BLKmode)
   1077 	    continue;
   1078 
   1079 	  if (known_eq (p->base_offset + p->full_size, q->base_offset))
   1080 	    {
   1081 	      /* Q comes after P; combine Q into P.  */
   1082 	      p->size += q->size;
   1083 	      p->full_size += q->full_size;
   1084 	      delete_q = 1;
   1085 	    }
   1086 	  else if (known_eq (q->base_offset + q->full_size, p->base_offset))
   1087 	    {
   1088 	      /* P comes after Q; combine P into Q.  */
   1089 	      q->size += p->size;
   1090 	      q->full_size += p->full_size;
   1091 	      delete_p = 1;
   1092 	      break;
   1093 	    }
   1094 	  if (delete_q)
   1095 	    cut_slot_from_list (q, &avail_temp_slots);
   1096 	}
   1097 
   1098       /* Either delete P or advance past it.  */
   1099       if (delete_p)
   1100 	cut_slot_from_list (p, &avail_temp_slots);
   1101     }
   1102 }
   1103 
   1104 /* Indicate that NEW_RTX is an alternate way of referring to the temp
   1106    slot that previously was known by OLD_RTX.  */
   1107 
   1108 void
   1109 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
   1110 {
   1111   class temp_slot *p;
   1112 
   1113   if (rtx_equal_p (old_rtx, new_rtx))
   1114     return;
   1115 
   1116   p = find_temp_slot_from_address (old_rtx);
   1117 
   1118   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
   1119      NEW_RTX is a register, see if one operand of the PLUS is a
   1120      temporary location.  If so, NEW_RTX points into it.  Otherwise,
   1121      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
   1122      in common between them.  If so, try a recursive call on those
   1123      values.  */
   1124   if (p == 0)
   1125     {
   1126       if (GET_CODE (old_rtx) != PLUS)
   1127 	return;
   1128 
   1129       if (REG_P (new_rtx))
   1130 	{
   1131 	  update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
   1132 	  update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
   1133 	  return;
   1134 	}
   1135       else if (GET_CODE (new_rtx) != PLUS)
   1136 	return;
   1137 
   1138       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
   1139 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
   1140       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
   1141 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
   1142       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
   1143 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
   1144       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
   1145 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
   1146 
   1147       return;
   1148     }
   1149 
   1150   /* Otherwise add an alias for the temp's address.  */
   1151   insert_temp_slot_address (new_rtx, p);
   1152 }
   1153 
   1154 /* If X could be a reference to a temporary slot, mark that slot as
   1155    belonging to the to one level higher than the current level.  If X
   1156    matched one of our slots, just mark that one.  Otherwise, we can't
   1157    easily predict which it is, so upgrade all of them.
   1158 
   1159    This is called when an ({...}) construct occurs and a statement
   1160    returns a value in memory.  */
   1161 
   1162 void
   1163 preserve_temp_slots (rtx x)
   1164 {
   1165   class temp_slot *p = 0, *next;
   1166 
   1167   if (x == 0)
   1168     return;
   1169 
   1170   /* If X is a register that is being used as a pointer, see if we have
   1171      a temporary slot we know it points to.  */
   1172   if (REG_P (x) && REG_POINTER (x))
   1173     p = find_temp_slot_from_address (x);
   1174 
   1175   /* If X is not in memory or is at a constant address, it cannot be in
   1176      a temporary slot.  */
   1177   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
   1178     return;
   1179 
   1180   /* First see if we can find a match.  */
   1181   if (p == 0)
   1182     p = find_temp_slot_from_address (XEXP (x, 0));
   1183 
   1184   if (p != 0)
   1185     {
   1186       if (p->level == temp_slot_level)
   1187 	move_slot_to_level (p, temp_slot_level - 1);
   1188       return;
   1189     }
   1190 
   1191   /* Otherwise, preserve all non-kept slots at this level.  */
   1192   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
   1193     {
   1194       next = p->next;
   1195       move_slot_to_level (p, temp_slot_level - 1);
   1196     }
   1197 }
   1198 
   1199 /* Free all temporaries used so far.  This is normally called at the
   1200    end of generating code for a statement.  */
   1201 
   1202 void
   1203 free_temp_slots (void)
   1204 {
   1205   class temp_slot *p, *next;
   1206   bool some_available = false;
   1207 
   1208   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
   1209     {
   1210       next = p->next;
   1211       make_slot_available (p);
   1212       some_available = true;
   1213     }
   1214 
   1215   if (some_available)
   1216     {
   1217       remove_unused_temp_slot_addresses ();
   1218       combine_temp_slots ();
   1219     }
   1220 }
   1221 
   1222 /* Push deeper into the nesting level for stack temporaries.  */
   1223 
   1224 void
   1225 push_temp_slots (void)
   1226 {
   1227   temp_slot_level++;
   1228 }
   1229 
   1230 /* Pop a temporary nesting level.  All slots in use in the current level
   1231    are freed.  */
   1232 
   1233 void
   1234 pop_temp_slots (void)
   1235 {
   1236   free_temp_slots ();
   1237   temp_slot_level--;
   1238 }
   1239 
   1240 /* Initialize temporary slots.  */
   1241 
   1242 void
   1243 init_temp_slots (void)
   1244 {
   1245   /* We have not allocated any temporaries yet.  */
   1246   avail_temp_slots = 0;
   1247   vec_alloc (used_temp_slots, 0);
   1248   temp_slot_level = 0;
   1249   n_temp_slots_in_use = 0;
   1250 
   1251   /* Set up the table to map addresses to temp slots.  */
   1252   if (! temp_slot_address_table)
   1253     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
   1254   else
   1255     temp_slot_address_table->empty ();
   1256 }
   1257 
   1258 /* Functions and data structures to keep track of the values hard regs
   1260    had at the start of the function.  */
   1261 
   1262 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
   1263    and has_hard_reg_initial_val..  */
   1264 struct GTY(()) initial_value_pair {
   1265   rtx hard_reg;
   1266   rtx pseudo;
   1267 };
   1268 /* ???  This could be a VEC but there is currently no way to define an
   1269    opaque VEC type.  This could be worked around by defining struct
   1270    initial_value_pair in function.h.  */
   1271 struct GTY(()) initial_value_struct {
   1272   int num_entries;
   1273   int max_entries;
   1274   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
   1275 };
   1276 
   1277 /* If a pseudo represents an initial hard reg (or expression), return
   1278    it, else return NULL_RTX.  */
   1279 
   1280 rtx
   1281 get_hard_reg_initial_reg (rtx reg)
   1282 {
   1283   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
   1284   int i;
   1285 
   1286   if (ivs == 0)
   1287     return NULL_RTX;
   1288 
   1289   for (i = 0; i < ivs->num_entries; i++)
   1290     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
   1291       return ivs->entries[i].hard_reg;
   1292 
   1293   return NULL_RTX;
   1294 }
   1295 
   1296 /* Make sure that there's a pseudo register of mode MODE that stores the
   1297    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
   1298 
   1299 rtx
   1300 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
   1301 {
   1302   struct initial_value_struct *ivs;
   1303   rtx rv;
   1304 
   1305   rv = has_hard_reg_initial_val (mode, regno);
   1306   if (rv)
   1307     return rv;
   1308 
   1309   ivs = crtl->hard_reg_initial_vals;
   1310   if (ivs == 0)
   1311     {
   1312       ivs = ggc_alloc<initial_value_struct> ();
   1313       ivs->num_entries = 0;
   1314       ivs->max_entries = 5;
   1315       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
   1316       crtl->hard_reg_initial_vals = ivs;
   1317     }
   1318 
   1319   if (ivs->num_entries >= ivs->max_entries)
   1320     {
   1321       ivs->max_entries += 5;
   1322       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
   1323 				    ivs->max_entries);
   1324     }
   1325 
   1326   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
   1327   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
   1328 
   1329   return ivs->entries[ivs->num_entries++].pseudo;
   1330 }
   1331 
   1332 /* See if get_hard_reg_initial_val has been used to create a pseudo
   1333    for the initial value of hard register REGNO in mode MODE.  Return
   1334    the associated pseudo if so, otherwise return NULL.  */
   1335 
   1336 rtx
   1337 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
   1338 {
   1339   struct initial_value_struct *ivs;
   1340   int i;
   1341 
   1342   ivs = crtl->hard_reg_initial_vals;
   1343   if (ivs != 0)
   1344     for (i = 0; i < ivs->num_entries; i++)
   1345       if (GET_MODE (ivs->entries[i].hard_reg) == mode
   1346 	  && REGNO (ivs->entries[i].hard_reg) == regno)
   1347 	return ivs->entries[i].pseudo;
   1348 
   1349   return NULL_RTX;
   1350 }
   1351 
   1352 unsigned int
   1353 emit_initial_value_sets (void)
   1354 {
   1355   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
   1356   int i;
   1357   rtx_insn *seq;
   1358 
   1359   if (ivs == 0)
   1360     return 0;
   1361 
   1362   start_sequence ();
   1363   for (i = 0; i < ivs->num_entries; i++)
   1364     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
   1365   seq = get_insns ();
   1366   end_sequence ();
   1367 
   1368   emit_insn_at_entry (seq);
   1369   return 0;
   1370 }
   1371 
   1372 /* Return the hardreg-pseudoreg initial values pair entry I and
   1373    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
   1374 bool
   1375 initial_value_entry (int i, rtx *hreg, rtx *preg)
   1376 {
   1377   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
   1378   if (!ivs || i >= ivs->num_entries)
   1379     return false;
   1380 
   1381   *hreg = ivs->entries[i].hard_reg;
   1382   *preg = ivs->entries[i].pseudo;
   1383   return true;
   1384 }
   1385 
   1386 /* These routines are responsible for converting virtual register references
   1388    to the actual hard register references once RTL generation is complete.
   1389 
   1390    The following four variables are used for communication between the
   1391    routines.  They contain the offsets of the virtual registers from their
   1392    respective hard registers.  */
   1393 
   1394 static poly_int64 in_arg_offset;
   1395 static poly_int64 var_offset;
   1396 static poly_int64 dynamic_offset;
   1397 static poly_int64 out_arg_offset;
   1398 static poly_int64 cfa_offset;
   1399 
   1400 /* In most machines, the stack pointer register is equivalent to the bottom
   1401    of the stack.  */
   1402 
   1403 #ifndef STACK_POINTER_OFFSET
   1404 #define STACK_POINTER_OFFSET	0
   1405 #endif
   1406 
   1407 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
   1408 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
   1409 #endif
   1410 
   1411 /* If not defined, pick an appropriate default for the offset of dynamically
   1412    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
   1413    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
   1414 
   1415 #ifndef STACK_DYNAMIC_OFFSET
   1416 
   1417 /* The bottom of the stack points to the actual arguments.  If
   1418    REG_PARM_STACK_SPACE is defined, this includes the space for the register
   1419    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
   1420    stack space for register parameters is not pushed by the caller, but
   1421    rather part of the fixed stack areas and hence not included in
   1422    `crtl->outgoing_args_size'.  Nevertheless, we must allow
   1423    for it when allocating stack dynamic objects.  */
   1424 
   1425 #ifdef INCOMING_REG_PARM_STACK_SPACE
   1426 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
   1427 ((ACCUMULATE_OUTGOING_ARGS						      \
   1428   ? (crtl->outgoing_args_size				      \
   1429      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
   1430 					       : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
   1431   : 0) + (STACK_POINTER_OFFSET))
   1432 #else
   1433 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
   1434   ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
   1435  + (STACK_POINTER_OFFSET))
   1436 #endif
   1437 #endif
   1438 
   1439 
   1440 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
   1442    is a virtual register, return the equivalent hard register and set the
   1443    offset indirectly through the pointer.  Otherwise, return 0.  */
   1444 
   1445 static rtx
   1446 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
   1447 {
   1448   rtx new_rtx;
   1449   poly_int64 offset;
   1450 
   1451   if (x == virtual_incoming_args_rtx)
   1452     {
   1453       if (stack_realign_drap)
   1454         {
   1455 	  /* Replace virtual_incoming_args_rtx with internal arg
   1456 	     pointer if DRAP is used to realign stack.  */
   1457           new_rtx = crtl->args.internal_arg_pointer;
   1458           offset = 0;
   1459         }
   1460       else
   1461         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
   1462     }
   1463   else if (x == virtual_stack_vars_rtx)
   1464     new_rtx = frame_pointer_rtx, offset = var_offset;
   1465   else if (x == virtual_stack_dynamic_rtx)
   1466     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
   1467   else if (x == virtual_outgoing_args_rtx)
   1468     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
   1469   else if (x == virtual_cfa_rtx)
   1470     {
   1471 #ifdef FRAME_POINTER_CFA_OFFSET
   1472       new_rtx = frame_pointer_rtx;
   1473 #else
   1474       new_rtx = arg_pointer_rtx;
   1475 #endif
   1476       offset = cfa_offset;
   1477     }
   1478   else if (x == virtual_preferred_stack_boundary_rtx)
   1479     {
   1480       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
   1481       offset = 0;
   1482     }
   1483   else
   1484     return NULL_RTX;
   1485 
   1486   *poffset = offset;
   1487   return new_rtx;
   1488 }
   1489 
   1490 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
   1491    registers present inside of *LOC.  The expression is simplified,
   1492    as much as possible, but is not to be considered "valid" in any sense
   1493    implied by the target.  Return true if any change is made.  */
   1494 
   1495 static bool
   1496 instantiate_virtual_regs_in_rtx (rtx *loc)
   1497 {
   1498   if (!*loc)
   1499     return false;
   1500   bool changed = false;
   1501   subrtx_ptr_iterator::array_type array;
   1502   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
   1503     {
   1504       rtx *loc = *iter;
   1505       if (rtx x = *loc)
   1506 	{
   1507 	  rtx new_rtx;
   1508 	  poly_int64 offset;
   1509 	  switch (GET_CODE (x))
   1510 	    {
   1511 	    case REG:
   1512 	      new_rtx = instantiate_new_reg (x, &offset);
   1513 	      if (new_rtx)
   1514 		{
   1515 		  *loc = plus_constant (GET_MODE (x), new_rtx, offset);
   1516 		  changed = true;
   1517 		}
   1518 	      iter.skip_subrtxes ();
   1519 	      break;
   1520 
   1521 	    case PLUS:
   1522 	      new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
   1523 	      if (new_rtx)
   1524 		{
   1525 		  XEXP (x, 0) = new_rtx;
   1526 		  *loc = plus_constant (GET_MODE (x), x, offset, true);
   1527 		  changed = true;
   1528 		  iter.skip_subrtxes ();
   1529 		  break;
   1530 		}
   1531 
   1532 	      /* FIXME -- from old code */
   1533 	      /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
   1534 		 we can commute the PLUS and SUBREG because pointers into the
   1535 		 frame are well-behaved.  */
   1536 	      break;
   1537 
   1538 	    default:
   1539 	      break;
   1540 	    }
   1541 	}
   1542     }
   1543   return changed;
   1544 }
   1545 
   1546 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
   1547    matches the predicate for insn CODE operand OPERAND.  */
   1548 
   1549 static int
   1550 safe_insn_predicate (int code, int operand, rtx x)
   1551 {
   1552   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
   1553 }
   1554 
   1555 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
   1556    registers present inside of insn.  The result will be a valid insn.  */
   1557 
   1558 static void
   1559 instantiate_virtual_regs_in_insn (rtx_insn *insn)
   1560 {
   1561   poly_int64 offset;
   1562   int insn_code, i;
   1563   bool any_change = false;
   1564   rtx set, new_rtx, x;
   1565   rtx_insn *seq;
   1566 
   1567   /* There are some special cases to be handled first.  */
   1568   set = single_set (insn);
   1569   if (set)
   1570     {
   1571       /* We're allowed to assign to a virtual register.  This is interpreted
   1572 	 to mean that the underlying register gets assigned the inverse
   1573 	 transformation.  This is used, for example, in the handling of
   1574 	 non-local gotos.  */
   1575       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
   1576       if (new_rtx)
   1577 	{
   1578 	  start_sequence ();
   1579 
   1580 	  instantiate_virtual_regs_in_rtx (&SET_SRC (set));
   1581 	  x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
   1582 				   gen_int_mode (-offset, GET_MODE (new_rtx)));
   1583 	  x = force_operand (x, new_rtx);
   1584 	  if (x != new_rtx)
   1585 	    emit_move_insn (new_rtx, x);
   1586 
   1587 	  seq = get_insns ();
   1588 	  end_sequence ();
   1589 
   1590 	  emit_insn_before (seq, insn);
   1591 	  delete_insn (insn);
   1592 	  return;
   1593 	}
   1594 
   1595       /* Handle a straight copy from a virtual register by generating a
   1596 	 new add insn.  The difference between this and falling through
   1597 	 to the generic case is avoiding a new pseudo and eliminating a
   1598 	 move insn in the initial rtl stream.  */
   1599       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
   1600       if (new_rtx
   1601 	  && maybe_ne (offset, 0)
   1602 	  && REG_P (SET_DEST (set))
   1603 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
   1604 	{
   1605 	  start_sequence ();
   1606 
   1607 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
   1608 				   gen_int_mode (offset,
   1609 						 GET_MODE (SET_DEST (set))),
   1610 				   SET_DEST (set), 1, OPTAB_LIB_WIDEN);
   1611 	  if (x != SET_DEST (set))
   1612 	    emit_move_insn (SET_DEST (set), x);
   1613 
   1614 	  seq = get_insns ();
   1615 	  end_sequence ();
   1616 
   1617 	  emit_insn_before (seq, insn);
   1618 	  delete_insn (insn);
   1619 	  return;
   1620 	}
   1621 
   1622       extract_insn (insn);
   1623       insn_code = INSN_CODE (insn);
   1624 
   1625       /* Handle a plus involving a virtual register by determining if the
   1626 	 operands remain valid if they're modified in place.  */
   1627       poly_int64 delta;
   1628       if (GET_CODE (SET_SRC (set)) == PLUS
   1629 	  && recog_data.n_operands >= 3
   1630 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
   1631 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
   1632 	  && poly_int_rtx_p (recog_data.operand[2], &delta)
   1633 	  && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
   1634 	{
   1635 	  offset += delta;
   1636 
   1637 	  /* If the sum is zero, then replace with a plain move.  */
   1638 	  if (known_eq (offset, 0)
   1639 	      && REG_P (SET_DEST (set))
   1640 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
   1641 	    {
   1642 	      start_sequence ();
   1643 	      emit_move_insn (SET_DEST (set), new_rtx);
   1644 	      seq = get_insns ();
   1645 	      end_sequence ();
   1646 
   1647 	      emit_insn_before (seq, insn);
   1648 	      delete_insn (insn);
   1649 	      return;
   1650 	    }
   1651 
   1652 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
   1653 
   1654 	  /* Using validate_change and apply_change_group here leaves
   1655 	     recog_data in an invalid state.  Since we know exactly what
   1656 	     we want to check, do those two by hand.  */
   1657 	  if (safe_insn_predicate (insn_code, 1, new_rtx)
   1658 	      && safe_insn_predicate (insn_code, 2, x))
   1659 	    {
   1660 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
   1661 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
   1662 	      any_change = true;
   1663 
   1664 	      /* Fall through into the regular operand fixup loop in
   1665 		 order to take care of operands other than 1 and 2.  */
   1666 	    }
   1667 	}
   1668     }
   1669   else
   1670     {
   1671       extract_insn (insn);
   1672       insn_code = INSN_CODE (insn);
   1673     }
   1674 
   1675   /* In the general case, we expect virtual registers to appear only in
   1676      operands, and then only as either bare registers or inside memories.  */
   1677   for (i = 0; i < recog_data.n_operands; ++i)
   1678     {
   1679       x = recog_data.operand[i];
   1680       switch (GET_CODE (x))
   1681 	{
   1682 	case MEM:
   1683 	  {
   1684 	    rtx addr = XEXP (x, 0);
   1685 
   1686 	    if (!instantiate_virtual_regs_in_rtx (&addr))
   1687 	      continue;
   1688 
   1689 	    start_sequence ();
   1690 	    x = replace_equiv_address (x, addr, true);
   1691 	    /* It may happen that the address with the virtual reg
   1692 	       was valid (e.g. based on the virtual stack reg, which might
   1693 	       be acceptable to the predicates with all offsets), whereas
   1694 	       the address now isn't anymore, for instance when the address
   1695 	       is still offsetted, but the base reg isn't virtual-stack-reg
   1696 	       anymore.  Below we would do a force_reg on the whole operand,
   1697 	       but this insn might actually only accept memory.  Hence,
   1698 	       before doing that last resort, try to reload the address into
   1699 	       a register, so this operand stays a MEM.  */
   1700 	    if (!safe_insn_predicate (insn_code, i, x))
   1701 	      {
   1702 		addr = force_reg (GET_MODE (addr), addr);
   1703 		x = replace_equiv_address (x, addr, true);
   1704 	      }
   1705 	    seq = get_insns ();
   1706 	    end_sequence ();
   1707 	    if (seq)
   1708 	      emit_insn_before (seq, insn);
   1709 	  }
   1710 	  break;
   1711 
   1712 	case REG:
   1713 	  new_rtx = instantiate_new_reg (x, &offset);
   1714 	  if (new_rtx == NULL)
   1715 	    continue;
   1716 	  if (known_eq (offset, 0))
   1717 	    x = new_rtx;
   1718 	  else
   1719 	    {
   1720 	      start_sequence ();
   1721 
   1722 	      /* Careful, special mode predicates may have stuff in
   1723 		 insn_data[insn_code].operand[i].mode that isn't useful
   1724 		 to us for computing a new value.  */
   1725 	      /* ??? Recognize address_operand and/or "p" constraints
   1726 		 to see if (plus new offset) is a valid before we put
   1727 		 this through expand_simple_binop.  */
   1728 	      x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
   1729 				       gen_int_mode (offset, GET_MODE (x)),
   1730 				       NULL_RTX, 1, OPTAB_LIB_WIDEN);
   1731 	      seq = get_insns ();
   1732 	      end_sequence ();
   1733 	      emit_insn_before (seq, insn);
   1734 	    }
   1735 	  break;
   1736 
   1737 	case SUBREG:
   1738 #ifdef NB_FIX_VAX_BACKEND
   1739 	  if (MEM_P (XEXP (x, 0)))
   1740 	    {
   1741 	      /* convert a subreg of a MEMORY operand into a
   1742 		 register operand */
   1743 	      rtx mx = XEXP (x, 0); /* memory operand */
   1744 	      rtx addr = XEXP (mx, 0);
   1745 	      instantiate_virtual_regs_in_rtx (&addr);
   1746 	      start_sequence ();
   1747 	      mx = replace_equiv_address (mx, addr, true);
   1748 	      addr = force_reg (GET_MODE (addr), addr);
   1749 	      mx = replace_equiv_address (mx, addr, true);
   1750 	      seq = get_insns ();
   1751 	      end_sequence ();
   1752 	      if (seq)
   1753 		emit_insn_before (seq, insn);
   1754               /* generate a new subreg expression */
   1755 	      x = gen_rtx_SUBREG (GET_MODE (x), mx, SUBREG_BYTE (x));
   1756 	    }
   1757 #endif
   1758 	  new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
   1759 	  if (new_rtx == NULL)
   1760 	    continue;
   1761 	  if (maybe_ne (offset, 0))
   1762 	    {
   1763 	      start_sequence ();
   1764 	      new_rtx = expand_simple_binop
   1765 		(GET_MODE (new_rtx), PLUS, new_rtx,
   1766 		 gen_int_mode (offset, GET_MODE (new_rtx)),
   1767 		 NULL_RTX, 1, OPTAB_LIB_WIDEN);
   1768 	      seq = get_insns ();
   1769 	      end_sequence ();
   1770 	      emit_insn_before (seq, insn);
   1771 	    }
   1772 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
   1773 				   GET_MODE (new_rtx), SUBREG_BYTE (x));
   1774 	  gcc_assert (x);
   1775 	  break;
   1776 
   1777 	default:
   1778 	  continue;
   1779 	}
   1780 
   1781       /* At this point, X contains the new value for the operand.
   1782 	 Validate the new value vs the insn predicate.  Note that
   1783 	 asm insns will have insn_code -1 here.  */
   1784       if (!safe_insn_predicate (insn_code, i, x))
   1785 	{
   1786 	  start_sequence ();
   1787 	  if (REG_P (x))
   1788 	    {
   1789 	      gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
   1790 	      x = copy_to_reg (x);
   1791 	    }
   1792 	  else
   1793 	    x = force_reg (insn_data[insn_code].operand[i].mode, x);
   1794 	  seq = get_insns ();
   1795 	  end_sequence ();
   1796 	  if (seq)
   1797 	    emit_insn_before (seq, insn);
   1798 	}
   1799 
   1800       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
   1801       any_change = true;
   1802     }
   1803 
   1804   if (any_change)
   1805     {
   1806       /* Propagate operand changes into the duplicates.  */
   1807       for (i = 0; i < recog_data.n_dups; ++i)
   1808 	*recog_data.dup_loc[i]
   1809 	  = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
   1810 
   1811       /* Force re-recognition of the instruction for validation.  */
   1812       INSN_CODE (insn) = -1;
   1813     }
   1814 
   1815   if (asm_noperands (PATTERN (insn)) >= 0)
   1816     {
   1817       if (!check_asm_operands (PATTERN (insn)))
   1818 	{
   1819 	  error_for_asm (insn, "impossible constraint in %<asm%>");
   1820 	  /* For asm goto, instead of fixing up all the edges
   1821 	     just clear the template and clear input and output operands
   1822 	     and strip away clobbers.  */
   1823 	  if (JUMP_P (insn))
   1824 	    {
   1825 	      rtx asm_op = extract_asm_operands (PATTERN (insn));
   1826 	      PATTERN (insn) = asm_op;
   1827 	      PUT_MODE (asm_op, VOIDmode);
   1828 	      ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
   1829 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (asm_op) = "";
   1830 	      ASM_OPERANDS_OUTPUT_IDX (asm_op) = 0;
   1831 	      ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
   1832 	      ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
   1833 	    }
   1834 	  else
   1835 	    delete_insn (insn);
   1836 	}
   1837     }
   1838   else
   1839     {
   1840       if (recog_memoized (insn) < 0)
   1841 	fatal_insn_not_found (insn);
   1842     }
   1843 }
   1844 
   1845 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
   1846    do any instantiation required.  */
   1847 
   1848 void
   1849 instantiate_decl_rtl (rtx x)
   1850 {
   1851   rtx addr;
   1852 
   1853   if (x == 0)
   1854     return;
   1855 
   1856   /* If this is a CONCAT, recurse for the pieces.  */
   1857   if (GET_CODE (x) == CONCAT)
   1858     {
   1859       instantiate_decl_rtl (XEXP (x, 0));
   1860       instantiate_decl_rtl (XEXP (x, 1));
   1861       return;
   1862     }
   1863 
   1864 #ifdef NB_FIX_VAX_BACKEND
   1865   /* If this is a SUBREG, recurse for the pieces */
   1866   if (GET_CODE (x) == SUBREG)
   1867     {
   1868       instantiate_decl_rtl (XEXP (x, 0));
   1869       return;
   1870     }
   1871 #endif
   1872 
   1873   /* If this is not a MEM, no need to do anything.  Similarly if the
   1874      address is a constant or a register that is not a virtual register.  */
   1875   if (!MEM_P (x))
   1876     return;
   1877 
   1878   addr = XEXP (x, 0);
   1879   if (CONSTANT_P (addr)
   1880       || (REG_P (addr)
   1881 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
   1882 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
   1883     return;
   1884 
   1885   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
   1886 }
   1887 
   1888 /* Helper for instantiate_decls called via walk_tree: Process all decls
   1889    in the given DECL_VALUE_EXPR.  */
   1890 
   1891 static tree
   1892 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
   1893 {
   1894   tree t = *tp;
   1895   if (! EXPR_P (t))
   1896     {
   1897       *walk_subtrees = 0;
   1898       if (DECL_P (t))
   1899 	{
   1900 	  if (DECL_RTL_SET_P (t))
   1901 	    instantiate_decl_rtl (DECL_RTL (t));
   1902 	  if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
   1903 	      && DECL_INCOMING_RTL (t))
   1904 	    instantiate_decl_rtl (DECL_INCOMING_RTL (t));
   1905 	  if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
   1906 	      && DECL_HAS_VALUE_EXPR_P (t))
   1907 	    {
   1908 	      tree v = DECL_VALUE_EXPR (t);
   1909 	      walk_tree (&v, instantiate_expr, NULL, NULL);
   1910 	    }
   1911 	}
   1912     }
   1913   return NULL;
   1914 }
   1915 
   1916 /* Subroutine of instantiate_decls: Process all decls in the given
   1917    BLOCK node and all its subblocks.  */
   1918 
   1919 static void
   1920 instantiate_decls_1 (tree let)
   1921 {
   1922   tree t;
   1923 
   1924   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
   1925     {
   1926       if (DECL_RTL_SET_P (t))
   1927 	instantiate_decl_rtl (DECL_RTL (t));
   1928       if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
   1929 	{
   1930 	  tree v = DECL_VALUE_EXPR (t);
   1931 	  walk_tree (&v, instantiate_expr, NULL, NULL);
   1932 	}
   1933     }
   1934 
   1935   /* Process all subblocks.  */
   1936   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
   1937     instantiate_decls_1 (t);
   1938 }
   1939 
   1940 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
   1941    all virtual registers in their DECL_RTL's.  */
   1942 
   1943 static void
   1944 instantiate_decls (tree fndecl)
   1945 {
   1946   tree decl;
   1947   unsigned ix;
   1948 
   1949   /* Process all parameters of the function.  */
   1950   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
   1951     {
   1952       instantiate_decl_rtl (DECL_RTL (decl));
   1953       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
   1954       if (DECL_HAS_VALUE_EXPR_P (decl))
   1955 	{
   1956 	  tree v = DECL_VALUE_EXPR (decl);
   1957 	  walk_tree (&v, instantiate_expr, NULL, NULL);
   1958 	}
   1959     }
   1960 
   1961   if ((decl = DECL_RESULT (fndecl))
   1962       && TREE_CODE (decl) == RESULT_DECL)
   1963     {
   1964       if (DECL_RTL_SET_P (decl))
   1965 	instantiate_decl_rtl (DECL_RTL (decl));
   1966       if (DECL_HAS_VALUE_EXPR_P (decl))
   1967 	{
   1968 	  tree v = DECL_VALUE_EXPR (decl);
   1969 	  walk_tree (&v, instantiate_expr, NULL, NULL);
   1970 	}
   1971     }
   1972 
   1973   /* Process the saved static chain if it exists.  */
   1974   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
   1975   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
   1976     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
   1977 
   1978   /* Now process all variables defined in the function or its subblocks.  */
   1979   if (DECL_INITIAL (fndecl))
   1980     instantiate_decls_1 (DECL_INITIAL (fndecl));
   1981 
   1982   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
   1983     if (DECL_RTL_SET_P (decl))
   1984       instantiate_decl_rtl (DECL_RTL (decl));
   1985   vec_free (cfun->local_decls);
   1986 }
   1987 
   1988 /* Pass through the INSNS of function FNDECL and convert virtual register
   1989    references to hard register references.  */
   1990 
   1991 static unsigned int
   1992 instantiate_virtual_regs (void)
   1993 {
   1994   rtx_insn *insn;
   1995 
   1996   /* Compute the offsets to use for this function.  */
   1997   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
   1998   var_offset = targetm.starting_frame_offset ();
   1999   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
   2000   out_arg_offset = STACK_POINTER_OFFSET;
   2001 #ifdef FRAME_POINTER_CFA_OFFSET
   2002   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
   2003 #else
   2004   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
   2005 #endif
   2006 
   2007   /* Initialize recognition, indicating that volatile is OK.  */
   2008   init_recog ();
   2009 
   2010   /* Scan through all the insns, instantiating every virtual register still
   2011      present.  */
   2012   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
   2013     if (INSN_P (insn))
   2014       {
   2015 	/* These patterns in the instruction stream can never be recognized.
   2016 	   Fortunately, they shouldn't contain virtual registers either.  */
   2017         if (GET_CODE (PATTERN (insn)) == USE
   2018 	    || GET_CODE (PATTERN (insn)) == CLOBBER
   2019 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT
   2020 	    || DEBUG_MARKER_INSN_P (insn))
   2021 	  continue;
   2022 	else if (DEBUG_BIND_INSN_P (insn))
   2023 	  instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
   2024 	else
   2025 	  instantiate_virtual_regs_in_insn (insn);
   2026 
   2027 	if (insn->deleted ())
   2028 	  continue;
   2029 
   2030 	instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
   2031 
   2032 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
   2033 	if (CALL_P (insn))
   2034 	  instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
   2035       }
   2036 
   2037   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
   2038   instantiate_decls (current_function_decl);
   2039 
   2040   targetm.instantiate_decls ();
   2041 
   2042   /* Indicate that, from now on, assign_stack_local should use
   2043      frame_pointer_rtx.  */
   2044   virtuals_instantiated = 1;
   2045 
   2046   return 0;
   2047 }
   2048 
   2049 namespace {
   2050 
   2051 const pass_data pass_data_instantiate_virtual_regs =
   2052 {
   2053   RTL_PASS, /* type */
   2054   "vregs", /* name */
   2055   OPTGROUP_NONE, /* optinfo_flags */
   2056   TV_NONE, /* tv_id */
   2057   0, /* properties_required */
   2058   0, /* properties_provided */
   2059   0, /* properties_destroyed */
   2060   0, /* todo_flags_start */
   2061   0, /* todo_flags_finish */
   2062 };
   2063 
   2064 class pass_instantiate_virtual_regs : public rtl_opt_pass
   2065 {
   2066 public:
   2067   pass_instantiate_virtual_regs (gcc::context *ctxt)
   2068     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
   2069   {}
   2070 
   2071   /* opt_pass methods: */
   2072   virtual unsigned int execute (function *)
   2073     {
   2074       return instantiate_virtual_regs ();
   2075     }
   2076 
   2077 }; // class pass_instantiate_virtual_regs
   2078 
   2079 } // anon namespace
   2080 
   2081 rtl_opt_pass *
   2082 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
   2083 {
   2084   return new pass_instantiate_virtual_regs (ctxt);
   2085 }
   2086 
   2087 
   2088 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
   2090    This means a type for which function calls must pass an address to the
   2091    function or get an address back from the function.
   2092    EXP may be a type node or an expression (whose type is tested).  */
   2093 
   2094 int
   2095 aggregate_value_p (const_tree exp, const_tree fntype)
   2096 {
   2097   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
   2098   int i, regno, nregs;
   2099   rtx reg;
   2100 
   2101   if (fntype)
   2102     switch (TREE_CODE (fntype))
   2103       {
   2104       case CALL_EXPR:
   2105 	{
   2106 	  tree fndecl = get_callee_fndecl (fntype);
   2107 	  if (fndecl)
   2108 	    fntype = TREE_TYPE (fndecl);
   2109 	  else if (CALL_EXPR_FN (fntype))
   2110 	    fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
   2111 	  else
   2112 	    /* For internal functions, assume nothing needs to be
   2113 	       returned in memory.  */
   2114 	    return 0;
   2115 	}
   2116 	break;
   2117       case FUNCTION_DECL:
   2118 	fntype = TREE_TYPE (fntype);
   2119 	break;
   2120       case FUNCTION_TYPE:
   2121       case METHOD_TYPE:
   2122         break;
   2123       case IDENTIFIER_NODE:
   2124 	fntype = NULL_TREE;
   2125 	break;
   2126       default:
   2127 	/* We don't expect other tree types here.  */
   2128 	gcc_unreachable ();
   2129       }
   2130 
   2131   if (VOID_TYPE_P (type))
   2132     return 0;
   2133 
   2134   /* If a record should be passed the same as its first (and only) member
   2135      don't pass it as an aggregate.  */
   2136   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
   2137     return aggregate_value_p (first_field (type), fntype);
   2138 
   2139   /* If the front end has decided that this needs to be passed by
   2140      reference, do so.  */
   2141   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
   2142       && DECL_BY_REFERENCE (exp))
   2143     return 1;
   2144 
   2145   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
   2146   if (fntype && TREE_ADDRESSABLE (fntype))
   2147     return 1;
   2148 
   2149   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
   2150      and thus can't be returned in registers.  */
   2151   if (TREE_ADDRESSABLE (type))
   2152     return 1;
   2153 
   2154   if (TYPE_EMPTY_P (type))
   2155     return 0;
   2156 
   2157   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
   2158     return 1;
   2159 
   2160   if (targetm.calls.return_in_memory (type, fntype))
   2161     return 1;
   2162 
   2163   /* Make sure we have suitable call-clobbered regs to return
   2164      the value in; if not, we must return it in memory.  */
   2165   reg = hard_function_value (type, 0, fntype, 0);
   2166 
   2167   /* If we have something other than a REG (e.g. a PARALLEL), then assume
   2168      it is OK.  */
   2169   if (!REG_P (reg))
   2170     return 0;
   2171 
   2172   /* Use the default ABI if the type of the function isn't known.
   2173      The scheme for handling interoperability between different ABIs
   2174      requires us to be able to tell when we're calling a function with
   2175      a nondefault ABI.  */
   2176   const predefined_function_abi &abi = (fntype
   2177 					? fntype_abi (fntype)
   2178 					: default_function_abi);
   2179   regno = REGNO (reg);
   2180   nregs = hard_regno_nregs (regno, TYPE_MODE (type));
   2181   for (i = 0; i < nregs; i++)
   2182     if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
   2183       return 1;
   2184 
   2185   return 0;
   2186 }
   2187 
   2188 /* Return true if we should assign DECL a pseudo register; false if it
   2190    should live on the local stack.  */
   2191 
   2192 bool
   2193 use_register_for_decl (const_tree decl)
   2194 {
   2195   if (TREE_CODE (decl) == SSA_NAME)
   2196     {
   2197       /* We often try to use the SSA_NAME, instead of its underlying
   2198 	 decl, to get type information and guide decisions, to avoid
   2199 	 differences of behavior between anonymous and named
   2200 	 variables, but in this one case we have to go for the actual
   2201 	 variable if there is one.  The main reason is that, at least
   2202 	 at -O0, we want to place user variables on the stack, but we
   2203 	 don't mind using pseudos for anonymous or ignored temps.
   2204 	 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
   2205 	 should go in pseudos, whereas their corresponding variables
   2206 	 might have to go on the stack.  So, disregarding the decl
   2207 	 here would negatively impact debug info at -O0, enable
   2208 	 coalescing between SSA_NAMEs that ought to get different
   2209 	 stack/pseudo assignments, and get the incoming argument
   2210 	 processing thoroughly confused by PARM_DECLs expected to live
   2211 	 in stack slots but assigned to pseudos.  */
   2212       if (!SSA_NAME_VAR (decl))
   2213 	return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
   2214 	  && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
   2215 
   2216       decl = SSA_NAME_VAR (decl);
   2217     }
   2218 
   2219   /* Honor volatile.  */
   2220   if (TREE_SIDE_EFFECTS (decl))
   2221     return false;
   2222 
   2223   /* Honor addressability.  */
   2224   if (TREE_ADDRESSABLE (decl))
   2225     return false;
   2226 
   2227   /* RESULT_DECLs are a bit special in that they're assigned without
   2228      regard to use_register_for_decl, but we generally only store in
   2229      them.  If we coalesce their SSA NAMEs, we'd better return a
   2230      result that matches the assignment in expand_function_start.  */
   2231   if (TREE_CODE (decl) == RESULT_DECL)
   2232     {
   2233       /* If it's not an aggregate, we're going to use a REG or a
   2234 	 PARALLEL containing a REG.  */
   2235       if (!aggregate_value_p (decl, current_function_decl))
   2236 	return true;
   2237 
   2238       /* If expand_function_start determines the return value, we'll
   2239 	 use MEM if it's not by reference.  */
   2240       if (cfun->returns_pcc_struct
   2241 	  || (targetm.calls.struct_value_rtx
   2242 	      (TREE_TYPE (current_function_decl), 1)))
   2243 	return DECL_BY_REFERENCE (decl);
   2244 
   2245       /* Otherwise, we're taking an extra all.function_result_decl
   2246 	 argument.  It's set up in assign_parms_augmented_arg_list,
   2247 	 under the (negated) conditions above, and then it's used to
   2248 	 set up the RESULT_DECL rtl in assign_params, after looping
   2249 	 over all parameters.  Now, if the RESULT_DECL is not by
   2250 	 reference, we'll use a MEM either way.  */
   2251       if (!DECL_BY_REFERENCE (decl))
   2252 	return false;
   2253 
   2254       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
   2255 	 the function_result_decl's assignment.  Since it's a pointer,
   2256 	 we can short-circuit a number of the tests below, and we must
   2257 	 duplicate them because we don't have the function_result_decl
   2258 	 to test.  */
   2259       if (!targetm.calls.allocate_stack_slots_for_args ())
   2260 	return true;
   2261       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
   2262       if (optimize)
   2263 	return true;
   2264       if (cfun->tail_call_marked)
   2265 	return true;
   2266       /* We don't set DECL_REGISTER for the function_result_decl.  */
   2267       return false;
   2268     }
   2269 
   2270   /* Only register-like things go in registers.  */
   2271   if (DECL_MODE (decl) == BLKmode)
   2272     return false;
   2273 
   2274   /* If -ffloat-store specified, don't put explicit float variables
   2275      into registers.  */
   2276   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
   2277      propagates values across these stores, and it probably shouldn't.  */
   2278   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
   2279     return false;
   2280 
   2281   if (!targetm.calls.allocate_stack_slots_for_args ())
   2282     return true;
   2283 
   2284   /* If we're not interested in tracking debugging information for
   2285      this decl, then we can certainly put it in a register.  */
   2286   if (DECL_IGNORED_P (decl))
   2287     return true;
   2288 
   2289   if (optimize)
   2290     return true;
   2291 
   2292   /* Thunks force a tail call even at -O0 so we need to avoid creating a
   2293      dangling reference in case the parameter is passed by reference.  */
   2294   if (TREE_CODE (decl) == PARM_DECL && cfun->tail_call_marked)
   2295     return true;
   2296 
   2297   if (!DECL_REGISTER (decl))
   2298     return false;
   2299 
   2300   /* When not optimizing, disregard register keyword for types that
   2301      could have methods, otherwise the methods won't be callable from
   2302      the debugger.  */
   2303   if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
   2304     return false;
   2305 
   2306   return true;
   2307 }
   2308 
   2309 /* Structures to communicate between the subroutines of assign_parms.
   2310    The first holds data persistent across all parameters, the second
   2311    is cleared out for each parameter.  */
   2312 
   2313 struct assign_parm_data_all
   2314 {
   2315   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
   2316      should become a job of the target or otherwise encapsulated.  */
   2317   CUMULATIVE_ARGS args_so_far_v;
   2318   cumulative_args_t args_so_far;
   2319   struct args_size stack_args_size;
   2320   tree function_result_decl;
   2321   tree orig_fnargs;
   2322   rtx_insn *first_conversion_insn;
   2323   rtx_insn *last_conversion_insn;
   2324   HOST_WIDE_INT pretend_args_size;
   2325   HOST_WIDE_INT extra_pretend_bytes;
   2326   int reg_parm_stack_space;
   2327 };
   2328 
   2329 struct assign_parm_data_one
   2330 {
   2331   tree nominal_type;
   2332   function_arg_info arg;
   2333   rtx entry_parm;
   2334   rtx stack_parm;
   2335   machine_mode nominal_mode;
   2336   machine_mode passed_mode;
   2337   struct locate_and_pad_arg_data locate;
   2338   int partial;
   2339 };
   2340 
   2341 /* A subroutine of assign_parms.  Initialize ALL.  */
   2342 
   2343 static void
   2344 assign_parms_initialize_all (struct assign_parm_data_all *all)
   2345 {
   2346   tree fntype ATTRIBUTE_UNUSED;
   2347 
   2348   memset (all, 0, sizeof (*all));
   2349 
   2350   fntype = TREE_TYPE (current_function_decl);
   2351 
   2352 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
   2353   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
   2354 #else
   2355   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
   2356 			current_function_decl, -1);
   2357 #endif
   2358   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
   2359 
   2360 #ifdef INCOMING_REG_PARM_STACK_SPACE
   2361   all->reg_parm_stack_space
   2362     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
   2363 #endif
   2364 }
   2365 
   2366 /* If ARGS contains entries with complex types, split the entry into two
   2367    entries of the component type.  Return a new list of substitutions are
   2368    needed, else the old list.  */
   2369 
   2370 static void
   2371 split_complex_args (vec<tree> *args)
   2372 {
   2373   unsigned i;
   2374   tree p;
   2375 
   2376   FOR_EACH_VEC_ELT (*args, i, p)
   2377     {
   2378       tree type = TREE_TYPE (p);
   2379       if (TREE_CODE (type) == COMPLEX_TYPE
   2380 	  && targetm.calls.split_complex_arg (type))
   2381 	{
   2382 	  tree decl;
   2383 	  tree subtype = TREE_TYPE (type);
   2384 	  bool addressable = TREE_ADDRESSABLE (p);
   2385 
   2386 	  /* Rewrite the PARM_DECL's type with its component.  */
   2387 	  p = copy_node (p);
   2388 	  TREE_TYPE (p) = subtype;
   2389 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
   2390 	  SET_DECL_MODE (p, VOIDmode);
   2391 	  DECL_SIZE (p) = NULL;
   2392 	  DECL_SIZE_UNIT (p) = NULL;
   2393 	  /* If this arg must go in memory, put it in a pseudo here.
   2394 	     We can't allow it to go in memory as per normal parms,
   2395 	     because the usual place might not have the imag part
   2396 	     adjacent to the real part.  */
   2397 	  DECL_ARTIFICIAL (p) = addressable;
   2398 	  DECL_IGNORED_P (p) = addressable;
   2399 	  TREE_ADDRESSABLE (p) = 0;
   2400 	  layout_decl (p, 0);
   2401 	  (*args)[i] = p;
   2402 
   2403 	  /* Build a second synthetic decl.  */
   2404 	  decl = build_decl (EXPR_LOCATION (p),
   2405 			     PARM_DECL, NULL_TREE, subtype);
   2406 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
   2407 	  DECL_ARTIFICIAL (decl) = addressable;
   2408 	  DECL_IGNORED_P (decl) = addressable;
   2409 	  layout_decl (decl, 0);
   2410 	  args->safe_insert (++i, decl);
   2411 	}
   2412     }
   2413 }
   2414 
   2415 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
   2416    the hidden struct return argument, and (abi willing) complex args.
   2417    Return the new parameter list.  */
   2418 
   2419 static vec<tree>
   2420 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
   2421 {
   2422   tree fndecl = current_function_decl;
   2423   tree fntype = TREE_TYPE (fndecl);
   2424   vec<tree> fnargs = vNULL;
   2425   tree arg;
   2426 
   2427   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
   2428     fnargs.safe_push (arg);
   2429 
   2430   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
   2431 
   2432   /* If struct value address is treated as the first argument, make it so.  */
   2433   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
   2434       && ! cfun->returns_pcc_struct
   2435       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
   2436     {
   2437       tree type = build_pointer_type (TREE_TYPE (fntype));
   2438       tree decl;
   2439 
   2440       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
   2441 			 PARM_DECL, get_identifier (".result_ptr"), type);
   2442       DECL_ARG_TYPE (decl) = type;
   2443       DECL_ARTIFICIAL (decl) = 1;
   2444       DECL_NAMELESS (decl) = 1;
   2445       TREE_CONSTANT (decl) = 1;
   2446       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
   2447 	 changes, the end of the RESULT_DECL handling block in
   2448 	 use_register_for_decl must be adjusted to match.  */
   2449 
   2450       DECL_CHAIN (decl) = all->orig_fnargs;
   2451       all->orig_fnargs = decl;
   2452       fnargs.safe_insert (0, decl);
   2453 
   2454       all->function_result_decl = decl;
   2455     }
   2456 
   2457   /* If the target wants to split complex arguments into scalars, do so.  */
   2458   if (targetm.calls.split_complex_arg)
   2459     split_complex_args (&fnargs);
   2460 
   2461   return fnargs;
   2462 }
   2463 
   2464 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
   2465    data for the parameter.  Incorporate ABI specifics such as pass-by-
   2466    reference and type promotion.  */
   2467 
   2468 static void
   2469 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
   2470 			     struct assign_parm_data_one *data)
   2471 {
   2472   int unsignedp;
   2473 
   2474 #ifndef BROKEN_VALUE_INITIALIZATION
   2475   *data = assign_parm_data_one ();
   2476 #else
   2477   /* Old versions of GCC used to miscompile the above by only initializing
   2478      the members with explicit constructors and copying garbage
   2479      to the other members.  */
   2480   assign_parm_data_one zero_data = {};
   2481   *data = zero_data;
   2482 #endif
   2483 
   2484   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
   2485   if (!cfun->stdarg)
   2486     data->arg.named = 1;  /* No variadic parms.  */
   2487   else if (DECL_CHAIN (parm))
   2488     data->arg.named = 1;  /* Not the last non-variadic parm. */
   2489   else if (targetm.calls.strict_argument_naming (all->args_so_far))
   2490     data->arg.named = 1;  /* Only variadic ones are unnamed.  */
   2491   else
   2492     data->arg.named = 0;  /* Treat as variadic.  */
   2493 
   2494   data->nominal_type = TREE_TYPE (parm);
   2495   data->arg.type = DECL_ARG_TYPE (parm);
   2496 
   2497   /* Look out for errors propagating this far.  Also, if the parameter's
   2498      type is void then its value doesn't matter.  */
   2499   if (TREE_TYPE (parm) == error_mark_node
   2500       /* This can happen after weird syntax errors
   2501 	 or if an enum type is defined among the parms.  */
   2502       || TREE_CODE (parm) != PARM_DECL
   2503       || data->arg.type == NULL
   2504       || VOID_TYPE_P (data->nominal_type))
   2505     {
   2506       data->nominal_type = data->arg.type = void_type_node;
   2507       data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
   2508       return;
   2509     }
   2510 
   2511   /* Find mode of arg as it is passed, and mode of arg as it should be
   2512      during execution of this function.  */
   2513   data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
   2514   data->nominal_mode = TYPE_MODE (data->nominal_type);
   2515 
   2516   /* If the parm is to be passed as a transparent union or record, use the
   2517      type of the first field for the tests below.  We have already verified
   2518      that the modes are the same.  */
   2519   if (RECORD_OR_UNION_TYPE_P (data->arg.type)
   2520       && TYPE_TRANSPARENT_AGGR (data->arg.type))
   2521     data->arg.type = TREE_TYPE (first_field (data->arg.type));
   2522 
   2523   /* See if this arg was passed by invisible reference.  */
   2524   if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
   2525     {
   2526       data->nominal_type = data->arg.type;
   2527       data->passed_mode = data->nominal_mode = data->arg.mode;
   2528     }
   2529 
   2530   /* Find mode as it is passed by the ABI.  */
   2531   unsignedp = TYPE_UNSIGNED (data->arg.type);
   2532   data->arg.mode
   2533     = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
   2534 			     TREE_TYPE (current_function_decl), 0);
   2535 }
   2536 
   2537 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
   2538 
   2539 static void
   2540 assign_parms_setup_varargs (struct assign_parm_data_all *all,
   2541 			    struct assign_parm_data_one *data, bool no_rtl)
   2542 {
   2543   int varargs_pretend_bytes = 0;
   2544 
   2545   function_arg_info last_named_arg = data->arg;
   2546   last_named_arg.named = true;
   2547   targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
   2548 					&varargs_pretend_bytes, no_rtl);
   2549 
   2550   /* If the back-end has requested extra stack space, record how much is
   2551      needed.  Do not change pretend_args_size otherwise since it may be
   2552      nonzero from an earlier partial argument.  */
   2553   if (varargs_pretend_bytes > 0)
   2554     all->pretend_args_size = varargs_pretend_bytes;
   2555 }
   2556 
   2557 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
   2558    the incoming location of the current parameter.  */
   2559 
   2560 static void
   2561 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
   2562 			    struct assign_parm_data_one *data)
   2563 {
   2564   HOST_WIDE_INT pretend_bytes = 0;
   2565   rtx entry_parm;
   2566   bool in_regs;
   2567 
   2568   if (data->arg.mode == VOIDmode)
   2569     {
   2570       data->entry_parm = data->stack_parm = const0_rtx;
   2571       return;
   2572     }
   2573 
   2574   targetm.calls.warn_parameter_passing_abi (all->args_so_far,
   2575 					    data->arg.type);
   2576 
   2577   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
   2578 						    data->arg);
   2579   if (entry_parm == 0)
   2580     data->arg.mode = data->passed_mode;
   2581 
   2582   /* Determine parm's home in the stack, in case it arrives in the stack
   2583      or we should pretend it did.  Compute the stack position and rtx where
   2584      the argument arrives and its size.
   2585 
   2586      There is one complexity here:  If this was a parameter that would
   2587      have been passed in registers, but wasn't only because it is
   2588      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
   2589      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
   2590      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
   2591      as it was the previous time.  */
   2592   in_regs = (entry_parm != 0);
   2593 #ifdef STACK_PARMS_IN_REG_PARM_AREA
   2594   in_regs = true;
   2595 #endif
   2596   if (!in_regs && !data->arg.named)
   2597     {
   2598       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
   2599 	{
   2600 	  rtx tem;
   2601 	  function_arg_info named_arg = data->arg;
   2602 	  named_arg.named = true;
   2603 	  tem = targetm.calls.function_incoming_arg (all->args_so_far,
   2604 						     named_arg);
   2605 	  in_regs = tem != NULL;
   2606 	}
   2607     }
   2608 
   2609   /* If this parameter was passed both in registers and in the stack, use
   2610      the copy on the stack.  */
   2611   if (targetm.calls.must_pass_in_stack (data->arg))
   2612     entry_parm = 0;
   2613 
   2614   if (entry_parm)
   2615     {
   2616       int partial;
   2617 
   2618       partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
   2619       data->partial = partial;
   2620 
   2621       /* The caller might already have allocated stack space for the
   2622 	 register parameters.  */
   2623       if (partial != 0 && all->reg_parm_stack_space == 0)
   2624 	{
   2625 	  /* Part of this argument is passed in registers and part
   2626 	     is passed on the stack.  Ask the prologue code to extend
   2627 	     the stack part so that we can recreate the full value.
   2628 
   2629 	     PRETEND_BYTES is the size of the registers we need to store.
   2630 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
   2631 	     stack space that the prologue should allocate.
   2632 
   2633 	     Internally, gcc assumes that the argument pointer is aligned
   2634 	     to STACK_BOUNDARY bits.  This is used both for alignment
   2635 	     optimizations (see init_emit) and to locate arguments that are
   2636 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
   2637 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
   2638 	     a stack boundary.  */
   2639 
   2640 	  /* We assume at most one partial arg, and it must be the first
   2641 	     argument on the stack.  */
   2642 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
   2643 
   2644 	  pretend_bytes = partial;
   2645 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
   2646 
   2647 	  /* We want to align relative to the actual stack pointer, so
   2648 	     don't include this in the stack size until later.  */
   2649 	  all->extra_pretend_bytes = all->pretend_args_size;
   2650 	}
   2651     }
   2652 
   2653   locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
   2654 		       all->reg_parm_stack_space,
   2655 		       entry_parm ? data->partial : 0, current_function_decl,
   2656 		       &all->stack_args_size, &data->locate);
   2657 
   2658   /* Update parm_stack_boundary if this parameter is passed in the
   2659      stack.  */
   2660   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
   2661     crtl->parm_stack_boundary = data->locate.boundary;
   2662 
   2663   /* Adjust offsets to include the pretend args.  */
   2664   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
   2665   data->locate.slot_offset.constant += pretend_bytes;
   2666   data->locate.offset.constant += pretend_bytes;
   2667 
   2668   data->entry_parm = entry_parm;
   2669 }
   2670 
   2671 /* A subroutine of assign_parms.  If there is actually space on the stack
   2672    for this parm, count it in stack_args_size and return true.  */
   2673 
   2674 static bool
   2675 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
   2676 			   struct assign_parm_data_one *data)
   2677 {
   2678   /* Trivially true if we've no incoming register.  */
   2679   if (data->entry_parm == NULL)
   2680     ;
   2681   /* Also true if we're partially in registers and partially not,
   2682      since we've arranged to drop the entire argument on the stack.  */
   2683   else if (data->partial != 0)
   2684     ;
   2685   /* Also true if the target says that it's passed in both registers
   2686      and on the stack.  */
   2687   else if (GET_CODE (data->entry_parm) == PARALLEL
   2688 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
   2689     ;
   2690   /* Also true if the target says that there's stack allocated for
   2691      all register parameters.  */
   2692   else if (all->reg_parm_stack_space > 0)
   2693     ;
   2694   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
   2695   else
   2696     return false;
   2697 
   2698   all->stack_args_size.constant += data->locate.size.constant;
   2699   if (data->locate.size.var)
   2700     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
   2701 
   2702   return true;
   2703 }
   2704 
   2705 /* A subroutine of assign_parms.  Given that this parameter is allocated
   2706    stack space by the ABI, find it.  */
   2707 
   2708 static void
   2709 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
   2710 {
   2711   rtx offset_rtx, stack_parm;
   2712   unsigned int align, boundary;
   2713 
   2714   /* If we're passing this arg using a reg, make its stack home the
   2715      aligned stack slot.  */
   2716   if (data->entry_parm)
   2717     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
   2718   else
   2719     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
   2720 
   2721   stack_parm = crtl->args.internal_arg_pointer;
   2722   if (offset_rtx != const0_rtx)
   2723     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
   2724   stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
   2725 
   2726   if (!data->arg.pass_by_reference)
   2727     {
   2728       set_mem_attributes (stack_parm, parm, 1);
   2729       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
   2730 	 while promoted mode's size is needed.  */
   2731       if (data->arg.mode != BLKmode
   2732 	  && data->arg.mode != DECL_MODE (parm))
   2733 	{
   2734 	  set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
   2735 	  if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
   2736 	    {
   2737 	      poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
   2738 							 data->arg.mode);
   2739 	      if (maybe_ne (offset, 0))
   2740 		set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
   2741 	    }
   2742 	}
   2743     }
   2744 
   2745   boundary = data->locate.boundary;
   2746   align = BITS_PER_UNIT;
   2747 
   2748   /* If we're padding upward, we know that the alignment of the slot
   2749      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
   2750      intentionally forcing upward padding.  Otherwise we have to come
   2751      up with a guess at the alignment based on OFFSET_RTX.  */
   2752   poly_int64 offset;
   2753   if (data->locate.where_pad == PAD_NONE || data->entry_parm)
   2754     align = boundary;
   2755   else if (data->locate.where_pad == PAD_UPWARD)
   2756     {
   2757       align = boundary;
   2758       /* If the argument offset is actually more aligned than the nominal
   2759 	 stack slot boundary, take advantage of that excess alignment.
   2760 	 Don't make any assumptions if STACK_POINTER_OFFSET is in use.  */
   2761       if (poly_int_rtx_p (offset_rtx, &offset)
   2762 	  && known_eq (STACK_POINTER_OFFSET, 0))
   2763 	{
   2764 	  unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
   2765 	  if (offset_align == 0 || offset_align > STACK_BOUNDARY)
   2766 	    offset_align = STACK_BOUNDARY;
   2767 	  align = MAX (align, offset_align);
   2768 	}
   2769     }
   2770   else if (poly_int_rtx_p (offset_rtx, &offset))
   2771     {
   2772       align = least_bit_hwi (boundary);
   2773       unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
   2774       if (offset_align != 0)
   2775 	align = MIN (align, offset_align);
   2776     }
   2777   set_mem_align (stack_parm, align);
   2778 
   2779   if (data->entry_parm)
   2780     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
   2781 
   2782   data->stack_parm = stack_parm;
   2783 }
   2784 
   2785 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
   2786    always valid and contiguous.  */
   2787 
   2788 static void
   2789 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
   2790 {
   2791   rtx entry_parm = data->entry_parm;
   2792   rtx stack_parm = data->stack_parm;
   2793 
   2794   /* If this parm was passed part in regs and part in memory, pretend it
   2795      arrived entirely in memory by pushing the register-part onto the stack.
   2796      In the special case of a DImode or DFmode that is split, we could put
   2797      it together in a pseudoreg directly, but for now that's not worth
   2798      bothering with.  */
   2799   if (data->partial != 0)
   2800     {
   2801       /* Handle calls that pass values in multiple non-contiguous
   2802 	 locations.  The Irix 6 ABI has examples of this.  */
   2803       if (GET_CODE (entry_parm) == PARALLEL)
   2804 	emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
   2805 			  data->arg.type, int_size_in_bytes (data->arg.type));
   2806       else
   2807 	{
   2808 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
   2809 	  move_block_from_reg (REGNO (entry_parm),
   2810 			       validize_mem (copy_rtx (stack_parm)),
   2811 			       data->partial / UNITS_PER_WORD);
   2812 	}
   2813 
   2814       entry_parm = stack_parm;
   2815     }
   2816 
   2817   /* If we didn't decide this parm came in a register, by default it came
   2818      on the stack.  */
   2819   else if (entry_parm == NULL)
   2820     entry_parm = stack_parm;
   2821 
   2822   /* When an argument is passed in multiple locations, we can't make use
   2823      of this information, but we can save some copying if the whole argument
   2824      is passed in a single register.  */
   2825   else if (GET_CODE (entry_parm) == PARALLEL
   2826 	   && data->nominal_mode != BLKmode
   2827 	   && data->passed_mode != BLKmode)
   2828     {
   2829       size_t i, len = XVECLEN (entry_parm, 0);
   2830 
   2831       for (i = 0; i < len; i++)
   2832 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
   2833 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
   2834 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
   2835 		== data->passed_mode)
   2836 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
   2837 	  {
   2838 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
   2839 	    break;
   2840 	  }
   2841     }
   2842 
   2843   data->entry_parm = entry_parm;
   2844 }
   2845 
   2846 /* A subroutine of assign_parms.  Reconstitute any values which were
   2847    passed in multiple registers and would fit in a single register.  */
   2848 
   2849 static void
   2850 assign_parm_remove_parallels (struct assign_parm_data_one *data)
   2851 {
   2852   rtx entry_parm = data->entry_parm;
   2853 
   2854   /* Convert the PARALLEL to a REG of the same mode as the parallel.
   2855      This can be done with register operations rather than on the
   2856      stack, even if we will store the reconstituted parameter on the
   2857      stack later.  */
   2858   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
   2859     {
   2860       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
   2861       emit_group_store (parmreg, entry_parm, data->arg.type,
   2862 			GET_MODE_SIZE (GET_MODE (entry_parm)));
   2863       entry_parm = parmreg;
   2864     }
   2865 
   2866   data->entry_parm = entry_parm;
   2867 }
   2868 
   2869 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
   2870    always valid and properly aligned.  */
   2871 
   2872 static void
   2873 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
   2874 {
   2875   rtx stack_parm = data->stack_parm;
   2876 
   2877   /* If we can't trust the parm stack slot to be aligned enough for its
   2878      ultimate type, don't use that slot after entry.  We'll make another
   2879      stack slot, if we need one.  */
   2880   if (stack_parm
   2881       && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
   2882 	   && ((optab_handler (movmisalign_optab, data->nominal_mode)
   2883 		!= CODE_FOR_nothing)
   2884 	       || targetm.slow_unaligned_access (data->nominal_mode,
   2885 						 MEM_ALIGN (stack_parm))))
   2886 	  || (data->nominal_type
   2887 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
   2888 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
   2889     stack_parm = NULL;
   2890 
   2891   /* If parm was passed in memory, and we need to convert it on entry,
   2892      don't store it back in that same slot.  */
   2893   else if (data->entry_parm == stack_parm
   2894 	   && data->nominal_mode != BLKmode
   2895 	   && data->nominal_mode != data->passed_mode)
   2896     stack_parm = NULL;
   2897 
   2898   /* If stack protection is in effect for this function, don't leave any
   2899      pointers in their passed stack slots.  */
   2900   else if (crtl->stack_protect_guard
   2901 	   && (flag_stack_protect == SPCT_FLAG_ALL
   2902 	       || data->arg.pass_by_reference
   2903 	       || POINTER_TYPE_P (data->nominal_type)))
   2904     stack_parm = NULL;
   2905 
   2906   data->stack_parm = stack_parm;
   2907 }
   2908 
   2909 /* A subroutine of assign_parms.  Return true if the current parameter
   2910    should be stored as a BLKmode in the current frame.  */
   2911 
   2912 static bool
   2913 assign_parm_setup_block_p (struct assign_parm_data_one *data)
   2914 {
   2915   if (data->nominal_mode == BLKmode)
   2916     return true;
   2917   if (GET_MODE (data->entry_parm) == BLKmode)
   2918     return true;
   2919 
   2920 #ifdef BLOCK_REG_PADDING
   2921   /* Only assign_parm_setup_block knows how to deal with register arguments
   2922      that are padded at the least significant end.  */
   2923   if (REG_P (data->entry_parm)
   2924       && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
   2925       && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
   2926 	  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
   2927     return true;
   2928 #endif
   2929 
   2930   return false;
   2931 }
   2932 
   2933 /* A subroutine of assign_parms.  Arrange for the parameter to be
   2934    present and valid in DATA->STACK_RTL.  */
   2935 
   2936 static void
   2937 assign_parm_setup_block (struct assign_parm_data_all *all,
   2938 			 tree parm, struct assign_parm_data_one *data)
   2939 {
   2940   rtx entry_parm = data->entry_parm;
   2941   rtx stack_parm = data->stack_parm;
   2942   rtx target_reg = NULL_RTX;
   2943   bool in_conversion_seq = false;
   2944   HOST_WIDE_INT size;
   2945   HOST_WIDE_INT size_stored;
   2946 
   2947   if (GET_CODE (entry_parm) == PARALLEL)
   2948     entry_parm = emit_group_move_into_temps (entry_parm);
   2949 
   2950   /* If we want the parameter in a pseudo, don't use a stack slot.  */
   2951   if (is_gimple_reg (parm) && use_register_for_decl (parm))
   2952     {
   2953       tree def = ssa_default_def (cfun, parm);
   2954       gcc_assert (def);
   2955       machine_mode mode = promote_ssa_mode (def, NULL);
   2956       rtx reg = gen_reg_rtx (mode);
   2957       if (GET_CODE (reg) != CONCAT)
   2958 	stack_parm = reg;
   2959       else
   2960 	{
   2961 	  target_reg = reg;
   2962 	  /* Avoid allocating a stack slot, if there isn't one
   2963 	     preallocated by the ABI.  It might seem like we should
   2964 	     always prefer a pseudo, but converting between
   2965 	     floating-point and integer modes goes through the stack
   2966 	     on various machines, so it's better to use the reserved
   2967 	     stack slot than to risk wasting it and allocating more
   2968 	     for the conversion.  */
   2969 	  if (stack_parm == NULL_RTX)
   2970 	    {
   2971 	      int save = generating_concat_p;
   2972 	      generating_concat_p = 0;
   2973 	      stack_parm = gen_reg_rtx (mode);
   2974 	      generating_concat_p = save;
   2975 	    }
   2976 	}
   2977       data->stack_parm = NULL;
   2978     }
   2979 
   2980   size = int_size_in_bytes (data->arg.type);
   2981   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
   2982   if (stack_parm == 0)
   2983     {
   2984       HOST_WIDE_INT parm_align
   2985 	= (STRICT_ALIGNMENT
   2986 	   ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
   2987 
   2988       SET_DECL_ALIGN (parm, parm_align);
   2989       if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
   2990 	{
   2991 	  rtx allocsize = gen_int_mode (size_stored, Pmode);
   2992 	  get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
   2993 	  stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
   2994 					   MAX_SUPPORTED_STACK_ALIGNMENT);
   2995 	  rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
   2996 					    DECL_ALIGN (parm));
   2997 	  mark_reg_pointer (addr, DECL_ALIGN (parm));
   2998 	  stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
   2999 	  MEM_NOTRAP_P (stack_parm) = 1;
   3000 	}
   3001       else
   3002 	stack_parm = assign_stack_local (BLKmode, size_stored,
   3003 					 DECL_ALIGN (parm));
   3004       if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
   3005 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
   3006       set_mem_attributes (stack_parm, parm, 1);
   3007     }
   3008 
   3009   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
   3010      calls that pass values in multiple non-contiguous locations.  */
   3011   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
   3012     {
   3013       rtx mem;
   3014 
   3015       /* Note that we will be storing an integral number of words.
   3016 	 So we have to be careful to ensure that we allocate an
   3017 	 integral number of words.  We do this above when we call
   3018 	 assign_stack_local if space was not allocated in the argument
   3019 	 list.  If it was, this will not work if PARM_BOUNDARY is not
   3020 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
   3021 	 if it becomes a problem.  Exception is when BLKmode arrives
   3022 	 with arguments not conforming to word_mode.  */
   3023 
   3024       if (data->stack_parm == 0)
   3025 	;
   3026       else if (GET_CODE (entry_parm) == PARALLEL)
   3027 	;
   3028       else
   3029 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
   3030 
   3031       mem = validize_mem (copy_rtx (stack_parm));
   3032 
   3033       /* Handle values in multiple non-contiguous locations.  */
   3034       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
   3035 	emit_group_store (mem, entry_parm, data->arg.type, size);
   3036       else if (GET_CODE (entry_parm) == PARALLEL)
   3037 	{
   3038 	  push_to_sequence2 (all->first_conversion_insn,
   3039 			     all->last_conversion_insn);
   3040 	  emit_group_store (mem, entry_parm, data->arg.type, size);
   3041 	  all->first_conversion_insn = get_insns ();
   3042 	  all->last_conversion_insn = get_last_insn ();
   3043 	  end_sequence ();
   3044 	  in_conversion_seq = true;
   3045 	}
   3046 
   3047       else if (size == 0)
   3048 	;
   3049 
   3050       /* If SIZE is that of a mode no bigger than a word, just use
   3051 	 that mode's store operation.  */
   3052       else if (size <= UNITS_PER_WORD)
   3053 	{
   3054 	  unsigned int bits = size * BITS_PER_UNIT;
   3055 	  machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
   3056 
   3057 	  if (mode != BLKmode
   3058 #ifdef BLOCK_REG_PADDING
   3059 	      && (size == UNITS_PER_WORD
   3060 		  || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
   3061 		      != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
   3062 #endif
   3063 	      )
   3064 	    {
   3065 	      rtx reg;
   3066 
   3067 	      /* We are really truncating a word_mode value containing
   3068 		 SIZE bytes into a value of mode MODE.  If such an
   3069 		 operation requires no actual instructions, we can refer
   3070 		 to the value directly in mode MODE, otherwise we must
   3071 		 start with the register in word_mode and explicitly
   3072 		 convert it.  */
   3073 	      if (mode == word_mode
   3074 		  || TRULY_NOOP_TRUNCATION_MODES_P (mode, word_mode))
   3075 		reg = gen_rtx_REG (mode, REGNO (entry_parm));
   3076 	      else
   3077 		{
   3078 		  reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
   3079 		  reg = convert_to_mode (mode, copy_to_reg (reg), 1);
   3080 		}
   3081 
   3082 	      /* We use adjust_address to get a new MEM with the mode
   3083 		 changed.  adjust_address is better than change_address
   3084 		 for this purpose because adjust_address does not lose
   3085 		 the MEM_EXPR associated with the MEM.
   3086 
   3087 		 If the MEM_EXPR is lost, then optimizations like DSE
   3088 		 assume the MEM escapes and thus is not subject to DSE.  */
   3089 	      emit_move_insn (adjust_address (mem, mode, 0), reg);
   3090 	    }
   3091 
   3092 #ifdef BLOCK_REG_PADDING
   3093 	  /* Storing the register in memory as a full word, as
   3094 	     move_block_from_reg below would do, and then using the
   3095 	     MEM in a smaller mode, has the effect of shifting right
   3096 	     if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
   3097 	     shifting must be explicit.  */
   3098 	  else if (!MEM_P (mem))
   3099 	    {
   3100 	      rtx x;
   3101 
   3102 	      /* If the assert below fails, we should have taken the
   3103 		 mode != BLKmode path above, unless we have downward
   3104 		 padding of smaller-than-word arguments on a machine
   3105 		 with little-endian bytes, which would likely require
   3106 		 additional changes to work correctly.  */
   3107 	      gcc_checking_assert (BYTES_BIG_ENDIAN
   3108 				   && (BLOCK_REG_PADDING (mode,
   3109 							  data->arg.type, 1)
   3110 				       == PAD_UPWARD));
   3111 
   3112 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
   3113 
   3114 	      x = gen_rtx_REG (word_mode, REGNO (entry_parm));
   3115 	      x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
   3116 				NULL_RTX, 1);
   3117 	      x = force_reg (word_mode, x);
   3118 	      x = gen_lowpart_SUBREG (GET_MODE (mem), x);
   3119 
   3120 	      emit_move_insn (mem, x);
   3121 	    }
   3122 #endif
   3123 
   3124 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
   3125 	     machine must be aligned to the left before storing
   3126 	     to memory.  Note that the previous test doesn't
   3127 	     handle all cases (e.g. SIZE == 3).  */
   3128 	  else if (size != UNITS_PER_WORD
   3129 #ifdef BLOCK_REG_PADDING
   3130 		   && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
   3131 		       == PAD_DOWNWARD)
   3132 #else
   3133 		   && BYTES_BIG_ENDIAN
   3134 #endif
   3135 		   )
   3136 	    {
   3137 	      rtx tem, x;
   3138 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
   3139 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
   3140 
   3141 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
   3142 	      tem = change_address (mem, word_mode, 0);
   3143 	      emit_move_insn (tem, x);
   3144 	    }
   3145 	  else
   3146 	    move_block_from_reg (REGNO (entry_parm), mem,
   3147 				 size_stored / UNITS_PER_WORD);
   3148 	}
   3149       else if (!MEM_P (mem))
   3150 	{
   3151 	  gcc_checking_assert (size > UNITS_PER_WORD);
   3152 #ifdef BLOCK_REG_PADDING
   3153 	  gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
   3154 						  data->arg.type, 0)
   3155 			       == PAD_UPWARD);
   3156 #endif
   3157 	  emit_move_insn (mem, entry_parm);
   3158 	}
   3159       else
   3160 	move_block_from_reg (REGNO (entry_parm), mem,
   3161 			     size_stored / UNITS_PER_WORD);
   3162     }
   3163   else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
   3164     {
   3165       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
   3166       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
   3167 		       BLOCK_OP_NORMAL);
   3168       all->first_conversion_insn = get_insns ();
   3169       all->last_conversion_insn = get_last_insn ();
   3170       end_sequence ();
   3171       in_conversion_seq = true;
   3172     }
   3173 
   3174   if (target_reg)
   3175     {
   3176       if (!in_conversion_seq)
   3177 	emit_move_insn (target_reg, stack_parm);
   3178       else
   3179 	{
   3180 	  push_to_sequence2 (all->first_conversion_insn,
   3181 			     all->last_conversion_insn);
   3182 	  emit_move_insn (target_reg, stack_parm);
   3183 	  all->first_conversion_insn = get_insns ();
   3184 	  all->last_conversion_insn = get_last_insn ();
   3185 	  end_sequence ();
   3186 	}
   3187       stack_parm = target_reg;
   3188     }
   3189 
   3190   data->stack_parm = stack_parm;
   3191   set_parm_rtl (parm, stack_parm);
   3192 }
   3193 
   3194 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
   3195    parameter.  Get it there.  Perform all ABI specified conversions.  */
   3196 
   3197 static void
   3198 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
   3199 		       struct assign_parm_data_one *data)
   3200 {
   3201   rtx parmreg, validated_mem;
   3202   rtx equiv_stack_parm;
   3203   machine_mode promoted_nominal_mode;
   3204   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
   3205   bool did_conversion = false;
   3206   bool need_conversion, moved;
   3207   enum insn_code icode;
   3208   rtx rtl;
   3209 
   3210   /* Store the parm in a pseudoregister during the function, but we may
   3211      need to do it in a wider mode.  Using 2 here makes the result
   3212      consistent with promote_decl_mode and thus expand_expr_real_1.  */
   3213   promoted_nominal_mode
   3214     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
   3215 			     TREE_TYPE (current_function_decl), 2);
   3216 
   3217   parmreg = gen_reg_rtx (promoted_nominal_mode);
   3218   if (!DECL_ARTIFICIAL (parm))
   3219     mark_user_reg (parmreg);
   3220 
   3221   /* If this was an item that we received a pointer to,
   3222      set rtl appropriately.  */
   3223   if (data->arg.pass_by_reference)
   3224     {
   3225       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
   3226       set_mem_attributes (rtl, parm, 1);
   3227     }
   3228   else
   3229     rtl = parmreg;
   3230 
   3231   assign_parm_remove_parallels (data);
   3232 
   3233   /* Copy the value into the register, thus bridging between
   3234      assign_parm_find_data_types and expand_expr_real_1.  */
   3235 
   3236   equiv_stack_parm = data->stack_parm;
   3237   validated_mem = validize_mem (copy_rtx (data->entry_parm));
   3238 
   3239   need_conversion = (data->nominal_mode != data->passed_mode
   3240 		     || promoted_nominal_mode != data->arg.mode);
   3241   moved = false;
   3242 
   3243   if (need_conversion
   3244       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
   3245       && data->nominal_mode == data->passed_mode
   3246       && data->nominal_mode == GET_MODE (data->entry_parm))
   3247     {
   3248       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
   3249 	 mode, by the caller.  We now have to convert it to
   3250 	 NOMINAL_MODE, if different.  However, PARMREG may be in
   3251 	 a different mode than NOMINAL_MODE if it is being stored
   3252 	 promoted.
   3253 
   3254 	 If ENTRY_PARM is a hard register, it might be in a register
   3255 	 not valid for operating in its mode (e.g., an odd-numbered
   3256 	 register for a DFmode).  In that case, moves are the only
   3257 	 thing valid, so we can't do a convert from there.  This
   3258 	 occurs when the calling sequence allow such misaligned
   3259 	 usages.
   3260 
   3261 	 In addition, the conversion may involve a call, which could
   3262 	 clobber parameters which haven't been copied to pseudo
   3263 	 registers yet.
   3264 
   3265 	 First, we try to emit an insn which performs the necessary
   3266 	 conversion.  We verify that this insn does not clobber any
   3267 	 hard registers.  */
   3268 
   3269       rtx op0, op1;
   3270 
   3271       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
   3272 			    unsignedp);
   3273 
   3274       op0 = parmreg;
   3275       op1 = validated_mem;
   3276       if (icode != CODE_FOR_nothing
   3277 	  && insn_operand_matches (icode, 0, op0)
   3278 	  && insn_operand_matches (icode, 1, op1))
   3279 	{
   3280 	  enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
   3281 	  rtx_insn *insn, *insns;
   3282 	  rtx t = op1;
   3283 	  HARD_REG_SET hardregs;
   3284 
   3285 	  start_sequence ();
   3286 	  /* If op1 is a hard register that is likely spilled, first
   3287 	     force it into a pseudo, otherwise combiner might extend
   3288 	     its lifetime too much.  */
   3289 	  if (GET_CODE (t) == SUBREG)
   3290 	    t = SUBREG_REG (t);
   3291 	  if (REG_P (t)
   3292 	      && HARD_REGISTER_P (t)
   3293 	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
   3294 	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
   3295 	    {
   3296 	      t = gen_reg_rtx (GET_MODE (op1));
   3297 	      emit_move_insn (t, op1);
   3298 	    }
   3299 	  else
   3300 	    t = op1;
   3301 	  rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
   3302 					   data->passed_mode, unsignedp);
   3303 	  emit_insn (pat);
   3304 	  insns = get_insns ();
   3305 
   3306 	  moved = true;
   3307 	  CLEAR_HARD_REG_SET (hardregs);
   3308 	  for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
   3309 	    {
   3310 	      if (INSN_P (insn))
   3311 		note_stores (insn, record_hard_reg_sets, &hardregs);
   3312 	      if (!hard_reg_set_empty_p (hardregs))
   3313 		moved = false;
   3314 	    }
   3315 
   3316 	  end_sequence ();
   3317 
   3318 	  if (moved)
   3319 	    {
   3320 	      emit_insn (insns);
   3321 	      if (equiv_stack_parm != NULL_RTX)
   3322 		equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
   3323 						  equiv_stack_parm);
   3324 	    }
   3325 	}
   3326     }
   3327 
   3328   if (moved)
   3329     /* Nothing to do.  */
   3330     ;
   3331   else if (need_conversion)
   3332     {
   3333       /* We did not have an insn to convert directly, or the sequence
   3334 	 generated appeared unsafe.  We must first copy the parm to a
   3335 	 pseudo reg, and save the conversion until after all
   3336 	 parameters have been moved.  */
   3337 
   3338       int save_tree_used;
   3339       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
   3340 
   3341       emit_move_insn (tempreg, validated_mem);
   3342 
   3343       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
   3344       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
   3345 
   3346       if (partial_subreg_p (tempreg)
   3347 	  && GET_MODE (tempreg) == data->nominal_mode
   3348 	  && REG_P (SUBREG_REG (tempreg))
   3349 	  && data->nominal_mode == data->passed_mode
   3350 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
   3351 	{
   3352 	  /* The argument is already sign/zero extended, so note it
   3353 	     into the subreg.  */
   3354 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
   3355 	  SUBREG_PROMOTED_SET (tempreg, unsignedp);
   3356 	}
   3357 
   3358       /* TREE_USED gets set erroneously during expand_assignment.  */
   3359       save_tree_used = TREE_USED (parm);
   3360       SET_DECL_RTL (parm, rtl);
   3361       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
   3362       SET_DECL_RTL (parm, NULL_RTX);
   3363       TREE_USED (parm) = save_tree_used;
   3364       all->first_conversion_insn = get_insns ();
   3365       all->last_conversion_insn = get_last_insn ();
   3366       end_sequence ();
   3367 
   3368       did_conversion = true;
   3369     }
   3370   else if (MEM_P (data->entry_parm)
   3371 	   && GET_MODE_ALIGNMENT (promoted_nominal_mode)
   3372 	      > MEM_ALIGN (data->entry_parm)
   3373 	   && (((icode = optab_handler (movmisalign_optab,
   3374 					promoted_nominal_mode))
   3375 		!= CODE_FOR_nothing)
   3376 	       || targetm.slow_unaligned_access (promoted_nominal_mode,
   3377 						 MEM_ALIGN (data->entry_parm))))
   3378     {
   3379       if (icode != CODE_FOR_nothing)
   3380 	emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
   3381       else
   3382 	rtl = parmreg = extract_bit_field (validated_mem,
   3383 			GET_MODE_BITSIZE (promoted_nominal_mode), 0,
   3384 			unsignedp, parmreg,
   3385 			promoted_nominal_mode, VOIDmode, false, NULL);
   3386     }
   3387   else
   3388     emit_move_insn (parmreg, validated_mem);
   3389 
   3390   /* If we were passed a pointer but the actual value can live in a register,
   3391      retrieve it and use it directly.  Note that we cannot use nominal_mode,
   3392      because it will have been set to Pmode above, we must use the actual mode
   3393      of the parameter instead.  */
   3394   if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
   3395     {
   3396       /* Use a stack slot for debugging purposes if possible.  */
   3397       if (use_register_for_decl (parm))
   3398 	{
   3399 	  parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
   3400 	  mark_user_reg (parmreg);
   3401 	}
   3402       else
   3403 	{
   3404 	  int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
   3405 					    TYPE_MODE (TREE_TYPE (parm)),
   3406 					    TYPE_ALIGN (TREE_TYPE (parm)));
   3407 	  parmreg
   3408 	    = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
   3409 				  GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
   3410 				  align);
   3411 	  set_mem_attributes (parmreg, parm, 1);
   3412 	}
   3413 
   3414       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
   3415 	 the debug info in case it is not legitimate.  */
   3416       if (GET_MODE (parmreg) != GET_MODE (rtl))
   3417 	{
   3418 	  rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
   3419 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
   3420 
   3421 	  push_to_sequence2 (all->first_conversion_insn,
   3422 			     all->last_conversion_insn);
   3423 	  emit_move_insn (tempreg, rtl);
   3424 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
   3425 	  emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
   3426 			  tempreg);
   3427 	  all->first_conversion_insn = get_insns ();
   3428 	  all->last_conversion_insn = get_last_insn ();
   3429 	  end_sequence ();
   3430 
   3431 	  did_conversion = true;
   3432 	}
   3433       else
   3434 	emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
   3435 
   3436       rtl = parmreg;
   3437 
   3438       /* STACK_PARM is the pointer, not the parm, and PARMREG is
   3439 	 now the parm.  */
   3440       data->stack_parm = NULL;
   3441     }
   3442 
   3443   set_parm_rtl (parm, rtl);
   3444 
   3445   /* Mark the register as eliminable if we did no conversion and it was
   3446      copied from memory at a fixed offset, and the arg pointer was not
   3447      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
   3448      offset formed an invalid address, such memory-equivalences as we
   3449      make here would screw up life analysis for it.  */
   3450   if (data->nominal_mode == data->passed_mode
   3451       && !did_conversion
   3452       && data->stack_parm != 0
   3453       && MEM_P (data->stack_parm)
   3454       && data->locate.offset.var == 0
   3455       && reg_mentioned_p (virtual_incoming_args_rtx,
   3456 			  XEXP (data->stack_parm, 0)))
   3457     {
   3458       rtx_insn *linsn = get_last_insn ();
   3459       rtx_insn *sinsn;
   3460       rtx set;
   3461 
   3462       /* Mark complex types separately.  */
   3463       if (GET_CODE (parmreg) == CONCAT)
   3464 	{
   3465 	  scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
   3466 	  int regnor = REGNO (XEXP (parmreg, 0));
   3467 	  int regnoi = REGNO (XEXP (parmreg, 1));
   3468 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
   3469 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
   3470 					  GET_MODE_SIZE (submode));
   3471 
   3472 	  /* Scan backwards for the set of the real and
   3473 	     imaginary parts.  */
   3474 	  for (sinsn = linsn; sinsn != 0;
   3475 	       sinsn = prev_nonnote_insn (sinsn))
   3476 	    {
   3477 	      set = single_set (sinsn);
   3478 	      if (set == 0)
   3479 		continue;
   3480 
   3481 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
   3482 		set_unique_reg_note (sinsn, REG_EQUIV, stacki);
   3483 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
   3484 		set_unique_reg_note (sinsn, REG_EQUIV, stackr);
   3485 	    }
   3486 	}
   3487       else
   3488 	set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
   3489     }
   3490 
   3491   /* For pointer data type, suggest pointer register.  */
   3492   if (POINTER_TYPE_P (TREE_TYPE (parm)))
   3493     mark_reg_pointer (parmreg,
   3494 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
   3495 }
   3496 
   3497 /* A subroutine of assign_parms.  Allocate stack space to hold the current
   3498    parameter.  Get it there.  Perform all ABI specified conversions.  */
   3499 
   3500 static void
   3501 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
   3502 		         struct assign_parm_data_one *data)
   3503 {
   3504   /* Value must be stored in the stack slot STACK_PARM during function
   3505      execution.  */
   3506   bool to_conversion = false;
   3507 
   3508   assign_parm_remove_parallels (data);
   3509 
   3510   if (data->arg.mode != data->nominal_mode)
   3511     {
   3512       /* Conversion is required.  */
   3513       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
   3514 
   3515       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
   3516 
   3517       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
   3518       to_conversion = true;
   3519 
   3520       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
   3521 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
   3522 
   3523       if (data->stack_parm)
   3524 	{
   3525 	  poly_int64 offset
   3526 	    = subreg_lowpart_offset (data->nominal_mode,
   3527 				     GET_MODE (data->stack_parm));
   3528 	  /* ??? This may need a big-endian conversion on sparc64.  */
   3529 	  data->stack_parm
   3530 	    = adjust_address (data->stack_parm, data->nominal_mode, 0);
   3531 	  if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
   3532 	    set_mem_offset (data->stack_parm,
   3533 			    MEM_OFFSET (data->stack_parm) + offset);
   3534 	}
   3535     }
   3536 
   3537   if (data->entry_parm != data->stack_parm)
   3538     {
   3539       rtx src, dest;
   3540 
   3541       if (data->stack_parm == 0)
   3542 	{
   3543 	  int align = STACK_SLOT_ALIGNMENT (data->arg.type,
   3544 					    GET_MODE (data->entry_parm),
   3545 					    TYPE_ALIGN (data->arg.type));
   3546 	  if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
   3547 	      && ((optab_handler (movmisalign_optab,
   3548 				  GET_MODE (data->entry_parm))
   3549 		   != CODE_FOR_nothing)
   3550 		  || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
   3551 						    align)))
   3552 	    align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
   3553 	  data->stack_parm
   3554 	    = assign_stack_local (GET_MODE (data->entry_parm),
   3555 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
   3556 				  align);
   3557 	  align = MEM_ALIGN (data->stack_parm);
   3558 	  set_mem_attributes (data->stack_parm, parm, 1);
   3559 	  set_mem_align (data->stack_parm, align);
   3560 	}
   3561 
   3562       dest = validize_mem (copy_rtx (data->stack_parm));
   3563       src = validize_mem (copy_rtx (data->entry_parm));
   3564 
   3565       if (TYPE_EMPTY_P (data->arg.type))
   3566 	/* Empty types don't really need to be copied.  */;
   3567       else if (MEM_P (src))
   3568 	{
   3569 	  /* Use a block move to handle potentially misaligned entry_parm.  */
   3570 	  if (!to_conversion)
   3571 	    push_to_sequence2 (all->first_conversion_insn,
   3572 			       all->last_conversion_insn);
   3573 	  to_conversion = true;
   3574 
   3575 	  emit_block_move (dest, src,
   3576 			   GEN_INT (int_size_in_bytes (data->arg.type)),
   3577 			   BLOCK_OP_NORMAL);
   3578 	}
   3579       else
   3580 	{
   3581 	  if (!REG_P (src))
   3582 	    src = force_reg (GET_MODE (src), src);
   3583 	  emit_move_insn (dest, src);
   3584 	}
   3585     }
   3586 
   3587   if (to_conversion)
   3588     {
   3589       all->first_conversion_insn = get_insns ();
   3590       all->last_conversion_insn = get_last_insn ();
   3591       end_sequence ();
   3592     }
   3593 
   3594   set_parm_rtl (parm, data->stack_parm);
   3595 }
   3596 
   3597 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
   3598    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
   3599 
   3600 static void
   3601 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
   3602 			      vec<tree> fnargs)
   3603 {
   3604   tree parm;
   3605   tree orig_fnargs = all->orig_fnargs;
   3606   unsigned i = 0;
   3607 
   3608   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
   3609     {
   3610       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
   3611 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
   3612 	{
   3613 	  rtx tmp, real, imag;
   3614 	  scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
   3615 
   3616 	  real = DECL_RTL (fnargs[i]);
   3617 	  imag = DECL_RTL (fnargs[i + 1]);
   3618 	  if (inner != GET_MODE (real))
   3619 	    {
   3620 	      real = gen_lowpart_SUBREG (inner, real);
   3621 	      imag = gen_lowpart_SUBREG (inner, imag);
   3622 	    }
   3623 
   3624 	  if (TREE_ADDRESSABLE (parm))
   3625 	    {
   3626 	      rtx rmem, imem;
   3627 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
   3628 	      int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
   3629 						DECL_MODE (parm),
   3630 						TYPE_ALIGN (TREE_TYPE (parm)));
   3631 
   3632 	      /* split_complex_arg put the real and imag parts in
   3633 		 pseudos.  Move them to memory.  */
   3634 	      tmp = assign_stack_local (DECL_MODE (parm), size, align);
   3635 	      set_mem_attributes (tmp, parm, 1);
   3636 	      rmem = adjust_address_nv (tmp, inner, 0);
   3637 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
   3638 	      push_to_sequence2 (all->first_conversion_insn,
   3639 				 all->last_conversion_insn);
   3640 	      emit_move_insn (rmem, real);
   3641 	      emit_move_insn (imem, imag);
   3642 	      all->first_conversion_insn = get_insns ();
   3643 	      all->last_conversion_insn = get_last_insn ();
   3644 	      end_sequence ();
   3645 	    }
   3646 	  else
   3647 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
   3648 	  set_parm_rtl (parm, tmp);
   3649 
   3650 	  real = DECL_INCOMING_RTL (fnargs[i]);
   3651 	  imag = DECL_INCOMING_RTL (fnargs[i + 1]);
   3652 	  if (inner != GET_MODE (real))
   3653 	    {
   3654 	      real = gen_lowpart_SUBREG (inner, real);
   3655 	      imag = gen_lowpart_SUBREG (inner, imag);
   3656 	    }
   3657 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
   3658 	  set_decl_incoming_rtl (parm, tmp, false);
   3659 	  i++;
   3660 	}
   3661     }
   3662 }
   3663 
   3664 /* Assign RTL expressions to the function's parameters.  This may involve
   3665    copying them into registers and using those registers as the DECL_RTL.  */
   3666 
   3667 static void
   3668 assign_parms (tree fndecl)
   3669 {
   3670   struct assign_parm_data_all all;
   3671   tree parm;
   3672   vec<tree> fnargs;
   3673   unsigned i;
   3674 
   3675   crtl->args.internal_arg_pointer
   3676     = targetm.calls.internal_arg_pointer ();
   3677 
   3678   assign_parms_initialize_all (&all);
   3679   fnargs = assign_parms_augmented_arg_list (&all);
   3680 
   3681   FOR_EACH_VEC_ELT (fnargs, i, parm)
   3682     {
   3683       struct assign_parm_data_one data;
   3684 
   3685       /* Extract the type of PARM; adjust it according to ABI.  */
   3686       assign_parm_find_data_types (&all, parm, &data);
   3687 
   3688       /* Early out for errors and void parameters.  */
   3689       if (data.passed_mode == VOIDmode)
   3690 	{
   3691 	  SET_DECL_RTL (parm, const0_rtx);
   3692 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
   3693 	  continue;
   3694 	}
   3695 
   3696       /* Estimate stack alignment from parameter alignment.  */
   3697       if (SUPPORTS_STACK_ALIGNMENT)
   3698         {
   3699           unsigned int align
   3700 	    = targetm.calls.function_arg_boundary (data.arg.mode,
   3701 						   data.arg.type);
   3702 	  align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
   3703 	  if (TYPE_ALIGN (data.nominal_type) > align)
   3704 	    align = MINIMUM_ALIGNMENT (data.nominal_type,
   3705 				       TYPE_MODE (data.nominal_type),
   3706 				       TYPE_ALIGN (data.nominal_type));
   3707 	  if (crtl->stack_alignment_estimated < align)
   3708 	    {
   3709 	      gcc_assert (!crtl->stack_realign_processed);
   3710 	      crtl->stack_alignment_estimated = align;
   3711 	    }
   3712 	}
   3713 
   3714       /* Find out where the parameter arrives in this function.  */
   3715       assign_parm_find_entry_rtl (&all, &data);
   3716 
   3717       /* Find out where stack space for this parameter might be.  */
   3718       if (assign_parm_is_stack_parm (&all, &data))
   3719 	{
   3720 	  assign_parm_find_stack_rtl (parm, &data);
   3721 	  assign_parm_adjust_entry_rtl (&data);
   3722 	  /* For arguments that occupy no space in the parameter
   3723 	     passing area, have non-zero size and have address taken,
   3724 	     force creation of a stack slot so that they have distinct
   3725 	     address from other parameters.  */
   3726 	  if (TYPE_EMPTY_P (data.arg.type)
   3727 	      && TREE_ADDRESSABLE (parm)
   3728 	      && data.entry_parm == data.stack_parm
   3729 	      && MEM_P (data.entry_parm)
   3730 	      && int_size_in_bytes (data.arg.type))
   3731 	    data.stack_parm = NULL_RTX;
   3732 	}
   3733       /* Record permanently how this parm was passed.  */
   3734       if (data.arg.pass_by_reference)
   3735 	{
   3736 	  rtx incoming_rtl
   3737 	    = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
   3738 			   data.entry_parm);
   3739 	  set_decl_incoming_rtl (parm, incoming_rtl, true);
   3740 	}
   3741       else
   3742 	set_decl_incoming_rtl (parm, data.entry_parm, false);
   3743 
   3744       assign_parm_adjust_stack_rtl (&data);
   3745 
   3746       if (assign_parm_setup_block_p (&data))
   3747 	assign_parm_setup_block (&all, parm, &data);
   3748       else if (data.arg.pass_by_reference || use_register_for_decl (parm))
   3749 	assign_parm_setup_reg (&all, parm, &data);
   3750       else
   3751 	assign_parm_setup_stack (&all, parm, &data);
   3752 
   3753       if (cfun->stdarg && !DECL_CHAIN (parm))
   3754 	assign_parms_setup_varargs (&all, &data, false);
   3755 
   3756       /* Update info on where next arg arrives in registers.  */
   3757       targetm.calls.function_arg_advance (all.args_so_far, data.arg);
   3758     }
   3759 
   3760   if (targetm.calls.split_complex_arg)
   3761     assign_parms_unsplit_complex (&all, fnargs);
   3762 
   3763   fnargs.release ();
   3764 
   3765   /* Output all parameter conversion instructions (possibly including calls)
   3766      now that all parameters have been copied out of hard registers.  */
   3767   emit_insn (all.first_conversion_insn);
   3768 
   3769   do_pending_stack_adjust ();
   3770 
   3771   /* Estimate reload stack alignment from scalar return mode.  */
   3772   if (SUPPORTS_STACK_ALIGNMENT)
   3773     {
   3774       if (DECL_RESULT (fndecl))
   3775 	{
   3776 	  tree type = TREE_TYPE (DECL_RESULT (fndecl));
   3777 	  machine_mode mode = TYPE_MODE (type);
   3778 
   3779 	  if (mode != BLKmode
   3780 	      && mode != VOIDmode
   3781 	      && !AGGREGATE_TYPE_P (type))
   3782 	    {
   3783 	      unsigned int align = GET_MODE_ALIGNMENT (mode);
   3784 	      if (crtl->stack_alignment_estimated < align)
   3785 		{
   3786 		  gcc_assert (!crtl->stack_realign_processed);
   3787 		  crtl->stack_alignment_estimated = align;
   3788 		}
   3789 	    }
   3790 	}
   3791     }
   3792 
   3793   /* If we are receiving a struct value address as the first argument, set up
   3794      the RTL for the function result. As this might require code to convert
   3795      the transmitted address to Pmode, we do this here to ensure that possible
   3796      preliminary conversions of the address have been emitted already.  */
   3797   if (all.function_result_decl)
   3798     {
   3799       tree result = DECL_RESULT (current_function_decl);
   3800       rtx addr = DECL_RTL (all.function_result_decl);
   3801       rtx x;
   3802 
   3803       if (DECL_BY_REFERENCE (result))
   3804 	{
   3805 	  SET_DECL_VALUE_EXPR (result, all.function_result_decl);
   3806 	  x = addr;
   3807 	}
   3808       else
   3809 	{
   3810 	  SET_DECL_VALUE_EXPR (result,
   3811 			       build1 (INDIRECT_REF, TREE_TYPE (result),
   3812 				       all.function_result_decl));
   3813 	  addr = convert_memory_address (Pmode, addr);
   3814 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
   3815 	  set_mem_attributes (x, result, 1);
   3816 	}
   3817 
   3818       DECL_HAS_VALUE_EXPR_P (result) = 1;
   3819 
   3820       set_parm_rtl (result, x);
   3821     }
   3822 
   3823   /* We have aligned all the args, so add space for the pretend args.  */
   3824   crtl->args.pretend_args_size = all.pretend_args_size;
   3825   all.stack_args_size.constant += all.extra_pretend_bytes;
   3826   crtl->args.size = all.stack_args_size.constant;
   3827 
   3828   /* Adjust function incoming argument size for alignment and
   3829      minimum length.  */
   3830 
   3831   crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
   3832   crtl->args.size = aligned_upper_bound (crtl->args.size,
   3833 					 PARM_BOUNDARY / BITS_PER_UNIT);
   3834 
   3835   if (ARGS_GROW_DOWNWARD)
   3836     {
   3837       crtl->args.arg_offset_rtx
   3838 	= (all.stack_args_size.var == 0
   3839 	   ? gen_int_mode (-all.stack_args_size.constant, Pmode)
   3840 	   : expand_expr (size_diffop (all.stack_args_size.var,
   3841 				       size_int (-all.stack_args_size.constant)),
   3842 			  NULL_RTX, VOIDmode, EXPAND_NORMAL));
   3843     }
   3844   else
   3845     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
   3846 
   3847   /* See how many bytes, if any, of its args a function should try to pop
   3848      on return.  */
   3849 
   3850   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
   3851 							 TREE_TYPE (fndecl),
   3852 							 crtl->args.size);
   3853 
   3854   /* For stdarg.h function, save info about
   3855      regs and stack space used by the named args.  */
   3856 
   3857   crtl->args.info = all.args_so_far_v;
   3858 
   3859   /* Set the rtx used for the function return value.  Put this in its
   3860      own variable so any optimizers that need this information don't have
   3861      to include tree.h.  Do this here so it gets done when an inlined
   3862      function gets output.  */
   3863 
   3864   crtl->return_rtx
   3865     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
   3866        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
   3867 
   3868   /* If scalar return value was computed in a pseudo-reg, or was a named
   3869      return value that got dumped to the stack, copy that to the hard
   3870      return register.  */
   3871   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
   3872     {
   3873       tree decl_result = DECL_RESULT (fndecl);
   3874       rtx decl_rtl = DECL_RTL (decl_result);
   3875 
   3876       if (REG_P (decl_rtl)
   3877 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
   3878 	  : DECL_REGISTER (decl_result))
   3879 	{
   3880 	  rtx real_decl_rtl;
   3881 
   3882 	  /* Unless the psABI says not to.  */
   3883 	  if (TYPE_EMPTY_P (TREE_TYPE (decl_result)))
   3884 	    real_decl_rtl = NULL_RTX;
   3885 	  else
   3886 	    {
   3887 	      real_decl_rtl
   3888 		= targetm.calls.function_value (TREE_TYPE (decl_result),
   3889 						fndecl, true);
   3890 	      REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
   3891 	    }
   3892 	  /* The delay slot scheduler assumes that crtl->return_rtx
   3893 	     holds the hard register containing the return value, not a
   3894 	     temporary pseudo.  */
   3895 	  crtl->return_rtx = real_decl_rtl;
   3896 	}
   3897     }
   3898 }
   3899 
   3900 /* A subroutine of gimplify_parameters, invoked via walk_tree.
   3901    For all seen types, gimplify their sizes.  */
   3902 
   3903 static tree
   3904 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
   3905 {
   3906   tree t = *tp;
   3907 
   3908   *walk_subtrees = 0;
   3909   if (TYPE_P (t))
   3910     {
   3911       if (POINTER_TYPE_P (t))
   3912 	*walk_subtrees = 1;
   3913       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
   3914 	       && !TYPE_SIZES_GIMPLIFIED (t))
   3915 	{
   3916 	  gimplify_type_sizes (t, (gimple_seq *) data);
   3917 	  *walk_subtrees = 1;
   3918 	}
   3919     }
   3920 
   3921   return NULL;
   3922 }
   3923 
   3924 /* Gimplify the parameter list for current_function_decl.  This involves
   3925    evaluating SAVE_EXPRs of variable sized parameters and generating code
   3926    to implement callee-copies reference parameters.  Returns a sequence of
   3927    statements to add to the beginning of the function.  */
   3928 
   3929 gimple_seq
   3930 gimplify_parameters (gimple_seq *cleanup)
   3931 {
   3932   struct assign_parm_data_all all;
   3933   tree parm;
   3934   gimple_seq stmts = NULL;
   3935   vec<tree> fnargs;
   3936   unsigned i;
   3937 
   3938   assign_parms_initialize_all (&all);
   3939   fnargs = assign_parms_augmented_arg_list (&all);
   3940 
   3941   FOR_EACH_VEC_ELT (fnargs, i, parm)
   3942     {
   3943       struct assign_parm_data_one data;
   3944 
   3945       /* Extract the type of PARM; adjust it according to ABI.  */
   3946       assign_parm_find_data_types (&all, parm, &data);
   3947 
   3948       /* Early out for errors and void parameters.  */
   3949       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
   3950 	continue;
   3951 
   3952       /* Update info on where next arg arrives in registers.  */
   3953       targetm.calls.function_arg_advance (all.args_so_far, data.arg);
   3954 
   3955       /* ??? Once upon a time variable_size stuffed parameter list
   3956 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
   3957 	 turned out to be less than manageable in the gimple world.
   3958 	 Now we have to hunt them down ourselves.  */
   3959       walk_tree_without_duplicates (&data.arg.type,
   3960 				    gimplify_parm_type, &stmts);
   3961 
   3962       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
   3963 	{
   3964 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
   3965 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
   3966 	}
   3967 
   3968       if (data.arg.pass_by_reference)
   3969 	{
   3970 	  tree type = TREE_TYPE (data.arg.type);
   3971 	  function_arg_info orig_arg (type, data.arg.named);
   3972 	  if (reference_callee_copied (&all.args_so_far_v, orig_arg))
   3973 	    {
   3974 	      tree local, t;
   3975 
   3976 	      /* For constant-sized objects, this is trivial; for
   3977 		 variable-sized objects, we have to play games.  */
   3978 	      if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
   3979 		  && !(flag_stack_check == GENERIC_STACK_CHECK
   3980 		       && compare_tree_int (DECL_SIZE_UNIT (parm),
   3981 					    STACK_CHECK_MAX_VAR_SIZE) > 0))
   3982 		{
   3983 		  local = create_tmp_var (type, get_name (parm));
   3984 		  DECL_IGNORED_P (local) = 0;
   3985 		  /* If PARM was addressable, move that flag over
   3986 		     to the local copy, as its address will be taken,
   3987 		     not the PARMs.  Keep the parms address taken
   3988 		     as we'll query that flag during gimplification.  */
   3989 		  if (TREE_ADDRESSABLE (parm))
   3990 		    TREE_ADDRESSABLE (local) = 1;
   3991 		  if (DECL_NOT_GIMPLE_REG_P (parm))
   3992 		    DECL_NOT_GIMPLE_REG_P (local) = 1;
   3993 
   3994 		  if (!is_gimple_reg (local)
   3995 		      && flag_stack_reuse != SR_NONE)
   3996 		    {
   3997 		      tree clobber = build_clobber (type);
   3998 		      gimple *clobber_stmt;
   3999 		      clobber_stmt = gimple_build_assign (local, clobber);
   4000 		      gimple_seq_add_stmt (cleanup, clobber_stmt);
   4001 		    }
   4002 		}
   4003 	      else
   4004 		{
   4005 		  tree ptr_type, addr;
   4006 
   4007 		  ptr_type = build_pointer_type (type);
   4008 		  addr = create_tmp_reg (ptr_type, get_name (parm));
   4009 		  DECL_IGNORED_P (addr) = 0;
   4010 		  local = build_fold_indirect_ref (addr);
   4011 
   4012 		  t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
   4013 					      DECL_ALIGN (parm),
   4014 					      max_int_size_in_bytes (type));
   4015 		  /* The call has been built for a variable-sized object.  */
   4016 		  CALL_ALLOCA_FOR_VAR_P (t) = 1;
   4017 		  t = fold_convert (ptr_type, t);
   4018 		  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
   4019 		  gimplify_and_add (t, &stmts);
   4020 		}
   4021 
   4022 	      gimplify_assign (local, parm, &stmts);
   4023 
   4024 	      SET_DECL_VALUE_EXPR (parm, local);
   4025 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
   4026 	    }
   4027 	}
   4028     }
   4029 
   4030   fnargs.release ();
   4031 
   4032   return stmts;
   4033 }
   4034 
   4035 /* Compute the size and offset from the start of the stacked arguments for a
   4037    parm passed in mode PASSED_MODE and with type TYPE.
   4038 
   4039    INITIAL_OFFSET_PTR points to the current offset into the stacked
   4040    arguments.
   4041 
   4042    The starting offset and size for this parm are returned in
   4043    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
   4044    nonzero, the offset is that of stack slot, which is returned in
   4045    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
   4046    padding required from the initial offset ptr to the stack slot.
   4047 
   4048    IN_REGS is nonzero if the argument will be passed in registers.  It will
   4049    never be set if REG_PARM_STACK_SPACE is not defined.
   4050 
   4051    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
   4052    for arguments which are passed in registers.
   4053 
   4054    FNDECL is the function in which the argument was defined.
   4055 
   4056    There are two types of rounding that are done.  The first, controlled by
   4057    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
   4058    argument list to be aligned to the specific boundary (in bits).  This
   4059    rounding affects the initial and starting offsets, but not the argument
   4060    size.
   4061 
   4062    The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
   4063    optionally rounds the size of the parm to PARM_BOUNDARY.  The
   4064    initial offset is not affected by this rounding, while the size always
   4065    is and the starting offset may be.  */
   4066 
   4067 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
   4068     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
   4069     callers pass in the total size of args so far as
   4070     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
   4071 
   4072 void
   4073 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
   4074 		     int reg_parm_stack_space, int partial,
   4075 		     tree fndecl ATTRIBUTE_UNUSED,
   4076 		     struct args_size *initial_offset_ptr,
   4077 		     struct locate_and_pad_arg_data *locate)
   4078 {
   4079   tree sizetree;
   4080   pad_direction where_pad;
   4081   unsigned int boundary, round_boundary;
   4082   int part_size_in_regs;
   4083 
   4084   /* If we have found a stack parm before we reach the end of the
   4085      area reserved for registers, skip that area.  */
   4086   if (! in_regs)
   4087     {
   4088       if (reg_parm_stack_space > 0)
   4089 	{
   4090 	  if (initial_offset_ptr->var
   4091 	      || !ordered_p (initial_offset_ptr->constant,
   4092 			     reg_parm_stack_space))
   4093 	    {
   4094 	      initial_offset_ptr->var
   4095 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
   4096 			      ssize_int (reg_parm_stack_space));
   4097 	      initial_offset_ptr->constant = 0;
   4098 	    }
   4099 	  else
   4100 	    initial_offset_ptr->constant
   4101 	      = ordered_max (initial_offset_ptr->constant,
   4102 			     reg_parm_stack_space);
   4103 	}
   4104     }
   4105 
   4106   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
   4107 
   4108   sizetree = (type
   4109 	      ? arg_size_in_bytes (type)
   4110 	      : size_int (GET_MODE_SIZE (passed_mode)));
   4111   where_pad = targetm.calls.function_arg_padding (passed_mode, type);
   4112   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
   4113   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
   4114 							      type);
   4115   locate->where_pad = where_pad;
   4116 
   4117   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
   4118   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
   4119     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
   4120 
   4121   locate->boundary = boundary;
   4122 
   4123   if (SUPPORTS_STACK_ALIGNMENT)
   4124     {
   4125       /* stack_alignment_estimated can't change after stack has been
   4126 	 realigned.  */
   4127       if (crtl->stack_alignment_estimated < boundary)
   4128         {
   4129           if (!crtl->stack_realign_processed)
   4130 	    crtl->stack_alignment_estimated = boundary;
   4131 	  else
   4132 	    {
   4133 	      /* If stack is realigned and stack alignment value
   4134 		 hasn't been finalized, it is OK not to increase
   4135 		 stack_alignment_estimated.  The bigger alignment
   4136 		 requirement is recorded in stack_alignment_needed
   4137 		 below.  */
   4138 	      gcc_assert (!crtl->stack_realign_finalized
   4139 			  && crtl->stack_realign_needed);
   4140 	    }
   4141 	}
   4142     }
   4143 
   4144   if (ARGS_GROW_DOWNWARD)
   4145     {
   4146       locate->slot_offset.constant = -initial_offset_ptr->constant;
   4147       if (initial_offset_ptr->var)
   4148 	locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
   4149 					      initial_offset_ptr->var);
   4150 
   4151       {
   4152 	tree s2 = sizetree;
   4153 	if (where_pad != PAD_NONE
   4154 	    && (!tree_fits_uhwi_p (sizetree)
   4155 		|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
   4156 	  s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
   4157 	SUB_PARM_SIZE (locate->slot_offset, s2);
   4158       }
   4159 
   4160       locate->slot_offset.constant += part_size_in_regs;
   4161 
   4162       if (!in_regs || reg_parm_stack_space > 0)
   4163 	pad_to_arg_alignment (&locate->slot_offset, boundary,
   4164 			      &locate->alignment_pad);
   4165 
   4166       locate->size.constant = (-initial_offset_ptr->constant
   4167 			       - locate->slot_offset.constant);
   4168       if (initial_offset_ptr->var)
   4169 	locate->size.var = size_binop (MINUS_EXPR,
   4170 				       size_binop (MINUS_EXPR,
   4171 						   ssize_int (0),
   4172 						   initial_offset_ptr->var),
   4173 				       locate->slot_offset.var);
   4174 
   4175       /* Pad_below needs the pre-rounded size to know how much to pad
   4176 	 below.  */
   4177       locate->offset = locate->slot_offset;
   4178       if (where_pad == PAD_DOWNWARD)
   4179 	pad_below (&locate->offset, passed_mode, sizetree);
   4180 
   4181     }
   4182   else
   4183     {
   4184       if (!in_regs || reg_parm_stack_space > 0)
   4185 	pad_to_arg_alignment (initial_offset_ptr, boundary,
   4186 			      &locate->alignment_pad);
   4187       locate->slot_offset = *initial_offset_ptr;
   4188 
   4189 #ifdef PUSH_ROUNDING
   4190       if (passed_mode != BLKmode)
   4191 	sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
   4192 #endif
   4193 
   4194       /* Pad_below needs the pre-rounded size to know how much to pad below
   4195 	 so this must be done before rounding up.  */
   4196       locate->offset = locate->slot_offset;
   4197       if (where_pad == PAD_DOWNWARD)
   4198 	pad_below (&locate->offset, passed_mode, sizetree);
   4199 
   4200       if (where_pad != PAD_NONE
   4201 	  && (!tree_fits_uhwi_p (sizetree)
   4202 	      || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
   4203 	sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
   4204 
   4205       ADD_PARM_SIZE (locate->size, sizetree);
   4206 
   4207       locate->size.constant -= part_size_in_regs;
   4208     }
   4209 
   4210   locate->offset.constant
   4211     += targetm.calls.function_arg_offset (passed_mode, type);
   4212 }
   4213 
   4214 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
   4215    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
   4216 
   4217 static void
   4218 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
   4219 		      struct args_size *alignment_pad)
   4220 {
   4221   tree save_var = NULL_TREE;
   4222   poly_int64 save_constant = 0;
   4223   int boundary_in_bytes = boundary / BITS_PER_UNIT;
   4224   poly_int64 sp_offset = STACK_POINTER_OFFSET;
   4225 
   4226 #ifdef SPARC_STACK_BOUNDARY_HACK
   4227   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
   4228      the real alignment of %sp.  However, when it does this, the
   4229      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
   4230   if (SPARC_STACK_BOUNDARY_HACK)
   4231     sp_offset = 0;
   4232 #endif
   4233 
   4234   if (boundary > PARM_BOUNDARY)
   4235     {
   4236       save_var = offset_ptr->var;
   4237       save_constant = offset_ptr->constant;
   4238     }
   4239 
   4240   alignment_pad->var = NULL_TREE;
   4241   alignment_pad->constant = 0;
   4242 
   4243   if (boundary > BITS_PER_UNIT)
   4244     {
   4245       int misalign;
   4246       if (offset_ptr->var
   4247 	  || !known_misalignment (offset_ptr->constant + sp_offset,
   4248 				  boundary_in_bytes, &misalign))
   4249 	{
   4250 	  tree sp_offset_tree = ssize_int (sp_offset);
   4251 	  tree offset = size_binop (PLUS_EXPR,
   4252 				    ARGS_SIZE_TREE (*offset_ptr),
   4253 				    sp_offset_tree);
   4254 	  tree rounded;
   4255 	  if (ARGS_GROW_DOWNWARD)
   4256 	    rounded = round_down (offset, boundary / BITS_PER_UNIT);
   4257 	  else
   4258 	    rounded = round_up   (offset, boundary / BITS_PER_UNIT);
   4259 
   4260 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
   4261 	  /* ARGS_SIZE_TREE includes constant term.  */
   4262 	  offset_ptr->constant = 0;
   4263 	  if (boundary > PARM_BOUNDARY)
   4264 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
   4265 					     save_var);
   4266 	}
   4267       else
   4268 	{
   4269 	  if (ARGS_GROW_DOWNWARD)
   4270 	    offset_ptr->constant -= misalign;
   4271 	  else
   4272 	    offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
   4273 
   4274 	  if (boundary > PARM_BOUNDARY)
   4275 	    alignment_pad->constant = offset_ptr->constant - save_constant;
   4276 	}
   4277     }
   4278 }
   4279 
   4280 static void
   4281 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
   4282 {
   4283   unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
   4284   int misalign;
   4285   if (passed_mode != BLKmode
   4286       && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
   4287     offset_ptr->constant += -misalign & (align - 1);
   4288   else
   4289     {
   4290       if (TREE_CODE (sizetree) != INTEGER_CST
   4291 	  || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
   4292 	{
   4293 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
   4294 	  tree s2 = round_up (sizetree, align);
   4295 	  /* Add it in.  */
   4296 	  ADD_PARM_SIZE (*offset_ptr, s2);
   4297 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
   4298 	}
   4299     }
   4300 }
   4301 
   4302 
   4304 /* True if register REGNO was alive at a place where `setjmp' was
   4305    called and was set more than once or is an argument.  Such regs may
   4306    be clobbered by `longjmp'.  */
   4307 
   4308 static bool
   4309 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
   4310 {
   4311   /* There appear to be cases where some local vars never reach the
   4312      backend but have bogus regnos.  */
   4313   if (regno >= max_reg_num ())
   4314     return false;
   4315 
   4316   return ((REG_N_SETS (regno) > 1
   4317 	   || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
   4318 			       regno))
   4319 	  && REGNO_REG_SET_P (setjmp_crosses, regno));
   4320 }
   4321 
   4322 /* Walk the tree of blocks describing the binding levels within a
   4323    function and warn about variables the might be killed by setjmp or
   4324    vfork.  This is done after calling flow_analysis before register
   4325    allocation since that will clobber the pseudo-regs to hard
   4326    regs.  */
   4327 
   4328 static void
   4329 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
   4330 {
   4331   tree decl, sub;
   4332 
   4333   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
   4334     {
   4335       if (VAR_P (decl)
   4336 	  && DECL_RTL_SET_P (decl)
   4337 	  && REG_P (DECL_RTL (decl))
   4338 	  && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
   4339 	warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
   4340                  " %<longjmp%> or %<vfork%>", decl);
   4341     }
   4342 
   4343   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
   4344     setjmp_vars_warning (setjmp_crosses, sub);
   4345 }
   4346 
   4347 /* Do the appropriate part of setjmp_vars_warning
   4348    but for arguments instead of local variables.  */
   4349 
   4350 static void
   4351 setjmp_args_warning (bitmap setjmp_crosses)
   4352 {
   4353   tree decl;
   4354   for (decl = DECL_ARGUMENTS (current_function_decl);
   4355        decl; decl = DECL_CHAIN (decl))
   4356     if (DECL_RTL (decl) != 0
   4357 	&& REG_P (DECL_RTL (decl))
   4358 	&& regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
   4359       warning (OPT_Wclobbered,
   4360                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
   4361 	       decl);
   4362 }
   4363 
   4364 /* Generate warning messages for variables live across setjmp.  */
   4365 
   4366 void
   4367 generate_setjmp_warnings (void)
   4368 {
   4369   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
   4370 
   4371   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
   4372       || bitmap_empty_p (setjmp_crosses))
   4373     return;
   4374 
   4375   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
   4376   setjmp_args_warning (setjmp_crosses);
   4377 }
   4378 
   4379 
   4380 /* Reverse the order of elements in the fragment chain T of blocks,
   4382    and return the new head of the chain (old last element).
   4383    In addition to that clear BLOCK_SAME_RANGE flags when needed
   4384    and adjust BLOCK_SUPERCONTEXT from the super fragment to
   4385    its super fragment origin.  */
   4386 
   4387 static tree
   4388 block_fragments_nreverse (tree t)
   4389 {
   4390   tree prev = 0, block, next, prev_super = 0;
   4391   tree super = BLOCK_SUPERCONTEXT (t);
   4392   if (BLOCK_FRAGMENT_ORIGIN (super))
   4393     super = BLOCK_FRAGMENT_ORIGIN (super);
   4394   for (block = t; block; block = next)
   4395     {
   4396       next = BLOCK_FRAGMENT_CHAIN (block);
   4397       BLOCK_FRAGMENT_CHAIN (block) = prev;
   4398       if ((prev && !BLOCK_SAME_RANGE (prev))
   4399 	  || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
   4400 	      != prev_super))
   4401 	BLOCK_SAME_RANGE (block) = 0;
   4402       prev_super = BLOCK_SUPERCONTEXT (block);
   4403       BLOCK_SUPERCONTEXT (block) = super;
   4404       prev = block;
   4405     }
   4406   t = BLOCK_FRAGMENT_ORIGIN (t);
   4407   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
   4408       != prev_super)
   4409     BLOCK_SAME_RANGE (t) = 0;
   4410   BLOCK_SUPERCONTEXT (t) = super;
   4411   return prev;
   4412 }
   4413 
   4414 /* Reverse the order of elements in the chain T of blocks,
   4415    and return the new head of the chain (old last element).
   4416    Also do the same on subblocks and reverse the order of elements
   4417    in BLOCK_FRAGMENT_CHAIN as well.  */
   4418 
   4419 static tree
   4420 blocks_nreverse_all (tree t)
   4421 {
   4422   tree prev = 0, block, next;
   4423   for (block = t; block; block = next)
   4424     {
   4425       next = BLOCK_CHAIN (block);
   4426       BLOCK_CHAIN (block) = prev;
   4427       if (BLOCK_FRAGMENT_CHAIN (block)
   4428 	  && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
   4429 	{
   4430 	  BLOCK_FRAGMENT_CHAIN (block)
   4431 	    = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
   4432 	  if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
   4433 	    BLOCK_SAME_RANGE (block) = 0;
   4434 	}
   4435       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
   4436       prev = block;
   4437     }
   4438   return prev;
   4439 }
   4440 
   4441 
   4442 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
   4443    and create duplicate blocks.  */
   4444 /* ??? Need an option to either create block fragments or to create
   4445    abstract origin duplicates of a source block.  It really depends
   4446    on what optimization has been performed.  */
   4447 
   4448 void
   4449 reorder_blocks (void)
   4450 {
   4451   tree block = DECL_INITIAL (current_function_decl);
   4452 
   4453   if (block == NULL_TREE)
   4454     return;
   4455 
   4456   auto_vec<tree, 10> block_stack;
   4457 
   4458   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
   4459   clear_block_marks (block);
   4460 
   4461   /* Prune the old trees away, so that they don't get in the way.  */
   4462   BLOCK_SUBBLOCKS (block) = NULL_TREE;
   4463   BLOCK_CHAIN (block) = NULL_TREE;
   4464 
   4465   /* Recreate the block tree from the note nesting.  */
   4466   reorder_blocks_1 (get_insns (), block, &block_stack);
   4467   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
   4468 }
   4469 
   4470 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
   4471 
   4472 void
   4473 clear_block_marks (tree block)
   4474 {
   4475   while (block)
   4476     {
   4477       TREE_ASM_WRITTEN (block) = 0;
   4478       clear_block_marks (BLOCK_SUBBLOCKS (block));
   4479       block = BLOCK_CHAIN (block);
   4480     }
   4481 }
   4482 
   4483 static void
   4484 reorder_blocks_1 (rtx_insn *insns, tree current_block,
   4485 		  vec<tree> *p_block_stack)
   4486 {
   4487   rtx_insn *insn;
   4488   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
   4489 
   4490   for (insn = insns; insn; insn = NEXT_INSN (insn))
   4491     {
   4492       if (NOTE_P (insn))
   4493 	{
   4494 	  if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
   4495 	    {
   4496 	      tree block = NOTE_BLOCK (insn);
   4497 	      tree origin;
   4498 
   4499 	      gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
   4500 	      origin = block;
   4501 
   4502 	      if (prev_end)
   4503 		BLOCK_SAME_RANGE (prev_end) = 0;
   4504 	      prev_end = NULL_TREE;
   4505 
   4506 	      /* If we have seen this block before, that means it now
   4507 		 spans multiple address regions.  Create a new fragment.  */
   4508 	      if (TREE_ASM_WRITTEN (block))
   4509 		{
   4510 		  tree new_block = copy_node (block);
   4511 
   4512 		  BLOCK_SAME_RANGE (new_block) = 0;
   4513 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
   4514 		  BLOCK_FRAGMENT_CHAIN (new_block)
   4515 		    = BLOCK_FRAGMENT_CHAIN (origin);
   4516 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
   4517 
   4518 		  NOTE_BLOCK (insn) = new_block;
   4519 		  block = new_block;
   4520 		}
   4521 
   4522 	      if (prev_beg == current_block && prev_beg)
   4523 		BLOCK_SAME_RANGE (block) = 1;
   4524 
   4525 	      prev_beg = origin;
   4526 
   4527 	      BLOCK_SUBBLOCKS (block) = 0;
   4528 	      TREE_ASM_WRITTEN (block) = 1;
   4529 	      /* When there's only one block for the entire function,
   4530 		 current_block == block and we mustn't do this, it
   4531 		 will cause infinite recursion.  */
   4532 	      if (block != current_block)
   4533 		{
   4534 		  tree super;
   4535 		  if (block != origin)
   4536 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
   4537 				|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
   4538 								      (origin))
   4539 				   == current_block);
   4540 		  if (p_block_stack->is_empty ())
   4541 		    super = current_block;
   4542 		  else
   4543 		    {
   4544 		      super = p_block_stack->last ();
   4545 		      gcc_assert (super == current_block
   4546 				  || BLOCK_FRAGMENT_ORIGIN (super)
   4547 				     == current_block);
   4548 		    }
   4549 		  BLOCK_SUPERCONTEXT (block) = super;
   4550 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
   4551 		  BLOCK_SUBBLOCKS (current_block) = block;
   4552 		  current_block = origin;
   4553 		}
   4554 	      p_block_stack->safe_push (block);
   4555 	    }
   4556 	  else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
   4557 	    {
   4558 	      NOTE_BLOCK (insn) = p_block_stack->pop ();
   4559 	      current_block = BLOCK_SUPERCONTEXT (current_block);
   4560 	      if (BLOCK_FRAGMENT_ORIGIN (current_block))
   4561 		current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
   4562 	      prev_beg = NULL_TREE;
   4563 	      prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
   4564 			 ? NOTE_BLOCK (insn) : NULL_TREE;
   4565 	    }
   4566 	}
   4567       else
   4568 	{
   4569 	  prev_beg = NULL_TREE;
   4570 	  if (prev_end)
   4571 	    BLOCK_SAME_RANGE (prev_end) = 0;
   4572 	  prev_end = NULL_TREE;
   4573 	}
   4574     }
   4575 }
   4576 
   4577 /* Reverse the order of elements in the chain T of blocks,
   4578    and return the new head of the chain (old last element).  */
   4579 
   4580 tree
   4581 blocks_nreverse (tree t)
   4582 {
   4583   tree prev = 0, block, next;
   4584   for (block = t; block; block = next)
   4585     {
   4586       next = BLOCK_CHAIN (block);
   4587       BLOCK_CHAIN (block) = prev;
   4588       prev = block;
   4589     }
   4590   return prev;
   4591 }
   4592 
   4593 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
   4594    by modifying the last node in chain 1 to point to chain 2.  */
   4595 
   4596 tree
   4597 block_chainon (tree op1, tree op2)
   4598 {
   4599   tree t1;
   4600 
   4601   if (!op1)
   4602     return op2;
   4603   if (!op2)
   4604     return op1;
   4605 
   4606   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
   4607     continue;
   4608   BLOCK_CHAIN (t1) = op2;
   4609 
   4610 #ifdef ENABLE_TREE_CHECKING
   4611   {
   4612     tree t2;
   4613     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
   4614       gcc_assert (t2 != t1);
   4615   }
   4616 #endif
   4617 
   4618   return op1;
   4619 }
   4620 
   4621 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
   4622    non-NULL, list them all into VECTOR, in a depth-first preorder
   4623    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
   4624    blocks.  */
   4625 
   4626 static int
   4627 all_blocks (tree block, tree *vector)
   4628 {
   4629   int n_blocks = 0;
   4630 
   4631   while (block)
   4632     {
   4633       TREE_ASM_WRITTEN (block) = 0;
   4634 
   4635       /* Record this block.  */
   4636       if (vector)
   4637 	vector[n_blocks] = block;
   4638 
   4639       ++n_blocks;
   4640 
   4641       /* Record the subblocks, and their subblocks...  */
   4642       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
   4643 			      vector ? vector + n_blocks : 0);
   4644       block = BLOCK_CHAIN (block);
   4645     }
   4646 
   4647   return n_blocks;
   4648 }
   4649 
   4650 /* Return a vector containing all the blocks rooted at BLOCK.  The
   4651    number of elements in the vector is stored in N_BLOCKS_P.  The
   4652    vector is dynamically allocated; it is the caller's responsibility
   4653    to call `free' on the pointer returned.  */
   4654 
   4655 static tree *
   4656 get_block_vector (tree block, int *n_blocks_p)
   4657 {
   4658   tree *block_vector;
   4659 
   4660   *n_blocks_p = all_blocks (block, NULL);
   4661   block_vector = XNEWVEC (tree, *n_blocks_p);
   4662   all_blocks (block, block_vector);
   4663 
   4664   return block_vector;
   4665 }
   4666 
   4667 static GTY(()) int next_block_index = 2;
   4668 
   4669 /* Set BLOCK_NUMBER for all the blocks in FN.  */
   4670 
   4671 void
   4672 number_blocks (tree fn)
   4673 {
   4674   int i;
   4675   int n_blocks;
   4676   tree *block_vector;
   4677 
   4678   /* For XCOFF debugging output, we start numbering the blocks
   4679      from 1 within each function, rather than keeping a running
   4680      count.  */
   4681 #if defined (XCOFF_DEBUGGING_INFO)
   4682   if (write_symbols == XCOFF_DEBUG)
   4683     next_block_index = 1;
   4684 #endif
   4685 
   4686   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
   4687 
   4688   /* The top-level BLOCK isn't numbered at all.  */
   4689   for (i = 1; i < n_blocks; ++i)
   4690     /* We number the blocks from two.  */
   4691     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
   4692 
   4693   free (block_vector);
   4694 
   4695   return;
   4696 }
   4697 
   4698 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
   4699 
   4700 DEBUG_FUNCTION tree
   4701 debug_find_var_in_block_tree (tree var, tree block)
   4702 {
   4703   tree t;
   4704 
   4705   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
   4706     if (t == var)
   4707       return block;
   4708 
   4709   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
   4710     {
   4711       tree ret = debug_find_var_in_block_tree (var, t);
   4712       if (ret)
   4713 	return ret;
   4714     }
   4715 
   4716   return NULL_TREE;
   4717 }
   4718 
   4719 /* Keep track of whether we're in a dummy function context.  If we are,
   4721    we don't want to invoke the set_current_function hook, because we'll
   4722    get into trouble if the hook calls target_reinit () recursively or
   4723    when the initial initialization is not yet complete.  */
   4724 
   4725 static bool in_dummy_function;
   4726 
   4727 /* Invoke the target hook when setting cfun.  Update the optimization options
   4728    if the function uses different options than the default.  */
   4729 
   4730 static void
   4731 invoke_set_current_function_hook (tree fndecl)
   4732 {
   4733   if (!in_dummy_function)
   4734     {
   4735       tree opts = ((fndecl)
   4736 		   ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
   4737 		   : optimization_default_node);
   4738 
   4739       if (!opts)
   4740 	opts = optimization_default_node;
   4741 
   4742       /* Change optimization options if needed.  */
   4743       if (optimization_current_node != opts)
   4744 	{
   4745 	  optimization_current_node = opts;
   4746 	  cl_optimization_restore (&global_options, &global_options_set,
   4747 				   TREE_OPTIMIZATION (opts));
   4748 	}
   4749 
   4750       targetm.set_current_function (fndecl);
   4751       this_fn_optabs = this_target_optabs;
   4752 
   4753       /* Initialize global alignment variables after op.  */
   4754       parse_alignment_opts ();
   4755 
   4756       if (opts != optimization_default_node)
   4757 	{
   4758 	  init_tree_optimization_optabs (opts);
   4759 	  if (TREE_OPTIMIZATION_OPTABS (opts))
   4760 	    this_fn_optabs = (struct target_optabs *)
   4761 	      TREE_OPTIMIZATION_OPTABS (opts);
   4762 	}
   4763     }
   4764 }
   4765 
   4766 /* cfun should never be set directly; use this function.  */
   4767 
   4768 void
   4769 set_cfun (struct function *new_cfun, bool force)
   4770 {
   4771   if (cfun != new_cfun || force)
   4772     {
   4773       cfun = new_cfun;
   4774       invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
   4775       redirect_edge_var_map_empty ();
   4776     }
   4777 }
   4778 
   4779 /* Initialized with NOGC, making this poisonous to the garbage collector.  */
   4780 
   4781 static vec<function *> cfun_stack;
   4782 
   4783 /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
   4784    current_function_decl accordingly.  */
   4785 
   4786 void
   4787 push_cfun (struct function *new_cfun)
   4788 {
   4789   gcc_assert ((!cfun && !current_function_decl)
   4790 	      || (cfun && current_function_decl == cfun->decl));
   4791   cfun_stack.safe_push (cfun);
   4792   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
   4793   set_cfun (new_cfun);
   4794 }
   4795 
   4796 /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
   4797 
   4798 void
   4799 pop_cfun (void)
   4800 {
   4801   struct function *new_cfun = cfun_stack.pop ();
   4802   /* When in_dummy_function, we do have a cfun but current_function_decl is
   4803      NULL.  We also allow pushing NULL cfun and subsequently changing
   4804      current_function_decl to something else and have both restored by
   4805      pop_cfun.  */
   4806   gcc_checking_assert (in_dummy_function
   4807 		       || !cfun
   4808 		       || current_function_decl == cfun->decl);
   4809   set_cfun (new_cfun);
   4810   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
   4811 }
   4812 
   4813 /* Return value of funcdef and increase it.  */
   4814 int
   4815 get_next_funcdef_no (void)
   4816 {
   4817   return funcdef_no++;
   4818 }
   4819 
   4820 /* Return value of funcdef.  */
   4821 int
   4822 get_last_funcdef_no (void)
   4823 {
   4824   return funcdef_no;
   4825 }
   4826 
   4827 /* Allocate and initialize the stack usage info data structure for the
   4828    current function.  */
   4829 static void
   4830 allocate_stack_usage_info (void)
   4831 {
   4832   gcc_assert (!cfun->su);
   4833   cfun->su = ggc_cleared_alloc<stack_usage> ();
   4834   cfun->su->static_stack_size = -1;
   4835 }
   4836 
   4837 /* Allocate a function structure for FNDECL and set its contents
   4838    to the defaults.  Set cfun to the newly-allocated object.
   4839    Some of the helper functions invoked during initialization assume
   4840    that cfun has already been set.  Therefore, assign the new object
   4841    directly into cfun and invoke the back end hook explicitly at the
   4842    very end, rather than initializing a temporary and calling set_cfun
   4843    on it.
   4844 
   4845    ABSTRACT_P is true if this is a function that will never be seen by
   4846    the middle-end.  Such functions are front-end concepts (like C++
   4847    function templates) that do not correspond directly to functions
   4848    placed in object files.  */
   4849 
   4850 void
   4851 allocate_struct_function (tree fndecl, bool abstract_p)
   4852 {
   4853   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
   4854 
   4855   cfun = ggc_cleared_alloc<function> ();
   4856 
   4857   init_eh_for_function ();
   4858 
   4859   if (init_machine_status)
   4860     cfun->machine = (*init_machine_status) ();
   4861 
   4862 #ifdef OVERRIDE_ABI_FORMAT
   4863   OVERRIDE_ABI_FORMAT (fndecl);
   4864 #endif
   4865 
   4866   if (fndecl != NULL_TREE)
   4867     {
   4868       DECL_STRUCT_FUNCTION (fndecl) = cfun;
   4869       cfun->decl = fndecl;
   4870       current_function_funcdef_no = get_next_funcdef_no ();
   4871     }
   4872 
   4873   invoke_set_current_function_hook (fndecl);
   4874 
   4875   if (fndecl != NULL_TREE)
   4876     {
   4877       tree result = DECL_RESULT (fndecl);
   4878 
   4879       if (!abstract_p)
   4880 	{
   4881 	  /* Now that we have activated any function-specific attributes
   4882 	     that might affect layout, particularly vector modes, relayout
   4883 	     each of the parameters and the result.  */
   4884 	  relayout_decl (result);
   4885 	  for (tree parm = DECL_ARGUMENTS (fndecl); parm;
   4886 	       parm = DECL_CHAIN (parm))
   4887 	    relayout_decl (parm);
   4888 
   4889 	  /* Similarly relayout the function decl.  */
   4890 	  targetm.target_option.relayout_function (fndecl);
   4891 	}
   4892 
   4893       if (!abstract_p && aggregate_value_p (result, fndecl))
   4894 	{
   4895 #ifdef PCC_STATIC_STRUCT_RETURN
   4896 	  cfun->returns_pcc_struct = 1;
   4897 #endif
   4898 	  cfun->returns_struct = 1;
   4899 	}
   4900 
   4901       cfun->stdarg = stdarg_p (fntype);
   4902 
   4903       /* Assume all registers in stdarg functions need to be saved.  */
   4904       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
   4905       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
   4906 
   4907       /* ??? This could be set on a per-function basis by the front-end
   4908          but is this worth the hassle?  */
   4909       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
   4910       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
   4911 
   4912       if (!profile_flag && !flag_instrument_function_entry_exit)
   4913 	DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
   4914 
   4915       if (flag_callgraph_info)
   4916 	allocate_stack_usage_info ();
   4917     }
   4918 
   4919   /* Don't enable begin stmt markers if var-tracking at assignments is
   4920      disabled.  The markers make little sense without the variable
   4921      binding annotations among them.  */
   4922   cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
   4923     && MAY_HAVE_DEBUG_MARKER_STMTS;
   4924 }
   4925 
   4926 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
   4927    instead of just setting it.  */
   4928 
   4929 void
   4930 push_struct_function (tree fndecl, bool abstract_p)
   4931 {
   4932   /* When in_dummy_function we might be in the middle of a pop_cfun and
   4933      current_function_decl and cfun may not match.  */
   4934   gcc_assert (in_dummy_function
   4935 	      || (!cfun && !current_function_decl)
   4936 	      || (cfun && current_function_decl == cfun->decl));
   4937   cfun_stack.safe_push (cfun);
   4938   current_function_decl = fndecl;
   4939   allocate_struct_function (fndecl, abstract_p);
   4940 }
   4941 
   4942 /* Reset crtl and other non-struct-function variables to defaults as
   4943    appropriate for emitting rtl at the start of a function.  */
   4944 
   4945 static void
   4946 prepare_function_start (void)
   4947 {
   4948   gcc_assert (!get_last_insn ());
   4949 
   4950   if (in_dummy_function)
   4951     crtl->abi = &default_function_abi;
   4952   else
   4953     crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
   4954 
   4955   init_temp_slots ();
   4956   init_emit ();
   4957   init_varasm_status ();
   4958   init_expr ();
   4959   default_rtl_profile ();
   4960 
   4961   if (flag_stack_usage_info && !flag_callgraph_info)
   4962     allocate_stack_usage_info ();
   4963 
   4964   cse_not_expected = ! optimize;
   4965 
   4966   /* Caller save not needed yet.  */
   4967   caller_save_needed = 0;
   4968 
   4969   /* We haven't done register allocation yet.  */
   4970   reg_renumber = 0;
   4971 
   4972   /* Indicate that we have not instantiated virtual registers yet.  */
   4973   virtuals_instantiated = 0;
   4974 
   4975   /* Indicate that we want CONCATs now.  */
   4976   generating_concat_p = 1;
   4977 
   4978   /* Indicate we have no need of a frame pointer yet.  */
   4979   frame_pointer_needed = 0;
   4980 }
   4981 
   4982 void
   4983 push_dummy_function (bool with_decl)
   4984 {
   4985   tree fn_decl, fn_type, fn_result_decl;
   4986 
   4987   gcc_assert (!in_dummy_function);
   4988   in_dummy_function = true;
   4989 
   4990   if (with_decl)
   4991     {
   4992       fn_type = build_function_type_list (void_type_node, NULL_TREE);
   4993       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
   4994 			    fn_type);
   4995       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
   4996 					 NULL_TREE, void_type_node);
   4997       DECL_RESULT (fn_decl) = fn_result_decl;
   4998       DECL_ARTIFICIAL (fn_decl) = 1;
   4999       tree fn_name = get_identifier (" ");
   5000       SET_DECL_ASSEMBLER_NAME (fn_decl, fn_name);
   5001     }
   5002   else
   5003     fn_decl = NULL_TREE;
   5004 
   5005   push_struct_function (fn_decl);
   5006 }
   5007 
   5008 /* Initialize the rtl expansion mechanism so that we can do simple things
   5009    like generate sequences.  This is used to provide a context during global
   5010    initialization of some passes.  You must call expand_dummy_function_end
   5011    to exit this context.  */
   5012 
   5013 void
   5014 init_dummy_function_start (void)
   5015 {
   5016   push_dummy_function (false);
   5017   prepare_function_start ();
   5018 }
   5019 
   5020 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
   5021    and initialize static variables for generating RTL for the statements
   5022    of the function.  */
   5023 
   5024 void
   5025 init_function_start (tree subr)
   5026 {
   5027   /* Initialize backend, if needed.  */
   5028   initialize_rtl ();
   5029 
   5030   prepare_function_start ();
   5031   decide_function_section (subr);
   5032 
   5033   /* Warn if this value is an aggregate type,
   5034      regardless of which calling convention we are using for it.  */
   5035   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
   5036     warning (OPT_Waggregate_return, "function returns an aggregate");
   5037 }
   5038 
   5039 /* Expand code to verify the stack_protect_guard.  This is invoked at
   5040    the end of a function to be protected.  */
   5041 
   5042 void
   5043 stack_protect_epilogue (void)
   5044 {
   5045   tree guard_decl = crtl->stack_protect_guard_decl;
   5046   rtx_code_label *label = gen_label_rtx ();
   5047   rtx x, y;
   5048   rtx_insn *seq = NULL;
   5049 
   5050   x = expand_normal (crtl->stack_protect_guard);
   5051 
   5052   if (targetm.have_stack_protect_combined_test () && guard_decl)
   5053     {
   5054       gcc_assert (DECL_P (guard_decl));
   5055       y = DECL_RTL (guard_decl);
   5056       /* Allow the target to compute address of Y and compare it with X without
   5057 	 leaking Y into a register.  This combined address + compare pattern
   5058 	 allows the target to prevent spilling of any intermediate results by
   5059 	 splitting it after register allocator.  */
   5060       seq = targetm.gen_stack_protect_combined_test (x, y, label);
   5061     }
   5062   else
   5063     {
   5064       if (guard_decl)
   5065 	y = expand_normal (guard_decl);
   5066       else
   5067 	y = const0_rtx;
   5068 
   5069       /* Allow the target to compare Y with X without leaking either into
   5070 	 a register.  */
   5071       if (targetm.have_stack_protect_test ())
   5072 	seq = targetm.gen_stack_protect_test (x, y, label);
   5073     }
   5074 
   5075   if (seq)
   5076     emit_insn (seq);
   5077   else
   5078     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
   5079 
   5080   /* The noreturn predictor has been moved to the tree level.  The rtl-level
   5081      predictors estimate this branch about 20%, which isn't enough to get
   5082      things moved out of line.  Since this is the only extant case of adding
   5083      a noreturn function at the rtl level, it doesn't seem worth doing ought
   5084      except adding the prediction by hand.  */
   5085   rtx_insn *tmp = get_last_insn ();
   5086   if (JUMP_P (tmp))
   5087     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
   5088 
   5089   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
   5090   free_temp_slots ();
   5091   emit_label (label);
   5092 }
   5093 
   5094 /* Start the RTL for a new function, and set variables used for
   5096    emitting RTL.
   5097    SUBR is the FUNCTION_DECL node.
   5098    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
   5099    the function's parameters, which must be run at any return statement.  */
   5100 
   5101 bool currently_expanding_function_start;
   5102 void
   5103 expand_function_start (tree subr)
   5104 {
   5105   currently_expanding_function_start = true;
   5106 
   5107   /* Make sure volatile mem refs aren't considered
   5108      valid operands of arithmetic insns.  */
   5109   init_recog_no_volatile ();
   5110 
   5111   crtl->profile
   5112     = (profile_flag
   5113        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
   5114 
   5115   crtl->limit_stack
   5116     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
   5117 
   5118   /* Make the label for return statements to jump to.  Do not special
   5119      case machines with special return instructions -- they will be
   5120      handled later during jump, ifcvt, or epilogue creation.  */
   5121   return_label = gen_label_rtx ();
   5122 
   5123   /* Initialize rtx used to return the value.  */
   5124   /* Do this before assign_parms so that we copy the struct value address
   5125      before any library calls that assign parms might generate.  */
   5126 
   5127   /* Decide whether to return the value in memory or in a register.  */
   5128   tree res = DECL_RESULT (subr);
   5129   if (aggregate_value_p (res, subr))
   5130     {
   5131       /* Returning something that won't go in a register.  */
   5132       rtx value_address = 0;
   5133 
   5134 #ifdef PCC_STATIC_STRUCT_RETURN
   5135       if (cfun->returns_pcc_struct)
   5136 	{
   5137 	  int size = int_size_in_bytes (TREE_TYPE (res));
   5138 	  value_address = assemble_static_space (size);
   5139 	}
   5140       else
   5141 #endif
   5142 	{
   5143 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
   5144 	  /* Expect to be passed the address of a place to store the value.
   5145 	     If it is passed as an argument, assign_parms will take care of
   5146 	     it.  */
   5147 	  if (sv)
   5148 	    {
   5149 	      value_address = gen_reg_rtx (Pmode);
   5150 	      emit_move_insn (value_address, sv);
   5151 	    }
   5152 	}
   5153       if (value_address)
   5154 	{
   5155 	  rtx x = value_address;
   5156 	  if (!DECL_BY_REFERENCE (res))
   5157 	    {
   5158 	      x = gen_rtx_MEM (DECL_MODE (res), x);
   5159 	      set_mem_attributes (x, res, 1);
   5160 	    }
   5161 	  set_parm_rtl (res, x);
   5162 	}
   5163     }
   5164   else if (DECL_MODE (res) == VOIDmode)
   5165     /* If return mode is void, this decl rtl should not be used.  */
   5166     set_parm_rtl (res, NULL_RTX);
   5167   else
   5168     {
   5169       /* Compute the return values into a pseudo reg, which we will copy
   5170 	 into the true return register after the cleanups are done.  */
   5171       tree return_type = TREE_TYPE (res);
   5172 
   5173       /* If we may coalesce this result, make sure it has the expected mode
   5174 	 in case it was promoted.  But we need not bother about BLKmode.  */
   5175       machine_mode promoted_mode
   5176 	= flag_tree_coalesce_vars && is_gimple_reg (res)
   5177 	  ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
   5178 	  : BLKmode;
   5179 
   5180       if (promoted_mode != BLKmode)
   5181 	set_parm_rtl (res, gen_reg_rtx (promoted_mode));
   5182       else if (TYPE_MODE (return_type) != BLKmode
   5183 	       && targetm.calls.return_in_msb (return_type))
   5184 	/* expand_function_end will insert the appropriate padding in
   5185 	   this case.  Use the return value's natural (unpadded) mode
   5186 	   within the function proper.  */
   5187 	set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
   5188       else
   5189 	{
   5190 	  /* In order to figure out what mode to use for the pseudo, we
   5191 	     figure out what the mode of the eventual return register will
   5192 	     actually be, and use that.  */
   5193 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
   5194 
   5195 	  /* Structures that are returned in registers are not
   5196 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
   5197 	  if (REG_P (hard_reg))
   5198 	    set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
   5199 	  else
   5200 	    {
   5201 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
   5202 	      set_parm_rtl (res, gen_group_rtx (hard_reg));
   5203 	    }
   5204 	}
   5205 
   5206       /* Set DECL_REGISTER flag so that expand_function_end will copy the
   5207 	 result to the real return register(s).  */
   5208       DECL_REGISTER (res) = 1;
   5209     }
   5210 
   5211   /* Initialize rtx for parameters and local variables.
   5212      In some cases this requires emitting insns.  */
   5213   assign_parms (subr);
   5214 
   5215   /* If function gets a static chain arg, store it.  */
   5216   if (cfun->static_chain_decl)
   5217     {
   5218       tree parm = cfun->static_chain_decl;
   5219       rtx local, chain;
   5220       rtx_insn *insn;
   5221       int unsignedp;
   5222 
   5223       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
   5224       chain = targetm.calls.static_chain (current_function_decl, true);
   5225 
   5226       set_decl_incoming_rtl (parm, chain, false);
   5227       set_parm_rtl (parm, local);
   5228       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
   5229 
   5230       if (GET_MODE (local) != GET_MODE (chain))
   5231 	{
   5232 	  convert_move (local, chain, unsignedp);
   5233 	  insn = get_last_insn ();
   5234 	}
   5235       else
   5236 	insn = emit_move_insn (local, chain);
   5237 
   5238       /* Mark the register as eliminable, similar to parameters.  */
   5239       if (MEM_P (chain)
   5240 	  && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
   5241 	set_dst_reg_note (insn, REG_EQUIV, chain, local);
   5242 
   5243       /* If we aren't optimizing, save the static chain onto the stack.  */
   5244       if (!optimize)
   5245 	{
   5246 	  tree saved_static_chain_decl
   5247 	    = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
   5248 			  DECL_NAME (parm), TREE_TYPE (parm));
   5249 	  rtx saved_static_chain_rtx
   5250 	    = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
   5251 	  SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
   5252 	  emit_move_insn (saved_static_chain_rtx, chain);
   5253 	  SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
   5254 	  DECL_HAS_VALUE_EXPR_P (parm) = 1;
   5255 	}
   5256     }
   5257 
   5258   /* The following was moved from init_function_start.
   5259      The move was supposed to make sdb output more accurate.  */
   5260   /* Indicate the beginning of the function body,
   5261      as opposed to parm setup.  */
   5262   emit_note (NOTE_INSN_FUNCTION_BEG);
   5263 
   5264   gcc_assert (NOTE_P (get_last_insn ()));
   5265 
   5266   parm_birth_insn = get_last_insn ();
   5267 
   5268   /* If the function receives a non-local goto, then store the
   5269      bits we need to restore the frame pointer.  */
   5270   if (cfun->nonlocal_goto_save_area)
   5271     {
   5272       tree t_save;
   5273       rtx r_save;
   5274 
   5275       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
   5276       gcc_assert (DECL_RTL_SET_P (var));
   5277 
   5278       t_save = build4 (ARRAY_REF,
   5279 		       TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
   5280 		       cfun->nonlocal_goto_save_area,
   5281 		       integer_zero_node, NULL_TREE, NULL_TREE);
   5282       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
   5283       gcc_assert (GET_MODE (r_save) == Pmode);
   5284 
   5285       emit_move_insn (r_save, hard_frame_pointer_rtx);
   5286       update_nonlocal_goto_save_area ();
   5287     }
   5288 
   5289   if (crtl->profile)
   5290     {
   5291 #ifdef PROFILE_HOOK
   5292       PROFILE_HOOK (current_function_funcdef_no);
   5293 #endif
   5294     }
   5295 
   5296   /* If we are doing generic stack checking, the probe should go here.  */
   5297   if (flag_stack_check == GENERIC_STACK_CHECK)
   5298     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
   5299 
   5300   currently_expanding_function_start = false;
   5301 }
   5302 
   5303 void
   5305 pop_dummy_function (void)
   5306 {
   5307   pop_cfun ();
   5308   in_dummy_function = false;
   5309 }
   5310 
   5311 /* Undo the effects of init_dummy_function_start.  */
   5312 void
   5313 expand_dummy_function_end (void)
   5314 {
   5315   gcc_assert (in_dummy_function);
   5316 
   5317   /* End any sequences that failed to be closed due to syntax errors.  */
   5318   while (in_sequence_p ())
   5319     end_sequence ();
   5320 
   5321   /* Outside function body, can't compute type's actual size
   5322      until next function's body starts.  */
   5323 
   5324   free_after_parsing (cfun);
   5325   free_after_compilation (cfun);
   5326   pop_dummy_function ();
   5327 }
   5328 
   5329 /* Helper for diddle_return_value.  */
   5330 
   5331 void
   5332 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
   5333 {
   5334   if (! outgoing)
   5335     return;
   5336 
   5337   if (REG_P (outgoing))
   5338     (*doit) (outgoing, arg);
   5339   else if (GET_CODE (outgoing) == PARALLEL)
   5340     {
   5341       int i;
   5342 
   5343       for (i = 0; i < XVECLEN (outgoing, 0); i++)
   5344 	{
   5345 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
   5346 
   5347 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
   5348 	    (*doit) (x, arg);
   5349 	}
   5350     }
   5351 }
   5352 
   5353 /* Call DOIT for each hard register used as a return value from
   5354    the current function.  */
   5355 
   5356 void
   5357 diddle_return_value (void (*doit) (rtx, void *), void *arg)
   5358 {
   5359   diddle_return_value_1 (doit, arg, crtl->return_rtx);
   5360 }
   5361 
   5362 static void
   5363 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
   5364 {
   5365   emit_clobber (reg);
   5366 }
   5367 
   5368 void
   5369 clobber_return_register (void)
   5370 {
   5371   diddle_return_value (do_clobber_return_reg, NULL);
   5372 
   5373   /* In case we do use pseudo to return value, clobber it too.  */
   5374   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
   5375     {
   5376       tree decl_result = DECL_RESULT (current_function_decl);
   5377       rtx decl_rtl = DECL_RTL (decl_result);
   5378       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
   5379 	{
   5380 	  do_clobber_return_reg (decl_rtl, NULL);
   5381 	}
   5382     }
   5383 }
   5384 
   5385 static void
   5386 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
   5387 {
   5388   emit_use (reg);
   5389 }
   5390 
   5391 static void
   5392 use_return_register (void)
   5393 {
   5394   diddle_return_value (do_use_return_reg, NULL);
   5395 }
   5396 
   5397 /* Generate RTL for the end of the current function.  */
   5398 
   5399 void
   5400 expand_function_end (void)
   5401 {
   5402   /* If arg_pointer_save_area was referenced only from a nested
   5403      function, we will not have initialized it yet.  Do that now.  */
   5404   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
   5405     get_arg_pointer_save_area ();
   5406 
   5407   /* If we are doing generic stack checking and this function makes calls,
   5408      do a stack probe at the start of the function to ensure we have enough
   5409      space for another stack frame.  */
   5410   if (flag_stack_check == GENERIC_STACK_CHECK)
   5411     {
   5412       rtx_insn *insn, *seq;
   5413 
   5414       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
   5415 	if (CALL_P (insn))
   5416 	  {
   5417 	    rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
   5418 	    start_sequence ();
   5419 	    if (STACK_CHECK_MOVING_SP)
   5420 	      anti_adjust_stack_and_probe (max_frame_size, true);
   5421 	    else
   5422 	      probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
   5423 	    seq = get_insns ();
   5424 	    end_sequence ();
   5425 	    set_insn_locations (seq, prologue_location);
   5426 	    emit_insn_before (seq, stack_check_probe_note);
   5427 	    break;
   5428 	  }
   5429     }
   5430 
   5431   /* End any sequences that failed to be closed due to syntax errors.  */
   5432   while (in_sequence_p ())
   5433     end_sequence ();
   5434 
   5435   clear_pending_stack_adjust ();
   5436   do_pending_stack_adjust ();
   5437 
   5438   /* Output a linenumber for the end of the function.
   5439      SDB depended on this.  */
   5440   set_curr_insn_location (input_location);
   5441 
   5442   /* Before the return label (if any), clobber the return
   5443      registers so that they are not propagated live to the rest of
   5444      the function.  This can only happen with functions that drop
   5445      through; if there had been a return statement, there would
   5446      have either been a return rtx, or a jump to the return label.
   5447 
   5448      We delay actual code generation after the current_function_value_rtx
   5449      is computed.  */
   5450   rtx_insn *clobber_after = get_last_insn ();
   5451 
   5452   /* Output the label for the actual return from the function.  */
   5453   emit_label (return_label);
   5454 
   5455   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
   5456     {
   5457       /* Let except.cc know where it should emit the call to unregister
   5458 	 the function context for sjlj exceptions.  */
   5459       if (flag_exceptions)
   5460 	sjlj_emit_function_exit_after (get_last_insn ());
   5461     }
   5462 
   5463   /* If this is an implementation of throw, do what's necessary to
   5464      communicate between __builtin_eh_return and the epilogue.  */
   5465   expand_eh_return ();
   5466 
   5467   /* If stack protection is enabled for this function, check the guard.  */
   5468   if (crtl->stack_protect_guard
   5469       && targetm.stack_protect_runtime_enabled_p ()
   5470       && naked_return_label == NULL_RTX)
   5471     stack_protect_epilogue ();
   5472 
   5473   /* If scalar return value was computed in a pseudo-reg, or was a named
   5474      return value that got dumped to the stack, copy that to the hard
   5475      return register.  */
   5476   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
   5477     {
   5478       tree decl_result = DECL_RESULT (current_function_decl);
   5479       rtx decl_rtl = DECL_RTL (decl_result);
   5480 
   5481       if ((REG_P (decl_rtl)
   5482 	   ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
   5483 	   : DECL_REGISTER (decl_result))
   5484 	  /* Unless the psABI says not to.  */
   5485 	  && !TYPE_EMPTY_P (TREE_TYPE (decl_result)))
   5486 	{
   5487 	  rtx real_decl_rtl = crtl->return_rtx;
   5488 	  complex_mode cmode;
   5489 
   5490 	  /* This should be set in assign_parms.  */
   5491 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
   5492 
   5493 	  /* If this is a BLKmode structure being returned in registers,
   5494 	     then use the mode computed in expand_return.  Note that if
   5495 	     decl_rtl is memory, then its mode may have been changed,
   5496 	     but that crtl->return_rtx has not.  */
   5497 	  if (GET_MODE (real_decl_rtl) == BLKmode)
   5498 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
   5499 
   5500 	  /* If a non-BLKmode return value should be padded at the least
   5501 	     significant end of the register, shift it left by the appropriate
   5502 	     amount.  BLKmode results are handled using the group load/store
   5503 	     machinery.  */
   5504 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
   5505 	      && REG_P (real_decl_rtl)
   5506 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
   5507 	    {
   5508 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
   5509 					   REGNO (real_decl_rtl)),
   5510 			      decl_rtl);
   5511 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
   5512 	    }
   5513 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
   5514 	    {
   5515 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
   5516 		 move the result to the real return registers.  Otherwise, do
   5517 		 a group load from decl_rtl for a named return.  */
   5518 	      if (GET_CODE (decl_rtl) == PARALLEL)
   5519 		emit_group_move (real_decl_rtl, decl_rtl);
   5520 	      else
   5521 		emit_group_load (real_decl_rtl, decl_rtl,
   5522 				 TREE_TYPE (decl_result),
   5523 				 int_size_in_bytes (TREE_TYPE (decl_result)));
   5524 	    }
   5525 	  /* In the case of complex integer modes smaller than a word, we'll
   5526 	     need to generate some non-trivial bitfield insertions.  Do that
   5527 	     on a pseudo and not the hard register.  */
   5528 	  else if (GET_CODE (decl_rtl) == CONCAT
   5529 		   && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
   5530 		   && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
   5531 	    {
   5532 	      int old_generating_concat_p;
   5533 	      rtx tmp;
   5534 
   5535 	      old_generating_concat_p = generating_concat_p;
   5536 	      generating_concat_p = 0;
   5537 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
   5538 	      generating_concat_p = old_generating_concat_p;
   5539 
   5540 	      emit_move_insn (tmp, decl_rtl);
   5541 	      emit_move_insn (real_decl_rtl, tmp);
   5542 	    }
   5543 	  /* If a named return value dumped decl_return to memory, then
   5544 	     we may need to re-do the PROMOTE_MODE signed/unsigned
   5545 	     extension.  */
   5546 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
   5547 	    {
   5548 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
   5549 	      promote_function_mode (TREE_TYPE (decl_result),
   5550 				     GET_MODE (decl_rtl), &unsignedp,
   5551 				     TREE_TYPE (current_function_decl), 1);
   5552 
   5553 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
   5554 	    }
   5555 	  else
   5556 	    emit_move_insn (real_decl_rtl, decl_rtl);
   5557 	}
   5558     }
   5559 
   5560   /* If returning a structure, arrange to return the address of the value
   5561      in a place where debuggers expect to find it.
   5562 
   5563      If returning a structure PCC style,
   5564      the caller also depends on this value.
   5565      And cfun->returns_pcc_struct is not necessarily set.  */
   5566   if ((cfun->returns_struct || cfun->returns_pcc_struct)
   5567       && !targetm.calls.omit_struct_return_reg)
   5568     {
   5569       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
   5570       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
   5571       rtx outgoing;
   5572 
   5573       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
   5574 	type = TREE_TYPE (type);
   5575       else
   5576 	value_address = XEXP (value_address, 0);
   5577 
   5578       outgoing = targetm.calls.function_value (build_pointer_type (type),
   5579 					       current_function_decl, true);
   5580 
   5581       /* Mark this as a function return value so integrate will delete the
   5582 	 assignment and USE below when inlining this function.  */
   5583       REG_FUNCTION_VALUE_P (outgoing) = 1;
   5584 
   5585       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
   5586       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
   5587       value_address = convert_memory_address (mode, value_address);
   5588 
   5589       emit_move_insn (outgoing, value_address);
   5590 
   5591       /* Show return register used to hold result (in this case the address
   5592 	 of the result.  */
   5593       crtl->return_rtx = outgoing;
   5594     }
   5595 
   5596   /* Emit the actual code to clobber return register.  Don't emit
   5597      it if clobber_after is a barrier, then the previous basic block
   5598      certainly doesn't fall thru into the exit block.  */
   5599   if (!BARRIER_P (clobber_after))
   5600     {
   5601       start_sequence ();
   5602       clobber_return_register ();
   5603       rtx_insn *seq = get_insns ();
   5604       end_sequence ();
   5605 
   5606       emit_insn_after (seq, clobber_after);
   5607     }
   5608 
   5609   /* Output the label for the naked return from the function.  */
   5610   if (naked_return_label)
   5611     emit_label (naked_return_label);
   5612 
   5613   /* @@@ This is a kludge.  We want to ensure that instructions that
   5614      may trap are not moved into the epilogue by scheduling, because
   5615      we don't always emit unwind information for the epilogue.  */
   5616   if (cfun->can_throw_non_call_exceptions
   5617       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
   5618     emit_insn (gen_blockage ());
   5619 
   5620   /* If stack protection is enabled for this function, check the guard.  */
   5621   if (crtl->stack_protect_guard
   5622       && targetm.stack_protect_runtime_enabled_p ()
   5623       && naked_return_label)
   5624     stack_protect_epilogue ();
   5625 
   5626   /* If we had calls to alloca, and this machine needs
   5627      an accurate stack pointer to exit the function,
   5628      insert some code to save and restore the stack pointer.  */
   5629   if (! EXIT_IGNORE_STACK
   5630       && cfun->calls_alloca)
   5631     {
   5632       rtx tem = 0;
   5633 
   5634       start_sequence ();
   5635       emit_stack_save (SAVE_FUNCTION, &tem);
   5636       rtx_insn *seq = get_insns ();
   5637       end_sequence ();
   5638       emit_insn_before (seq, parm_birth_insn);
   5639 
   5640       emit_stack_restore (SAVE_FUNCTION, tem);
   5641     }
   5642 
   5643   /* ??? This should no longer be necessary since stupid is no longer with
   5644      us, but there are some parts of the compiler (eg reload_combine, and
   5645      sh mach_dep_reorg) that still try and compute their own lifetime info
   5646      instead of using the general framework.  */
   5647   use_return_register ();
   5648 }
   5649 
   5650 rtx
   5651 get_arg_pointer_save_area (void)
   5652 {
   5653   rtx ret = arg_pointer_save_area;
   5654 
   5655   if (! ret)
   5656     {
   5657       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
   5658       arg_pointer_save_area = ret;
   5659     }
   5660 
   5661   if (! crtl->arg_pointer_save_area_init)
   5662     {
   5663       /* Save the arg pointer at the beginning of the function.  The
   5664 	 generated stack slot may not be a valid memory address, so we
   5665 	 have to check it and fix it if necessary.  */
   5666       start_sequence ();
   5667       emit_move_insn (validize_mem (copy_rtx (ret)),
   5668                       crtl->args.internal_arg_pointer);
   5669       rtx_insn *seq = get_insns ();
   5670       end_sequence ();
   5671 
   5672       push_topmost_sequence ();
   5673       emit_insn_after (seq, entry_of_function ());
   5674       pop_topmost_sequence ();
   5675 
   5676       crtl->arg_pointer_save_area_init = true;
   5677     }
   5678 
   5679   return ret;
   5680 }
   5681 
   5682 
   5684 /* If debugging dumps are requested, dump information about how the
   5685    target handled -fstack-check=clash for the prologue.
   5686 
   5687    PROBES describes what if any probes were emitted.
   5688 
   5689    RESIDUALS indicates if the prologue had any residual allocation
   5690    (i.e. total allocation was not a multiple of PROBE_INTERVAL).  */
   5691 
   5692 void
   5693 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
   5694 {
   5695   if (!dump_file)
   5696     return;
   5697 
   5698   switch (probes)
   5699     {
   5700     case NO_PROBE_NO_FRAME:
   5701       fprintf (dump_file,
   5702 	       "Stack clash no probe no stack adjustment in prologue.\n");
   5703       break;
   5704     case NO_PROBE_SMALL_FRAME:
   5705       fprintf (dump_file,
   5706 	       "Stack clash no probe small stack adjustment in prologue.\n");
   5707       break;
   5708     case PROBE_INLINE:
   5709       fprintf (dump_file, "Stack clash inline probes in prologue.\n");
   5710       break;
   5711     case PROBE_LOOP:
   5712       fprintf (dump_file, "Stack clash probe loop in prologue.\n");
   5713       break;
   5714     }
   5715 
   5716   if (residuals)
   5717     fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
   5718   else
   5719     fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
   5720 
   5721   if (frame_pointer_needed)
   5722     fprintf (dump_file, "Stack clash frame pointer needed.\n");
   5723   else
   5724     fprintf (dump_file, "Stack clash no frame pointer needed.\n");
   5725 
   5726   if (TREE_THIS_VOLATILE (cfun->decl))
   5727     fprintf (dump_file,
   5728 	     "Stack clash noreturn prologue, assuming no implicit"
   5729 	     " probes in caller.\n");
   5730   else
   5731     fprintf (dump_file,
   5732 	     "Stack clash not noreturn prologue.\n");
   5733 }
   5734 
   5735 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
   5736    for the first time.  */
   5737 
   5738 static void
   5739 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
   5740 {
   5741   rtx_insn *tmp;
   5742   hash_table<insn_cache_hasher> *hash = *hashp;
   5743 
   5744   if (hash == NULL)
   5745     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
   5746 
   5747   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
   5748     {
   5749       rtx *slot = hash->find_slot (tmp, INSERT);
   5750       gcc_assert (*slot == NULL);
   5751       *slot = tmp;
   5752     }
   5753 }
   5754 
   5755 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
   5756    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
   5757    insn, then record COPY as well.  */
   5758 
   5759 void
   5760 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
   5761 {
   5762   hash_table<insn_cache_hasher> *hash;
   5763   rtx *slot;
   5764 
   5765   hash = epilogue_insn_hash;
   5766   if (!hash || !hash->find (insn))
   5767     {
   5768       hash = prologue_insn_hash;
   5769       if (!hash || !hash->find (insn))
   5770 	return;
   5771     }
   5772 
   5773   slot = hash->find_slot (copy, INSERT);
   5774   gcc_assert (*slot == NULL);
   5775   *slot = copy;
   5776 }
   5777 
   5778 /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
   5779    we can be running after reorg, SEQUENCE rtl is possible.  */
   5780 
   5781 static bool
   5782 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
   5783 {
   5784   if (hash == NULL)
   5785     return false;
   5786 
   5787   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
   5788     {
   5789       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
   5790       int i;
   5791       for (i = seq->len () - 1; i >= 0; i--)
   5792 	if (hash->find (seq->element (i)))
   5793 	  return true;
   5794       return false;
   5795     }
   5796 
   5797   return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
   5798 }
   5799 
   5800 int
   5801 prologue_contains (const rtx_insn *insn)
   5802 {
   5803   return contains (insn, prologue_insn_hash);
   5804 }
   5805 
   5806 int
   5807 epilogue_contains (const rtx_insn *insn)
   5808 {
   5809   return contains (insn, epilogue_insn_hash);
   5810 }
   5811 
   5812 int
   5813 prologue_epilogue_contains (const rtx_insn *insn)
   5814 {
   5815   if (contains (insn, prologue_insn_hash))
   5816     return 1;
   5817   if (contains (insn, epilogue_insn_hash))
   5818     return 1;
   5819   return 0;
   5820 }
   5821 
   5822 void
   5823 record_prologue_seq (rtx_insn *seq)
   5824 {
   5825   record_insns (seq, NULL, &prologue_insn_hash);
   5826 }
   5827 
   5828 void
   5829 record_epilogue_seq (rtx_insn *seq)
   5830 {
   5831   record_insns (seq, NULL, &epilogue_insn_hash);
   5832 }
   5833 
   5834 /* Set JUMP_LABEL for a return insn.  */
   5835 
   5836 void
   5837 set_return_jump_label (rtx_insn *returnjump)
   5838 {
   5839   rtx pat = PATTERN (returnjump);
   5840   if (GET_CODE (pat) == PARALLEL)
   5841     pat = XVECEXP (pat, 0, 0);
   5842   if (ANY_RETURN_P (pat))
   5843     JUMP_LABEL (returnjump) = pat;
   5844   else
   5845     JUMP_LABEL (returnjump) = ret_rtx;
   5846 }
   5847 
   5848 /* Return a sequence to be used as the split prologue for the current
   5849    function, or NULL.  */
   5850 
   5851 static rtx_insn *
   5852 make_split_prologue_seq (void)
   5853 {
   5854   if (!flag_split_stack
   5855       || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
   5856     return NULL;
   5857 
   5858   start_sequence ();
   5859   emit_insn (targetm.gen_split_stack_prologue ());
   5860   rtx_insn *seq = get_insns ();
   5861   end_sequence ();
   5862 
   5863   record_insns (seq, NULL, &prologue_insn_hash);
   5864   set_insn_locations (seq, prologue_location);
   5865 
   5866   return seq;
   5867 }
   5868 
   5869 /* Return a sequence to be used as the prologue for the current function,
   5870    or NULL.  */
   5871 
   5872 static rtx_insn *
   5873 make_prologue_seq (void)
   5874 {
   5875   if (!targetm.have_prologue ())
   5876     return NULL;
   5877 
   5878   start_sequence ();
   5879   rtx_insn *seq = targetm.gen_prologue ();
   5880   emit_insn (seq);
   5881 
   5882   /* Insert an explicit USE for the frame pointer
   5883      if the profiling is on and the frame pointer is required.  */
   5884   if (crtl->profile && frame_pointer_needed)
   5885     emit_use (hard_frame_pointer_rtx);
   5886 
   5887   /* Retain a map of the prologue insns.  */
   5888   record_insns (seq, NULL, &prologue_insn_hash);
   5889   emit_note (NOTE_INSN_PROLOGUE_END);
   5890 
   5891   /* Ensure that instructions are not moved into the prologue when
   5892      profiling is on.  The call to the profiling routine can be
   5893      emitted within the live range of a call-clobbered register.  */
   5894   if (!targetm.profile_before_prologue () && crtl->profile)
   5895     emit_insn (gen_blockage ());
   5896 
   5897   seq = get_insns ();
   5898   end_sequence ();
   5899   set_insn_locations (seq, prologue_location);
   5900 
   5901   return seq;
   5902 }
   5903 
   5904 /* Emit a sequence of insns to zero the call-used registers before RET
   5905    according to ZERO_REGS_TYPE.  */
   5906 
   5907 static void
   5908 gen_call_used_regs_seq (rtx_insn *ret, unsigned int zero_regs_type)
   5909 {
   5910   bool only_gpr = true;
   5911   bool only_used = true;
   5912   bool only_arg = true;
   5913 
   5914   /* No need to zero call-used-regs in main ().  */
   5915   if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
   5916     return;
   5917 
   5918   /* No need to zero call-used-regs if __builtin_eh_return is called
   5919      since it isn't a normal function return.  */
   5920   if (crtl->calls_eh_return)
   5921     return;
   5922 
   5923   /* If only_gpr is true, only zero call-used registers that are
   5924      general-purpose registers; if only_used is true, only zero
   5925      call-used registers that are used in the current function;
   5926      if only_arg is true, only zero call-used registers that pass
   5927      parameters defined by the flatform's calling conversion.  */
   5928 
   5929   using namespace zero_regs_flags;
   5930 
   5931   only_gpr = zero_regs_type & ONLY_GPR;
   5932   only_used = zero_regs_type & ONLY_USED;
   5933   only_arg = zero_regs_type & ONLY_ARG;
   5934 
   5935   /* For each of the hard registers, we should zero it if:
   5936 	    1. it is a call-used register;
   5937 	and 2. it is not a fixed register;
   5938 	and 3. it is not live at the return of the routine;
   5939 	and 4. it is general registor if only_gpr is true;
   5940 	and 5. it is used in the routine if only_used is true;
   5941 	and 6. it is a register that passes parameter if only_arg is true.  */
   5942 
   5943   /* First, prepare the data flow information.  */
   5944   basic_block bb = BLOCK_FOR_INSN (ret);
   5945   auto_bitmap live_out;
   5946   bitmap_copy (live_out, df_get_live_out (bb));
   5947   df_simulate_initialize_backwards (bb, live_out);
   5948   df_simulate_one_insn_backwards (bb, ret, live_out);
   5949 
   5950   HARD_REG_SET selected_hardregs;
   5951   HARD_REG_SET all_call_used_regs;
   5952   CLEAR_HARD_REG_SET (selected_hardregs);
   5953   CLEAR_HARD_REG_SET (all_call_used_regs);
   5954   for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
   5955     {
   5956       if (!crtl->abi->clobbers_full_reg_p (regno))
   5957 	continue;
   5958       if (fixed_regs[regno])
   5959 	continue;
   5960       if (REGNO_REG_SET_P (live_out, regno))
   5961 	continue;
   5962 #ifdef LEAF_REG_REMAP
   5963       if (crtl->uses_only_leaf_regs && LEAF_REG_REMAP (regno) < 0)
   5964 	continue;
   5965 #endif
   5966       /* This is a call used register that is dead at return.  */
   5967       SET_HARD_REG_BIT (all_call_used_regs, regno);
   5968 
   5969       if (only_gpr
   5970 	  && !TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], regno))
   5971 	continue;
   5972       if (only_used && !df_regs_ever_live_p (regno))
   5973 	continue;
   5974       if (only_arg && !FUNCTION_ARG_REGNO_P (regno))
   5975 	continue;
   5976 
   5977       /* Now this is a register that we might want to zero.  */
   5978       SET_HARD_REG_BIT (selected_hardregs, regno);
   5979     }
   5980 
   5981   if (hard_reg_set_empty_p (selected_hardregs))
   5982     return;
   5983 
   5984   /* Now that we have a hard register set that needs to be zeroed, pass it to
   5985      target to generate zeroing sequence.  */
   5986   HARD_REG_SET zeroed_hardregs;
   5987   start_sequence ();
   5988   zeroed_hardregs = targetm.calls.zero_call_used_regs (selected_hardregs);
   5989 
   5990   /* For most targets, the returned set of registers is a subset of
   5991      selected_hardregs, however, for some of the targets (for example MIPS),
   5992      clearing some registers that are in selected_hardregs requires clearing
   5993      other call used registers that are not in the selected_hardregs, under
   5994      such situation, the returned set of registers must be a subset of
   5995      all call used registers.  */
   5996   gcc_assert (hard_reg_set_subset_p (zeroed_hardregs, all_call_used_regs));
   5997 
   5998   rtx_insn *seq = get_insns ();
   5999   end_sequence ();
   6000   if (seq)
   6001     {
   6002       /* Emit the memory blockage and register clobber asm volatile before
   6003 	 the whole sequence.  */
   6004       start_sequence ();
   6005       expand_asm_reg_clobber_mem_blockage (zeroed_hardregs);
   6006       rtx_insn *seq_barrier = get_insns ();
   6007       end_sequence ();
   6008 
   6009       emit_insn_before (seq_barrier, ret);
   6010       emit_insn_before (seq, ret);
   6011 
   6012       /* Update the data flow information.  */
   6013       crtl->must_be_zero_on_return |= zeroed_hardregs;
   6014       df_update_exit_block_uses ();
   6015     }
   6016 }
   6017 
   6018 
   6019 /* Return a sequence to be used as the epilogue for the current function,
   6020    or NULL.  */
   6021 
   6022 static rtx_insn *
   6023 make_epilogue_seq (void)
   6024 {
   6025   if (!targetm.have_epilogue ())
   6026     return NULL;
   6027 
   6028   start_sequence ();
   6029   emit_note (NOTE_INSN_EPILOGUE_BEG);
   6030   rtx_insn *seq = targetm.gen_epilogue ();
   6031   if (seq)
   6032     emit_jump_insn (seq);
   6033 
   6034   /* Retain a map of the epilogue insns.  */
   6035   record_insns (seq, NULL, &epilogue_insn_hash);
   6036   set_insn_locations (seq, epilogue_location);
   6037 
   6038   seq = get_insns ();
   6039   rtx_insn *returnjump = get_last_insn ();
   6040   end_sequence ();
   6041 
   6042   if (JUMP_P (returnjump))
   6043     set_return_jump_label (returnjump);
   6044 
   6045   return seq;
   6046 }
   6047 
   6048 
   6049 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
   6050    this into place with notes indicating where the prologue ends and where
   6051    the epilogue begins.  Update the basic block information when possible.
   6052 
   6053    Notes on epilogue placement:
   6054    There are several kinds of edges to the exit block:
   6055    * a single fallthru edge from LAST_BB
   6056    * possibly, edges from blocks containing sibcalls
   6057    * possibly, fake edges from infinite loops
   6058 
   6059    The epilogue is always emitted on the fallthru edge from the last basic
   6060    block in the function, LAST_BB, into the exit block.
   6061 
   6062    If LAST_BB is empty except for a label, it is the target of every
   6063    other basic block in the function that ends in a return.  If a
   6064    target has a return or simple_return pattern (possibly with
   6065    conditional variants), these basic blocks can be changed so that a
   6066    return insn is emitted into them, and their target is adjusted to
   6067    the real exit block.
   6068 
   6069    Notes on shrink wrapping: We implement a fairly conservative
   6070    version of shrink-wrapping rather than the textbook one.  We only
   6071    generate a single prologue and a single epilogue.  This is
   6072    sufficient to catch a number of interesting cases involving early
   6073    exits.
   6074 
   6075    First, we identify the blocks that require the prologue to occur before
   6076    them.  These are the ones that modify a call-saved register, or reference
   6077    any of the stack or frame pointer registers.  To simplify things, we then
   6078    mark everything reachable from these blocks as also requiring a prologue.
   6079    This takes care of loops automatically, and avoids the need to examine
   6080    whether MEMs reference the frame, since it is sufficient to check for
   6081    occurrences of the stack or frame pointer.
   6082 
   6083    We then compute the set of blocks for which the need for a prologue
   6084    is anticipatable (borrowing terminology from the shrink-wrapping
   6085    description in Muchnick's book).  These are the blocks which either
   6086    require a prologue themselves, or those that have only successors
   6087    where the prologue is anticipatable.  The prologue needs to be
   6088    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
   6089    is not.  For the moment, we ensure that only one such edge exists.
   6090 
   6091    The epilogue is placed as described above, but we make a
   6092    distinction between inserting return and simple_return patterns
   6093    when modifying other blocks that end in a return.  Blocks that end
   6094    in a sibcall omit the sibcall_epilogue if the block is not in
   6095    ANTIC.  */
   6096 
   6097 void
   6098 thread_prologue_and_epilogue_insns (void)
   6099 {
   6100   df_analyze ();
   6101 
   6102   /* Can't deal with multiple successors of the entry block at the
   6103      moment.  Function should always have at least one entry
   6104      point.  */
   6105   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
   6106 
   6107   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   6108   edge orig_entry_edge = entry_edge;
   6109 
   6110   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
   6111   rtx_insn *prologue_seq = make_prologue_seq ();
   6112   rtx_insn *epilogue_seq = make_epilogue_seq ();
   6113 
   6114   /* Try to perform a kind of shrink-wrapping, making sure the
   6115      prologue/epilogue is emitted only around those parts of the
   6116      function that require it.  */
   6117   try_shrink_wrapping (&entry_edge, prologue_seq);
   6118 
   6119   /* If the target can handle splitting the prologue/epilogue into separate
   6120      components, try to shrink-wrap these components separately.  */
   6121   try_shrink_wrapping_separate (entry_edge->dest);
   6122 
   6123   /* If that did anything for any component we now need the generate the
   6124      "main" prologue again.  Because some targets require some of these
   6125      to be called in a specific order (i386 requires the split prologue
   6126      to be first, for example), we create all three sequences again here.
   6127      If this does not work for some target, that target should not enable
   6128      separate shrink-wrapping.  */
   6129   if (crtl->shrink_wrapped_separate)
   6130     {
   6131       split_prologue_seq = make_split_prologue_seq ();
   6132       prologue_seq = make_prologue_seq ();
   6133       epilogue_seq = make_epilogue_seq ();
   6134     }
   6135 
   6136   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
   6137 
   6138   /* A small fib -- epilogue is not yet completed, but we wish to re-use
   6139      this marker for the splits of EH_RETURN patterns, and nothing else
   6140      uses the flag in the meantime.  */
   6141   epilogue_completed = 1;
   6142 
   6143   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
   6144      some targets, these get split to a special version of the epilogue
   6145      code.  In order to be able to properly annotate these with unwind
   6146      info, try to split them now.  If we get a valid split, drop an
   6147      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
   6148   edge e;
   6149   edge_iterator ei;
   6150   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
   6151     {
   6152       rtx_insn *prev, *last, *trial;
   6153 
   6154       if (e->flags & EDGE_FALLTHRU)
   6155 	continue;
   6156       last = BB_END (e->src);
   6157       if (!eh_returnjump_p (last))
   6158 	continue;
   6159 
   6160       prev = PREV_INSN (last);
   6161       trial = try_split (PATTERN (last), last, 1);
   6162       if (trial == last)
   6163 	continue;
   6164 
   6165       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
   6166       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
   6167     }
   6168 
   6169   edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
   6170 
   6171   if (exit_fallthru_edge)
   6172     {
   6173       if (epilogue_seq)
   6174 	{
   6175 	  insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
   6176 	  commit_edge_insertions ();
   6177 
   6178 	  /* The epilogue insns we inserted may cause the exit edge to no longer
   6179 	     be fallthru.  */
   6180 	  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
   6181 	    {
   6182 	      if (((e->flags & EDGE_FALLTHRU) != 0)
   6183 		  && returnjump_p (BB_END (e->src)))
   6184 		e->flags &= ~EDGE_FALLTHRU;
   6185 	    }
   6186 	}
   6187       else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
   6188 	{
   6189 	  /* We have a fall-through edge to the exit block, the source is not
   6190 	     at the end of the function, and there will be an assembler epilogue
   6191 	     at the end of the function.
   6192 	     We can't use force_nonfallthru here, because that would try to
   6193 	     use return.  Inserting a jump 'by hand' is extremely messy, so
   6194 	     we take advantage of cfg_layout_finalize using
   6195 	     fixup_fallthru_exit_predecessor.  */
   6196 	  cfg_layout_initialize (0);
   6197 	  basic_block cur_bb;
   6198 	  FOR_EACH_BB_FN (cur_bb, cfun)
   6199 	    if (cur_bb->index >= NUM_FIXED_BLOCKS
   6200 		&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
   6201 	      cur_bb->aux = cur_bb->next_bb;
   6202 	  cfg_layout_finalize ();
   6203 	}
   6204     }
   6205 
   6206   /* Insert the prologue.  */
   6207 
   6208   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   6209 
   6210   if (split_prologue_seq || prologue_seq)
   6211     {
   6212       rtx_insn *split_prologue_insn = split_prologue_seq;
   6213       if (split_prologue_seq)
   6214 	{
   6215 	  while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
   6216 	    split_prologue_insn = NEXT_INSN (split_prologue_insn);
   6217 	  insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
   6218 	}
   6219 
   6220       rtx_insn *prologue_insn = prologue_seq;
   6221       if (prologue_seq)
   6222 	{
   6223 	  while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
   6224 	    prologue_insn = NEXT_INSN (prologue_insn);
   6225 	  insert_insn_on_edge (prologue_seq, entry_edge);
   6226 	}
   6227 
   6228       commit_edge_insertions ();
   6229 
   6230       /* Look for basic blocks within the prologue insns.  */
   6231       if (split_prologue_insn
   6232 	  && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
   6233 	split_prologue_insn = NULL;
   6234       if (prologue_insn
   6235 	  && BLOCK_FOR_INSN (prologue_insn) == NULL)
   6236 	prologue_insn = NULL;
   6237       if (split_prologue_insn || prologue_insn)
   6238 	{
   6239 	  auto_sbitmap blocks (last_basic_block_for_fn (cfun));
   6240 	  bitmap_clear (blocks);
   6241 	  if (split_prologue_insn)
   6242 	    bitmap_set_bit (blocks,
   6243 			    BLOCK_FOR_INSN (split_prologue_insn)->index);
   6244 	  if (prologue_insn)
   6245 	    bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
   6246 	  find_many_sub_basic_blocks (blocks);
   6247 	}
   6248     }
   6249 
   6250   default_rtl_profile ();
   6251 
   6252   /* Emit sibling epilogues before any sibling call sites.  */
   6253   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
   6254        (e = ei_safe_edge (ei));
   6255        ei_next (&ei))
   6256     {
   6257       /* Skip those already handled, the ones that run without prologue.  */
   6258       if (e->flags & EDGE_IGNORE)
   6259 	{
   6260 	  e->flags &= ~EDGE_IGNORE;
   6261 	  continue;
   6262 	}
   6263 
   6264       rtx_insn *insn = BB_END (e->src);
   6265 
   6266       if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
   6267 	continue;
   6268 
   6269       if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
   6270 	{
   6271 	  start_sequence ();
   6272 	  emit_note (NOTE_INSN_EPILOGUE_BEG);
   6273 	  emit_insn (ep_seq);
   6274 	  rtx_insn *seq = get_insns ();
   6275 	  end_sequence ();
   6276 
   6277 	  /* Retain a map of the epilogue insns.  Used in life analysis to
   6278 	     avoid getting rid of sibcall epilogue insns.  Do this before we
   6279 	     actually emit the sequence.  */
   6280 	  record_insns (seq, NULL, &epilogue_insn_hash);
   6281 	  set_insn_locations (seq, epilogue_location);
   6282 
   6283 	  emit_insn_before (seq, insn);
   6284 	}
   6285     }
   6286 
   6287   if (epilogue_seq)
   6288     {
   6289       rtx_insn *insn, *next;
   6290 
   6291       /* Similarly, move any line notes that appear after the epilogue.
   6292          There is no need, however, to be quite so anal about the existence
   6293 	 of such a note.  Also possibly move
   6294 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
   6295 	 info generation.  */
   6296       for (insn = epilogue_seq; insn; insn = next)
   6297 	{
   6298 	  next = NEXT_INSN (insn);
   6299 	  if (NOTE_P (insn)
   6300 	      && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
   6301 	    reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
   6302 	}
   6303     }
   6304 
   6305   /* Threading the prologue and epilogue changes the artificial refs
   6306      in the entry and exit blocks.  */
   6307   epilogue_completed = 1;
   6308   df_update_entry_exit_and_calls ();
   6309 }
   6310 
   6311 /* Reposition the prologue-end and epilogue-begin notes after
   6312    instruction scheduling.  */
   6313 
   6314 void
   6315 reposition_prologue_and_epilogue_notes (void)
   6316 {
   6317   if (!targetm.have_prologue ()
   6318       && !targetm.have_epilogue ()
   6319       && !targetm.have_sibcall_epilogue ())
   6320     return;
   6321 
   6322   /* Since the hash table is created on demand, the fact that it is
   6323      non-null is a signal that it is non-empty.  */
   6324   if (prologue_insn_hash != NULL)
   6325     {
   6326       size_t len = prologue_insn_hash->elements ();
   6327       rtx_insn *insn, *last = NULL, *note = NULL;
   6328 
   6329       /* Scan from the beginning until we reach the last prologue insn.  */
   6330       /* ??? While we do have the CFG intact, there are two problems:
   6331 	 (1) The prologue can contain loops (typically probing the stack),
   6332 	     which means that the end of the prologue isn't in the first bb.
   6333 	 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
   6334       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
   6335 	{
   6336 	  if (NOTE_P (insn))
   6337 	    {
   6338 	      if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
   6339 		note = insn;
   6340 	    }
   6341 	  else if (contains (insn, prologue_insn_hash))
   6342 	    {
   6343 	      last = insn;
   6344 	      if (--len == 0)
   6345 		break;
   6346 	    }
   6347 	}
   6348 
   6349       if (last)
   6350 	{
   6351 	  if (note == NULL)
   6352 	    {
   6353 	      /* Scan forward looking for the PROLOGUE_END note.  It should
   6354 		 be right at the beginning of the block, possibly with other
   6355 		 insn notes that got moved there.  */
   6356 	      for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
   6357 		{
   6358 		  if (NOTE_P (note)
   6359 		      && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
   6360 		    break;
   6361 		}
   6362 	    }
   6363 
   6364 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
   6365 	  if (LABEL_P (last))
   6366 	    last = NEXT_INSN (last);
   6367 	  reorder_insns (note, note, last);
   6368 	}
   6369     }
   6370 
   6371   if (epilogue_insn_hash != NULL)
   6372     {
   6373       edge_iterator ei;
   6374       edge e;
   6375 
   6376       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
   6377 	{
   6378 	  rtx_insn *insn, *first = NULL, *note = NULL;
   6379 	  basic_block bb = e->src;
   6380 
   6381 	  /* Scan from the beginning until we reach the first epilogue insn. */
   6382 	  FOR_BB_INSNS (bb, insn)
   6383 	    {
   6384 	      if (NOTE_P (insn))
   6385 		{
   6386 		  if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
   6387 		    {
   6388 		      note = insn;
   6389 		      if (first != NULL)
   6390 			break;
   6391 		    }
   6392 		}
   6393 	      else if (first == NULL && contains (insn, epilogue_insn_hash))
   6394 		{
   6395 		  first = insn;
   6396 		  if (note != NULL)
   6397 		    break;
   6398 		}
   6399 	    }
   6400 
   6401 	  if (note)
   6402 	    {
   6403 	      /* If the function has a single basic block, and no real
   6404 		 epilogue insns (e.g. sibcall with no cleanup), the
   6405 		 epilogue note can get scheduled before the prologue
   6406 		 note.  If we have frame related prologue insns, having
   6407 		 them scanned during the epilogue will result in a crash.
   6408 		 In this case re-order the epilogue note to just before
   6409 		 the last insn in the block.  */
   6410 	      if (first == NULL)
   6411 		first = BB_END (bb);
   6412 
   6413 	      if (PREV_INSN (first) != note)
   6414 		reorder_insns (note, note, PREV_INSN (first));
   6415 	    }
   6416 	}
   6417     }
   6418 }
   6419 
   6420 /* Returns the name of function declared by FNDECL.  */
   6421 const char *
   6422 fndecl_name (tree fndecl)
   6423 {
   6424   if (fndecl == NULL)
   6425     return "(nofn)";
   6426   return lang_hooks.decl_printable_name (fndecl, 1);
   6427 }
   6428 
   6429 /* Returns the name of function FN.  */
   6430 const char *
   6431 function_name (struct function *fn)
   6432 {
   6433   tree fndecl = (fn == NULL) ? NULL : fn->decl;
   6434   return fndecl_name (fndecl);
   6435 }
   6436 
   6437 /* Returns the name of the current function.  */
   6438 const char *
   6439 current_function_name (void)
   6440 {
   6441   return function_name (cfun);
   6442 }
   6443 
   6444 
   6446 static unsigned int
   6447 rest_of_handle_check_leaf_regs (void)
   6448 {
   6449 #ifdef LEAF_REGISTERS
   6450   crtl->uses_only_leaf_regs
   6451     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
   6452 #endif
   6453   return 0;
   6454 }
   6455 
   6456 /* Insert a TYPE into the used types hash table of CFUN.  */
   6457 
   6458 static void
   6459 used_types_insert_helper (tree type, struct function *func)
   6460 {
   6461   if (type != NULL && func != NULL)
   6462     {
   6463       if (func->used_types_hash == NULL)
   6464 	func->used_types_hash = hash_set<tree>::create_ggc (37);
   6465 
   6466       func->used_types_hash->add (type);
   6467     }
   6468 }
   6469 
   6470 /* Given a type, insert it into the used hash table in cfun.  */
   6471 void
   6472 used_types_insert (tree t)
   6473 {
   6474   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
   6475     if (TYPE_NAME (t))
   6476       break;
   6477     else
   6478       t = TREE_TYPE (t);
   6479   if (TREE_CODE (t) == ERROR_MARK)
   6480     return;
   6481   if (TYPE_NAME (t) == NULL_TREE
   6482       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
   6483     t = TYPE_MAIN_VARIANT (t);
   6484   if (debug_info_level > DINFO_LEVEL_NONE)
   6485     {
   6486       if (cfun)
   6487 	used_types_insert_helper (t, cfun);
   6488       else
   6489 	{
   6490 	  /* So this might be a type referenced by a global variable.
   6491 	     Record that type so that we can later decide to emit its
   6492 	     debug information.  */
   6493 	  vec_safe_push (types_used_by_cur_var_decl, t);
   6494 	}
   6495     }
   6496 }
   6497 
   6498 /* Helper to Hash a struct types_used_by_vars_entry.  */
   6499 
   6500 static hashval_t
   6501 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
   6502 {
   6503   gcc_assert (entry && entry->var_decl && entry->type);
   6504 
   6505   return iterative_hash_object (entry->type,
   6506 				iterative_hash_object (entry->var_decl, 0));
   6507 }
   6508 
   6509 /* Hash function of the types_used_by_vars_entry hash table.  */
   6510 
   6511 hashval_t
   6512 used_type_hasher::hash (types_used_by_vars_entry *entry)
   6513 {
   6514   return hash_types_used_by_vars_entry (entry);
   6515 }
   6516 
   6517 /*Equality function of the types_used_by_vars_entry hash table.  */
   6518 
   6519 bool
   6520 used_type_hasher::equal (types_used_by_vars_entry *e1,
   6521 			 types_used_by_vars_entry *e2)
   6522 {
   6523   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
   6524 }
   6525 
   6526 /* Inserts an entry into the types_used_by_vars_hash hash table. */
   6527 
   6528 void
   6529 types_used_by_var_decl_insert (tree type, tree var_decl)
   6530 {
   6531   if (type != NULL && var_decl != NULL)
   6532     {
   6533       types_used_by_vars_entry **slot;
   6534       struct types_used_by_vars_entry e;
   6535       e.var_decl = var_decl;
   6536       e.type = type;
   6537       if (types_used_by_vars_hash == NULL)
   6538 	types_used_by_vars_hash
   6539 	  = hash_table<used_type_hasher>::create_ggc (37);
   6540 
   6541       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
   6542       if (*slot == NULL)
   6543 	{
   6544 	  struct types_used_by_vars_entry *entry;
   6545 	  entry = ggc_alloc<types_used_by_vars_entry> ();
   6546 	  entry->type = type;
   6547 	  entry->var_decl = var_decl;
   6548 	  *slot = entry;
   6549 	}
   6550     }
   6551 }
   6552 
   6553 namespace {
   6554 
   6555 const pass_data pass_data_leaf_regs =
   6556 {
   6557   RTL_PASS, /* type */
   6558   "*leaf_regs", /* name */
   6559   OPTGROUP_NONE, /* optinfo_flags */
   6560   TV_NONE, /* tv_id */
   6561   0, /* properties_required */
   6562   0, /* properties_provided */
   6563   0, /* properties_destroyed */
   6564   0, /* todo_flags_start */
   6565   0, /* todo_flags_finish */
   6566 };
   6567 
   6568 class pass_leaf_regs : public rtl_opt_pass
   6569 {
   6570 public:
   6571   pass_leaf_regs (gcc::context *ctxt)
   6572     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
   6573   {}
   6574 
   6575   /* opt_pass methods: */
   6576   virtual unsigned int execute (function *)
   6577     {
   6578       return rest_of_handle_check_leaf_regs ();
   6579     }
   6580 
   6581 }; // class pass_leaf_regs
   6582 
   6583 } // anon namespace
   6584 
   6585 rtl_opt_pass *
   6586 make_pass_leaf_regs (gcc::context *ctxt)
   6587 {
   6588   return new pass_leaf_regs (ctxt);
   6589 }
   6590 
   6591 static unsigned int
   6592 rest_of_handle_thread_prologue_and_epilogue (void)
   6593 {
   6594   /* prepare_shrink_wrap is sensitive to the block structure of the control
   6595      flow graph, so clean it up first.  */
   6596   if (optimize)
   6597     cleanup_cfg (0);
   6598 
   6599   /* On some machines, the prologue and epilogue code, or parts thereof,
   6600      can be represented as RTL.  Doing so lets us schedule insns between
   6601      it and the rest of the code and also allows delayed branch
   6602      scheduling to operate in the epilogue.  */
   6603   thread_prologue_and_epilogue_insns ();
   6604 
   6605   /* Some non-cold blocks may now be only reachable from cold blocks.
   6606      Fix that up.  */
   6607   fixup_partitions ();
   6608 
   6609   /* Shrink-wrapping can result in unreachable edges in the epilogue,
   6610      see PR57320.  */
   6611   cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
   6612 
   6613   /* The stack usage info is finalized during prologue expansion.  */
   6614   if (flag_stack_usage_info || flag_callgraph_info)
   6615     output_stack_usage ();
   6616 
   6617   return 0;
   6618 }
   6619 
   6620 /* Record a final call to CALLEE at LOCATION.  */
   6621 
   6622 void
   6623 record_final_call (tree callee, location_t location)
   6624 {
   6625   struct callinfo_callee datum = { location, callee };
   6626   vec_safe_push (cfun->su->callees, datum);
   6627 }
   6628 
   6629 /* Record a dynamic allocation made for DECL_OR_EXP.  */
   6630 
   6631 void
   6632 record_dynamic_alloc (tree decl_or_exp)
   6633 {
   6634   struct callinfo_dalloc datum;
   6635 
   6636   if (DECL_P (decl_or_exp))
   6637     {
   6638       datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
   6639       const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
   6640       const char *dot = strrchr (name, '.');
   6641       if (dot)
   6642 	name = dot + 1;
   6643       datum.name = ggc_strdup (name);
   6644     }
   6645   else
   6646     {
   6647       datum.location = EXPR_LOCATION (decl_or_exp);
   6648       datum.name = NULL;
   6649     }
   6650 
   6651   vec_safe_push (cfun->su->dallocs, datum);
   6652 }
   6653 
   6654 namespace {
   6655 
   6656 const pass_data pass_data_thread_prologue_and_epilogue =
   6657 {
   6658   RTL_PASS, /* type */
   6659   "pro_and_epilogue", /* name */
   6660   OPTGROUP_NONE, /* optinfo_flags */
   6661   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
   6662   0, /* properties_required */
   6663   0, /* properties_provided */
   6664   0, /* properties_destroyed */
   6665   0, /* todo_flags_start */
   6666   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
   6667 };
   6668 
   6669 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
   6670 {
   6671 public:
   6672   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
   6673     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
   6674   {}
   6675 
   6676   /* opt_pass methods: */
   6677   virtual unsigned int execute (function *)
   6678     {
   6679       return rest_of_handle_thread_prologue_and_epilogue ();
   6680     }
   6681 
   6682 }; // class pass_thread_prologue_and_epilogue
   6683 
   6684 } // anon namespace
   6685 
   6686 rtl_opt_pass *
   6687 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
   6688 {
   6689   return new pass_thread_prologue_and_epilogue (ctxt);
   6690 }
   6691 
   6692 namespace {
   6693 
   6694 const pass_data pass_data_zero_call_used_regs =
   6695 {
   6696   RTL_PASS, /* type */
   6697   "zero_call_used_regs", /* name */
   6698   OPTGROUP_NONE, /* optinfo_flags */
   6699   TV_NONE, /* tv_id */
   6700   0, /* properties_required */
   6701   0, /* properties_provided */
   6702   0, /* properties_destroyed */
   6703   0, /* todo_flags_start */
   6704   0, /* todo_flags_finish */
   6705 };
   6706 
   6707 class pass_zero_call_used_regs: public rtl_opt_pass
   6708 {
   6709 public:
   6710   pass_zero_call_used_regs (gcc::context *ctxt)
   6711     : rtl_opt_pass (pass_data_zero_call_used_regs, ctxt)
   6712   {}
   6713 
   6714   /* opt_pass methods: */
   6715   virtual unsigned int execute (function *);
   6716 
   6717 }; // class pass_zero_call_used_regs
   6718 
   6719 unsigned int
   6720 pass_zero_call_used_regs::execute (function *fun)
   6721 {
   6722   using namespace zero_regs_flags;
   6723   unsigned int zero_regs_type = UNSET;
   6724 
   6725   tree attr_zero_regs = lookup_attribute ("zero_call_used_regs",
   6726 					  DECL_ATTRIBUTES (fun->decl));
   6727 
   6728   /* Get the type of zero_call_used_regs from function attribute.
   6729      We have filtered out invalid attribute values already at this point.  */
   6730   if (attr_zero_regs)
   6731     {
   6732       /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
   6733 	 is the attribute argument's value.  */
   6734       attr_zero_regs = TREE_VALUE (attr_zero_regs);
   6735       gcc_assert (TREE_CODE (attr_zero_regs) == TREE_LIST);
   6736       attr_zero_regs = TREE_VALUE (attr_zero_regs);
   6737       gcc_assert (TREE_CODE (attr_zero_regs) == STRING_CST);
   6738 
   6739       for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL; ++i)
   6740 	if (strcmp (TREE_STRING_POINTER (attr_zero_regs),
   6741 		     zero_call_used_regs_opts[i].name) == 0)
   6742 	  {
   6743 	    zero_regs_type = zero_call_used_regs_opts[i].flag;
   6744  	    break;
   6745 	  }
   6746     }
   6747 
   6748   if (!zero_regs_type)
   6749     zero_regs_type = flag_zero_call_used_regs;
   6750 
   6751   /* No need to zero call-used-regs when no user request is present.  */
   6752   if (!(zero_regs_type & ENABLED))
   6753     return 0;
   6754 
   6755   edge_iterator ei;
   6756   edge e;
   6757 
   6758   /* This pass needs data flow information.  */
   6759   df_analyze ();
   6760 
   6761   /* Iterate over the function's return instructions and insert any
   6762      register zeroing required by the -fzero-call-used-regs command-line
   6763      option or the "zero_call_used_regs" function attribute.  */
   6764   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
   6765     {
   6766       rtx_insn *insn = BB_END (e->src);
   6767       if (JUMP_P (insn) && ANY_RETURN_P (JUMP_LABEL (insn)))
   6768 	gen_call_used_regs_seq (insn, zero_regs_type);
   6769     }
   6770 
   6771   return 0;
   6772 }
   6773 
   6774 } // anon namespace
   6775 
   6776 rtl_opt_pass *
   6777 make_pass_zero_call_used_regs (gcc::context *ctxt)
   6778 {
   6779   return new pass_zero_call_used_regs (ctxt);
   6780 }
   6781 
   6782 /* If CONSTRAINT is a matching constraint, then return its number.
   6783    Otherwise, return -1.  */
   6784 
   6785 static int
   6786 matching_constraint_num (const char *constraint)
   6787 {
   6788   if (*constraint == '%')
   6789     constraint++;
   6790 
   6791   if (IN_RANGE (*constraint, '0', '9'))
   6792     return strtoul (constraint, NULL, 10);
   6793 
   6794   return -1;
   6795 }
   6796 
   6797 /* This mini-pass fixes fall-out from SSA in asm statements that have
   6798    in-out constraints.  Say you start with
   6799 
   6800      orig = inout;
   6801      asm ("": "+mr" (inout));
   6802      use (orig);
   6803 
   6804    which is transformed very early to use explicit output and match operands:
   6805 
   6806      orig = inout;
   6807      asm ("": "=mr" (inout) : "0" (inout));
   6808      use (orig);
   6809 
   6810    Or, after SSA and copyprop,
   6811 
   6812      asm ("": "=mr" (inout_2) : "0" (inout_1));
   6813      use (inout_1);
   6814 
   6815    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
   6816    they represent two separate values, so they will get different pseudo
   6817    registers during expansion.  Then, since the two operands need to match
   6818    per the constraints, but use different pseudo registers, reload can
   6819    only register a reload for these operands.  But reloads can only be
   6820    satisfied by hardregs, not by memory, so we need a register for this
   6821    reload, just because we are presented with non-matching operands.
   6822    So, even though we allow memory for this operand, no memory can be
   6823    used for it, just because the two operands don't match.  This can
   6824    cause reload failures on register-starved targets.
   6825 
   6826    So it's a symptom of reload not being able to use memory for reloads
   6827    or, alternatively it's also a symptom of both operands not coming into
   6828    reload as matching (in which case the pseudo could go to memory just
   6829    fine, as the alternative allows it, and no reload would be necessary).
   6830    We fix the latter problem here, by transforming
   6831 
   6832      asm ("": "=mr" (inout_2) : "0" (inout_1));
   6833 
   6834    back to
   6835 
   6836      inout_2 = inout_1;
   6837      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
   6838 
   6839 static void
   6840 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
   6841 {
   6842   int i;
   6843   bool changed = false;
   6844   rtx op = SET_SRC (p_sets[0]);
   6845   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
   6846   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
   6847   bool *output_matched = XALLOCAVEC (bool, noutputs);
   6848 
   6849   memset (output_matched, 0, noutputs * sizeof (bool));
   6850   for (i = 0; i < ninputs; i++)
   6851     {
   6852       rtx input, output;
   6853       rtx_insn *insns;
   6854       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
   6855       int match, j;
   6856 
   6857       match = matching_constraint_num (constraint);
   6858       if (match < 0)
   6859 	continue;
   6860 
   6861       gcc_assert (match < noutputs);
   6862       output = SET_DEST (p_sets[match]);
   6863       input = RTVEC_ELT (inputs, i);
   6864       /* Only do the transformation for pseudos.  */
   6865       if (! REG_P (output)
   6866 	  || rtx_equal_p (output, input)
   6867 	  || !(REG_P (input) || SUBREG_P (input)
   6868 	       || MEM_P (input) || CONSTANT_P (input))
   6869 	  || !general_operand (input, GET_MODE (output)))
   6870 	continue;
   6871 
   6872       /* We can't do anything if the output is also used as input,
   6873 	 as we're going to overwrite it.  */
   6874       for (j = 0; j < ninputs; j++)
   6875 	if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
   6876 	  break;
   6877       if (j != ninputs)
   6878 	continue;
   6879 
   6880       /* Avoid changing the same input several times.  For
   6881 	 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
   6882 	 only change it once (to out1), rather than changing it
   6883 	 first to out1 and afterwards to out2.  */
   6884       if (i > 0)
   6885 	{
   6886 	  for (j = 0; j < noutputs; j++)
   6887 	    if (output_matched[j] && input == SET_DEST (p_sets[j]))
   6888 	      break;
   6889 	  if (j != noutputs)
   6890 	    continue;
   6891 	}
   6892       output_matched[match] = true;
   6893 
   6894       start_sequence ();
   6895       emit_move_insn (output, copy_rtx (input));
   6896       insns = get_insns ();
   6897       end_sequence ();
   6898       emit_insn_before (insns, insn);
   6899 
   6900       constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
   6901       bool early_clobber_p = strchr (constraint, '&') != NULL;
   6902 
   6903       /* Now replace all mentions of the input with output.  We can't
   6904 	 just replace the occurrence in inputs[i], as the register might
   6905 	 also be used in some other input (or even in an address of an
   6906 	 output), which would mean possibly increasing the number of
   6907 	 inputs by one (namely 'output' in addition), which might pose
   6908 	 a too complicated problem for reload to solve.  E.g. this situation:
   6909 
   6910 	   asm ("" : "=r" (output), "=m" (input) : "0" (input))
   6911 
   6912 	 Here 'input' is used in two occurrences as input (once for the
   6913 	 input operand, once for the address in the second output operand).
   6914 	 If we would replace only the occurrence of the input operand (to
   6915 	 make the matching) we would be left with this:
   6916 
   6917 	   output = input
   6918 	   asm ("" : "=r" (output), "=m" (input) : "0" (output))
   6919 
   6920 	 Now we suddenly have two different input values (containing the same
   6921 	 value, but different pseudos) where we formerly had only one.
   6922 	 With more complicated asms this might lead to reload failures
   6923 	 which wouldn't have happen without this pass.  So, iterate over
   6924 	 all operands and replace all occurrences of the register used.
   6925 
   6926 	 However, if one or more of the 'input' uses have a non-matching
   6927 	 constraint and the matched output operand is an early clobber
   6928 	 operand, then do not replace the input operand, since by definition
   6929 	 it conflicts with the output operand and cannot share the same
   6930 	 register.  See PR89313 for details.  */
   6931 
   6932       for (j = 0; j < noutputs; j++)
   6933 	if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
   6934 	    && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
   6935 	  SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
   6936 					      input, output);
   6937       for (j = 0; j < ninputs; j++)
   6938 	if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
   6939 	  {
   6940 	    if (!early_clobber_p
   6941 		|| match == matching_constraint_num
   6942 			      (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
   6943 	      RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
   6944 						   input, output);
   6945 	  }
   6946 
   6947       changed = true;
   6948     }
   6949 
   6950   if (changed)
   6951     df_insn_rescan (insn);
   6952 }
   6953 
   6954 /* Add the decl D to the local_decls list of FUN.  */
   6955 
   6956 void
   6957 add_local_decl (struct function *fun, tree d)
   6958 {
   6959   gcc_assert (VAR_P (d));
   6960   vec_safe_push (fun->local_decls, d);
   6961 }
   6962 
   6963 namespace {
   6964 
   6965 const pass_data pass_data_match_asm_constraints =
   6966 {
   6967   RTL_PASS, /* type */
   6968   "asmcons", /* name */
   6969   OPTGROUP_NONE, /* optinfo_flags */
   6970   TV_NONE, /* tv_id */
   6971   0, /* properties_required */
   6972   0, /* properties_provided */
   6973   0, /* properties_destroyed */
   6974   0, /* todo_flags_start */
   6975   0, /* todo_flags_finish */
   6976 };
   6977 
   6978 class pass_match_asm_constraints : public rtl_opt_pass
   6979 {
   6980 public:
   6981   pass_match_asm_constraints (gcc::context *ctxt)
   6982     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
   6983   {}
   6984 
   6985   /* opt_pass methods: */
   6986   virtual unsigned int execute (function *);
   6987 
   6988 }; // class pass_match_asm_constraints
   6989 
   6990 unsigned
   6991 pass_match_asm_constraints::execute (function *fun)
   6992 {
   6993   basic_block bb;
   6994   rtx_insn *insn;
   6995   rtx pat, *p_sets;
   6996   int noutputs;
   6997 
   6998   if (!crtl->has_asm_statement)
   6999     return 0;
   7000 
   7001   df_set_flags (DF_DEFER_INSN_RESCAN);
   7002   FOR_EACH_BB_FN (bb, fun)
   7003     {
   7004       FOR_BB_INSNS (bb, insn)
   7005 	{
   7006 	  if (!INSN_P (insn))
   7007 	    continue;
   7008 
   7009 	  pat = PATTERN (insn);
   7010 	  if (GET_CODE (pat) == PARALLEL)
   7011 	    p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
   7012 	  else if (GET_CODE (pat) == SET)
   7013 	    p_sets = &PATTERN (insn), noutputs = 1;
   7014 	  else
   7015 	    continue;
   7016 
   7017 	  if (GET_CODE (*p_sets) == SET
   7018 	      && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
   7019 	    match_asm_constraints_1 (insn, p_sets, noutputs);
   7020 	 }
   7021     }
   7022 
   7023   return TODO_df_finish;
   7024 }
   7025 
   7026 } // anon namespace
   7027 
   7028 rtl_opt_pass *
   7029 make_pass_match_asm_constraints (gcc::context *ctxt)
   7030 {
   7031   return new pass_match_asm_constraints (ctxt);
   7032 }
   7033 
   7034 
   7035 #include "gt-function.h"
   7036