Home | History | Annotate | Line # | Download | only in gcc
      1 /* Analyze RTL for GNU compiler.
      2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
      3 
      4 This file is part of GCC.
      5 
      6 GCC is free software; you can redistribute it and/or modify it under
      7 the terms of the GNU General Public License as published by the Free
      8 Software Foundation; either version 3, or (at your option) any later
      9 version.
     10 
     11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14 for more details.
     15 
     16 You should have received a copy of the GNU General Public License
     17 along with GCC; see the file COPYING3.  If not see
     18 <http://www.gnu.org/licenses/>.  */
     19 
     20 
     21 #include "config.h"
     22 #include "system.h"
     23 #include "coretypes.h"
     24 #include "backend.h"
     25 #include "target.h"
     26 #include "rtl.h"
     27 #include "rtlanal.h"
     28 #include "tree.h"
     29 #include "predict.h"
     30 #include "df.h"
     31 #include "memmodel.h"
     32 #include "tm_p.h"
     33 #include "insn-config.h"
     34 #include "regs.h"
     35 #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
     36 #include "recog.h"
     37 #include "addresses.h"
     38 #include "rtl-iter.h"
     39 #include "hard-reg-set.h"
     40 #include "function-abi.h"
     41 
     42 /* Forward declarations */
     43 static void set_of_1 (rtx, const_rtx, void *);
     44 static bool covers_regno_p (const_rtx, unsigned int);
     45 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
     46 static int computed_jump_p_1 (const_rtx);
     47 static void parms_set (rtx, const_rtx, void *);
     48 
     49 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, scalar_int_mode,
     50                                                    const_rtx, machine_mode,
     51                                                    unsigned HOST_WIDE_INT);
     52 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, scalar_int_mode,
     53 					     const_rtx, machine_mode,
     54                                              unsigned HOST_WIDE_INT);
     55 static unsigned int cached_num_sign_bit_copies (const_rtx, scalar_int_mode,
     56 						const_rtx, machine_mode,
     57                                                 unsigned int);
     58 static unsigned int num_sign_bit_copies1 (const_rtx, scalar_int_mode,
     59 					  const_rtx, machine_mode,
     60 					  unsigned int);
     61 
     62 rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
     63 rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
     64 
     65 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
     66    If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
     67    SIGN_EXTEND then while narrowing we also have to enforce the
     68    representation and sign-extend the value to mode DESTINATION_REP.
     69 
     70    If the value is already sign-extended to DESTINATION_REP mode we
     71    can just switch to DESTINATION mode on it.  For each pair of
     72    integral modes SOURCE and DESTINATION, when truncating from SOURCE
     73    to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
     74    contains the number of high-order bits in SOURCE that have to be
     75    copies of the sign-bit so that we can do this mode-switch to
     76    DESTINATION.  */
     77 
     78 static unsigned int
     79 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
     80 
     81 /* Store X into index I of ARRAY.  ARRAY is known to have at least I
     83    elements.  Return the new base of ARRAY.  */
     84 
     85 template <typename T>
     86 typename T::value_type *
     87 generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
     88 						  value_type *base,
     89 						  size_t i, value_type x)
     90 {
     91   if (base == array.stack)
     92     {
     93       if (i < LOCAL_ELEMS)
     94 	{
     95 	  base[i] = x;
     96 	  return base;
     97 	}
     98       gcc_checking_assert (i == LOCAL_ELEMS);
     99       /* A previous iteration might also have moved from the stack to the
    100 	 heap, in which case the heap array will already be big enough.  */
    101       if (vec_safe_length (array.heap) <= i)
    102 	vec_safe_grow (array.heap, i + 1, true);
    103       base = array.heap->address ();
    104       memcpy (base, array.stack, sizeof (array.stack));
    105       base[LOCAL_ELEMS] = x;
    106       return base;
    107     }
    108   unsigned int length = array.heap->length ();
    109   if (length > i)
    110     {
    111       gcc_checking_assert (base == array.heap->address ());
    112       base[i] = x;
    113       return base;
    114     }
    115   else
    116     {
    117       gcc_checking_assert (i == length);
    118       vec_safe_push (array.heap, x);
    119       return array.heap->address ();
    120     }
    121 }
    122 
    123 /* Add the subrtxes of X to worklist ARRAY, starting at END.  Return the
    124    number of elements added to the worklist.  */
    125 
    126 template <typename T>
    127 size_t
    128 generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
    129 						    value_type *base,
    130 						    size_t end, rtx_type x)
    131 {
    132   enum rtx_code code = GET_CODE (x);
    133   const char *format = GET_RTX_FORMAT (code);
    134   size_t orig_end = end;
    135   if (__builtin_expect (INSN_P (x), false))
    136     {
    137       /* Put the pattern at the top of the queue, since that's what
    138 	 we're likely to want most.  It also allows for the SEQUENCE
    139 	 code below.  */
    140       for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
    141 	if (format[i] == 'e')
    142 	  {
    143 	    value_type subx = T::get_value (x->u.fld[i].rt_rtx);
    144 	    if (__builtin_expect (end < LOCAL_ELEMS, true))
    145 	      base[end++] = subx;
    146 	    else
    147 	      base = add_single_to_queue (array, base, end++, subx);
    148 	  }
    149     }
    150   else
    151     for (int i = 0; format[i]; ++i)
    152       if (format[i] == 'e')
    153 	{
    154 	  value_type subx = T::get_value (x->u.fld[i].rt_rtx);
    155 	  if (__builtin_expect (end < LOCAL_ELEMS, true))
    156 	    base[end++] = subx;
    157 	  else
    158 	    base = add_single_to_queue (array, base, end++, subx);
    159 	}
    160       else if (format[i] == 'E')
    161 	{
    162 	  unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
    163 	  rtx *vec = x->u.fld[i].rt_rtvec->elem;
    164 	  if (__builtin_expect (end + length <= LOCAL_ELEMS, true))
    165 	    for (unsigned int j = 0; j < length; j++)
    166 	      base[end++] = T::get_value (vec[j]);
    167 	  else
    168 	    for (unsigned int j = 0; j < length; j++)
    169 	      base = add_single_to_queue (array, base, end++,
    170 					  T::get_value (vec[j]));
    171 	  if (code == SEQUENCE && end == length)
    172 	    /* If the subrtxes of the sequence fill the entire array then
    173 	       we know that no other parts of a containing insn are queued.
    174 	       The caller is therefore iterating over the sequence as a
    175 	       PATTERN (...), so we also want the patterns of the
    176 	       subinstructions.  */
    177 	    for (unsigned int j = 0; j < length; j++)
    178 	      {
    179 		typename T::rtx_type x = T::get_rtx (base[j]);
    180 		if (INSN_P (x))
    181 		  base[j] = T::get_value (PATTERN (x));
    182 	      }
    183 	}
    184   return end - orig_end;
    185 }
    186 
    187 template <typename T>
    188 void
    189 generic_subrtx_iterator <T>::free_array (array_type &array)
    190 {
    191   vec_free (array.heap);
    192 }
    193 
    194 template <typename T>
    195 const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
    196 
    197 template class generic_subrtx_iterator <const_rtx_accessor>;
    198 template class generic_subrtx_iterator <rtx_var_accessor>;
    199 template class generic_subrtx_iterator <rtx_ptr_accessor>;
    200 
    201 /* Return 1 if the value of X is unstable
    202    (would be different at a different point in the program).
    203    The frame pointer, arg pointer, etc. are considered stable
    204    (within one function) and so is anything marked `unchanging'.  */
    205 
    206 int
    207 rtx_unstable_p (const_rtx x)
    208 {
    209   const RTX_CODE code = GET_CODE (x);
    210   int i;
    211   const char *fmt;
    212 
    213   switch (code)
    214     {
    215     case MEM:
    216       return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
    217 
    218     case CONST:
    219     CASE_CONST_ANY:
    220     case SYMBOL_REF:
    221     case LABEL_REF:
    222       return 0;
    223 
    224     case REG:
    225       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
    226       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
    227 	  /* The arg pointer varies if it is not a fixed register.  */
    228 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
    229 	return 0;
    230       /* ??? When call-clobbered, the value is stable modulo the restore
    231 	 that must happen after a call.  This currently screws up local-alloc
    232 	 into believing that the restore is not needed.  */
    233       if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
    234 	return 0;
    235       return 1;
    236 
    237     case ASM_OPERANDS:
    238       if (MEM_VOLATILE_P (x))
    239 	return 1;
    240 
    241       /* Fall through.  */
    242 
    243     default:
    244       break;
    245     }
    246 
    247   fmt = GET_RTX_FORMAT (code);
    248   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    249     if (fmt[i] == 'e')
    250       {
    251 	if (rtx_unstable_p (XEXP (x, i)))
    252 	  return 1;
    253       }
    254     else if (fmt[i] == 'E')
    255       {
    256 	int j;
    257 	for (j = 0; j < XVECLEN (x, i); j++)
    258 	  if (rtx_unstable_p (XVECEXP (x, i, j)))
    259 	    return 1;
    260       }
    261 
    262   return 0;
    263 }
    264 
    265 /* Return 1 if X has a value that can vary even between two
    266    executions of the program.  0 means X can be compared reliably
    267    against certain constants or near-constants.
    268    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
    269    zero, we are slightly more conservative.
    270    The frame pointer and the arg pointer are considered constant.  */
    271 
    272 bool
    273 rtx_varies_p (const_rtx x, bool for_alias)
    274 {
    275   RTX_CODE code;
    276   int i;
    277   const char *fmt;
    278 
    279   if (!x)
    280     return 0;
    281 
    282   code = GET_CODE (x);
    283   switch (code)
    284     {
    285     case MEM:
    286       return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
    287 
    288     case CONST:
    289     CASE_CONST_ANY:
    290     case SYMBOL_REF:
    291     case LABEL_REF:
    292       return 0;
    293 
    294     case REG:
    295       /* Note that we have to test for the actual rtx used for the frame
    296 	 and arg pointers and not just the register number in case we have
    297 	 eliminated the frame and/or arg pointer and are using it
    298 	 for pseudos.  */
    299       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
    300 	  /* The arg pointer varies if it is not a fixed register.  */
    301 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
    302 	return 0;
    303       if (x == pic_offset_table_rtx
    304 	  /* ??? When call-clobbered, the value is stable modulo the restore
    305 	     that must happen after a call.  This currently screws up
    306 	     local-alloc into believing that the restore is not needed, so we
    307 	     must return 0 only if we are called from alias analysis.  */
    308 	  && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
    309 	return 0;
    310       return 1;
    311 
    312     case LO_SUM:
    313       /* The operand 0 of a LO_SUM is considered constant
    314 	 (in fact it is related specifically to operand 1)
    315 	 during alias analysis.  */
    316       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
    317 	     || rtx_varies_p (XEXP (x, 1), for_alias);
    318 
    319     case ASM_OPERANDS:
    320       if (MEM_VOLATILE_P (x))
    321 	return 1;
    322 
    323       /* Fall through.  */
    324 
    325     default:
    326       break;
    327     }
    328 
    329   fmt = GET_RTX_FORMAT (code);
    330   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    331     if (fmt[i] == 'e')
    332       {
    333 	if (rtx_varies_p (XEXP (x, i), for_alias))
    334 	  return 1;
    335       }
    336     else if (fmt[i] == 'E')
    337       {
    338 	int j;
    339 	for (j = 0; j < XVECLEN (x, i); j++)
    340 	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
    341 	    return 1;
    342       }
    343 
    344   return 0;
    345 }
    346 
    347 /* Compute an approximation for the offset between the register
    348    FROM and TO for the current function, as it was at the start
    349    of the routine.  */
    350 
    351 static poly_int64
    352 get_initial_register_offset (int from, int to)
    353 {
    354   static const struct elim_table_t
    355   {
    356     const int from;
    357     const int to;
    358   } table[] = ELIMINABLE_REGS;
    359   poly_int64 offset1, offset2;
    360   unsigned int i, j;
    361 
    362   if (to == from)
    363     return 0;
    364 
    365   /* It is not safe to call INITIAL_ELIMINATION_OFFSET before the epilogue
    366      is completed, but we need to give at least an estimate for the stack
    367      pointer based on the frame size.  */
    368   if (!epilogue_completed)
    369     {
    370       offset1 = crtl->outgoing_args_size + get_frame_size ();
    371 #if !STACK_GROWS_DOWNWARD
    372       offset1 = - offset1;
    373 #endif
    374       if (to == STACK_POINTER_REGNUM)
    375 	return offset1;
    376       else if (from == STACK_POINTER_REGNUM)
    377 	return - offset1;
    378       else
    379 	return 0;
    380      }
    381 
    382   for (i = 0; i < ARRAY_SIZE (table); i++)
    383       if (table[i].from == from)
    384 	{
    385 	  if (table[i].to == to)
    386 	    {
    387 	      INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
    388 					  offset1);
    389 	      return offset1;
    390 	    }
    391 	  for (j = 0; j < ARRAY_SIZE (table); j++)
    392 	    {
    393 	      if (table[j].to == to
    394 		  && table[j].from == table[i].to)
    395 		{
    396 		  INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
    397 					      offset1);
    398 		  INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
    399 					      offset2);
    400 		  return offset1 + offset2;
    401 		}
    402 	      if (table[j].from == to
    403 		  && table[j].to == table[i].to)
    404 		{
    405 		  INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
    406 					      offset1);
    407 		  INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
    408 					      offset2);
    409 		  return offset1 - offset2;
    410 		}
    411 	    }
    412 	}
    413       else if (table[i].to == from)
    414 	{
    415 	  if (table[i].from == to)
    416 	    {
    417 	      INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
    418 					  offset1);
    419 	      return - offset1;
    420 	    }
    421 	  for (j = 0; j < ARRAY_SIZE (table); j++)
    422 	    {
    423 	      if (table[j].to == to
    424 		  && table[j].from == table[i].from)
    425 		{
    426 		  INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
    427 					      offset1);
    428 		  INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
    429 					      offset2);
    430 		  return - offset1 + offset2;
    431 		}
    432 	      if (table[j].from == to
    433 		  && table[j].to == table[i].from)
    434 		{
    435 		  INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
    436 					      offset1);
    437 		  INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
    438 					      offset2);
    439 		  return - offset1 - offset2;
    440 		}
    441 	    }
    442 	}
    443 
    444   /* If the requested register combination was not found,
    445      try a different more simple combination.  */
    446   if (from == ARG_POINTER_REGNUM)
    447     return get_initial_register_offset (HARD_FRAME_POINTER_REGNUM, to);
    448   else if (to == ARG_POINTER_REGNUM)
    449     return get_initial_register_offset (from, HARD_FRAME_POINTER_REGNUM);
    450   else if (from == HARD_FRAME_POINTER_REGNUM)
    451     return get_initial_register_offset (FRAME_POINTER_REGNUM, to);
    452   else if (to == HARD_FRAME_POINTER_REGNUM)
    453     return get_initial_register_offset (from, FRAME_POINTER_REGNUM);
    454   else
    455     return 0;
    456 }
    457 
    458 /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE
    459    bytes can cause a trap.  MODE is the mode of the MEM (not that of X) and
    460    UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
    461    references on strict alignment machines.  */
    462 
    463 static int
    464 rtx_addr_can_trap_p_1 (const_rtx x, poly_int64 offset, poly_int64 size,
    465 		       machine_mode mode, bool unaligned_mems)
    466 {
    467   enum rtx_code code = GET_CODE (x);
    468   gcc_checking_assert (mode == BLKmode
    469 		       || mode == VOIDmode
    470 		       || known_size_p (size));
    471   poly_int64 const_x1;
    472 
    473   /* The offset must be a multiple of the mode size if we are considering
    474      unaligned memory references on strict alignment machines.  */
    475   if (STRICT_ALIGNMENT
    476       && unaligned_mems
    477       && mode != BLKmode
    478       && mode != VOIDmode)
    479     {
    480       poly_int64 actual_offset = offset;
    481 
    482 #ifdef SPARC_STACK_BOUNDARY_HACK
    483       /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
    484 	     the real alignment of %sp.  However, when it does this, the
    485 	     alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
    486       if (SPARC_STACK_BOUNDARY_HACK
    487 	  && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
    488 	actual_offset -= STACK_POINTER_OFFSET;
    489 #endif
    490 
    491       if (!multiple_p (actual_offset, GET_MODE_SIZE (mode)))
    492 	return 1;
    493     }
    494 
    495   switch (code)
    496     {
    497     case SYMBOL_REF:
    498       if (SYMBOL_REF_WEAK (x))
    499 	return 1;
    500       if (!CONSTANT_POOL_ADDRESS_P (x) && !SYMBOL_REF_FUNCTION_P (x))
    501 	{
    502 	  tree decl;
    503 	  poly_int64 decl_size;
    504 
    505 	  if (maybe_lt (offset, 0))
    506 	    return 1;
    507 	  if (!known_size_p (size))
    508 	    return maybe_ne (offset, 0);
    509 
    510 	  /* If the size of the access or of the symbol is unknown,
    511 	     assume the worst.  */
    512 	  decl = SYMBOL_REF_DECL (x);
    513 
    514 	  /* Else check that the access is in bounds.  TODO: restructure
    515 	     expr_size/tree_expr_size/int_expr_size and just use the latter.  */
    516 	  if (!decl)
    517 	    decl_size = -1;
    518 	  else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
    519 	    {
    520 	      if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &decl_size))
    521 		decl_size = -1;
    522 	    }
    523 	  else if (TREE_CODE (decl) == STRING_CST)
    524 	    decl_size = TREE_STRING_LENGTH (decl);
    525 	  else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
    526 	    decl_size = int_size_in_bytes (TREE_TYPE (decl));
    527 	  else
    528 	    decl_size = -1;
    529 
    530 	  return (!known_size_p (decl_size) || known_eq (decl_size, 0)
    531 		  ? maybe_ne (offset, 0)
    532 		  : !known_subrange_p (offset, size, 0, decl_size));
    533         }
    534 
    535       return 0;
    536 
    537     case LABEL_REF:
    538       return 0;
    539 
    540     case REG:
    541       /* Stack references are assumed not to trap, but we need to deal with
    542 	 nonsensical offsets.  */
    543       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
    544 	 || x == stack_pointer_rtx
    545 	 /* The arg pointer varies if it is not a fixed register.  */
    546 	 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
    547 	{
    548 #ifdef RED_ZONE_SIZE
    549 	  poly_int64 red_zone_size = RED_ZONE_SIZE;
    550 #else
    551 	  poly_int64 red_zone_size = 0;
    552 #endif
    553 	  poly_int64 stack_boundary = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
    554 	  poly_int64 low_bound, high_bound;
    555 
    556 	  if (!known_size_p (size))
    557 	    return 1;
    558 
    559 	  if (x == frame_pointer_rtx)
    560 	    {
    561 	      if (FRAME_GROWS_DOWNWARD)
    562 		{
    563 		  high_bound = targetm.starting_frame_offset ();
    564 		  low_bound  = high_bound - get_frame_size ();
    565 		}
    566 	      else
    567 		{
    568 		  low_bound  = targetm.starting_frame_offset ();
    569 		  high_bound = low_bound + get_frame_size ();
    570 		}
    571 	    }
    572 	  else if (x == hard_frame_pointer_rtx)
    573 	    {
    574 	      poly_int64 sp_offset
    575 		= get_initial_register_offset (STACK_POINTER_REGNUM,
    576 					       HARD_FRAME_POINTER_REGNUM);
    577 	      poly_int64 ap_offset
    578 		= get_initial_register_offset (ARG_POINTER_REGNUM,
    579 					       HARD_FRAME_POINTER_REGNUM);
    580 
    581 #if STACK_GROWS_DOWNWARD
    582 	      low_bound  = sp_offset - red_zone_size - stack_boundary;
    583 	      high_bound = ap_offset
    584 			   + FIRST_PARM_OFFSET (current_function_decl)
    585 #if !ARGS_GROW_DOWNWARD
    586 			   + crtl->args.size
    587 #endif
    588 			   + stack_boundary;
    589 #else
    590 	      high_bound = sp_offset + red_zone_size + stack_boundary;
    591 	      low_bound  = ap_offset
    592 			   + FIRST_PARM_OFFSET (current_function_decl)
    593 #if ARGS_GROW_DOWNWARD
    594 			   - crtl->args.size
    595 #endif
    596 			   - stack_boundary;
    597 #endif
    598 	    }
    599 	  else if (x == stack_pointer_rtx)
    600 	    {
    601 	      poly_int64 ap_offset
    602 		= get_initial_register_offset (ARG_POINTER_REGNUM,
    603 					       STACK_POINTER_REGNUM);
    604 
    605 #if STACK_GROWS_DOWNWARD
    606 	      low_bound  = - red_zone_size - stack_boundary;
    607 	      high_bound = ap_offset
    608 			   + FIRST_PARM_OFFSET (current_function_decl)
    609 #if !ARGS_GROW_DOWNWARD
    610 			   + crtl->args.size
    611 #endif
    612 			   + stack_boundary;
    613 #else
    614 	      high_bound = red_zone_size + stack_boundary;
    615 	      low_bound  = ap_offset
    616 			   + FIRST_PARM_OFFSET (current_function_decl)
    617 #if ARGS_GROW_DOWNWARD
    618 			   - crtl->args.size
    619 #endif
    620 			   - stack_boundary;
    621 #endif
    622 	    }
    623 	  else
    624 	    {
    625 	      /* We assume that accesses are safe to at least the
    626 		 next stack boundary.
    627 		 Examples are varargs and __builtin_return_address.  */
    628 #if ARGS_GROW_DOWNWARD
    629 	      high_bound = FIRST_PARM_OFFSET (current_function_decl)
    630 			   + stack_boundary;
    631 	      low_bound  = FIRST_PARM_OFFSET (current_function_decl)
    632 			   - crtl->args.size - stack_boundary;
    633 #else
    634 	      low_bound  = FIRST_PARM_OFFSET (current_function_decl)
    635 			   - stack_boundary;
    636 	      high_bound = FIRST_PARM_OFFSET (current_function_decl)
    637 			   + crtl->args.size + stack_boundary;
    638 #endif
    639 	    }
    640 
    641 	  if (known_ge (offset, low_bound)
    642 	      && known_le (offset, high_bound - size))
    643 	    return 0;
    644 	  return 1;
    645 	}
    646       /* All of the virtual frame registers are stack references.  */
    647       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
    648 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
    649 	return 0;
    650       return 1;
    651 
    652     case CONST:
    653       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
    654 				    mode, unaligned_mems);
    655 
    656     case PLUS:
    657       /* An address is assumed not to trap if:
    658 	 - it is the pic register plus a const unspec without offset.  */
    659       if (XEXP (x, 0) == pic_offset_table_rtx
    660 	  && GET_CODE (XEXP (x, 1)) == CONST
    661 	  && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
    662 	  && known_eq (offset, 0))
    663 	return 0;
    664 
    665       /* - or it is an address that can't trap plus a constant integer.  */
    666       if (poly_int_rtx_p (XEXP (x, 1), &const_x1)
    667 	  && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + const_x1,
    668 				     size, mode, unaligned_mems))
    669 	return 0;
    670 
    671       return 1;
    672 
    673     case LO_SUM:
    674     case PRE_MODIFY:
    675       return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
    676 				    mode, unaligned_mems);
    677 
    678     case PRE_DEC:
    679     case PRE_INC:
    680     case POST_DEC:
    681     case POST_INC:
    682     case POST_MODIFY:
    683       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
    684 				    mode, unaligned_mems);
    685 
    686     default:
    687       break;
    688     }
    689 
    690   /* If it isn't one of the case above, it can cause a trap.  */
    691   return 1;
    692 }
    693 
    694 /* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
    695 
    696 int
    697 rtx_addr_can_trap_p (const_rtx x)
    698 {
    699   return rtx_addr_can_trap_p_1 (x, 0, -1, BLKmode, false);
    700 }
    701 
    702 /* Return true if X contains a MEM subrtx.  */
    703 
    704 bool
    705 contains_mem_rtx_p (rtx x)
    706 {
    707   subrtx_iterator::array_type array;
    708   FOR_EACH_SUBRTX (iter, array, x, ALL)
    709     if (MEM_P (*iter))
    710       return true;
    711 
    712   return false;
    713 }
    714 
    715 /* Return true if X is an address that is known to not be zero.  */
    716 
    717 bool
    718 nonzero_address_p (const_rtx x)
    719 {
    720   const enum rtx_code code = GET_CODE (x);
    721 
    722   switch (code)
    723     {
    724     case SYMBOL_REF:
    725       return flag_delete_null_pointer_checks && !SYMBOL_REF_WEAK (x);
    726 
    727     case LABEL_REF:
    728       return true;
    729 
    730     case REG:
    731       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
    732       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
    733 	  || x == stack_pointer_rtx
    734 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
    735 	return true;
    736       /* All of the virtual frame registers are stack references.  */
    737       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
    738 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
    739 	return true;
    740       return false;
    741 
    742     case CONST:
    743       return nonzero_address_p (XEXP (x, 0));
    744 
    745     case PLUS:
    746       /* Handle PIC references.  */
    747       if (XEXP (x, 0) == pic_offset_table_rtx
    748 	       && CONSTANT_P (XEXP (x, 1)))
    749 	return true;
    750       return false;
    751 
    752     case PRE_MODIFY:
    753       /* Similar to the above; allow positive offsets.  Further, since
    754 	 auto-inc is only allowed in memories, the register must be a
    755 	 pointer.  */
    756       if (CONST_INT_P (XEXP (x, 1))
    757 	  && INTVAL (XEXP (x, 1)) > 0)
    758 	return true;
    759       return nonzero_address_p (XEXP (x, 0));
    760 
    761     case PRE_INC:
    762       /* Similarly.  Further, the offset is always positive.  */
    763       return true;
    764 
    765     case PRE_DEC:
    766     case POST_DEC:
    767     case POST_INC:
    768     case POST_MODIFY:
    769       return nonzero_address_p (XEXP (x, 0));
    770 
    771     case LO_SUM:
    772       return nonzero_address_p (XEXP (x, 1));
    773 
    774     default:
    775       break;
    776     }
    777 
    778   /* If it isn't one of the case above, might be zero.  */
    779   return false;
    780 }
    781 
    782 /* Return 1 if X refers to a memory location whose address
    783    cannot be compared reliably with constant addresses,
    784    or if X refers to a BLKmode memory object.
    785    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
    786    zero, we are slightly more conservative.  */
    787 
    788 bool
    789 rtx_addr_varies_p (const_rtx x, bool for_alias)
    790 {
    791   enum rtx_code code;
    792   int i;
    793   const char *fmt;
    794 
    795   if (x == 0)
    796     return 0;
    797 
    798   code = GET_CODE (x);
    799   if (code == MEM)
    800     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
    801 
    802   fmt = GET_RTX_FORMAT (code);
    803   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    804     if (fmt[i] == 'e')
    805       {
    806 	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
    807 	  return 1;
    808       }
    809     else if (fmt[i] == 'E')
    810       {
    811 	int j;
    812 	for (j = 0; j < XVECLEN (x, i); j++)
    813 	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
    814 	    return 1;
    815       }
    816   return 0;
    817 }
    818 
    819 /* Return the CALL in X if there is one.  */
    821 
    822 rtx
    823 get_call_rtx_from (const rtx_insn *insn)
    824 {
    825   rtx x = PATTERN (insn);
    826   if (GET_CODE (x) == PARALLEL)
    827     x = XVECEXP (x, 0, 0);
    828   if (GET_CODE (x) == SET)
    829     x = SET_SRC (x);
    830   if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
    831     return x;
    832   return NULL_RTX;
    833 }
    834 
    835 /* Get the declaration of the function called by INSN.  */
    836 
    837 tree
    838 get_call_fndecl (const rtx_insn *insn)
    839 {
    840   rtx note, datum;
    841 
    842   note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
    843   if (note == NULL_RTX)
    844     return NULL_TREE;
    845 
    846   datum = XEXP (note, 0);
    847   if (datum != NULL_RTX)
    848     return SYMBOL_REF_DECL (datum);
    849 
    850   return NULL_TREE;
    851 }
    852 
    853 /* Return the value of the integer term in X, if one is apparent;
    855    otherwise return 0.
    856    Only obvious integer terms are detected.
    857    This is used in cse.cc with the `related_value' field.  */
    858 
    859 HOST_WIDE_INT
    860 get_integer_term (const_rtx x)
    861 {
    862   if (GET_CODE (x) == CONST)
    863     x = XEXP (x, 0);
    864 
    865   if (GET_CODE (x) == MINUS
    866       && CONST_INT_P (XEXP (x, 1)))
    867     return - INTVAL (XEXP (x, 1));
    868   if (GET_CODE (x) == PLUS
    869       && CONST_INT_P (XEXP (x, 1)))
    870     return INTVAL (XEXP (x, 1));
    871   return 0;
    872 }
    873 
    874 /* If X is a constant, return the value sans apparent integer term;
    875    otherwise return 0.
    876    Only obvious integer terms are detected.  */
    877 
    878 rtx
    879 get_related_value (const_rtx x)
    880 {
    881   if (GET_CODE (x) != CONST)
    882     return 0;
    883   x = XEXP (x, 0);
    884   if (GET_CODE (x) == PLUS
    885       && CONST_INT_P (XEXP (x, 1)))
    886     return XEXP (x, 0);
    887   else if (GET_CODE (x) == MINUS
    888 	   && CONST_INT_P (XEXP (x, 1)))
    889     return XEXP (x, 0);
    890   return 0;
    891 }
    892 
    893 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
    895    to somewhere in the same object or object_block as SYMBOL.  */
    896 
    897 bool
    898 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
    899 {
    900   tree decl;
    901 
    902   if (GET_CODE (symbol) != SYMBOL_REF)
    903     return false;
    904 
    905   if (offset == 0)
    906     return true;
    907 
    908   if (offset > 0)
    909     {
    910       if (CONSTANT_POOL_ADDRESS_P (symbol)
    911 	  && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
    912 	return true;
    913 
    914       decl = SYMBOL_REF_DECL (symbol);
    915       if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
    916 	return true;
    917     }
    918 
    919   if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
    920       && SYMBOL_REF_BLOCK (symbol)
    921       && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
    922       && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
    923 	  < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
    924     return true;
    925 
    926   return false;
    927 }
    928 
    929 /* Split X into a base and a constant offset, storing them in *BASE_OUT
    930    and *OFFSET_OUT respectively.  */
    931 
    932 void
    933 split_const (rtx x, rtx *base_out, rtx *offset_out)
    934 {
    935   if (GET_CODE (x) == CONST)
    936     {
    937       x = XEXP (x, 0);
    938       if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
    939 	{
    940 	  *base_out = XEXP (x, 0);
    941 	  *offset_out = XEXP (x, 1);
    942 	  return;
    943 	}
    944     }
    945   *base_out = x;
    946   *offset_out = const0_rtx;
    947 }
    948 
    949 /* Express integer value X as some value Y plus a polynomial offset,
    950    where Y is either const0_rtx, X or something within X (as opposed
    951    to a new rtx).  Return the Y and store the offset in *OFFSET_OUT.  */
    952 
    953 rtx
    954 strip_offset (rtx x, poly_int64_pod *offset_out)
    955 {
    956   rtx base = const0_rtx;
    957   rtx test = x;
    958   if (GET_CODE (test) == CONST)
    959     test = XEXP (test, 0);
    960   if (GET_CODE (test) == PLUS)
    961     {
    962       base = XEXP (test, 0);
    963       test = XEXP (test, 1);
    964     }
    965   if (poly_int_rtx_p (test, offset_out))
    966     return base;
    967   *offset_out = 0;
    968   return x;
    969 }
    970 
    971 /* Return the argument size in REG_ARGS_SIZE note X.  */
    972 
    973 poly_int64
    974 get_args_size (const_rtx x)
    975 {
    976   gcc_checking_assert (REG_NOTE_KIND (x) == REG_ARGS_SIZE);
    977   return rtx_to_poly_int64 (XEXP (x, 0));
    978 }
    979 
    980 /* Return the number of places FIND appears within X.  If COUNT_DEST is
    982    zero, we do not count occurrences inside the destination of a SET.  */
    983 
    984 int
    985 count_occurrences (const_rtx x, const_rtx find, int count_dest)
    986 {
    987   int i, j;
    988   enum rtx_code code;
    989   const char *format_ptr;
    990   int count;
    991 
    992   if (x == find)
    993     return 1;
    994 
    995   code = GET_CODE (x);
    996 
    997   switch (code)
    998     {
    999     case REG:
   1000     CASE_CONST_ANY:
   1001     case SYMBOL_REF:
   1002     case CODE_LABEL:
   1003     case PC:
   1004       return 0;
   1005 
   1006     case EXPR_LIST:
   1007       count = count_occurrences (XEXP (x, 0), find, count_dest);
   1008       if (XEXP (x, 1))
   1009 	count += count_occurrences (XEXP (x, 1), find, count_dest);
   1010       return count;
   1011 
   1012     case MEM:
   1013       if (MEM_P (find) && rtx_equal_p (x, find))
   1014 	return 1;
   1015       break;
   1016 
   1017     case SET:
   1018       if (SET_DEST (x) == find && ! count_dest)
   1019 	return count_occurrences (SET_SRC (x), find, count_dest);
   1020       break;
   1021 
   1022     default:
   1023       break;
   1024     }
   1025 
   1026   format_ptr = GET_RTX_FORMAT (code);
   1027   count = 0;
   1028 
   1029   for (i = 0; i < GET_RTX_LENGTH (code); i++)
   1030     {
   1031       switch (*format_ptr++)
   1032 	{
   1033 	case 'e':
   1034 	  count += count_occurrences (XEXP (x, i), find, count_dest);
   1035 	  break;
   1036 
   1037 	case 'E':
   1038 	  for (j = 0; j < XVECLEN (x, i); j++)
   1039 	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
   1040 	  break;
   1041 	}
   1042     }
   1043   return count;
   1044 }
   1045 
   1046 
   1047 /* Return TRUE if OP is a register or subreg of a register that
   1049    holds an unsigned quantity.  Otherwise, return FALSE.  */
   1050 
   1051 bool
   1052 unsigned_reg_p (rtx op)
   1053 {
   1054   if (REG_P (op)
   1055       && REG_EXPR (op)
   1056       && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
   1057     return true;
   1058 
   1059   if (GET_CODE (op) == SUBREG
   1060       && SUBREG_PROMOTED_SIGN (op))
   1061     return true;
   1062 
   1063   return false;
   1064 }
   1065 
   1066 
   1067 /* Nonzero if register REG appears somewhere within IN.
   1069    Also works if REG is not a register; in this case it checks
   1070    for a subexpression of IN that is Lisp "equal" to REG.  */
   1071 
   1072 int
   1073 reg_mentioned_p (const_rtx reg, const_rtx in)
   1074 {
   1075   const char *fmt;
   1076   int i;
   1077   enum rtx_code code;
   1078 
   1079   if (in == 0)
   1080     return 0;
   1081 
   1082   if (reg == in)
   1083     return 1;
   1084 
   1085   if (GET_CODE (in) == LABEL_REF)
   1086     return reg == label_ref_label (in);
   1087 
   1088   code = GET_CODE (in);
   1089 
   1090   switch (code)
   1091     {
   1092       /* Compare registers by number.  */
   1093     case REG:
   1094       return REG_P (reg) && REGNO (in) == REGNO (reg);
   1095 
   1096       /* These codes have no constituent expressions
   1097 	 and are unique.  */
   1098     case SCRATCH:
   1099     case PC:
   1100       return 0;
   1101 
   1102     CASE_CONST_ANY:
   1103       /* These are kept unique for a given value.  */
   1104       return 0;
   1105 
   1106     default:
   1107       break;
   1108     }
   1109 
   1110   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
   1111     return 1;
   1112 
   1113   fmt = GET_RTX_FORMAT (code);
   1114 
   1115   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1116     {
   1117       if (fmt[i] == 'E')
   1118 	{
   1119 	  int j;
   1120 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
   1121 	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
   1122 	      return 1;
   1123 	}
   1124       else if (fmt[i] == 'e'
   1125 	       && reg_mentioned_p (reg, XEXP (in, i)))
   1126 	return 1;
   1127     }
   1128   return 0;
   1129 }
   1130 
   1131 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
   1133    no CODE_LABEL insn.  */
   1134 
   1135 int
   1136 no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
   1137 {
   1138   rtx_insn *p;
   1139   if (beg == end)
   1140     return 0;
   1141   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
   1142     if (LABEL_P (p))
   1143       return 0;
   1144   return 1;
   1145 }
   1146 
   1147 /* Nonzero if register REG is used in an insn between
   1148    FROM_INSN and TO_INSN (exclusive of those two).  */
   1149 
   1150 int
   1151 reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
   1152 		    const rtx_insn *to_insn)
   1153 {
   1154   rtx_insn *insn;
   1155 
   1156   if (from_insn == to_insn)
   1157     return 0;
   1158 
   1159   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
   1160     if (NONDEBUG_INSN_P (insn)
   1161 	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
   1162 	   || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
   1163       return 1;
   1164   return 0;
   1165 }
   1166 
   1167 /* Nonzero if the old value of X, a register, is referenced in BODY.  If X
   1169    is entirely replaced by a new value and the only use is as a SET_DEST,
   1170    we do not consider it a reference.  */
   1171 
   1172 int
   1173 reg_referenced_p (const_rtx x, const_rtx body)
   1174 {
   1175   int i;
   1176 
   1177   switch (GET_CODE (body))
   1178     {
   1179     case SET:
   1180       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
   1181 	return 1;
   1182 
   1183       /* If the destination is anything other than PC, a REG or a SUBREG
   1184 	 of a REG that occupies all of the REG, the insn references X if
   1185 	 it is mentioned in the destination.  */
   1186       if (GET_CODE (SET_DEST (body)) != PC
   1187 	  && !REG_P (SET_DEST (body))
   1188 	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
   1189 		&& REG_P (SUBREG_REG (SET_DEST (body)))
   1190 		&& !read_modify_subreg_p (SET_DEST (body)))
   1191 	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
   1192 	return 1;
   1193       return 0;
   1194 
   1195     case ASM_OPERANDS:
   1196       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
   1197 	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
   1198 	  return 1;
   1199       return 0;
   1200 
   1201     case CALL:
   1202     case USE:
   1203     case IF_THEN_ELSE:
   1204       return reg_overlap_mentioned_p (x, body);
   1205 
   1206     case TRAP_IF:
   1207       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
   1208 
   1209     case PREFETCH:
   1210       return reg_overlap_mentioned_p (x, XEXP (body, 0));
   1211 
   1212     case UNSPEC:
   1213     case UNSPEC_VOLATILE:
   1214       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
   1215 	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
   1216 	  return 1;
   1217       return 0;
   1218 
   1219     case PARALLEL:
   1220       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
   1221 	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
   1222 	  return 1;
   1223       return 0;
   1224 
   1225     case CLOBBER:
   1226       if (MEM_P (XEXP (body, 0)))
   1227 	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
   1228 	  return 1;
   1229       return 0;
   1230 
   1231     case COND_EXEC:
   1232       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
   1233 	return 1;
   1234       return reg_referenced_p (x, COND_EXEC_CODE (body));
   1235 
   1236     default:
   1237       return 0;
   1238     }
   1239 }
   1240 
   1241 /* Nonzero if register REG is set or clobbered in an insn between
   1243    FROM_INSN and TO_INSN (exclusive of those two).  */
   1244 
   1245 int
   1246 reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
   1247 		   const rtx_insn *to_insn)
   1248 {
   1249   const rtx_insn *insn;
   1250 
   1251   if (from_insn == to_insn)
   1252     return 0;
   1253 
   1254   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
   1255     if (INSN_P (insn) && reg_set_p (reg, insn))
   1256       return 1;
   1257   return 0;
   1258 }
   1259 
   1260 /* Return true if REG is set or clobbered inside INSN.  */
   1261 
   1262 int
   1263 reg_set_p (const_rtx reg, const_rtx insn)
   1264 {
   1265   /* After delay slot handling, call and branch insns might be in a
   1266      sequence.  Check all the elements there.  */
   1267   if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
   1268     {
   1269       for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
   1270 	if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
   1271 	  return true;
   1272 
   1273       return false;
   1274     }
   1275 
   1276   /* We can be passed an insn or part of one.  If we are passed an insn,
   1277      check if a side-effect of the insn clobbers REG.  */
   1278   if (INSN_P (insn)
   1279       && (FIND_REG_INC_NOTE (insn, reg)
   1280 	  || (CALL_P (insn)
   1281 	      && ((REG_P (reg)
   1282 		   && REGNO (reg) < FIRST_PSEUDO_REGISTER
   1283 		   && (insn_callee_abi (as_a<const rtx_insn *> (insn))
   1284 		       .clobbers_reg_p (GET_MODE (reg), REGNO (reg))))
   1285 		  || MEM_P (reg)
   1286 		  || find_reg_fusage (insn, CLOBBER, reg)))))
   1287     return true;
   1288 
   1289   /* There are no REG_INC notes for SP autoinc.  */
   1290   if (reg == stack_pointer_rtx && INSN_P (insn))
   1291     {
   1292       subrtx_var_iterator::array_type array;
   1293       FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), NONCONST)
   1294 	{
   1295 	  rtx mem = *iter;
   1296 	  if (mem
   1297 	      && MEM_P (mem)
   1298 	      && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
   1299 	    {
   1300 	      if (XEXP (XEXP (mem, 0), 0) == stack_pointer_rtx)
   1301 		return true;
   1302 	      iter.skip_subrtxes ();
   1303 	    }
   1304 	}
   1305     }
   1306 
   1307   return set_of (reg, insn) != NULL_RTX;
   1308 }
   1309 
   1310 /* Similar to reg_set_between_p, but check all registers in X.  Return 0
   1311    only if none of them are modified between START and END.  Return 1 if
   1312    X contains a MEM; this routine does use memory aliasing.  */
   1313 
   1314 int
   1315 modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
   1316 {
   1317   const enum rtx_code code = GET_CODE (x);
   1318   const char *fmt;
   1319   int i, j;
   1320   rtx_insn *insn;
   1321 
   1322   if (start == end)
   1323     return 0;
   1324 
   1325   switch (code)
   1326     {
   1327     CASE_CONST_ANY:
   1328     case CONST:
   1329     case SYMBOL_REF:
   1330     case LABEL_REF:
   1331       return 0;
   1332 
   1333     case PC:
   1334       return 1;
   1335 
   1336     case MEM:
   1337       if (modified_between_p (XEXP (x, 0), start, end))
   1338 	return 1;
   1339       if (MEM_READONLY_P (x))
   1340 	return 0;
   1341       for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
   1342 	if (memory_modified_in_insn_p (x, insn))
   1343 	  return 1;
   1344       return 0;
   1345 
   1346     case REG:
   1347       return reg_set_between_p (x, start, end);
   1348 
   1349     default:
   1350       break;
   1351     }
   1352 
   1353   fmt = GET_RTX_FORMAT (code);
   1354   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1355     {
   1356       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
   1357 	return 1;
   1358 
   1359       else if (fmt[i] == 'E')
   1360 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   1361 	  if (modified_between_p (XVECEXP (x, i, j), start, end))
   1362 	    return 1;
   1363     }
   1364 
   1365   return 0;
   1366 }
   1367 
   1368 /* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
   1369    of them are modified in INSN.  Return 1 if X contains a MEM; this routine
   1370    does use memory aliasing.  */
   1371 
   1372 int
   1373 modified_in_p (const_rtx x, const_rtx insn)
   1374 {
   1375   const enum rtx_code code = GET_CODE (x);
   1376   const char *fmt;
   1377   int i, j;
   1378 
   1379   switch (code)
   1380     {
   1381     CASE_CONST_ANY:
   1382     case CONST:
   1383     case SYMBOL_REF:
   1384     case LABEL_REF:
   1385       return 0;
   1386 
   1387     case PC:
   1388       return 1;
   1389 
   1390     case MEM:
   1391       if (modified_in_p (XEXP (x, 0), insn))
   1392 	return 1;
   1393       if (MEM_READONLY_P (x))
   1394 	return 0;
   1395       if (memory_modified_in_insn_p (x, insn))
   1396 	return 1;
   1397       return 0;
   1398 
   1399     case REG:
   1400       return reg_set_p (x, insn);
   1401 
   1402     default:
   1403       break;
   1404     }
   1405 
   1406   fmt = GET_RTX_FORMAT (code);
   1407   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1408     {
   1409       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
   1410 	return 1;
   1411 
   1412       else if (fmt[i] == 'E')
   1413 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   1414 	  if (modified_in_p (XVECEXP (x, i, j), insn))
   1415 	    return 1;
   1416     }
   1417 
   1418   return 0;
   1419 }
   1420 
   1421 /* Return true if X is a SUBREG and if storing a value to X would
   1422    preserve some of its SUBREG_REG.  For example, on a normal 32-bit
   1423    target, using a SUBREG to store to one half of a DImode REG would
   1424    preserve the other half.  */
   1425 
   1426 bool
   1427 read_modify_subreg_p (const_rtx x)
   1428 {
   1429   if (GET_CODE (x) != SUBREG)
   1430     return false;
   1431   poly_uint64 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
   1432   poly_uint64 osize = GET_MODE_SIZE (GET_MODE (x));
   1433   poly_uint64 regsize = REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
   1434   /* The inner and outer modes of a subreg must be ordered, so that we
   1435      can tell whether they're paradoxical or partial.  */
   1436   gcc_checking_assert (ordered_p (isize, osize));
   1437   return (maybe_gt (isize, osize) && maybe_gt (isize, regsize));
   1438 }
   1439 
   1440 /* Helper function for set_of.  */
   1442 struct set_of_data
   1443   {
   1444     const_rtx found;
   1445     const_rtx pat;
   1446   };
   1447 
   1448 static void
   1449 set_of_1 (rtx x, const_rtx pat, void *data1)
   1450 {
   1451   struct set_of_data *const data = (struct set_of_data *) (data1);
   1452   if (rtx_equal_p (x, data->pat)
   1453       || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
   1454     data->found = pat;
   1455 }
   1456 
   1457 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
   1458    (either directly or via STRICT_LOW_PART and similar modifiers).  */
   1459 const_rtx
   1460 set_of (const_rtx pat, const_rtx insn)
   1461 {
   1462   struct set_of_data data;
   1463   data.found = NULL_RTX;
   1464   data.pat = pat;
   1465   note_pattern_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
   1466   return data.found;
   1467 }
   1468 
   1469 /* Check whether instruction pattern PAT contains a SET with the following
   1470    properties:
   1471 
   1472    - the SET is executed unconditionally; and
   1473    - either:
   1474      - the destination of the SET is a REG that contains REGNO; or
   1475      - both:
   1476        - the destination of the SET is a SUBREG of such a REG; and
   1477        - writing to the subreg clobbers all of the SUBREG_REG
   1478 	 (in other words, read_modify_subreg_p is false).
   1479 
   1480    If PAT does have a SET like that, return the set, otherwise return null.
   1481 
   1482    This is intended to be an alternative to single_set for passes that
   1483    can handle patterns with multiple_sets.  */
   1484 rtx
   1485 simple_regno_set (rtx pat, unsigned int regno)
   1486 {
   1487   if (GET_CODE (pat) == PARALLEL)
   1488     {
   1489       int last = XVECLEN (pat, 0) - 1;
   1490       for (int i = 0; i < last; ++i)
   1491 	if (rtx set = simple_regno_set (XVECEXP (pat, 0, i), regno))
   1492 	  return set;
   1493 
   1494       pat = XVECEXP (pat, 0, last);
   1495     }
   1496 
   1497   if (GET_CODE (pat) == SET
   1498       && covers_regno_no_parallel_p (SET_DEST (pat), regno))
   1499     return pat;
   1500 
   1501   return nullptr;
   1502 }
   1503 
   1504 /* Add all hard register in X to *PSET.  */
   1505 void
   1506 find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
   1507 {
   1508   subrtx_iterator::array_type array;
   1509   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
   1510     {
   1511       const_rtx x = *iter;
   1512       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
   1513 	add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
   1514     }
   1515 }
   1516 
   1517 /* This function, called through note_stores, collects sets and
   1518    clobbers of hard registers in a HARD_REG_SET, which is pointed to
   1519    by DATA.  */
   1520 void
   1521 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
   1522 {
   1523   HARD_REG_SET *pset = (HARD_REG_SET *)data;
   1524   if (REG_P (x) && HARD_REGISTER_P (x))
   1525     add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
   1526 }
   1527 
   1528 /* Examine INSN, and compute the set of hard registers written by it.
   1529    Store it in *PSET.  Should only be called after reload.
   1530 
   1531    IMPLICIT is true if we should include registers that are fully-clobbered
   1532    by calls.  This should be used with caution, since it doesn't include
   1533    partially-clobbered registers.  */
   1534 void
   1535 find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
   1536 {
   1537   rtx link;
   1538 
   1539   CLEAR_HARD_REG_SET (*pset);
   1540   note_stores (insn, record_hard_reg_sets, pset);
   1541   if (CALL_P (insn) && implicit)
   1542     *pset |= insn_callee_abi (insn).full_reg_clobbers ();
   1543   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
   1544     if (REG_NOTE_KIND (link) == REG_INC)
   1545       record_hard_reg_sets (XEXP (link, 0), NULL, pset);
   1546 }
   1547 
   1548 /* Like record_hard_reg_sets, but called through note_uses.  */
   1549 void
   1550 record_hard_reg_uses (rtx *px, void *data)
   1551 {
   1552   find_all_hard_regs (*px, (HARD_REG_SET *) data);
   1553 }
   1554 
   1555 /* Given an INSN, return a SET expression if this insn has only a single SET.
   1557    It may also have CLOBBERs, USEs, or SET whose output
   1558    will not be used, which we ignore.  */
   1559 
   1560 rtx
   1561 single_set_2 (const rtx_insn *insn, const_rtx pat)
   1562 {
   1563   rtx set = NULL;
   1564   int set_verified = 1;
   1565   int i;
   1566 
   1567   if (GET_CODE (pat) == PARALLEL)
   1568     {
   1569       for (i = 0; i < XVECLEN (pat, 0); i++)
   1570 	{
   1571 	  rtx sub = XVECEXP (pat, 0, i);
   1572 	  switch (GET_CODE (sub))
   1573 	    {
   1574 	    case USE:
   1575 	    case CLOBBER:
   1576 	      break;
   1577 
   1578 	    case SET:
   1579 	      /* We can consider insns having multiple sets, where all
   1580 		 but one are dead as single set insns.  In common case
   1581 		 only single set is present in the pattern so we want
   1582 		 to avoid checking for REG_UNUSED notes unless necessary.
   1583 
   1584 		 When we reach set first time, we just expect this is
   1585 		 the single set we are looking for and only when more
   1586 		 sets are found in the insn, we check them.  */
   1587 	      if (!set_verified)
   1588 		{
   1589 		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
   1590 		      && !side_effects_p (set))
   1591 		    set = NULL;
   1592 		  else
   1593 		    set_verified = 1;
   1594 		}
   1595 	      if (!set)
   1596 		set = sub, set_verified = 0;
   1597 	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
   1598 		       || side_effects_p (sub))
   1599 		return NULL_RTX;
   1600 	      break;
   1601 
   1602 	    default:
   1603 	      return NULL_RTX;
   1604 	    }
   1605 	}
   1606     }
   1607   return set;
   1608 }
   1609 
   1610 /* Given an INSN, return nonzero if it has more than one SET, else return
   1611    zero.  */
   1612 
   1613 int
   1614 multiple_sets (const_rtx insn)
   1615 {
   1616   int found;
   1617   int i;
   1618 
   1619   /* INSN must be an insn.  */
   1620   if (! INSN_P (insn))
   1621     return 0;
   1622 
   1623   /* Only a PARALLEL can have multiple SETs.  */
   1624   if (GET_CODE (PATTERN (insn)) == PARALLEL)
   1625     {
   1626       for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
   1627 	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
   1628 	  {
   1629 	    /* If we have already found a SET, then return now.  */
   1630 	    if (found)
   1631 	      return 1;
   1632 	    else
   1633 	      found = 1;
   1634 	  }
   1635     }
   1636 
   1637   /* Either zero or one SET.  */
   1638   return 0;
   1639 }
   1640 
   1641 /* Return nonzero if the destination of SET equals the source
   1643    and there are no side effects.  */
   1644 
   1645 int
   1646 set_noop_p (const_rtx set)
   1647 {
   1648   rtx src = SET_SRC (set);
   1649   rtx dst = SET_DEST (set);
   1650 
   1651   if (dst == pc_rtx && src == pc_rtx)
   1652     return 1;
   1653 
   1654   if (MEM_P (dst) && MEM_P (src))
   1655     return (rtx_equal_p (dst, src)
   1656 	    && !side_effects_p (dst)
   1657 	    && !side_effects_p (src));
   1658 
   1659   if (GET_CODE (dst) == ZERO_EXTRACT)
   1660     return (rtx_equal_p (XEXP (dst, 0), src)
   1661 	    && !BITS_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
   1662 	    && !side_effects_p (src)
   1663 	    && !side_effects_p (XEXP (dst, 0)));
   1664 
   1665   if (GET_CODE (dst) == STRICT_LOW_PART)
   1666     dst = XEXP (dst, 0);
   1667 
   1668   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
   1669     {
   1670       if (maybe_ne (SUBREG_BYTE (src), SUBREG_BYTE (dst)))
   1671 	return 0;
   1672       src = SUBREG_REG (src);
   1673       dst = SUBREG_REG (dst);
   1674       if (GET_MODE (src) != GET_MODE (dst))
   1675 	/* It is hard to tell whether subregs refer to the same bits, so act
   1676 	   conservatively and return 0.  */
   1677 	return 0;
   1678     }
   1679 
   1680   /* It is a NOOP if destination overlaps with selected src vector
   1681      elements.  */
   1682   if (GET_CODE (src) == VEC_SELECT
   1683       && REG_P (XEXP (src, 0)) && REG_P (dst)
   1684       && HARD_REGISTER_P (XEXP (src, 0))
   1685       && HARD_REGISTER_P (dst))
   1686     {
   1687       int i;
   1688       rtx par = XEXP (src, 1);
   1689       rtx src0 = XEXP (src, 0);
   1690       poly_int64 c0;
   1691       if (!poly_int_rtx_p (XVECEXP (par, 0, 0), &c0))
   1692 	return 0;
   1693       poly_int64 offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
   1694 
   1695       for (i = 1; i < XVECLEN (par, 0); i++)
   1696 	{
   1697 	  poly_int64 c0i;
   1698 	  if (!poly_int_rtx_p (XVECEXP (par, 0, i), &c0i)
   1699 	      || maybe_ne (c0i, c0 + i))
   1700 	    return 0;
   1701 	}
   1702       return
   1703 	REG_CAN_CHANGE_MODE_P (REGNO (dst), GET_MODE (src0), GET_MODE (dst))
   1704 	&& simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
   1705 				  offset, GET_MODE (dst)) == (int) REGNO (dst);
   1706     }
   1707 
   1708   return (REG_P (src) && REG_P (dst)
   1709 	  && REGNO (src) == REGNO (dst));
   1710 }
   1711 
   1712 /* Return nonzero if an insn consists only of SETs, each of which only sets a
   1714    value to itself.  */
   1715 
   1716 int
   1717 noop_move_p (const rtx_insn *insn)
   1718 {
   1719   rtx pat = PATTERN (insn);
   1720 
   1721   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
   1722     return 1;
   1723 
   1724   /* Check the code to be executed for COND_EXEC.  */
   1725   if (GET_CODE (pat) == COND_EXEC)
   1726     pat = COND_EXEC_CODE (pat);
   1727 
   1728   if (GET_CODE (pat) == SET && set_noop_p (pat))
   1729     return 1;
   1730 
   1731   if (GET_CODE (pat) == PARALLEL)
   1732     {
   1733       int i;
   1734       /* If nothing but SETs of registers to themselves,
   1735 	 this insn can also be deleted.  */
   1736       for (i = 0; i < XVECLEN (pat, 0); i++)
   1737 	{
   1738 	  rtx tem = XVECEXP (pat, 0, i);
   1739 
   1740 	  if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
   1741 	    continue;
   1742 
   1743 	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
   1744 	    return 0;
   1745 	}
   1746 
   1747       return 1;
   1748     }
   1749   return 0;
   1750 }
   1751 
   1752 
   1754 /* Return nonzero if register in range [REGNO, ENDREGNO)
   1755    appears either explicitly or implicitly in X
   1756    other than being stored into.
   1757 
   1758    References contained within the substructure at LOC do not count.
   1759    LOC may be zero, meaning don't ignore anything.  */
   1760 
   1761 bool
   1762 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
   1763 		   rtx *loc)
   1764 {
   1765   int i;
   1766   unsigned int x_regno;
   1767   RTX_CODE code;
   1768   const char *fmt;
   1769 
   1770  repeat:
   1771   /* The contents of a REG_NONNEG note is always zero, so we must come here
   1772      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
   1773   if (x == 0)
   1774     return false;
   1775 
   1776   code = GET_CODE (x);
   1777 
   1778   switch (code)
   1779     {
   1780     case REG:
   1781       x_regno = REGNO (x);
   1782 
   1783       /* If we modifying the stack, frame, or argument pointer, it will
   1784 	 clobber a virtual register.  In fact, we could be more precise,
   1785 	 but it isn't worth it.  */
   1786       if ((x_regno == STACK_POINTER_REGNUM
   1787 	   || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
   1788 	       && x_regno == ARG_POINTER_REGNUM)
   1789 	   || x_regno == FRAME_POINTER_REGNUM)
   1790 	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
   1791 	return true;
   1792 
   1793       return endregno > x_regno && regno < END_REGNO (x);
   1794 
   1795     case SUBREG:
   1796       /* If this is a SUBREG of a hard reg, we can see exactly which
   1797 	 registers are being modified.  Otherwise, handle normally.  */
   1798       if (REG_P (SUBREG_REG (x))
   1799 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
   1800 	{
   1801 	  unsigned int inner_regno = subreg_regno (x);
   1802 	  unsigned int inner_endregno
   1803 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
   1804 			     ? subreg_nregs (x) : 1);
   1805 
   1806 	  return endregno > inner_regno && regno < inner_endregno;
   1807 	}
   1808       break;
   1809 
   1810     case CLOBBER:
   1811     case SET:
   1812       if (&SET_DEST (x) != loc
   1813 	  /* Note setting a SUBREG counts as referring to the REG it is in for
   1814 	     a pseudo but not for hard registers since we can
   1815 	     treat each word individually.  */
   1816 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
   1817 	       && loc != &SUBREG_REG (SET_DEST (x))
   1818 	       && REG_P (SUBREG_REG (SET_DEST (x)))
   1819 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
   1820 	       && refers_to_regno_p (regno, endregno,
   1821 				     SUBREG_REG (SET_DEST (x)), loc))
   1822 	      || (!REG_P (SET_DEST (x))
   1823 		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
   1824 	return true;
   1825 
   1826       if (code == CLOBBER || loc == &SET_SRC (x))
   1827 	return false;
   1828       x = SET_SRC (x);
   1829       goto repeat;
   1830 
   1831     default:
   1832       break;
   1833     }
   1834 
   1835   /* X does not match, so try its subexpressions.  */
   1836 
   1837   fmt = GET_RTX_FORMAT (code);
   1838   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1839     {
   1840       if (fmt[i] == 'e' && loc != &XEXP (x, i))
   1841 	{
   1842 	  if (i == 0)
   1843 	    {
   1844 	      x = XEXP (x, 0);
   1845 	      goto repeat;
   1846 	    }
   1847 	  else
   1848 	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
   1849 	      return true;
   1850 	}
   1851       else if (fmt[i] == 'E')
   1852 	{
   1853 	  int j;
   1854 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   1855 	    if (loc != &XVECEXP (x, i, j)
   1856 		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
   1857 	      return true;
   1858 	}
   1859     }
   1860   return false;
   1861 }
   1862 
   1863 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
   1864    we check if any register number in X conflicts with the relevant register
   1865    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
   1866    contains a MEM (we don't bother checking for memory addresses that can't
   1867    conflict because we expect this to be a rare case.  */
   1868 
   1869 int
   1870 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
   1871 {
   1872   unsigned int regno, endregno;
   1873 
   1874   /* If either argument is a constant, then modifying X cannot
   1875      affect IN.  Here we look at IN, we can profitably combine
   1876      CONSTANT_P (x) with the switch statement below.  */
   1877   if (CONSTANT_P (in))
   1878     return 0;
   1879 
   1880  recurse:
   1881   switch (GET_CODE (x))
   1882     {
   1883     case CLOBBER:
   1884     case STRICT_LOW_PART:
   1885     case ZERO_EXTRACT:
   1886     case SIGN_EXTRACT:
   1887       /* Overly conservative.  */
   1888       x = XEXP (x, 0);
   1889       goto recurse;
   1890 
   1891     case SUBREG:
   1892       regno = REGNO (SUBREG_REG (x));
   1893       if (regno < FIRST_PSEUDO_REGISTER)
   1894 	regno = subreg_regno (x);
   1895       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
   1896 			  ? subreg_nregs (x) : 1);
   1897       goto do_reg;
   1898 
   1899     case REG:
   1900       regno = REGNO (x);
   1901       endregno = END_REGNO (x);
   1902     do_reg:
   1903       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
   1904 
   1905     case MEM:
   1906       {
   1907 	const char *fmt;
   1908 	int i;
   1909 
   1910 	if (MEM_P (in))
   1911 	  return 1;
   1912 
   1913 	fmt = GET_RTX_FORMAT (GET_CODE (in));
   1914 	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
   1915 	  if (fmt[i] == 'e')
   1916 	    {
   1917 	      if (reg_overlap_mentioned_p (x, XEXP (in, i)))
   1918 		return 1;
   1919 	    }
   1920 	  else if (fmt[i] == 'E')
   1921 	    {
   1922 	      int j;
   1923 	      for (j = XVECLEN (in, i) - 1; j >= 0; --j)
   1924 		if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
   1925 		  return 1;
   1926 	    }
   1927 
   1928 	return 0;
   1929       }
   1930 
   1931     case SCRATCH:
   1932     case PC:
   1933       return reg_mentioned_p (x, in);
   1934 
   1935     case PARALLEL:
   1936       {
   1937 	int i;
   1938 
   1939 	/* If any register in here refers to it we return true.  */
   1940 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
   1941 	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
   1942 	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
   1943 	    return 1;
   1944 	return 0;
   1945       }
   1946 
   1947     default:
   1948       gcc_assert (CONSTANT_P (x));
   1949       return 0;
   1950     }
   1951 }
   1952 
   1953 /* Call FUN on each register or MEM that is stored into or clobbered by X.
   1955    (X would be the pattern of an insn).  DATA is an arbitrary pointer,
   1956    ignored by note_stores, but passed to FUN.
   1957 
   1958    FUN receives three arguments:
   1959    1. the REG, MEM or PC being stored in or clobbered,
   1960    2. the SET or CLOBBER rtx that does the store,
   1961    3. the pointer DATA provided to note_stores.
   1962 
   1963   If the item being stored in or clobbered is a SUBREG of a hard register,
   1964   the SUBREG will be passed.  */
   1965 
   1966 void
   1967 note_pattern_stores (const_rtx x,
   1968 		     void (*fun) (rtx, const_rtx, void *), void *data)
   1969 {
   1970   int i;
   1971 
   1972   if (GET_CODE (x) == COND_EXEC)
   1973     x = COND_EXEC_CODE (x);
   1974 
   1975   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
   1976     {
   1977       rtx dest = SET_DEST (x);
   1978 
   1979       while ((GET_CODE (dest) == SUBREG
   1980 	      && (!REG_P (SUBREG_REG (dest))
   1981 		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
   1982 	     || GET_CODE (dest) == ZERO_EXTRACT
   1983 	     || GET_CODE (dest) == STRICT_LOW_PART)
   1984 	dest = XEXP (dest, 0);
   1985 
   1986       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
   1987 	 each of whose first operand is a register.  */
   1988       if (GET_CODE (dest) == PARALLEL)
   1989 	{
   1990 	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
   1991 	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
   1992 	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
   1993 	}
   1994       else
   1995 	(*fun) (dest, x, data);
   1996     }
   1997 
   1998   else if (GET_CODE (x) == PARALLEL)
   1999     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
   2000       note_pattern_stores (XVECEXP (x, 0, i), fun, data);
   2001 }
   2002 
   2003 /* Same, but for an instruction.  If the instruction is a call, include
   2004    any CLOBBERs in its CALL_INSN_FUNCTION_USAGE.  */
   2005 
   2006 void
   2007 note_stores (const rtx_insn *insn,
   2008 	     void (*fun) (rtx, const_rtx, void *), void *data)
   2009 {
   2010   if (CALL_P (insn))
   2011     for (rtx link = CALL_INSN_FUNCTION_USAGE (insn);
   2012 	 link; link = XEXP (link, 1))
   2013       if (GET_CODE (XEXP (link, 0)) == CLOBBER)
   2014 	note_pattern_stores (XEXP (link, 0), fun, data);
   2015   note_pattern_stores (PATTERN (insn), fun, data);
   2016 }
   2017 
   2018 /* Like notes_stores, but call FUN for each expression that is being
   2020    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
   2021    FUN for each expression, not any interior subexpressions.  FUN receives a
   2022    pointer to the expression and the DATA passed to this function.
   2023 
   2024    Note that this is not quite the same test as that done in reg_referenced_p
   2025    since that considers something as being referenced if it is being
   2026    partially set, while we do not.  */
   2027 
   2028 void
   2029 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
   2030 {
   2031   rtx body = *pbody;
   2032   int i;
   2033 
   2034   switch (GET_CODE (body))
   2035     {
   2036     case COND_EXEC:
   2037       (*fun) (&COND_EXEC_TEST (body), data);
   2038       note_uses (&COND_EXEC_CODE (body), fun, data);
   2039       return;
   2040 
   2041     case PARALLEL:
   2042       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
   2043 	note_uses (&XVECEXP (body, 0, i), fun, data);
   2044       return;
   2045 
   2046     case SEQUENCE:
   2047       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
   2048 	note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
   2049       return;
   2050 
   2051     case USE:
   2052       (*fun) (&XEXP (body, 0), data);
   2053       return;
   2054 
   2055     case ASM_OPERANDS:
   2056       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
   2057 	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
   2058       return;
   2059 
   2060     case TRAP_IF:
   2061       (*fun) (&TRAP_CONDITION (body), data);
   2062       return;
   2063 
   2064     case PREFETCH:
   2065       (*fun) (&XEXP (body, 0), data);
   2066       return;
   2067 
   2068     case UNSPEC:
   2069     case UNSPEC_VOLATILE:
   2070       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
   2071 	(*fun) (&XVECEXP (body, 0, i), data);
   2072       return;
   2073 
   2074     case CLOBBER:
   2075       if (MEM_P (XEXP (body, 0)))
   2076 	(*fun) (&XEXP (XEXP (body, 0), 0), data);
   2077       return;
   2078 
   2079     case SET:
   2080       {
   2081 	rtx dest = SET_DEST (body);
   2082 
   2083 	/* For sets we replace everything in source plus registers in memory
   2084 	   expression in store and operands of a ZERO_EXTRACT.  */
   2085 	(*fun) (&SET_SRC (body), data);
   2086 
   2087 	if (GET_CODE (dest) == ZERO_EXTRACT)
   2088 	  {
   2089 	    (*fun) (&XEXP (dest, 1), data);
   2090 	    (*fun) (&XEXP (dest, 2), data);
   2091 	  }
   2092 
   2093 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
   2094 	  dest = XEXP (dest, 0);
   2095 
   2096 	if (MEM_P (dest))
   2097 	  (*fun) (&XEXP (dest, 0), data);
   2098       }
   2099       return;
   2100 
   2101     default:
   2102       /* All the other possibilities never store.  */
   2103       (*fun) (pbody, data);
   2104       return;
   2105     }
   2106 }
   2107 
   2108 /* Try to add a description of REG X to this object, stopping once
   2109    the REF_END limit has been reached.  FLAGS is a bitmask of
   2110    rtx_obj_reference flags that describe the context.  */
   2111 
   2112 void
   2113 rtx_properties::try_to_add_reg (const_rtx x, unsigned int flags)
   2114 {
   2115   if (REG_NREGS (x) != 1)
   2116     flags |= rtx_obj_flags::IS_MULTIREG;
   2117   machine_mode mode = GET_MODE (x);
   2118   unsigned int start_regno = REGNO (x);
   2119   unsigned int end_regno = END_REGNO (x);
   2120   for (unsigned int regno = start_regno; regno < end_regno; ++regno)
   2121     if (ref_iter != ref_end)
   2122       *ref_iter++ = rtx_obj_reference (regno, flags, mode,
   2123 				       regno - start_regno);
   2124 }
   2125 
   2126 /* Add a description of destination X to this object.  FLAGS is a bitmask
   2127    of rtx_obj_reference flags that describe the context.
   2128 
   2129    This routine accepts all rtxes that can legitimately appear in a
   2130    SET_DEST.  */
   2131 
   2132 void
   2133 rtx_properties::try_to_add_dest (const_rtx x, unsigned int flags)
   2134 {
   2135   /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
   2136      each of whose first operand is a register.  */
   2137   if (__builtin_expect (GET_CODE (x) == PARALLEL, 0))
   2138     {
   2139       for (int i = XVECLEN (x, 0) - 1; i >= 0; --i)
   2140 	if (rtx dest = XEXP (XVECEXP (x, 0, i), 0))
   2141 	  try_to_add_dest (dest, flags);
   2142       return;
   2143     }
   2144 
   2145   unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
   2146   flags |= rtx_obj_flags::IS_WRITE;
   2147   for (;;)
   2148     if (GET_CODE (x) == ZERO_EXTRACT)
   2149       {
   2150 	try_to_add_src (XEXP (x, 1), base_flags);
   2151 	try_to_add_src (XEXP (x, 2), base_flags);
   2152 	flags |= rtx_obj_flags::IS_READ;
   2153 	x = XEXP (x, 0);
   2154       }
   2155     else if (GET_CODE (x) == STRICT_LOW_PART)
   2156       {
   2157 	flags |= rtx_obj_flags::IS_READ;
   2158 	x = XEXP (x, 0);
   2159       }
   2160     else if (GET_CODE (x) == SUBREG)
   2161       {
   2162 	flags |= rtx_obj_flags::IN_SUBREG;
   2163 	if (read_modify_subreg_p (x))
   2164 	  flags |= rtx_obj_flags::IS_READ;
   2165 	x = SUBREG_REG (x);
   2166       }
   2167     else
   2168       break;
   2169 
   2170   if (MEM_P (x))
   2171     {
   2172       if (ref_iter != ref_end)
   2173 	*ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, GET_MODE (x));
   2174 
   2175       unsigned int addr_flags = base_flags | rtx_obj_flags::IN_MEM_STORE;
   2176       if (flags & rtx_obj_flags::IS_READ)
   2177 	addr_flags |= rtx_obj_flags::IN_MEM_LOAD;
   2178       try_to_add_src (XEXP (x, 0), addr_flags);
   2179       return;
   2180     }
   2181 
   2182   if (__builtin_expect (REG_P (x), 1))
   2183     {
   2184       /* We want to keep sp alive everywhere -  by making all
   2185 	 writes to sp also use sp. */
   2186       if (REGNO (x) == STACK_POINTER_REGNUM)
   2187 	flags |= rtx_obj_flags::IS_READ;
   2188       try_to_add_reg (x, flags);
   2189       return;
   2190     }
   2191 }
   2192 
   2193 /* Try to add a description of source X to this object, stopping once
   2194    the REF_END limit has been reached.  FLAGS is a bitmask of
   2195    rtx_obj_reference flags that describe the context.
   2196 
   2197    This routine accepts all rtxes that can legitimately appear in a SET_SRC.  */
   2198 
   2199 void
   2200 rtx_properties::try_to_add_src (const_rtx x, unsigned int flags)
   2201 {
   2202   unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
   2203   subrtx_iterator::array_type array;
   2204   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
   2205     {
   2206       const_rtx x = *iter;
   2207       rtx_code code = GET_CODE (x);
   2208       if (code == REG)
   2209 	try_to_add_reg (x, flags | rtx_obj_flags::IS_READ);
   2210       else if (code == MEM)
   2211 	{
   2212 	  if (MEM_VOLATILE_P (x))
   2213 	    has_volatile_refs = true;
   2214 
   2215 	  if (!MEM_READONLY_P (x) && ref_iter != ref_end)
   2216 	    {
   2217 	      auto mem_flags = flags | rtx_obj_flags::IS_READ;
   2218 	      *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags,
   2219 					       GET_MODE (x));
   2220 	    }
   2221 
   2222 	  try_to_add_src (XEXP (x, 0),
   2223 			  base_flags | rtx_obj_flags::IN_MEM_LOAD);
   2224 	  iter.skip_subrtxes ();
   2225 	}
   2226       else if (code == SUBREG)
   2227 	{
   2228 	  try_to_add_src (SUBREG_REG (x), flags | rtx_obj_flags::IN_SUBREG);
   2229 	  iter.skip_subrtxes ();
   2230 	}
   2231       else if (code == UNSPEC_VOLATILE)
   2232 	has_volatile_refs = true;
   2233       else if (code == ASM_INPUT || code == ASM_OPERANDS)
   2234 	{
   2235 	  has_asm = true;
   2236 	  if (MEM_VOLATILE_P (x))
   2237 	    has_volatile_refs = true;
   2238 	}
   2239       else if (code == PRE_INC
   2240 	       || code == PRE_DEC
   2241 	       || code == POST_INC
   2242 	       || code == POST_DEC
   2243 	       || code == PRE_MODIFY
   2244 	       || code == POST_MODIFY)
   2245 	{
   2246 	  has_pre_post_modify = true;
   2247 
   2248 	  unsigned int addr_flags = (base_flags
   2249 				     | rtx_obj_flags::IS_PRE_POST_MODIFY
   2250 				     | rtx_obj_flags::IS_READ);
   2251 	  try_to_add_dest (XEXP (x, 0), addr_flags);
   2252 	  if (code == PRE_MODIFY || code == POST_MODIFY)
   2253 	    iter.substitute (XEXP (XEXP (x, 1), 1));
   2254 	  else
   2255 	    iter.skip_subrtxes ();
   2256 	}
   2257       else if (code == CALL)
   2258 	has_call = true;
   2259     }
   2260 }
   2261 
   2262 /* Try to add a description of instruction pattern PAT to this object,
   2263    stopping once the REF_END limit has been reached.  */
   2264 
   2265 void
   2266 rtx_properties::try_to_add_pattern (const_rtx pat)
   2267 {
   2268   switch (GET_CODE (pat))
   2269     {
   2270     case COND_EXEC:
   2271       try_to_add_src (COND_EXEC_TEST (pat));
   2272       try_to_add_pattern (COND_EXEC_CODE (pat));
   2273       break;
   2274 
   2275     case PARALLEL:
   2276       {
   2277 	int last = XVECLEN (pat, 0) - 1;
   2278 	for (int i = 0; i < last; ++i)
   2279 	  try_to_add_pattern (XVECEXP (pat, 0, i));
   2280 	try_to_add_pattern (XVECEXP (pat, 0, last));
   2281 	break;
   2282       }
   2283 
   2284     case ASM_OPERANDS:
   2285       for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (pat); i < len; ++i)
   2286 	try_to_add_src (ASM_OPERANDS_INPUT (pat, i));
   2287       break;
   2288 
   2289     case CLOBBER:
   2290       try_to_add_dest (XEXP (pat, 0), rtx_obj_flags::IS_CLOBBER);
   2291       break;
   2292 
   2293     case SET:
   2294       try_to_add_dest (SET_DEST (pat));
   2295       try_to_add_src (SET_SRC (pat));
   2296       break;
   2297 
   2298     default:
   2299       /* All the other possibilities never store and can use a normal
   2300 	 rtx walk.  This includes:
   2301 
   2302 	 - USE
   2303 	 - TRAP_IF
   2304 	 - PREFETCH
   2305 	 - UNSPEC
   2306 	 - UNSPEC_VOLATILE.  */
   2307       try_to_add_src (pat);
   2308       break;
   2309     }
   2310 }
   2311 
   2312 /* Try to add a description of INSN to this object, stopping once
   2313    the REF_END limit has been reached.  INCLUDE_NOTES is true if the
   2314    description should include REG_EQUAL and REG_EQUIV notes; all such
   2315    references will then be marked with rtx_obj_flags::IN_NOTE.
   2316 
   2317    For calls, this description includes all accesses in
   2318    CALL_INSN_FUNCTION_USAGE.  It also include all implicit accesses
   2319    to global registers by the target function.  However, it does not
   2320    include clobbers performed by the target function; callers that want
   2321    this information should instead use the function_abi interface.  */
   2322 
   2323 void
   2324 rtx_properties::try_to_add_insn (const rtx_insn *insn, bool include_notes)
   2325 {
   2326   if (CALL_P (insn))
   2327     {
   2328       /* Non-const functions can read from global registers.  Impure
   2329 	 functions can also set them.
   2330 
   2331 	 Adding the global registers first removes a situation in which
   2332 	 a fixed-form clobber of register R could come before a real set
   2333 	 of register R.  */
   2334       if (!hard_reg_set_empty_p (global_reg_set)
   2335 	  && !RTL_CONST_CALL_P (insn))
   2336 	{
   2337 	  unsigned int flags = rtx_obj_flags::IS_READ;
   2338 	  if (!RTL_PURE_CALL_P (insn))
   2339 	    flags |= rtx_obj_flags::IS_WRITE;
   2340 	  for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
   2341 	    /* As a special case, the stack pointer is invariant across calls
   2342 	       even if it has been marked global; see the corresponding
   2343 	       handling in df_get_call_refs.  */
   2344 	    if (regno != STACK_POINTER_REGNUM
   2345 		&& global_regs[regno]
   2346 		&& ref_iter != ref_end)
   2347 	      *ref_iter++ = rtx_obj_reference (regno, flags,
   2348 					       reg_raw_mode[regno], 0);
   2349 	}
   2350       /* Untyped calls implicitly set all function value registers.
   2351 	 Again, we add them first in case the main pattern contains
   2352 	 a fixed-form clobber.  */
   2353       if (find_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX))
   2354 	for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
   2355 	  if (targetm.calls.function_value_regno_p (regno)
   2356 	      && ref_iter != ref_end)
   2357 	    *ref_iter++ = rtx_obj_reference (regno, rtx_obj_flags::IS_WRITE,
   2358 					     reg_raw_mode[regno], 0);
   2359       if (ref_iter != ref_end && !RTL_CONST_CALL_P (insn))
   2360 	{
   2361 	  auto mem_flags = rtx_obj_flags::IS_READ;
   2362 	  if (!RTL_PURE_CALL_P (insn))
   2363 	    mem_flags |= rtx_obj_flags::IS_WRITE;
   2364 	  *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags, BLKmode);
   2365 	}
   2366       try_to_add_pattern (PATTERN (insn));
   2367       for (rtx link = CALL_INSN_FUNCTION_USAGE (insn); link;
   2368 	   link = XEXP (link, 1))
   2369 	{
   2370 	  rtx x = XEXP (link, 0);
   2371 	  if (GET_CODE (x) == CLOBBER)
   2372 	    try_to_add_dest (XEXP (x, 0), rtx_obj_flags::IS_CLOBBER);
   2373 	  else if (GET_CODE (x) == USE)
   2374 	    try_to_add_src (XEXP (x, 0));
   2375 	}
   2376     }
   2377   else
   2378     try_to_add_pattern (PATTERN (insn));
   2379 
   2380   if (include_notes)
   2381     for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
   2382       if (REG_NOTE_KIND (note) == REG_EQUAL
   2383 	  || REG_NOTE_KIND (note) == REG_EQUIV)
   2384 	try_to_add_note (XEXP (note, 0));
   2385 }
   2386 
   2387 /* Grow the storage by a bit while keeping the contents of the first
   2388    START elements.  */
   2389 
   2390 void
   2391 vec_rtx_properties_base::grow (ptrdiff_t start)
   2392 {
   2393   /* The same heuristic that vec uses.  */
   2394   ptrdiff_t new_elems = (ref_end - ref_begin) * 3 / 2;
   2395   if (ref_begin == m_storage)
   2396     {
   2397       ref_begin = XNEWVEC (rtx_obj_reference, new_elems);
   2398       if (start)
   2399 	memcpy (ref_begin, m_storage, start * sizeof (rtx_obj_reference));
   2400     }
   2401   else
   2402     ref_begin = reinterpret_cast<rtx_obj_reference *>
   2403       (xrealloc (ref_begin, new_elems * sizeof (rtx_obj_reference)));
   2404   ref_iter = ref_begin + start;
   2405   ref_end = ref_begin + new_elems;
   2406 }
   2407 
   2408 /* Return nonzero if X's old contents don't survive after INSN.
   2410    This will be true if X is a register and X dies in INSN or because
   2411    INSN entirely sets X.
   2412 
   2413    "Entirely set" means set directly and not through a SUBREG, or
   2414    ZERO_EXTRACT, so no trace of the old contents remains.
   2415    Likewise, REG_INC does not count.
   2416 
   2417    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
   2418    but for this use that makes no difference, since regs don't overlap
   2419    during their lifetimes.  Therefore, this function may be used
   2420    at any time after deaths have been computed.
   2421 
   2422    If REG is a hard reg that occupies multiple machine registers, this
   2423    function will only return 1 if each of those registers will be replaced
   2424    by INSN.  */
   2425 
   2426 int
   2427 dead_or_set_p (const rtx_insn *insn, const_rtx x)
   2428 {
   2429   unsigned int regno, end_regno;
   2430   unsigned int i;
   2431 
   2432   gcc_assert (REG_P (x));
   2433 
   2434   regno = REGNO (x);
   2435   end_regno = END_REGNO (x);
   2436   for (i = regno; i < end_regno; i++)
   2437     if (! dead_or_set_regno_p (insn, i))
   2438       return 0;
   2439 
   2440   return 1;
   2441 }
   2442 
   2443 /* Return TRUE iff DEST is a register or subreg of a register, is a
   2444    complete rather than read-modify-write destination, and contains
   2445    register TEST_REGNO.  */
   2446 
   2447 static bool
   2448 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
   2449 {
   2450   unsigned int regno, endregno;
   2451 
   2452   if (GET_CODE (dest) == SUBREG && !read_modify_subreg_p (dest))
   2453     dest = SUBREG_REG (dest);
   2454 
   2455   if (!REG_P (dest))
   2456     return false;
   2457 
   2458   regno = REGNO (dest);
   2459   endregno = END_REGNO (dest);
   2460   return (test_regno >= regno && test_regno < endregno);
   2461 }
   2462 
   2463 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
   2464    any member matches the covers_regno_no_parallel_p criteria.  */
   2465 
   2466 static bool
   2467 covers_regno_p (const_rtx dest, unsigned int test_regno)
   2468 {
   2469   if (GET_CODE (dest) == PARALLEL)
   2470     {
   2471       /* Some targets place small structures in registers for return
   2472 	 values of functions, and those registers are wrapped in
   2473 	 PARALLELs that we may see as the destination of a SET.  */
   2474       int i;
   2475 
   2476       for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
   2477 	{
   2478 	  rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
   2479 	  if (inner != NULL_RTX
   2480 	      && covers_regno_no_parallel_p (inner, test_regno))
   2481 	    return true;
   2482 	}
   2483 
   2484       return false;
   2485     }
   2486   else
   2487     return covers_regno_no_parallel_p (dest, test_regno);
   2488 }
   2489 
   2490 /* Utility function for dead_or_set_p to check an individual register. */
   2491 
   2492 int
   2493 dead_or_set_regno_p (const rtx_insn *insn, unsigned int test_regno)
   2494 {
   2495   const_rtx pattern;
   2496 
   2497   /* See if there is a death note for something that includes TEST_REGNO.  */
   2498   if (find_regno_note (insn, REG_DEAD, test_regno))
   2499     return 1;
   2500 
   2501   if (CALL_P (insn)
   2502       && find_regno_fusage (insn, CLOBBER, test_regno))
   2503     return 1;
   2504 
   2505   pattern = PATTERN (insn);
   2506 
   2507   /* If a COND_EXEC is not executed, the value survives.  */
   2508   if (GET_CODE (pattern) == COND_EXEC)
   2509     return 0;
   2510 
   2511   if (GET_CODE (pattern) == SET || GET_CODE (pattern) == CLOBBER)
   2512     return covers_regno_p (SET_DEST (pattern), test_regno);
   2513   else if (GET_CODE (pattern) == PARALLEL)
   2514     {
   2515       int i;
   2516 
   2517       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
   2518 	{
   2519 	  rtx body = XVECEXP (pattern, 0, i);
   2520 
   2521 	  if (GET_CODE (body) == COND_EXEC)
   2522 	    body = COND_EXEC_CODE (body);
   2523 
   2524 	  if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
   2525 	      && covers_regno_p (SET_DEST (body), test_regno))
   2526 	    return 1;
   2527 	}
   2528     }
   2529 
   2530   return 0;
   2531 }
   2532 
   2533 /* Return the reg-note of kind KIND in insn INSN, if there is one.
   2534    If DATUM is nonzero, look for one whose datum is DATUM.  */
   2535 
   2536 rtx
   2537 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
   2538 {
   2539   rtx link;
   2540 
   2541   gcc_checking_assert (insn);
   2542 
   2543   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
   2544   if (! INSN_P (insn))
   2545     return 0;
   2546   if (datum == 0)
   2547     {
   2548       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
   2549 	if (REG_NOTE_KIND (link) == kind)
   2550 	  return link;
   2551       return 0;
   2552     }
   2553 
   2554   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
   2555     if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
   2556       return link;
   2557   return 0;
   2558 }
   2559 
   2560 /* Return the reg-note of kind KIND in insn INSN which applies to register
   2561    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
   2562    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
   2563    it might be the case that the note overlaps REGNO.  */
   2564 
   2565 rtx
   2566 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
   2567 {
   2568   rtx link;
   2569 
   2570   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
   2571   if (! INSN_P (insn))
   2572     return 0;
   2573 
   2574   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
   2575     if (REG_NOTE_KIND (link) == kind
   2576 	/* Verify that it is a register, so that scratch and MEM won't cause a
   2577 	   problem here.  */
   2578 	&& REG_P (XEXP (link, 0))
   2579 	&& REGNO (XEXP (link, 0)) <= regno
   2580 	&& END_REGNO (XEXP (link, 0)) > regno)
   2581       return link;
   2582   return 0;
   2583 }
   2584 
   2585 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
   2586    has such a note.  */
   2587 
   2588 rtx
   2589 find_reg_equal_equiv_note (const_rtx insn)
   2590 {
   2591   rtx link;
   2592 
   2593   if (!INSN_P (insn))
   2594     return 0;
   2595 
   2596   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
   2597     if (REG_NOTE_KIND (link) == REG_EQUAL
   2598 	|| REG_NOTE_KIND (link) == REG_EQUIV)
   2599       {
   2600 	/* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
   2601 	   insns that have multiple sets.  Checking single_set to
   2602 	   make sure of this is not the proper check, as explained
   2603 	   in the comment in set_unique_reg_note.
   2604 
   2605 	   This should be changed into an assert.  */
   2606 	if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
   2607 	  return 0;
   2608 	return link;
   2609       }
   2610   return NULL;
   2611 }
   2612 
   2613 /* Check whether INSN is a single_set whose source is known to be
   2614    equivalent to a constant.  Return that constant if so, otherwise
   2615    return null.  */
   2616 
   2617 rtx
   2618 find_constant_src (const rtx_insn *insn)
   2619 {
   2620   rtx note, set, x;
   2621 
   2622   set = single_set (insn);
   2623   if (set)
   2624     {
   2625       x = avoid_constant_pool_reference (SET_SRC (set));
   2626       if (CONSTANT_P (x))
   2627 	return x;
   2628     }
   2629 
   2630   note = find_reg_equal_equiv_note (insn);
   2631   if (note && CONSTANT_P (XEXP (note, 0)))
   2632     return XEXP (note, 0);
   2633 
   2634   return NULL_RTX;
   2635 }
   2636 
   2637 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
   2638    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
   2639 
   2640 int
   2641 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
   2642 {
   2643   /* If it's not a CALL_INSN, it can't possibly have a
   2644      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
   2645   if (!CALL_P (insn))
   2646     return 0;
   2647 
   2648   gcc_assert (datum);
   2649 
   2650   if (!REG_P (datum))
   2651     {
   2652       rtx link;
   2653 
   2654       for (link = CALL_INSN_FUNCTION_USAGE (insn);
   2655 	   link;
   2656 	   link = XEXP (link, 1))
   2657 	if (GET_CODE (XEXP (link, 0)) == code
   2658 	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
   2659 	  return 1;
   2660     }
   2661   else
   2662     {
   2663       unsigned int regno = REGNO (datum);
   2664 
   2665       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
   2666 	 to pseudo registers, so don't bother checking.  */
   2667 
   2668       if (regno < FIRST_PSEUDO_REGISTER)
   2669 	{
   2670 	  unsigned int end_regno = END_REGNO (datum);
   2671 	  unsigned int i;
   2672 
   2673 	  for (i = regno; i < end_regno; i++)
   2674 	    if (find_regno_fusage (insn, code, i))
   2675 	      return 1;
   2676 	}
   2677     }
   2678 
   2679   return 0;
   2680 }
   2681 
   2682 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
   2683    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
   2684 
   2685 int
   2686 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
   2687 {
   2688   rtx link;
   2689 
   2690   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
   2691      to pseudo registers, so don't bother checking.  */
   2692 
   2693   if (regno >= FIRST_PSEUDO_REGISTER
   2694       || !CALL_P (insn) )
   2695     return 0;
   2696 
   2697   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
   2698     {
   2699       rtx op, reg;
   2700 
   2701       if (GET_CODE (op = XEXP (link, 0)) == code
   2702 	  && REG_P (reg = XEXP (op, 0))
   2703 	  && REGNO (reg) <= regno
   2704 	  && END_REGNO (reg) > regno)
   2705 	return 1;
   2706     }
   2707 
   2708   return 0;
   2709 }
   2710 
   2711 
   2712 /* Return true if KIND is an integer REG_NOTE.  */
   2714 
   2715 static bool
   2716 int_reg_note_p (enum reg_note kind)
   2717 {
   2718   return kind == REG_BR_PROB;
   2719 }
   2720 
   2721 /* Allocate a register note with kind KIND and datum DATUM.  LIST is
   2722    stored as the pointer to the next register note.  */
   2723 
   2724 rtx
   2725 alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
   2726 {
   2727   rtx note;
   2728 
   2729   gcc_checking_assert (!int_reg_note_p (kind));
   2730   switch (kind)
   2731     {
   2732     case REG_LABEL_TARGET:
   2733     case REG_LABEL_OPERAND:
   2734     case REG_TM:
   2735       /* These types of register notes use an INSN_LIST rather than an
   2736 	 EXPR_LIST, so that copying is done right and dumps look
   2737 	 better.  */
   2738       note = alloc_INSN_LIST (datum, list);
   2739       PUT_REG_NOTE_KIND (note, kind);
   2740       break;
   2741 
   2742     default:
   2743       note = alloc_EXPR_LIST (kind, datum, list);
   2744       break;
   2745     }
   2746 
   2747   return note;
   2748 }
   2749 
   2750 /* Add register note with kind KIND and datum DATUM to INSN.  */
   2751 
   2752 void
   2753 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
   2754 {
   2755   REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
   2756 }
   2757 
   2758 /* Add an integer register note with kind KIND and datum DATUM to INSN.  */
   2759 
   2760 void
   2761 add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum)
   2762 {
   2763   gcc_checking_assert (int_reg_note_p (kind));
   2764   REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
   2765 				       datum, REG_NOTES (insn));
   2766 }
   2767 
   2768 /* Add a REG_ARGS_SIZE note to INSN with value VALUE.  */
   2769 
   2770 void
   2771 add_args_size_note (rtx_insn *insn, poly_int64 value)
   2772 {
   2773   gcc_checking_assert (!find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX));
   2774   add_reg_note (insn, REG_ARGS_SIZE, gen_int_mode (value, Pmode));
   2775 }
   2776 
   2777 /* Add a register note like NOTE to INSN.  */
   2778 
   2779 void
   2780 add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note)
   2781 {
   2782   if (GET_CODE (note) == INT_LIST)
   2783     add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
   2784   else
   2785     add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
   2786 }
   2787 
   2788 /* Duplicate NOTE and return the copy.  */
   2789 rtx
   2790 duplicate_reg_note (rtx note)
   2791 {
   2792   reg_note kind = REG_NOTE_KIND (note);
   2793 
   2794   if (GET_CODE (note) == INT_LIST)
   2795     return gen_rtx_INT_LIST ((machine_mode) kind, XINT (note, 0), NULL_RTX);
   2796   else if (GET_CODE (note) == EXPR_LIST)
   2797     return alloc_reg_note (kind, copy_insn_1 (XEXP (note, 0)), NULL_RTX);
   2798   else
   2799     return alloc_reg_note (kind, XEXP (note, 0), NULL_RTX);
   2800 }
   2801 
   2802 /* Remove register note NOTE from the REG_NOTES of INSN.  */
   2803 
   2804 void
   2805 remove_note (rtx_insn *insn, const_rtx note)
   2806 {
   2807   rtx link;
   2808 
   2809   if (note == NULL_RTX)
   2810     return;
   2811 
   2812   if (REG_NOTES (insn) == note)
   2813     REG_NOTES (insn) = XEXP (note, 1);
   2814   else
   2815     for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
   2816       if (XEXP (link, 1) == note)
   2817 	{
   2818 	  XEXP (link, 1) = XEXP (note, 1);
   2819 	  break;
   2820 	}
   2821 
   2822   switch (REG_NOTE_KIND (note))
   2823     {
   2824     case REG_EQUAL:
   2825     case REG_EQUIV:
   2826       df_notes_rescan (insn);
   2827       break;
   2828     default:
   2829       break;
   2830     }
   2831 }
   2832 
   2833 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.
   2834    If NO_RESCAN is false and any notes were removed, call
   2835    df_notes_rescan.  Return true if any note has been removed.  */
   2836 
   2837 bool
   2838 remove_reg_equal_equiv_notes (rtx_insn *insn, bool no_rescan)
   2839 {
   2840   rtx *loc;
   2841   bool ret = false;
   2842 
   2843   loc = &REG_NOTES (insn);
   2844   while (*loc)
   2845     {
   2846       enum reg_note kind = REG_NOTE_KIND (*loc);
   2847       if (kind == REG_EQUAL || kind == REG_EQUIV)
   2848 	{
   2849 	  *loc = XEXP (*loc, 1);
   2850 	  ret = true;
   2851 	}
   2852       else
   2853 	loc = &XEXP (*loc, 1);
   2854     }
   2855   if (ret && !no_rescan)
   2856     df_notes_rescan (insn);
   2857   return ret;
   2858 }
   2859 
   2860 /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO.  */
   2861 
   2862 void
   2863 remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
   2864 {
   2865   df_ref eq_use;
   2866 
   2867   if (!df)
   2868     return;
   2869 
   2870   /* This loop is a little tricky.  We cannot just go down the chain because
   2871      it is being modified by some actions in the loop.  So we just iterate
   2872      over the head.  We plan to drain the list anyway.  */
   2873   while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
   2874     {
   2875       rtx_insn *insn = DF_REF_INSN (eq_use);
   2876       rtx note = find_reg_equal_equiv_note (insn);
   2877 
   2878       /* This assert is generally triggered when someone deletes a REG_EQUAL
   2879 	 or REG_EQUIV note by hacking the list manually rather than calling
   2880 	 remove_note.  */
   2881       gcc_assert (note);
   2882 
   2883       remove_note (insn, note);
   2884     }
   2885 }
   2886 
   2887 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
   2888    return 1 if it is found.  A simple equality test is used to determine if
   2889    NODE matches.  */
   2890 
   2891 bool
   2892 in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node)
   2893 {
   2894   const_rtx x;
   2895 
   2896   for (x = listp; x; x = XEXP (x, 1))
   2897     if (node == XEXP (x, 0))
   2898       return true;
   2899 
   2900   return false;
   2901 }
   2902 
   2903 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
   2904    remove that entry from the list if it is found.
   2905 
   2906    A simple equality test is used to determine if NODE matches.  */
   2907 
   2908 void
   2909 remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp)
   2910 {
   2911   rtx_expr_list *temp = *listp;
   2912   rtx_expr_list *prev = NULL;
   2913 
   2914   while (temp)
   2915     {
   2916       if (node == temp->element ())
   2917 	{
   2918 	  /* Splice the node out of the list.  */
   2919 	  if (prev)
   2920 	    XEXP (prev, 1) = temp->next ();
   2921 	  else
   2922 	    *listp = temp->next ();
   2923 
   2924 	  return;
   2925 	}
   2926 
   2927       prev = temp;
   2928       temp = temp->next ();
   2929     }
   2930 }
   2931 
   2932 /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
   2933    remove that entry from the list if it is found.
   2934 
   2935    A simple equality test is used to determine if NODE matches.  */
   2936 
   2937 void
   2938 remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
   2939 {
   2940   rtx_insn_list *temp = *listp;
   2941   rtx_insn_list *prev = NULL;
   2942 
   2943   while (temp)
   2944     {
   2945       if (node == temp->insn ())
   2946 	{
   2947 	  /* Splice the node out of the list.  */
   2948 	  if (prev)
   2949 	    XEXP (prev, 1) = temp->next ();
   2950 	  else
   2951 	    *listp = temp->next ();
   2952 
   2953 	  return;
   2954 	}
   2955 
   2956       prev = temp;
   2957       temp = temp->next ();
   2958     }
   2959 }
   2960 
   2961 /* Nonzero if X contains any volatile instructions.  These are instructions
   2963    which may cause unpredictable machine state instructions, and thus no
   2964    instructions or register uses should be moved or combined across them.
   2965    This includes only volatile asms and UNSPEC_VOLATILE instructions.  */
   2966 
   2967 int
   2968 volatile_insn_p (const_rtx x)
   2969 {
   2970   const RTX_CODE code = GET_CODE (x);
   2971   switch (code)
   2972     {
   2973     case LABEL_REF:
   2974     case SYMBOL_REF:
   2975     case CONST:
   2976     CASE_CONST_ANY:
   2977     case PC:
   2978     case REG:
   2979     case SCRATCH:
   2980     case CLOBBER:
   2981     case ADDR_VEC:
   2982     case ADDR_DIFF_VEC:
   2983     case CALL:
   2984     case MEM:
   2985       return 0;
   2986 
   2987     case UNSPEC_VOLATILE:
   2988       return 1;
   2989 
   2990     case ASM_INPUT:
   2991     case ASM_OPERANDS:
   2992       if (MEM_VOLATILE_P (x))
   2993 	return 1;
   2994 
   2995     default:
   2996       break;
   2997     }
   2998 
   2999   /* Recursively scan the operands of this expression.  */
   3000 
   3001   {
   3002     const char *const fmt = GET_RTX_FORMAT (code);
   3003     int i;
   3004 
   3005     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   3006       {
   3007 	if (fmt[i] == 'e')
   3008 	  {
   3009 	    if (volatile_insn_p (XEXP (x, i)))
   3010 	      return 1;
   3011 	  }
   3012 	else if (fmt[i] == 'E')
   3013 	  {
   3014 	    int j;
   3015 	    for (j = 0; j < XVECLEN (x, i); j++)
   3016 	      if (volatile_insn_p (XVECEXP (x, i, j)))
   3017 		return 1;
   3018 	  }
   3019       }
   3020   }
   3021   return 0;
   3022 }
   3023 
   3024 /* Nonzero if X contains any volatile memory references
   3025    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
   3026 
   3027 int
   3028 volatile_refs_p (const_rtx x)
   3029 {
   3030   const RTX_CODE code = GET_CODE (x);
   3031   switch (code)
   3032     {
   3033     case LABEL_REF:
   3034     case SYMBOL_REF:
   3035     case CONST:
   3036     CASE_CONST_ANY:
   3037     case PC:
   3038     case REG:
   3039     case SCRATCH:
   3040     case CLOBBER:
   3041     case ADDR_VEC:
   3042     case ADDR_DIFF_VEC:
   3043       return 0;
   3044 
   3045     case UNSPEC_VOLATILE:
   3046       return 1;
   3047 
   3048     case MEM:
   3049     case ASM_INPUT:
   3050     case ASM_OPERANDS:
   3051       if (MEM_VOLATILE_P (x))
   3052 	return 1;
   3053 
   3054     default:
   3055       break;
   3056     }
   3057 
   3058   /* Recursively scan the operands of this expression.  */
   3059 
   3060   {
   3061     const char *const fmt = GET_RTX_FORMAT (code);
   3062     int i;
   3063 
   3064     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   3065       {
   3066 	if (fmt[i] == 'e')
   3067 	  {
   3068 	    if (volatile_refs_p (XEXP (x, i)))
   3069 	      return 1;
   3070 	  }
   3071 	else if (fmt[i] == 'E')
   3072 	  {
   3073 	    int j;
   3074 	    for (j = 0; j < XVECLEN (x, i); j++)
   3075 	      if (volatile_refs_p (XVECEXP (x, i, j)))
   3076 		return 1;
   3077 	  }
   3078       }
   3079   }
   3080   return 0;
   3081 }
   3082 
   3083 /* Similar to above, except that it also rejects register pre- and post-
   3084    incrementing.  */
   3085 
   3086 int
   3087 side_effects_p (const_rtx x)
   3088 {
   3089   const RTX_CODE code = GET_CODE (x);
   3090   switch (code)
   3091     {
   3092     case LABEL_REF:
   3093     case SYMBOL_REF:
   3094     case CONST:
   3095     CASE_CONST_ANY:
   3096     case PC:
   3097     case REG:
   3098     case SCRATCH:
   3099     case ADDR_VEC:
   3100     case ADDR_DIFF_VEC:
   3101     case VAR_LOCATION:
   3102       return 0;
   3103 
   3104     case CLOBBER:
   3105       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.cc
   3106 	 when some combination can't be done.  If we see one, don't think
   3107 	 that we can simplify the expression.  */
   3108       return (GET_MODE (x) != VOIDmode);
   3109 
   3110     case PRE_INC:
   3111     case PRE_DEC:
   3112     case POST_INC:
   3113     case POST_DEC:
   3114     case PRE_MODIFY:
   3115     case POST_MODIFY:
   3116     case CALL:
   3117     case UNSPEC_VOLATILE:
   3118       return 1;
   3119 
   3120     case MEM:
   3121     case ASM_INPUT:
   3122     case ASM_OPERANDS:
   3123       if (MEM_VOLATILE_P (x))
   3124 	return 1;
   3125 
   3126     default:
   3127       break;
   3128     }
   3129 
   3130   /* Recursively scan the operands of this expression.  */
   3131 
   3132   {
   3133     const char *fmt = GET_RTX_FORMAT (code);
   3134     int i;
   3135 
   3136     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   3137       {
   3138 	if (fmt[i] == 'e')
   3139 	  {
   3140 	    if (side_effects_p (XEXP (x, i)))
   3141 	      return 1;
   3142 	  }
   3143 	else if (fmt[i] == 'E')
   3144 	  {
   3145 	    int j;
   3146 	    for (j = 0; j < XVECLEN (x, i); j++)
   3147 	      if (side_effects_p (XVECEXP (x, i, j)))
   3148 		return 1;
   3149 	  }
   3150       }
   3151   }
   3152   return 0;
   3153 }
   3154 
   3155 /* Return nonzero if evaluating rtx X might cause a trap.
   3157    FLAGS controls how to consider MEMs.  A nonzero means the context
   3158    of the access may have changed from the original, such that the
   3159    address may have become invalid.  */
   3160 
   3161 int
   3162 may_trap_p_1 (const_rtx x, unsigned flags)
   3163 {
   3164   int i;
   3165   enum rtx_code code;
   3166   const char *fmt;
   3167 
   3168   /* We make no distinction currently, but this function is part of
   3169      the internal target-hooks ABI so we keep the parameter as
   3170      "unsigned flags".  */
   3171   bool code_changed = flags != 0;
   3172 
   3173   if (x == 0)
   3174     return 0;
   3175   code = GET_CODE (x);
   3176   switch (code)
   3177     {
   3178       /* Handle these cases quickly.  */
   3179     CASE_CONST_ANY:
   3180     case SYMBOL_REF:
   3181     case LABEL_REF:
   3182     case CONST:
   3183     case PC:
   3184     case REG:
   3185     case SCRATCH:
   3186       return 0;
   3187 
   3188     case UNSPEC:
   3189       return targetm.unspec_may_trap_p (x, flags);
   3190 
   3191     case UNSPEC_VOLATILE:
   3192     case ASM_INPUT:
   3193     case TRAP_IF:
   3194       return 1;
   3195 
   3196     case ASM_OPERANDS:
   3197       return MEM_VOLATILE_P (x);
   3198 
   3199       /* Memory ref can trap unless it's a static var or a stack slot.  */
   3200     case MEM:
   3201       /* Recognize specific pattern of stack checking probes.  */
   3202       if (flag_stack_check
   3203 	  && MEM_VOLATILE_P (x)
   3204 	  && XEXP (x, 0) == stack_pointer_rtx)
   3205 	return 1;
   3206       if (/* MEM_NOTRAP_P only relates to the actual position of the memory
   3207 	     reference; moving it out of context such as when moving code
   3208 	     when optimizing, might cause its address to become invalid.  */
   3209 	  code_changed
   3210 	  || !MEM_NOTRAP_P (x))
   3211 	{
   3212 	  poly_int64 size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : -1;
   3213 	  return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
   3214 					GET_MODE (x), code_changed);
   3215 	}
   3216 
   3217       return 0;
   3218 
   3219       /* Division by a non-constant might trap.  */
   3220     case DIV:
   3221     case MOD:
   3222     case UDIV:
   3223     case UMOD:
   3224       if (HONOR_SNANS (x))
   3225 	return 1;
   3226       if (FLOAT_MODE_P (GET_MODE (x)))
   3227 	return flag_trapping_math;
   3228       if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
   3229 	return 1;
   3230       if (GET_CODE (XEXP (x, 1)) == CONST_VECTOR)
   3231 	{
   3232 	  /* For CONST_VECTOR, return 1 if any element is or might be zero.  */
   3233 	  unsigned int n_elts;
   3234 	  rtx op = XEXP (x, 1);
   3235 	  if (!GET_MODE_NUNITS (GET_MODE (op)).is_constant (&n_elts))
   3236 	    {
   3237 	      if (!CONST_VECTOR_DUPLICATE_P (op))
   3238 		return 1;
   3239 	      for (unsigned i = 0; i < (unsigned int) XVECLEN (op, 0); i++)
   3240 		if (CONST_VECTOR_ENCODED_ELT (op, i) == const0_rtx)
   3241 		  return 1;
   3242 	    }
   3243 	  else
   3244 	    for (unsigned i = 0; i < n_elts; i++)
   3245 	      if (CONST_VECTOR_ELT (op, i) == const0_rtx)
   3246 		return 1;
   3247 	}
   3248       break;
   3249 
   3250     case EXPR_LIST:
   3251       /* An EXPR_LIST is used to represent a function call.  This
   3252 	 certainly may trap.  */
   3253       return 1;
   3254 
   3255     case GE:
   3256     case GT:
   3257     case LE:
   3258     case LT:
   3259     case LTGT:
   3260     case COMPARE:
   3261       /* Some floating point comparisons may trap.  */
   3262       if (!flag_trapping_math)
   3263 	break;
   3264       /* ??? There is no machine independent way to check for tests that trap
   3265 	 when COMPARE is used, though many targets do make this distinction.
   3266 	 For instance, sparc uses CCFPE for compares which generate exceptions
   3267 	 and CCFP for compares which do not generate exceptions.  */
   3268       if (HONOR_NANS (x))
   3269 	return 1;
   3270       /* But often the compare has some CC mode, so check operand
   3271 	 modes as well.  */
   3272       if (HONOR_NANS (XEXP (x, 0))
   3273 	  || HONOR_NANS (XEXP (x, 1)))
   3274 	return 1;
   3275       break;
   3276 
   3277     case EQ:
   3278     case NE:
   3279       if (HONOR_SNANS (x))
   3280 	return 1;
   3281       /* Often comparison is CC mode, so check operand modes.  */
   3282       if (HONOR_SNANS (XEXP (x, 0))
   3283 	  || HONOR_SNANS (XEXP (x, 1)))
   3284 	return 1;
   3285       break;
   3286 
   3287     case FIX:
   3288     case UNSIGNED_FIX:
   3289       /* Conversion of floating point might trap.  */
   3290       if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
   3291 	return 1;
   3292       break;
   3293 
   3294     case NEG:
   3295     case ABS:
   3296     case SUBREG:
   3297     case VEC_MERGE:
   3298     case VEC_SELECT:
   3299     case VEC_CONCAT:
   3300     case VEC_DUPLICATE:
   3301       /* These operations don't trap even with floating point.  */
   3302       break;
   3303 
   3304     case SIGN_EXTRACT:
   3305       if (targetm.have_extv ())
   3306 	return targetm.bitfield_may_trap_p (x, flags);
   3307       break;
   3308     case ZERO_EXTRACT:
   3309       if (targetm.have_extzv ())
   3310 	return targetm.bitfield_may_trap_p (x, flags);
   3311       break;
   3312 
   3313     default:
   3314       /* Any floating arithmetic may trap.  */
   3315       if (FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
   3316 	return 1;
   3317     }
   3318 
   3319   fmt = GET_RTX_FORMAT (code);
   3320   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   3321     {
   3322       if (fmt[i] == 'e')
   3323 	{
   3324 	  if (may_trap_p_1 (XEXP (x, i), flags))
   3325 	    return 1;
   3326 	}
   3327       else if (fmt[i] == 'E')
   3328 	{
   3329 	  int j;
   3330 	  for (j = 0; j < XVECLEN (x, i); j++)
   3331 	    if (may_trap_p_1 (XVECEXP (x, i, j), flags))
   3332 	      return 1;
   3333 	}
   3334     }
   3335   return 0;
   3336 }
   3337 
   3338 /* Return nonzero if evaluating rtx X might cause a trap.  */
   3339 
   3340 int
   3341 may_trap_p (const_rtx x)
   3342 {
   3343   return may_trap_p_1 (x, 0);
   3344 }
   3345 
   3346 /* Same as above, but additionally return nonzero if evaluating rtx X might
   3347    cause a fault.  We define a fault for the purpose of this function as a
   3348    erroneous execution condition that cannot be encountered during the normal
   3349    execution of a valid program; the typical example is an unaligned memory
   3350    access on a strict alignment machine.  The compiler guarantees that it
   3351    doesn't generate code that will fault from a valid program, but this
   3352    guarantee doesn't mean anything for individual instructions.  Consider
   3353    the following example:
   3354 
   3355       struct S { int d; union { char *cp; int *ip; }; };
   3356 
   3357       int foo(struct S *s)
   3358       {
   3359 	if (s->d == 1)
   3360 	  return *s->ip;
   3361 	else
   3362 	  return *s->cp;
   3363       }
   3364 
   3365    on a strict alignment machine.  In a valid program, foo will never be
   3366    invoked on a structure for which d is equal to 1 and the underlying
   3367    unique field of the union not aligned on a 4-byte boundary, but the
   3368    expression *s->ip might cause a fault if considered individually.
   3369 
   3370    At the RTL level, potentially problematic expressions will almost always
   3371    verify may_trap_p; for example, the above dereference can be emitted as
   3372    (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
   3373    However, suppose that foo is inlined in a caller that causes s->cp to
   3374    point to a local character variable and guarantees that s->d is not set
   3375    to 1; foo may have been effectively translated into pseudo-RTL as:
   3376 
   3377       if ((reg:SI) == 1)
   3378 	(set (reg:SI) (mem:SI (%fp - 7)))
   3379       else
   3380 	(set (reg:QI) (mem:QI (%fp - 7)))
   3381 
   3382    Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
   3383    memory reference to a stack slot, but it will certainly cause a fault
   3384    on a strict alignment machine.  */
   3385 
   3386 int
   3387 may_trap_or_fault_p (const_rtx x)
   3388 {
   3389   return may_trap_p_1 (x, 1);
   3390 }
   3391 
   3392 /* Replace any occurrence of FROM in X with TO.  The function does
   3394    not enter into CONST_DOUBLE for the replace.
   3395 
   3396    Note that copying is not done so X must not be shared unless all copies
   3397    are to be modified.
   3398 
   3399    ALL_REGS is true if we want to replace all REGs equal to FROM, not just
   3400    those pointer-equal ones.  */
   3401 
   3402 rtx
   3403 replace_rtx (rtx x, rtx from, rtx to, bool all_regs)
   3404 {
   3405   int i, j;
   3406   const char *fmt;
   3407 
   3408   if (x == from)
   3409     return to;
   3410 
   3411   /* Allow this function to make replacements in EXPR_LISTs.  */
   3412   if (x == 0)
   3413     return 0;
   3414 
   3415   if (all_regs
   3416       && REG_P (x)
   3417       && REG_P (from)
   3418       && REGNO (x) == REGNO (from))
   3419     {
   3420       gcc_assert (GET_MODE (x) == GET_MODE (from));
   3421       return to;
   3422     }
   3423   else if (GET_CODE (x) == SUBREG)
   3424     {
   3425       rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to, all_regs);
   3426 
   3427       if (CONST_SCALAR_INT_P (new_rtx))
   3428 	{
   3429 	  x = simplify_subreg (GET_MODE (x), new_rtx,
   3430 			       GET_MODE (SUBREG_REG (x)),
   3431 			       SUBREG_BYTE (x));
   3432 	  gcc_assert (x);
   3433 	}
   3434       else
   3435 	SUBREG_REG (x) = new_rtx;
   3436 
   3437       return x;
   3438     }
   3439   else if (GET_CODE (x) == ZERO_EXTEND)
   3440     {
   3441       rtx new_rtx = replace_rtx (XEXP (x, 0), from, to, all_regs);
   3442 
   3443       if (CONST_SCALAR_INT_P (new_rtx))
   3444 	{
   3445 	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
   3446 					new_rtx, GET_MODE (XEXP (x, 0)));
   3447 	  gcc_assert (x);
   3448 	}
   3449       else
   3450 	XEXP (x, 0) = new_rtx;
   3451 
   3452       return x;
   3453     }
   3454 
   3455   fmt = GET_RTX_FORMAT (GET_CODE (x));
   3456   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
   3457     {
   3458       if (fmt[i] == 'e')
   3459 	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to, all_regs);
   3460       else if (fmt[i] == 'E')
   3461 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   3462 	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j),
   3463 					   from, to, all_regs);
   3464     }
   3465 
   3466   return x;
   3467 }
   3468 
   3469 /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL.  Also track
   3471    the change in LABEL_NUSES if UPDATE_LABEL_NUSES.  */
   3472 
   3473 void
   3474 replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
   3475 {
   3476   /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long.  */
   3477   rtx x = *loc;
   3478   if (JUMP_TABLE_DATA_P (x))
   3479     {
   3480       x = PATTERN (x);
   3481       rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
   3482       int len = GET_NUM_ELEM (vec);
   3483       for (int i = 0; i < len; ++i)
   3484 	{
   3485 	  rtx ref = RTVEC_ELT (vec, i);
   3486 	  if (XEXP (ref, 0) == old_label)
   3487 	    {
   3488 	      XEXP (ref, 0) = new_label;
   3489 	      if (update_label_nuses)
   3490 		{
   3491 		  ++LABEL_NUSES (new_label);
   3492 		  --LABEL_NUSES (old_label);
   3493 		}
   3494 	    }
   3495 	}
   3496       return;
   3497     }
   3498 
   3499   /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
   3500      field.  This is not handled by the iterator because it doesn't
   3501      handle unprinted ('0') fields.  */
   3502   if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
   3503     JUMP_LABEL (x) = new_label;
   3504 
   3505   subrtx_ptr_iterator::array_type array;
   3506   FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
   3507     {
   3508       rtx *loc = *iter;
   3509       if (rtx x = *loc)
   3510 	{
   3511 	  if (GET_CODE (x) == SYMBOL_REF
   3512 	      && CONSTANT_POOL_ADDRESS_P (x))
   3513 	    {
   3514 	      rtx c = get_pool_constant (x);
   3515 	      if (rtx_referenced_p (old_label, c))
   3516 		{
   3517 		  /* Create a copy of constant C; replace the label inside
   3518 		     but do not update LABEL_NUSES because uses in constant pool
   3519 		     are not counted.  */
   3520 		  rtx new_c = copy_rtx (c);
   3521 		  replace_label (&new_c, old_label, new_label, false);
   3522 
   3523 		  /* Add the new constant NEW_C to constant pool and replace
   3524 		     the old reference to constant by new reference.  */
   3525 		  rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
   3526 		  *loc = replace_rtx (x, x, XEXP (new_mem, 0));
   3527 		}
   3528 	    }
   3529 
   3530 	  if ((GET_CODE (x) == LABEL_REF
   3531 	       || GET_CODE (x) == INSN_LIST)
   3532 	      && XEXP (x, 0) == old_label)
   3533 	    {
   3534 	      XEXP (x, 0) = new_label;
   3535 	      if (update_label_nuses)
   3536 		{
   3537 		  ++LABEL_NUSES (new_label);
   3538 		  --LABEL_NUSES (old_label);
   3539 		}
   3540 	    }
   3541 	}
   3542     }
   3543 }
   3544 
   3545 void
   3546 replace_label_in_insn (rtx_insn *insn, rtx_insn *old_label,
   3547 		       rtx_insn *new_label, bool update_label_nuses)
   3548 {
   3549   rtx insn_as_rtx = insn;
   3550   replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
   3551   gcc_checking_assert (insn_as_rtx == insn);
   3552 }
   3553 
   3554 /* Return true if X is referenced in BODY.  */
   3555 
   3556 bool
   3557 rtx_referenced_p (const_rtx x, const_rtx body)
   3558 {
   3559   subrtx_iterator::array_type array;
   3560   FOR_EACH_SUBRTX (iter, array, body, ALL)
   3561     if (const_rtx y = *iter)
   3562       {
   3563 	/* Check if a label_ref Y refers to label X.  */
   3564 	if (GET_CODE (y) == LABEL_REF
   3565 	    && LABEL_P (x)
   3566 	    && label_ref_label (y) == x)
   3567 	  return true;
   3568 
   3569 	if (rtx_equal_p (x, y))
   3570 	  return true;
   3571 
   3572 	/* If Y is a reference to pool constant traverse the constant.  */
   3573 	if (GET_CODE (y) == SYMBOL_REF
   3574 	    && CONSTANT_POOL_ADDRESS_P (y))
   3575 	  iter.substitute (get_pool_constant (y));
   3576       }
   3577   return false;
   3578 }
   3579 
   3580 /* If INSN is a tablejump return true and store the label (before jump table) to
   3581    *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
   3582 
   3583 bool
   3584 tablejump_p (const rtx_insn *insn, rtx_insn **labelp,
   3585 	     rtx_jump_table_data **tablep)
   3586 {
   3587   if (!JUMP_P (insn))
   3588     return false;
   3589 
   3590   rtx target = JUMP_LABEL (insn);
   3591   if (target == NULL_RTX || ANY_RETURN_P (target))
   3592     return false;
   3593 
   3594   rtx_insn *label = as_a<rtx_insn *> (target);
   3595   rtx_insn *table = next_insn (label);
   3596   if (table == NULL_RTX || !JUMP_TABLE_DATA_P (table))
   3597     return false;
   3598 
   3599   if (labelp)
   3600     *labelp = label;
   3601   if (tablep)
   3602     *tablep = as_a <rtx_jump_table_data *> (table);
   3603   return true;
   3604 }
   3605 
   3606 /* For INSN known to satisfy tablejump_p, determine if it actually is a
   3607    CASESI.  Return the insn pattern if so, NULL_RTX otherwise.  */
   3608 
   3609 rtx
   3610 tablejump_casesi_pattern (const rtx_insn *insn)
   3611 {
   3612   rtx tmp;
   3613 
   3614   if ((tmp = single_set (insn)) != NULL
   3615       && SET_DEST (tmp) == pc_rtx
   3616       && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
   3617       && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
   3618     return tmp;
   3619 
   3620   return NULL_RTX;
   3621 }
   3622 
   3623 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
   3624    constant that is not in the constant pool and not in the condition
   3625    of an IF_THEN_ELSE.  */
   3626 
   3627 static int
   3628 computed_jump_p_1 (const_rtx x)
   3629 {
   3630   const enum rtx_code code = GET_CODE (x);
   3631   int i, j;
   3632   const char *fmt;
   3633 
   3634   switch (code)
   3635     {
   3636     case LABEL_REF:
   3637     case PC:
   3638       return 0;
   3639 
   3640     case CONST:
   3641     CASE_CONST_ANY:
   3642     case SYMBOL_REF:
   3643     case REG:
   3644       return 1;
   3645 
   3646     case MEM:
   3647       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
   3648 		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
   3649 
   3650     case IF_THEN_ELSE:
   3651       return (computed_jump_p_1 (XEXP (x, 1))
   3652 	      || computed_jump_p_1 (XEXP (x, 2)));
   3653 
   3654     default:
   3655       break;
   3656     }
   3657 
   3658   fmt = GET_RTX_FORMAT (code);
   3659   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   3660     {
   3661       if (fmt[i] == 'e'
   3662 	  && computed_jump_p_1 (XEXP (x, i)))
   3663 	return 1;
   3664 
   3665       else if (fmt[i] == 'E')
   3666 	for (j = 0; j < XVECLEN (x, i); j++)
   3667 	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
   3668 	    return 1;
   3669     }
   3670 
   3671   return 0;
   3672 }
   3673 
   3674 /* Return nonzero if INSN is an indirect jump (aka computed jump).
   3675 
   3676    Tablejumps and casesi insns are not considered indirect jumps;
   3677    we can recognize them by a (use (label_ref)).  */
   3678 
   3679 int
   3680 computed_jump_p (const rtx_insn *insn)
   3681 {
   3682   int i;
   3683   if (JUMP_P (insn))
   3684     {
   3685       rtx pat = PATTERN (insn);
   3686 
   3687       /* If we have a JUMP_LABEL set, we're not a computed jump.  */
   3688       if (JUMP_LABEL (insn) != NULL)
   3689 	return 0;
   3690 
   3691       if (GET_CODE (pat) == PARALLEL)
   3692 	{
   3693 	  int len = XVECLEN (pat, 0);
   3694 	  int has_use_labelref = 0;
   3695 
   3696 	  for (i = len - 1; i >= 0; i--)
   3697 	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
   3698 		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
   3699 		    == LABEL_REF))
   3700 	      {
   3701 	        has_use_labelref = 1;
   3702 	        break;
   3703 	      }
   3704 
   3705 	  if (! has_use_labelref)
   3706 	    for (i = len - 1; i >= 0; i--)
   3707 	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
   3708 		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
   3709 		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
   3710 		return 1;
   3711 	}
   3712       else if (GET_CODE (pat) == SET
   3713 	       && SET_DEST (pat) == pc_rtx
   3714 	       && computed_jump_p_1 (SET_SRC (pat)))
   3715 	return 1;
   3716     }
   3717   return 0;
   3718 }
   3719 
   3720 
   3721 
   3723 /* MEM has a PRE/POST-INC/DEC/MODIFY address X.  Extract the operands of
   3724    the equivalent add insn and pass the result to FN, using DATA as the
   3725    final argument.  */
   3726 
   3727 static int
   3728 for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
   3729 {
   3730   rtx x = XEXP (mem, 0);
   3731   switch (GET_CODE (x))
   3732     {
   3733     case PRE_INC:
   3734     case POST_INC:
   3735       {
   3736 	poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
   3737 	rtx r1 = XEXP (x, 0);
   3738 	rtx c = gen_int_mode (size, GET_MODE (r1));
   3739 	return fn (mem, x, r1, r1, c, data);
   3740       }
   3741 
   3742     case PRE_DEC:
   3743     case POST_DEC:
   3744       {
   3745 	poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
   3746 	rtx r1 = XEXP (x, 0);
   3747 	rtx c = gen_int_mode (-size, GET_MODE (r1));
   3748 	return fn (mem, x, r1, r1, c, data);
   3749       }
   3750 
   3751     case PRE_MODIFY:
   3752     case POST_MODIFY:
   3753       {
   3754 	rtx r1 = XEXP (x, 0);
   3755 	rtx add = XEXP (x, 1);
   3756 	return fn (mem, x, r1, add, NULL, data);
   3757       }
   3758 
   3759     default:
   3760       gcc_unreachable ();
   3761     }
   3762 }
   3763 
   3764 /* Traverse *LOC looking for MEMs that have autoinc addresses.
   3765    For each such autoinc operation found, call FN, passing it
   3766    the innermost enclosing MEM, the operation itself, the RTX modified
   3767    by the operation, two RTXs (the second may be NULL) that, once
   3768    added, represent the value to be held by the modified RTX
   3769    afterwards, and DATA.  FN is to return 0 to continue the
   3770    traversal or any other value to have it returned to the caller of
   3771    for_each_inc_dec.  */
   3772 
   3773 int
   3774 for_each_inc_dec (rtx x,
   3775 		  for_each_inc_dec_fn fn,
   3776 		  void *data)
   3777 {
   3778   subrtx_var_iterator::array_type array;
   3779   FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
   3780     {
   3781       rtx mem = *iter;
   3782       if (mem
   3783 	  && MEM_P (mem)
   3784 	  && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
   3785 	{
   3786 	  int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
   3787 	  if (res != 0)
   3788 	    return res;
   3789 	  iter.skip_subrtxes ();
   3790 	}
   3791     }
   3792   return 0;
   3793 }
   3794 
   3795 
   3796 /* Searches X for any reference to REGNO, returning the rtx of the
   3798    reference found if any.  Otherwise, returns NULL_RTX.  */
   3799 
   3800 rtx
   3801 regno_use_in (unsigned int regno, rtx x)
   3802 {
   3803   const char *fmt;
   3804   int i, j;
   3805   rtx tem;
   3806 
   3807   if (REG_P (x) && REGNO (x) == regno)
   3808     return x;
   3809 
   3810   fmt = GET_RTX_FORMAT (GET_CODE (x));
   3811   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
   3812     {
   3813       if (fmt[i] == 'e')
   3814 	{
   3815 	  if ((tem = regno_use_in (regno, XEXP (x, i))))
   3816 	    return tem;
   3817 	}
   3818       else if (fmt[i] == 'E')
   3819 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   3820 	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
   3821 	    return tem;
   3822     }
   3823 
   3824   return NULL_RTX;
   3825 }
   3826 
   3827 /* Return a value indicating whether OP, an operand of a commutative
   3828    operation, is preferred as the first or second operand.  The more
   3829    positive the value, the stronger the preference for being the first
   3830    operand.  */
   3831 
   3832 int
   3833 commutative_operand_precedence (rtx op)
   3834 {
   3835   enum rtx_code code = GET_CODE (op);
   3836 
   3837   /* Constants always become the second operand.  Prefer "nice" constants.  */
   3838   if (code == CONST_INT)
   3839     return -10;
   3840   if (code == CONST_WIDE_INT)
   3841     return -9;
   3842   if (code == CONST_POLY_INT)
   3843     return -8;
   3844   if (code == CONST_DOUBLE)
   3845     return -8;
   3846   if (code == CONST_FIXED)
   3847     return -8;
   3848   op = avoid_constant_pool_reference (op);
   3849   code = GET_CODE (op);
   3850 
   3851   switch (GET_RTX_CLASS (code))
   3852     {
   3853     case RTX_CONST_OBJ:
   3854       if (code == CONST_INT)
   3855 	return -7;
   3856       if (code == CONST_WIDE_INT)
   3857 	return -6;
   3858       if (code == CONST_POLY_INT)
   3859 	return -5;
   3860       if (code == CONST_DOUBLE)
   3861 	return -5;
   3862       if (code == CONST_FIXED)
   3863 	return -5;
   3864       return -4;
   3865 
   3866     case RTX_EXTRA:
   3867       /* SUBREGs of objects should come second.  */
   3868       if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
   3869         return -3;
   3870       return 0;
   3871 
   3872     case RTX_OBJ:
   3873       /* Complex expressions should be the first, so decrease priority
   3874          of objects.  Prefer pointer objects over non pointer objects.  */
   3875       if ((REG_P (op) && REG_POINTER (op))
   3876 	  || (MEM_P (op) && MEM_POINTER (op)))
   3877 	return -1;
   3878       return -2;
   3879 
   3880     case RTX_COMM_ARITH:
   3881       /* Prefer operands that are themselves commutative to be first.
   3882          This helps to make things linear.  In particular,
   3883          (and (and (reg) (reg)) (not (reg))) is canonical.  */
   3884       return 4;
   3885 
   3886     case RTX_BIN_ARITH:
   3887       /* If only one operand is a binary expression, it will be the first
   3888          operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
   3889          is canonical, although it will usually be further simplified.  */
   3890       return 2;
   3891 
   3892     case RTX_UNARY:
   3893       /* Then prefer NEG and NOT.  */
   3894       if (code == NEG || code == NOT)
   3895         return 1;
   3896       /* FALLTHRU */
   3897 
   3898     default:
   3899       return 0;
   3900     }
   3901 }
   3902 
   3903 /* Return 1 iff it is necessary to swap operands of commutative operation
   3904    in order to canonicalize expression.  */
   3905 
   3906 bool
   3907 swap_commutative_operands_p (rtx x, rtx y)
   3908 {
   3909   return (commutative_operand_precedence (x)
   3910 	  < commutative_operand_precedence (y));
   3911 }
   3912 
   3913 /* Return 1 if X is an autoincrement side effect and the register is
   3914    not the stack pointer.  */
   3915 int
   3916 auto_inc_p (const_rtx x)
   3917 {
   3918   switch (GET_CODE (x))
   3919     {
   3920     case PRE_INC:
   3921     case POST_INC:
   3922     case PRE_DEC:
   3923     case POST_DEC:
   3924     case PRE_MODIFY:
   3925     case POST_MODIFY:
   3926       /* There are no REG_INC notes for SP.  */
   3927       if (XEXP (x, 0) != stack_pointer_rtx)
   3928 	return 1;
   3929     default:
   3930       break;
   3931     }
   3932   return 0;
   3933 }
   3934 
   3935 /* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
   3936 int
   3937 loc_mentioned_in_p (rtx *loc, const_rtx in)
   3938 {
   3939   enum rtx_code code;
   3940   const char *fmt;
   3941   int i, j;
   3942 
   3943   if (!in)
   3944     return 0;
   3945 
   3946   code = GET_CODE (in);
   3947   fmt = GET_RTX_FORMAT (code);
   3948   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   3949     {
   3950       if (fmt[i] == 'e')
   3951 	{
   3952 	  if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
   3953 	    return 1;
   3954 	}
   3955       else if (fmt[i] == 'E')
   3956 	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
   3957 	  if (loc == &XVECEXP (in, i, j)
   3958 	      || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
   3959 	    return 1;
   3960     }
   3961   return 0;
   3962 }
   3963 
   3964 /* Reinterpret a subreg as a bit extraction from an integer and return
   3965    the position of the least significant bit of the extracted value.
   3966    In other words, if the extraction were performed as a shift right
   3967    and mask, return the number of bits to shift right.
   3968 
   3969    The outer value of the subreg has OUTER_BYTES bytes and starts at
   3970    byte offset SUBREG_BYTE within an inner value of INNER_BYTES bytes.  */
   3971 
   3972 poly_uint64
   3973 subreg_size_lsb (poly_uint64 outer_bytes,
   3974 		 poly_uint64 inner_bytes,
   3975 		 poly_uint64 subreg_byte)
   3976 {
   3977   poly_uint64 subreg_end, trailing_bytes, byte_pos;
   3978 
   3979   /* A paradoxical subreg begins at bit position 0.  */
   3980   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
   3981   if (maybe_gt (outer_bytes, inner_bytes))
   3982     {
   3983       gcc_checking_assert (known_eq (subreg_byte, 0U));
   3984       return 0;
   3985     }
   3986 
   3987   subreg_end = subreg_byte + outer_bytes;
   3988   trailing_bytes = inner_bytes - subreg_end;
   3989   if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
   3990     byte_pos = trailing_bytes;
   3991   else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
   3992     byte_pos = subreg_byte;
   3993   else
   3994     {
   3995       /* When bytes and words have opposite endianness, we must be able
   3996 	 to split offsets into words and bytes at compile time.  */
   3997       poly_uint64 leading_word_part
   3998 	= force_align_down (subreg_byte, UNITS_PER_WORD);
   3999       poly_uint64 trailing_word_part
   4000 	= force_align_down (trailing_bytes, UNITS_PER_WORD);
   4001       /* If the subreg crosses a word boundary ensure that
   4002 	 it also begins and ends on a word boundary.  */
   4003       gcc_assert (known_le (subreg_end - leading_word_part,
   4004 			    (unsigned int) UNITS_PER_WORD)
   4005 		  || (known_eq (leading_word_part, subreg_byte)
   4006 		      && known_eq (trailing_word_part, trailing_bytes)));
   4007       if (WORDS_BIG_ENDIAN)
   4008 	byte_pos = trailing_word_part + (subreg_byte - leading_word_part);
   4009       else
   4010 	byte_pos = leading_word_part + (trailing_bytes - trailing_word_part);
   4011     }
   4012 
   4013   return byte_pos * BITS_PER_UNIT;
   4014 }
   4015 
   4016 /* Given a subreg X, return the bit offset where the subreg begins
   4017    (counting from the least significant bit of the reg).  */
   4018 
   4019 poly_uint64
   4020 subreg_lsb (const_rtx x)
   4021 {
   4022   return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
   4023 		       SUBREG_BYTE (x));
   4024 }
   4025 
   4026 /* Return the subreg byte offset for a subreg whose outer value has
   4027    OUTER_BYTES bytes, whose inner value has INNER_BYTES bytes, and where
   4028    there are LSB_SHIFT *bits* between the lsb of the outer value and the
   4029    lsb of the inner value.  This is the inverse of the calculation
   4030    performed by subreg_lsb_1 (which converts byte offsets to bit shifts).  */
   4031 
   4032 poly_uint64
   4033 subreg_size_offset_from_lsb (poly_uint64 outer_bytes, poly_uint64 inner_bytes,
   4034 			     poly_uint64 lsb_shift)
   4035 {
   4036   /* A paradoxical subreg begins at bit position 0.  */
   4037   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
   4038   if (maybe_gt (outer_bytes, inner_bytes))
   4039     {
   4040       gcc_checking_assert (known_eq (lsb_shift, 0U));
   4041       return 0;
   4042     }
   4043 
   4044   poly_uint64 lower_bytes = exact_div (lsb_shift, BITS_PER_UNIT);
   4045   poly_uint64 upper_bytes = inner_bytes - (lower_bytes + outer_bytes);
   4046   if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
   4047     return upper_bytes;
   4048   else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
   4049     return lower_bytes;
   4050   else
   4051     {
   4052       /* When bytes and words have opposite endianness, we must be able
   4053 	 to split offsets into words and bytes at compile time.  */
   4054       poly_uint64 lower_word_part = force_align_down (lower_bytes,
   4055 						      UNITS_PER_WORD);
   4056       poly_uint64 upper_word_part = force_align_down (upper_bytes,
   4057 						      UNITS_PER_WORD);
   4058       if (WORDS_BIG_ENDIAN)
   4059 	return upper_word_part + (lower_bytes - lower_word_part);
   4060       else
   4061 	return lower_word_part + (upper_bytes - upper_word_part);
   4062     }
   4063 }
   4064 
   4065 /* Fill in information about a subreg of a hard register.
   4066    xregno - A regno of an inner hard subreg_reg (or what will become one).
   4067    xmode  - The mode of xregno.
   4068    offset - The byte offset.
   4069    ymode  - The mode of a top level SUBREG (or what may become one).
   4070    info   - Pointer to structure to fill in.
   4071 
   4072    Rather than considering one particular inner register (and thus one
   4073    particular "outer" register) in isolation, this function really uses
   4074    XREGNO as a model for a sequence of isomorphic hard registers.  Thus the
   4075    function does not check whether adding INFO->offset to XREGNO gives
   4076    a valid hard register; even if INFO->offset + XREGNO is out of range,
   4077    there might be another register of the same type that is in range.
   4078    Likewise it doesn't check whether targetm.hard_regno_mode_ok accepts
   4079    the new register, since that can depend on things like whether the final
   4080    register number is even or odd.  Callers that want to check whether
   4081    this particular subreg can be replaced by a simple (reg ...) should
   4082    use simplify_subreg_regno.  */
   4083 
   4084 void
   4085 subreg_get_info (unsigned int xregno, machine_mode xmode,
   4086 		 poly_uint64 offset, machine_mode ymode,
   4087 		 struct subreg_info *info)
   4088 {
   4089   unsigned int nregs_xmode, nregs_ymode;
   4090 
   4091   gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
   4092 
   4093   poly_uint64 xsize = GET_MODE_SIZE (xmode);
   4094   poly_uint64 ysize = GET_MODE_SIZE (ymode);
   4095 
   4096   bool rknown = false;
   4097 
   4098   /* If the register representation of a non-scalar mode has holes in it,
   4099      we expect the scalar units to be concatenated together, with the holes
   4100      distributed evenly among the scalar units.  Each scalar unit must occupy
   4101      at least one register.  */
   4102   if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
   4103     {
   4104       /* As a consequence, we must be dealing with a constant number of
   4105 	 scalars, and thus a constant offset and number of units.  */
   4106       HOST_WIDE_INT coffset = offset.to_constant ();
   4107       HOST_WIDE_INT cysize = ysize.to_constant ();
   4108       nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
   4109       unsigned int nunits = GET_MODE_NUNITS (xmode).to_constant ();
   4110       scalar_mode xmode_unit = GET_MODE_INNER (xmode);
   4111       gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
   4112       gcc_assert (nregs_xmode
   4113 		  == (nunits
   4114 		      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
   4115       gcc_assert (hard_regno_nregs (xregno, xmode)
   4116 		  == hard_regno_nregs (xregno, xmode_unit) * nunits);
   4117 
   4118       /* You can only ask for a SUBREG of a value with holes in the middle
   4119 	 if you don't cross the holes.  (Such a SUBREG should be done by
   4120 	 picking a different register class, or doing it in memory if
   4121 	 necessary.)  An example of a value with holes is XCmode on 32-bit
   4122 	 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
   4123 	 3 for each part, but in memory it's two 128-bit parts.
   4124 	 Padding is assumed to be at the end (not necessarily the 'high part')
   4125 	 of each unit.  */
   4126       if ((coffset / GET_MODE_SIZE (xmode_unit) + 1 < nunits)
   4127 	  && (coffset / GET_MODE_SIZE (xmode_unit)
   4128 	      != ((coffset + cysize - 1) / GET_MODE_SIZE (xmode_unit))))
   4129 	{
   4130 	  info->representable_p = false;
   4131 	  rknown = true;
   4132 	}
   4133     }
   4134   else
   4135     nregs_xmode = hard_regno_nregs (xregno, xmode);
   4136 
   4137   nregs_ymode = hard_regno_nregs (xregno, ymode);
   4138 
   4139   /* Subreg sizes must be ordered, so that we can tell whether they are
   4140      partial, paradoxical or complete.  */
   4141   gcc_checking_assert (ordered_p (xsize, ysize));
   4142 
   4143   /* Paradoxical subregs are otherwise valid.  */
   4144   if (!rknown && known_eq (offset, 0U) && maybe_gt (ysize, xsize))
   4145     {
   4146       info->representable_p = true;
   4147       /* If this is a big endian paradoxical subreg, which uses more
   4148 	 actual hard registers than the original register, we must
   4149 	 return a negative offset so that we find the proper highpart
   4150 	 of the register.
   4151 
   4152 	 We assume that the ordering of registers within a multi-register
   4153 	 value has a consistent endianness: if bytes and register words
   4154 	 have different endianness, the hard registers that make up a
   4155 	 multi-register value must be at least word-sized.  */
   4156       if (REG_WORDS_BIG_ENDIAN)
   4157 	info->offset = (int) nregs_xmode - (int) nregs_ymode;
   4158       else
   4159 	info->offset = 0;
   4160       info->nregs = nregs_ymode;
   4161       return;
   4162     }
   4163 
   4164   /* If registers store different numbers of bits in the different
   4165      modes, we cannot generally form this subreg.  */
   4166   poly_uint64 regsize_xmode, regsize_ymode;
   4167   if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
   4168       && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
   4169       && multiple_p (xsize, nregs_xmode, &regsize_xmode)
   4170       && multiple_p (ysize, nregs_ymode, &regsize_ymode))
   4171     {
   4172       if (!rknown
   4173 	  && ((nregs_ymode > 1 && maybe_gt (regsize_xmode, regsize_ymode))
   4174 	      || (nregs_xmode > 1 && maybe_gt (regsize_ymode, regsize_xmode))))
   4175 	{
   4176 	  info->representable_p = false;
   4177 	  if (!can_div_away_from_zero_p (ysize, regsize_xmode, &info->nregs)
   4178 	      || !can_div_trunc_p (offset, regsize_xmode, &info->offset))
   4179 	    /* Checked by validate_subreg.  We must know at compile time
   4180 	       which inner registers are being accessed.  */
   4181 	    gcc_unreachable ();
   4182 	  return;
   4183 	}
   4184       /* It's not valid to extract a subreg of mode YMODE at OFFSET that
   4185 	 would go outside of XMODE.  */
   4186       if (!rknown && maybe_gt (ysize + offset, xsize))
   4187 	{
   4188 	  info->representable_p = false;
   4189 	  info->nregs = nregs_ymode;
   4190 	  if (!can_div_trunc_p (offset, regsize_xmode, &info->offset))
   4191 	    /* Checked by validate_subreg.  We must know at compile time
   4192 	       which inner registers are being accessed.  */
   4193 	    gcc_unreachable ();
   4194 	  return;
   4195 	}
   4196       /* Quick exit for the simple and common case of extracting whole
   4197 	 subregisters from a multiregister value.  */
   4198       /* ??? It would be better to integrate this into the code below,
   4199 	 if we can generalize the concept enough and figure out how
   4200 	 odd-sized modes can coexist with the other weird cases we support.  */
   4201       HOST_WIDE_INT count;
   4202       if (!rknown
   4203 	  && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
   4204 	  && known_eq (regsize_xmode, regsize_ymode)
   4205 	  && constant_multiple_p (offset, regsize_ymode, &count))
   4206 	{
   4207 	  info->representable_p = true;
   4208 	  info->nregs = nregs_ymode;
   4209 	  info->offset = count;
   4210 	  gcc_assert (info->offset + info->nregs <= (int) nregs_xmode);
   4211 	  return;
   4212 	}
   4213     }
   4214 
   4215   /* Lowpart subregs are otherwise valid.  */
   4216   if (!rknown && known_eq (offset, subreg_lowpart_offset (ymode, xmode)))
   4217     {
   4218       info->representable_p = true;
   4219       rknown = true;
   4220 
   4221       if (known_eq (offset, 0U) || nregs_xmode == nregs_ymode)
   4222 	{
   4223 	  info->offset = 0;
   4224 	  info->nregs = nregs_ymode;
   4225 	  return;
   4226 	}
   4227     }
   4228 
   4229   /* Set NUM_BLOCKS to the number of independently-representable YMODE
   4230      values there are in (reg:XMODE XREGNO).  We can view the register
   4231      as consisting of this number of independent "blocks", where each
   4232      block occupies NREGS_YMODE registers and contains exactly one
   4233      representable YMODE value.  */
   4234   gcc_assert ((nregs_xmode % nregs_ymode) == 0);
   4235   unsigned int num_blocks = nregs_xmode / nregs_ymode;
   4236 
   4237   /* Calculate the number of bytes in each block.  This must always
   4238      be exact, otherwise we don't know how to verify the constraint.
   4239      These conditions may be relaxed but subreg_regno_offset would
   4240      need to be redesigned.  */
   4241   poly_uint64 bytes_per_block = exact_div (xsize, num_blocks);
   4242 
   4243   /* Get the number of the first block that contains the subreg and the byte
   4244      offset of the subreg from the start of that block.  */
   4245   unsigned int block_number;
   4246   poly_uint64 subblock_offset;
   4247   if (!can_div_trunc_p (offset, bytes_per_block, &block_number,
   4248 			&subblock_offset))
   4249     /* Checked by validate_subreg.  We must know at compile time which
   4250        inner registers are being accessed.  */
   4251     gcc_unreachable ();
   4252 
   4253   if (!rknown)
   4254     {
   4255       /* Only the lowpart of each block is representable.  */
   4256       info->representable_p
   4257 	= known_eq (subblock_offset,
   4258 		    subreg_size_lowpart_offset (ysize, bytes_per_block));
   4259       rknown = true;
   4260     }
   4261 
   4262   /* We assume that the ordering of registers within a multi-register
   4263      value has a consistent endianness: if bytes and register words
   4264      have different endianness, the hard registers that make up a
   4265      multi-register value must be at least word-sized.  */
   4266   if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN)
   4267     /* The block number we calculated above followed memory endianness.
   4268        Convert it to register endianness by counting back from the end.
   4269        (Note that, because of the assumption above, each block must be
   4270        at least word-sized.)  */
   4271     info->offset = (num_blocks - block_number - 1) * nregs_ymode;
   4272   else
   4273     info->offset = block_number * nregs_ymode;
   4274   info->nregs = nregs_ymode;
   4275 }
   4276 
   4277 /* This function returns the regno offset of a subreg expression.
   4278    xregno - A regno of an inner hard subreg_reg (or what will become one).
   4279    xmode  - The mode of xregno.
   4280    offset - The byte offset.
   4281    ymode  - The mode of a top level SUBREG (or what may become one).
   4282    RETURN - The regno offset which would be used.  */
   4283 unsigned int
   4284 subreg_regno_offset (unsigned int xregno, machine_mode xmode,
   4285 		     poly_uint64 offset, machine_mode ymode)
   4286 {
   4287   struct subreg_info info;
   4288   subreg_get_info (xregno, xmode, offset, ymode, &info);
   4289   return info.offset;
   4290 }
   4291 
   4292 /* This function returns true when the offset is representable via
   4293    subreg_offset in the given regno.
   4294    xregno - A regno of an inner hard subreg_reg (or what will become one).
   4295    xmode  - The mode of xregno.
   4296    offset - The byte offset.
   4297    ymode  - The mode of a top level SUBREG (or what may become one).
   4298    RETURN - Whether the offset is representable.  */
   4299 bool
   4300 subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
   4301 			       poly_uint64 offset, machine_mode ymode)
   4302 {
   4303   struct subreg_info info;
   4304   subreg_get_info (xregno, xmode, offset, ymode, &info);
   4305   return info.representable_p;
   4306 }
   4307 
   4308 /* Return the number of a YMODE register to which
   4309 
   4310        (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
   4311 
   4312    can be simplified.  Return -1 if the subreg can't be simplified.
   4313 
   4314    XREGNO is a hard register number.  */
   4315 
   4316 int
   4317 simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
   4318 		       poly_uint64 offset, machine_mode ymode)
   4319 {
   4320   struct subreg_info info;
   4321   unsigned int yregno;
   4322 
   4323   /* Give the backend a chance to disallow the mode change.  */
   4324   if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
   4325       && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
   4326       && !REG_CAN_CHANGE_MODE_P (xregno, xmode, ymode))
   4327     return -1;
   4328 
   4329   /* We shouldn't simplify stack-related registers.  */
   4330   if ((!reload_completed || frame_pointer_needed)
   4331       && xregno == FRAME_POINTER_REGNUM)
   4332     return -1;
   4333 
   4334   if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
   4335       && xregno == ARG_POINTER_REGNUM)
   4336     return -1;
   4337 
   4338   if (xregno == STACK_POINTER_REGNUM
   4339       /* We should convert hard stack register in LRA if it is
   4340 	 possible.  */
   4341       && ! lra_in_progress)
   4342     return -1;
   4343 
   4344   /* Try to get the register offset.  */
   4345   subreg_get_info (xregno, xmode, offset, ymode, &info);
   4346   if (!info.representable_p)
   4347     return -1;
   4348 
   4349   /* Make sure that the offsetted register value is in range.  */
   4350   yregno = xregno + info.offset;
   4351   if (!HARD_REGISTER_NUM_P (yregno))
   4352     return -1;
   4353 
   4354   /* See whether (reg:YMODE YREGNO) is valid.
   4355 
   4356      ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
   4357      This is a kludge to work around how complex FP arguments are passed
   4358      on IA-64 and should be fixed.  See PR target/49226.  */
   4359   if (!targetm.hard_regno_mode_ok (yregno, ymode)
   4360       && targetm.hard_regno_mode_ok (xregno, xmode))
   4361     return -1;
   4362 
   4363   return (int) yregno;
   4364 }
   4365 
   4366 /* A wrapper around simplify_subreg_regno that uses subreg_lowpart_offset
   4367    (xmode, ymode) as the offset.  */
   4368 
   4369 int
   4370 lowpart_subreg_regno (unsigned int regno, machine_mode xmode,
   4371 		      machine_mode ymode)
   4372 {
   4373   poly_uint64 offset = subreg_lowpart_offset (xmode, ymode);
   4374   return simplify_subreg_regno (regno, xmode, offset, ymode);
   4375 }
   4376 
   4377 /* Return the final regno that a subreg expression refers to.  */
   4378 unsigned int
   4379 subreg_regno (const_rtx x)
   4380 {
   4381   unsigned int ret;
   4382   rtx subreg = SUBREG_REG (x);
   4383   int regno = REGNO (subreg);
   4384 
   4385   ret = regno + subreg_regno_offset (regno,
   4386 				     GET_MODE (subreg),
   4387 				     SUBREG_BYTE (x),
   4388 				     GET_MODE (x));
   4389   return ret;
   4390 
   4391 }
   4392 
   4393 /* Return the number of registers that a subreg expression refers
   4394    to.  */
   4395 unsigned int
   4396 subreg_nregs (const_rtx x)
   4397 {
   4398   return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
   4399 }
   4400 
   4401 /* Return the number of registers that a subreg REG with REGNO
   4402    expression refers to.  This is a copy of the rtlanal.cc:subreg_nregs
   4403    changed so that the regno can be passed in. */
   4404 
   4405 unsigned int
   4406 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
   4407 {
   4408   struct subreg_info info;
   4409   rtx subreg = SUBREG_REG (x);
   4410 
   4411   subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
   4412 		   &info);
   4413   return info.nregs;
   4414 }
   4415 
   4416 struct parms_set_data
   4417 {
   4418   int nregs;
   4419   HARD_REG_SET regs;
   4420 };
   4421 
   4422 /* Helper function for noticing stores to parameter registers.  */
   4423 static void
   4424 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
   4425 {
   4426   struct parms_set_data *const d = (struct parms_set_data *) data;
   4427   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
   4428       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
   4429     {
   4430       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
   4431       d->nregs--;
   4432     }
   4433 }
   4434 
   4435 /* Look backward for first parameter to be loaded.
   4436    Note that loads of all parameters will not necessarily be
   4437    found if CSE has eliminated some of them (e.g., an argument
   4438    to the outer function is passed down as a parameter).
   4439    Do not skip BOUNDARY.  */
   4440 rtx_insn *
   4441 find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
   4442 {
   4443   struct parms_set_data parm;
   4444   rtx p;
   4445   rtx_insn *before, *first_set;
   4446 
   4447   /* Since different machines initialize their parameter registers
   4448      in different orders, assume nothing.  Collect the set of all
   4449      parameter registers.  */
   4450   CLEAR_HARD_REG_SET (parm.regs);
   4451   parm.nregs = 0;
   4452   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
   4453     if (GET_CODE (XEXP (p, 0)) == USE
   4454 	&& REG_P (XEXP (XEXP (p, 0), 0))
   4455 	&& !STATIC_CHAIN_REG_P (XEXP (XEXP (p, 0), 0)))
   4456       {
   4457 	gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
   4458 
   4459 	/* We only care about registers which can hold function
   4460 	   arguments.  */
   4461 	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
   4462 	  continue;
   4463 
   4464 	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
   4465 	parm.nregs++;
   4466       }
   4467   before = call_insn;
   4468   first_set = call_insn;
   4469 
   4470   /* Search backward for the first set of a register in this set.  */
   4471   while (parm.nregs && before != boundary)
   4472     {
   4473       before = PREV_INSN (before);
   4474 
   4475       /* It is possible that some loads got CSEed from one call to
   4476          another.  Stop in that case.  */
   4477       if (CALL_P (before))
   4478 	break;
   4479 
   4480       /* Our caller needs either ensure that we will find all sets
   4481          (in case code has not been optimized yet), or take care
   4482          for possible labels in a way by setting boundary to preceding
   4483          CODE_LABEL.  */
   4484       if (LABEL_P (before))
   4485 	{
   4486 	  gcc_assert (before == boundary);
   4487 	  break;
   4488 	}
   4489 
   4490       if (INSN_P (before))
   4491 	{
   4492 	  int nregs_old = parm.nregs;
   4493 	  note_stores (before, parms_set, &parm);
   4494 	  /* If we found something that did not set a parameter reg,
   4495 	     we're done.  Do not keep going, as that might result
   4496 	     in hoisting an insn before the setting of a pseudo
   4497 	     that is used by the hoisted insn. */
   4498 	  if (nregs_old != parm.nregs)
   4499 	    first_set = before;
   4500 	  else
   4501 	    break;
   4502 	}
   4503     }
   4504   return first_set;
   4505 }
   4506 
   4507 /* Return true if we should avoid inserting code between INSN and preceding
   4508    call instruction.  */
   4509 
   4510 bool
   4511 keep_with_call_p (const rtx_insn *insn)
   4512 {
   4513   rtx set;
   4514 
   4515   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
   4516     {
   4517       if (REG_P (SET_DEST (set))
   4518 	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
   4519 	  && fixed_regs[REGNO (SET_DEST (set))]
   4520 	  && general_operand (SET_SRC (set), VOIDmode))
   4521 	return true;
   4522       if (REG_P (SET_SRC (set))
   4523 	  && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
   4524 	  && REG_P (SET_DEST (set))
   4525 	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
   4526 	return true;
   4527       /* There may be a stack pop just after the call and before the store
   4528 	 of the return register.  Search for the actual store when deciding
   4529 	 if we can break or not.  */
   4530       if (SET_DEST (set) == stack_pointer_rtx)
   4531 	{
   4532 	  /* This CONST_CAST is okay because next_nonnote_insn just
   4533 	     returns its argument and we assign it to a const_rtx
   4534 	     variable.  */
   4535 	  const rtx_insn *i2
   4536 	    = next_nonnote_insn (const_cast<rtx_insn *> (insn));
   4537 	  if (i2 && keep_with_call_p (i2))
   4538 	    return true;
   4539 	}
   4540     }
   4541   return false;
   4542 }
   4543 
   4544 /* Return true if LABEL is a target of JUMP_INSN.  This applies only
   4545    to non-complex jumps.  That is, direct unconditional, conditional,
   4546    and tablejumps, but not computed jumps or returns.  It also does
   4547    not apply to the fallthru case of a conditional jump.  */
   4548 
   4549 bool
   4550 label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
   4551 {
   4552   rtx tmp = JUMP_LABEL (jump_insn);
   4553   rtx_jump_table_data *table;
   4554 
   4555   if (label == tmp)
   4556     return true;
   4557 
   4558   if (tablejump_p (jump_insn, NULL, &table))
   4559     {
   4560       rtvec vec = table->get_labels ();
   4561       int i, veclen = GET_NUM_ELEM (vec);
   4562 
   4563       for (i = 0; i < veclen; ++i)
   4564 	if (XEXP (RTVEC_ELT (vec, i), 0) == label)
   4565 	  return true;
   4566     }
   4567 
   4568   if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
   4569     return true;
   4570 
   4571   return false;
   4572 }
   4573 
   4574 
   4575 /* Return an estimate of the cost of computing rtx X.
   4577    One use is in cse, to decide which expression to keep in the hash table.
   4578    Another is in rtl generation, to pick the cheapest way to multiply.
   4579    Other uses like the latter are expected in the future.
   4580 
   4581    X appears as operand OPNO in an expression with code OUTER_CODE.
   4582    SPEED specifies whether costs optimized for speed or size should
   4583    be returned.  */
   4584 
   4585 int
   4586 rtx_cost (rtx x, machine_mode mode, enum rtx_code outer_code,
   4587 	  int opno, bool speed)
   4588 {
   4589   int i, j;
   4590   enum rtx_code code;
   4591   const char *fmt;
   4592   int total;
   4593   int factor;
   4594   unsigned mode_size;
   4595 
   4596   if (x == 0)
   4597     return 0;
   4598 
   4599   if (GET_CODE (x) == SET)
   4600     /* A SET doesn't have a mode, so let's look at the SET_DEST to get
   4601        the mode for the factor.  */
   4602     mode = GET_MODE (SET_DEST (x));
   4603   else if (GET_MODE (x) != VOIDmode)
   4604     mode = GET_MODE (x);
   4605 
   4606   mode_size = estimated_poly_value (GET_MODE_SIZE (mode));
   4607 
   4608   /* A size N times larger than UNITS_PER_WORD likely needs N times as
   4609      many insns, taking N times as long.  */
   4610   factor = mode_size > UNITS_PER_WORD ? mode_size / UNITS_PER_WORD : 1;
   4611 
   4612   /* Compute the default costs of certain things.
   4613      Note that targetm.rtx_costs can override the defaults.  */
   4614 
   4615   code = GET_CODE (x);
   4616   switch (code)
   4617     {
   4618     case MULT:
   4619       /* Multiplication has time-complexity O(N*N), where N is the
   4620 	 number of units (translated from digits) when using
   4621 	 schoolbook long multiplication.  */
   4622       total = factor * factor * COSTS_N_INSNS (5);
   4623       break;
   4624     case DIV:
   4625     case UDIV:
   4626     case MOD:
   4627     case UMOD:
   4628       /* Similarly, complexity for schoolbook long division.  */
   4629       total = factor * factor * COSTS_N_INSNS (7);
   4630       break;
   4631     case USE:
   4632       /* Used in combine.cc as a marker.  */
   4633       total = 0;
   4634       break;
   4635     default:
   4636       total = factor * COSTS_N_INSNS (1);
   4637     }
   4638 
   4639   switch (code)
   4640     {
   4641     case REG:
   4642       return 0;
   4643 
   4644     case SUBREG:
   4645       total = 0;
   4646       /* If we can't tie these modes, make this expensive.  The larger
   4647 	 the mode, the more expensive it is.  */
   4648       if (!targetm.modes_tieable_p (mode, GET_MODE (SUBREG_REG (x))))
   4649 	return COSTS_N_INSNS (2 + factor);
   4650       break;
   4651 
   4652     case TRUNCATE:
   4653       if (targetm.modes_tieable_p (mode, GET_MODE (XEXP (x, 0))))
   4654 	{
   4655 	  total = 0;
   4656 	  break;
   4657 	}
   4658       /* FALLTHRU */
   4659     default:
   4660       if (targetm.rtx_costs (x, mode, outer_code, opno, &total, speed))
   4661 	return total;
   4662       break;
   4663     }
   4664 
   4665   /* Sum the costs of the sub-rtx's, plus cost of this operation,
   4666      which is already in total.  */
   4667 
   4668   fmt = GET_RTX_FORMAT (code);
   4669   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   4670     if (fmt[i] == 'e')
   4671       total += rtx_cost (XEXP (x, i), mode, code, i, speed);
   4672     else if (fmt[i] == 'E')
   4673       for (j = 0; j < XVECLEN (x, i); j++)
   4674 	total += rtx_cost (XVECEXP (x, i, j), mode, code, i, speed);
   4675 
   4676   return total;
   4677 }
   4678 
   4679 /* Fill in the structure C with information about both speed and size rtx
   4680    costs for X, which is operand OPNO in an expression with code OUTER.  */
   4681 
   4682 void
   4683 get_full_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno,
   4684 		   struct full_rtx_costs *c)
   4685 {
   4686   c->speed = rtx_cost (x, mode, outer, opno, true);
   4687   c->size = rtx_cost (x, mode, outer, opno, false);
   4688 }
   4689 
   4690 
   4691 /* Return cost of address expression X.
   4693    Expect that X is properly formed address reference.
   4694 
   4695    SPEED parameter specify whether costs optimized for speed or size should
   4696    be returned.  */
   4697 
   4698 int
   4699 address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
   4700 {
   4701   /* We may be asked for cost of various unusual addresses, such as operands
   4702      of push instruction.  It is not worthwhile to complicate writing
   4703      of the target hook by such cases.  */
   4704 
   4705   if (!memory_address_addr_space_p (mode, x, as))
   4706     return 1000;
   4707 
   4708   return targetm.address_cost (x, mode, as, speed);
   4709 }
   4710 
   4711 /* If the target doesn't override, compute the cost as with arithmetic.  */
   4712 
   4713 int
   4714 default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
   4715 {
   4716   return rtx_cost (x, Pmode, MEM, 0, speed);
   4717 }
   4718 
   4719 
   4721 unsigned HOST_WIDE_INT
   4722 nonzero_bits (const_rtx x, machine_mode mode)
   4723 {
   4724   if (mode == VOIDmode)
   4725     mode = GET_MODE (x);
   4726   scalar_int_mode int_mode;
   4727   if (!is_a <scalar_int_mode> (mode, &int_mode))
   4728     return GET_MODE_MASK (mode);
   4729   return cached_nonzero_bits (x, int_mode, NULL_RTX, VOIDmode, 0);
   4730 }
   4731 
   4732 unsigned int
   4733 num_sign_bit_copies (const_rtx x, machine_mode mode)
   4734 {
   4735   if (mode == VOIDmode)
   4736     mode = GET_MODE (x);
   4737   scalar_int_mode int_mode;
   4738   if (!is_a <scalar_int_mode> (mode, &int_mode))
   4739     return 1;
   4740   return cached_num_sign_bit_copies (x, int_mode, NULL_RTX, VOIDmode, 0);
   4741 }
   4742 
   4743 /* Return true if nonzero_bits1 might recurse into both operands
   4744    of X.  */
   4745 
   4746 static inline bool
   4747 nonzero_bits_binary_arith_p (const_rtx x)
   4748 {
   4749   if (!ARITHMETIC_P (x))
   4750     return false;
   4751   switch (GET_CODE (x))
   4752     {
   4753     case AND:
   4754     case XOR:
   4755     case IOR:
   4756     case UMIN:
   4757     case UMAX:
   4758     case SMIN:
   4759     case SMAX:
   4760     case PLUS:
   4761     case MINUS:
   4762     case MULT:
   4763     case DIV:
   4764     case UDIV:
   4765     case MOD:
   4766     case UMOD:
   4767       return true;
   4768     default:
   4769       return false;
   4770     }
   4771 }
   4772 
   4773 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
   4774    It avoids exponential behavior in nonzero_bits1 when X has
   4775    identical subexpressions on the first or the second level.  */
   4776 
   4777 static unsigned HOST_WIDE_INT
   4778 cached_nonzero_bits (const_rtx x, scalar_int_mode mode, const_rtx known_x,
   4779 		     machine_mode known_mode,
   4780 		     unsigned HOST_WIDE_INT known_ret)
   4781 {
   4782   if (x == known_x && mode == known_mode)
   4783     return known_ret;
   4784 
   4785   /* Try to find identical subexpressions.  If found call
   4786      nonzero_bits1 on X with the subexpressions as KNOWN_X and the
   4787      precomputed value for the subexpression as KNOWN_RET.  */
   4788 
   4789   if (nonzero_bits_binary_arith_p (x))
   4790     {
   4791       rtx x0 = XEXP (x, 0);
   4792       rtx x1 = XEXP (x, 1);
   4793 
   4794       /* Check the first level.  */
   4795       if (x0 == x1)
   4796 	return nonzero_bits1 (x, mode, x0, mode,
   4797 			      cached_nonzero_bits (x0, mode, known_x,
   4798 						   known_mode, known_ret));
   4799 
   4800       /* Check the second level.  */
   4801       if (nonzero_bits_binary_arith_p (x0)
   4802 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
   4803 	return nonzero_bits1 (x, mode, x1, mode,
   4804 			      cached_nonzero_bits (x1, mode, known_x,
   4805 						   known_mode, known_ret));
   4806 
   4807       if (nonzero_bits_binary_arith_p (x1)
   4808 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
   4809 	return nonzero_bits1 (x, mode, x0, mode,
   4810 			      cached_nonzero_bits (x0, mode, known_x,
   4811 						   known_mode, known_ret));
   4812     }
   4813 
   4814   return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
   4815 }
   4816 
   4817 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
   4818    We don't let nonzero_bits recur into num_sign_bit_copies, because that
   4819    is less useful.  We can't allow both, because that results in exponential
   4820    run time recursion.  There is a nullstone testcase that triggered
   4821    this.  This macro avoids accidental uses of num_sign_bit_copies.  */
   4822 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
   4823 
   4824 /* Given an expression, X, compute which bits in X can be nonzero.
   4825    We don't care about bits outside of those defined in MODE.
   4826 
   4827    For most X this is simply GET_MODE_MASK (GET_MODE (X)), but if X is
   4828    an arithmetic operation, we can do better.  */
   4829 
   4830 static unsigned HOST_WIDE_INT
   4831 nonzero_bits1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
   4832 	       machine_mode known_mode,
   4833 	       unsigned HOST_WIDE_INT known_ret)
   4834 {
   4835   unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
   4836   unsigned HOST_WIDE_INT inner_nz;
   4837   enum rtx_code code = GET_CODE (x);
   4838   machine_mode inner_mode;
   4839   unsigned int inner_width;
   4840   scalar_int_mode xmode;
   4841 
   4842   unsigned int mode_width = GET_MODE_PRECISION (mode);
   4843 
   4844   if (CONST_INT_P (x))
   4845     {
   4846       if (SHORT_IMMEDIATES_SIGN_EXTEND
   4847 	  && INTVAL (x) > 0
   4848 	  && mode_width < BITS_PER_WORD
   4849 	  && (UINTVAL (x) & (HOST_WIDE_INT_1U << (mode_width - 1))) != 0)
   4850 	return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
   4851 
   4852       return UINTVAL (x);
   4853     }
   4854 
   4855   if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
   4856     return nonzero;
   4857   unsigned int xmode_width = GET_MODE_PRECISION (xmode);
   4858 
   4859   /* If X is wider than MODE, use its mode instead.  */
   4860   if (xmode_width > mode_width)
   4861     {
   4862       mode = xmode;
   4863       nonzero = GET_MODE_MASK (mode);
   4864       mode_width = xmode_width;
   4865     }
   4866 
   4867   if (mode_width > HOST_BITS_PER_WIDE_INT)
   4868     /* Our only callers in this case look for single bit values.  So
   4869        just return the mode mask.  Those tests will then be false.  */
   4870     return nonzero;
   4871 
   4872   /* If MODE is wider than X, but both are a single word for both the host
   4873      and target machines, we can compute this from which bits of the object
   4874      might be nonzero in its own mode, taking into account the fact that, on
   4875      CISC machines, accessing an object in a wider mode generally causes the
   4876      high-order bits to become undefined, so they are not known to be zero.
   4877      We extend this reasoning to RISC machines for operations that might not
   4878      operate on the full registers.  */
   4879   if (mode_width > xmode_width
   4880       && xmode_width <= BITS_PER_WORD
   4881       && xmode_width <= HOST_BITS_PER_WIDE_INT
   4882       && !(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
   4883     {
   4884       nonzero &= cached_nonzero_bits (x, xmode,
   4885 				      known_x, known_mode, known_ret);
   4886       nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode);
   4887       return nonzero;
   4888     }
   4889 
   4890   /* Please keep nonzero_bits_binary_arith_p above in sync with
   4891      the code in the switch below.  */
   4892   switch (code)
   4893     {
   4894     case REG:
   4895 #if defined(POINTERS_EXTEND_UNSIGNED)
   4896       /* If pointers extend unsigned and this is a pointer in Pmode, say that
   4897 	 all the bits above ptr_mode are known to be zero.  */
   4898       /* As we do not know which address space the pointer is referring to,
   4899 	 we can do this only if the target does not support different pointer
   4900 	 or address modes depending on the address space.  */
   4901       if (target_default_pointer_address_modes_p ()
   4902 	  && POINTERS_EXTEND_UNSIGNED
   4903 	  && xmode == Pmode
   4904 	  && REG_POINTER (x)
   4905 	  && !targetm.have_ptr_extend ())
   4906 	nonzero &= GET_MODE_MASK (ptr_mode);
   4907 #endif
   4908 
   4909       /* Include declared information about alignment of pointers.  */
   4910       /* ??? We don't properly preserve REG_POINTER changes across
   4911 	 pointer-to-integer casts, so we can't trust it except for
   4912 	 things that we know must be pointers.  See execute/960116-1.c.  */
   4913       if ((x == stack_pointer_rtx
   4914 	   || x == frame_pointer_rtx
   4915 	   || x == arg_pointer_rtx)
   4916 	  && REGNO_POINTER_ALIGN (REGNO (x)))
   4917 	{
   4918 	  unsigned HOST_WIDE_INT alignment
   4919 	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
   4920 
   4921 #ifdef PUSH_ROUNDING
   4922 	  /* If PUSH_ROUNDING is defined, it is possible for the
   4923 	     stack to be momentarily aligned only to that amount,
   4924 	     so we pick the least alignment.  */
   4925 	  if (x == stack_pointer_rtx && targetm.calls.push_argument (0))
   4926 	    {
   4927 	      poly_uint64 rounded_1 = PUSH_ROUNDING (poly_int64 (1));
   4928 	      alignment = MIN (known_alignment (rounded_1), alignment);
   4929 	    }
   4930 #endif
   4931 
   4932 	  nonzero &= ~(alignment - 1);
   4933 	}
   4934 
   4935       {
   4936 	unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
   4937 	rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, xmode, mode,
   4938 						  &nonzero_for_hook);
   4939 
   4940 	if (new_rtx)
   4941 	  nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
   4942 						   known_mode, known_ret);
   4943 
   4944 	return nonzero_for_hook;
   4945       }
   4946 
   4947     case MEM:
   4948       /* In many, if not most, RISC machines, reading a byte from memory
   4949 	 zeros the rest of the register.  Noticing that fact saves a lot
   4950 	 of extra zero-extends.  */
   4951       if (load_extend_op (xmode) == ZERO_EXTEND)
   4952 	nonzero &= GET_MODE_MASK (xmode);
   4953       break;
   4954 
   4955     case EQ:  case NE:
   4956     case UNEQ:  case LTGT:
   4957     case GT:  case GTU:  case UNGT:
   4958     case LT:  case LTU:  case UNLT:
   4959     case GE:  case GEU:  case UNGE:
   4960     case LE:  case LEU:  case UNLE:
   4961     case UNORDERED: case ORDERED:
   4962       /* If this produces an integer result, we know which bits are set.
   4963 	 Code here used to clear bits outside the mode of X, but that is
   4964 	 now done above.  */
   4965       /* Mind that MODE is the mode the caller wants to look at this
   4966 	 operation in, and not the actual operation mode.  We can wind
   4967 	 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
   4968 	 that describes the results of a vector compare.  */
   4969       if (GET_MODE_CLASS (xmode) == MODE_INT
   4970 	  && mode_width <= HOST_BITS_PER_WIDE_INT)
   4971 	nonzero = STORE_FLAG_VALUE;
   4972       break;
   4973 
   4974     case NEG:
   4975 #if 0
   4976       /* Disabled to avoid exponential mutual recursion between nonzero_bits
   4977 	 and num_sign_bit_copies.  */
   4978       if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
   4979 	nonzero = 1;
   4980 #endif
   4981 
   4982       if (xmode_width < mode_width)
   4983 	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode));
   4984       break;
   4985 
   4986     case ABS:
   4987 #if 0
   4988       /* Disabled to avoid exponential mutual recursion between nonzero_bits
   4989 	 and num_sign_bit_copies.  */
   4990       if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
   4991 	nonzero = 1;
   4992 #endif
   4993       break;
   4994 
   4995     case TRUNCATE:
   4996       nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
   4997 				       known_x, known_mode, known_ret)
   4998 		  & GET_MODE_MASK (mode));
   4999       break;
   5000 
   5001     case ZERO_EXTEND:
   5002       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
   5003 				      known_x, known_mode, known_ret);
   5004       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
   5005 	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
   5006       break;
   5007 
   5008     case SIGN_EXTEND:
   5009       /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
   5010 	 Otherwise, show all the bits in the outer mode but not the inner
   5011 	 may be nonzero.  */
   5012       inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
   5013 				      known_x, known_mode, known_ret);
   5014       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
   5015 	{
   5016 	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
   5017 	  if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
   5018 	    inner_nz |= (GET_MODE_MASK (mode)
   5019 			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
   5020 	}
   5021 
   5022       nonzero &= inner_nz;
   5023       break;
   5024 
   5025     case AND:
   5026       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
   5027 				       known_x, known_mode, known_ret)
   5028       		 & cached_nonzero_bits (XEXP (x, 1), mode,
   5029 					known_x, known_mode, known_ret);
   5030       break;
   5031 
   5032     case XOR:   case IOR:
   5033     case UMIN:  case UMAX:  case SMIN:  case SMAX:
   5034       {
   5035 	unsigned HOST_WIDE_INT nonzero0
   5036 	   = cached_nonzero_bits (XEXP (x, 0), mode,
   5037 				  known_x, known_mode, known_ret);
   5038 
   5039 	/* Don't call nonzero_bits for the second time if it cannot change
   5040 	   anything.  */
   5041 	if ((nonzero & nonzero0) != nonzero)
   5042 	  nonzero &= nonzero0
   5043       		     | cached_nonzero_bits (XEXP (x, 1), mode,
   5044 					    known_x, known_mode, known_ret);
   5045       }
   5046       break;
   5047 
   5048     case PLUS:  case MINUS:
   5049     case MULT:
   5050     case DIV:   case UDIV:
   5051     case MOD:   case UMOD:
   5052       /* We can apply the rules of arithmetic to compute the number of
   5053 	 high- and low-order zero bits of these operations.  We start by
   5054 	 computing the width (position of the highest-order nonzero bit)
   5055 	 and the number of low-order zero bits for each value.  */
   5056       {
   5057 	unsigned HOST_WIDE_INT nz0
   5058 	  = cached_nonzero_bits (XEXP (x, 0), mode,
   5059 				 known_x, known_mode, known_ret);
   5060 	unsigned HOST_WIDE_INT nz1
   5061 	  = cached_nonzero_bits (XEXP (x, 1), mode,
   5062 				 known_x, known_mode, known_ret);
   5063 	int sign_index = xmode_width - 1;
   5064 	int width0 = floor_log2 (nz0) + 1;
   5065 	int width1 = floor_log2 (nz1) + 1;
   5066 	int low0 = ctz_or_zero (nz0);
   5067 	int low1 = ctz_or_zero (nz1);
   5068 	unsigned HOST_WIDE_INT op0_maybe_minusp
   5069 	  = nz0 & (HOST_WIDE_INT_1U << sign_index);
   5070 	unsigned HOST_WIDE_INT op1_maybe_minusp
   5071 	  = nz1 & (HOST_WIDE_INT_1U << sign_index);
   5072 	unsigned int result_width = mode_width;
   5073 	int result_low = 0;
   5074 
   5075 	switch (code)
   5076 	  {
   5077 	  case PLUS:
   5078 	    result_width = MAX (width0, width1) + 1;
   5079 	    result_low = MIN (low0, low1);
   5080 	    break;
   5081 	  case MINUS:
   5082 	    result_low = MIN (low0, low1);
   5083 	    break;
   5084 	  case MULT:
   5085 	    result_width = width0 + width1;
   5086 	    result_low = low0 + low1;
   5087 	    break;
   5088 	  case DIV:
   5089 	    if (width1 == 0)
   5090 	      break;
   5091 	    if (!op0_maybe_minusp && !op1_maybe_minusp)
   5092 	      result_width = width0;
   5093 	    break;
   5094 	  case UDIV:
   5095 	    if (width1 == 0)
   5096 	      break;
   5097 	    result_width = width0;
   5098 	    break;
   5099 	  case MOD:
   5100 	    if (width1 == 0)
   5101 	      break;
   5102 	    if (!op0_maybe_minusp && !op1_maybe_minusp)
   5103 	      result_width = MIN (width0, width1);
   5104 	    result_low = MIN (low0, low1);
   5105 	    break;
   5106 	  case UMOD:
   5107 	    if (width1 == 0)
   5108 	      break;
   5109 	    result_width = MIN (width0, width1);
   5110 	    result_low = MIN (low0, low1);
   5111 	    break;
   5112 	  default:
   5113 	    gcc_unreachable ();
   5114 	  }
   5115 
   5116 	/* Note that mode_width <= HOST_BITS_PER_WIDE_INT, see above.  */
   5117 	if (result_width < mode_width)
   5118 	  nonzero &= (HOST_WIDE_INT_1U << result_width) - 1;
   5119 
   5120 	if (result_low > 0)
   5121 	  {
   5122 	    if (result_low < HOST_BITS_PER_WIDE_INT)
   5123 	      nonzero &= ~((HOST_WIDE_INT_1U << result_low) - 1);
   5124 	    else
   5125 	      nonzero = 0;
   5126 	  }
   5127       }
   5128       break;
   5129 
   5130     case ZERO_EXTRACT:
   5131       if (CONST_INT_P (XEXP (x, 1))
   5132 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
   5133 	nonzero &= (HOST_WIDE_INT_1U << INTVAL (XEXP (x, 1))) - 1;
   5134       break;
   5135 
   5136     case SUBREG:
   5137       /* If this is a SUBREG formed for a promoted variable that has
   5138 	 been zero-extended, we know that at least the high-order bits
   5139 	 are zero, though others might be too.  */
   5140       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
   5141 	nonzero = GET_MODE_MASK (xmode)
   5142 		  & cached_nonzero_bits (SUBREG_REG (x), xmode,
   5143 					 known_x, known_mode, known_ret);
   5144 
   5145       /* If the inner mode is a single word for both the host and target
   5146 	 machines, we can compute this from which bits of the inner
   5147 	 object might be nonzero.  */
   5148       inner_mode = GET_MODE (SUBREG_REG (x));
   5149       if (GET_MODE_PRECISION (inner_mode).is_constant (&inner_width)
   5150 	  && inner_width <= BITS_PER_WORD
   5151 	  && inner_width <= HOST_BITS_PER_WIDE_INT)
   5152 	{
   5153 	  nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
   5154 					  known_x, known_mode, known_ret);
   5155 
   5156           /* On a typical CISC machine, accessing an object in a wider mode
   5157 	     causes the high-order bits to become undefined.  So they are
   5158 	     not known to be zero.
   5159 
   5160 	     On a typical RISC machine, we only have to worry about the way
   5161 	     loads are extended.  Otherwise, if we get a reload for the inner
   5162 	     part, it may be loaded from the stack, and then we may lose all
   5163 	     the zero bits that existed before the store to the stack.  */
   5164 	  rtx_code extend_op;
   5165 	  if ((!WORD_REGISTER_OPERATIONS
   5166 	       || ((extend_op = load_extend_op (inner_mode)) == SIGN_EXTEND
   5167 		   ? val_signbit_known_set_p (inner_mode, nonzero)
   5168 		   : extend_op != ZERO_EXTEND)
   5169 	       || !MEM_P (SUBREG_REG (x)))
   5170 	      && xmode_width > inner_width)
   5171 	    nonzero
   5172 	      |= (GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (inner_mode));
   5173 	}
   5174       break;
   5175 
   5176     case ASHIFT:
   5177     case ASHIFTRT:
   5178     case LSHIFTRT:
   5179     case ROTATE:
   5180     case ROTATERT:
   5181       /* The nonzero bits are in two classes: any bits within MODE
   5182 	 that aren't in xmode are always significant.  The rest of the
   5183 	 nonzero bits are those that are significant in the operand of
   5184 	 the shift when shifted the appropriate number of bits.  This
   5185 	 shows that high-order bits are cleared by the right shift and
   5186 	 low-order bits by left shifts.  */
   5187       if (CONST_INT_P (XEXP (x, 1))
   5188 	  && INTVAL (XEXP (x, 1)) >= 0
   5189 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
   5190 	  && INTVAL (XEXP (x, 1)) < xmode_width)
   5191 	{
   5192 	  int count = INTVAL (XEXP (x, 1));
   5193 	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (xmode);
   5194 	  unsigned HOST_WIDE_INT op_nonzero
   5195 	    = cached_nonzero_bits (XEXP (x, 0), mode,
   5196 				   known_x, known_mode, known_ret);
   5197 	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
   5198 	  unsigned HOST_WIDE_INT outer = 0;
   5199 
   5200 	  if (mode_width > xmode_width)
   5201 	    outer = (op_nonzero & nonzero & ~mode_mask);
   5202 
   5203 	  switch (code)
   5204 	    {
   5205 	    case ASHIFT:
   5206 	      inner <<= count;
   5207 	      break;
   5208 
   5209 	    case LSHIFTRT:
   5210 	      inner >>= count;
   5211 	      break;
   5212 
   5213 	    case ASHIFTRT:
   5214 	      inner >>= count;
   5215 
   5216 	      /* If the sign bit may have been nonzero before the shift, we
   5217 		 need to mark all the places it could have been copied to
   5218 		 by the shift as possibly nonzero.  */
   5219 	      if (inner & (HOST_WIDE_INT_1U << (xmode_width - 1 - count)))
   5220 		inner |= (((HOST_WIDE_INT_1U << count) - 1)
   5221 			  << (xmode_width - count));
   5222 	      break;
   5223 
   5224 	    case ROTATE:
   5225 	      inner = (inner << (count % xmode_width)
   5226 		       | (inner >> (xmode_width - (count % xmode_width))))
   5227 		      & mode_mask;
   5228 	      break;
   5229 
   5230 	    case ROTATERT:
   5231 	      inner = (inner >> (count % xmode_width)
   5232 		       | (inner << (xmode_width - (count % xmode_width))))
   5233 		      & mode_mask;
   5234 	      break;
   5235 
   5236 	    default:
   5237 	      gcc_unreachable ();
   5238 	    }
   5239 
   5240 	  nonzero &= (outer | inner);
   5241 	}
   5242       break;
   5243 
   5244     case FFS:
   5245     case POPCOUNT:
   5246       /* This is at most the number of bits in the mode.  */
   5247       nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
   5248       break;
   5249 
   5250     case CLZ:
   5251       /* If CLZ has a known value at zero, then the nonzero bits are
   5252 	 that value, plus the number of bits in the mode minus one.  */
   5253       if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
   5254 	nonzero
   5255 	  |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
   5256       else
   5257 	nonzero = -1;
   5258       break;
   5259 
   5260     case CTZ:
   5261       /* If CTZ has a known value at zero, then the nonzero bits are
   5262 	 that value, plus the number of bits in the mode minus one.  */
   5263       if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
   5264 	nonzero
   5265 	  |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
   5266       else
   5267 	nonzero = -1;
   5268       break;
   5269 
   5270     case CLRSB:
   5271       /* This is at most the number of bits in the mode minus 1.  */
   5272       nonzero = (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
   5273       break;
   5274 
   5275     case PARITY:
   5276       nonzero = 1;
   5277       break;
   5278 
   5279     case IF_THEN_ELSE:
   5280       {
   5281 	unsigned HOST_WIDE_INT nonzero_true
   5282 	  = cached_nonzero_bits (XEXP (x, 1), mode,
   5283 				 known_x, known_mode, known_ret);
   5284 
   5285 	/* Don't call nonzero_bits for the second time if it cannot change
   5286 	   anything.  */
   5287 	if ((nonzero & nonzero_true) != nonzero)
   5288 	  nonzero &= nonzero_true
   5289       		     | cached_nonzero_bits (XEXP (x, 2), mode,
   5290 					    known_x, known_mode, known_ret);
   5291       }
   5292       break;
   5293 
   5294     default:
   5295       break;
   5296     }
   5297 
   5298   return nonzero;
   5299 }
   5300 
   5301 /* See the macro definition above.  */
   5302 #undef cached_num_sign_bit_copies
   5303 
   5304 
   5305 /* Return true if num_sign_bit_copies1 might recurse into both operands
   5307    of X.  */
   5308 
   5309 static inline bool
   5310 num_sign_bit_copies_binary_arith_p (const_rtx x)
   5311 {
   5312   if (!ARITHMETIC_P (x))
   5313     return false;
   5314   switch (GET_CODE (x))
   5315     {
   5316     case IOR:
   5317     case AND:
   5318     case XOR:
   5319     case SMIN:
   5320     case SMAX:
   5321     case UMIN:
   5322     case UMAX:
   5323     case PLUS:
   5324     case MINUS:
   5325     case MULT:
   5326       return true;
   5327     default:
   5328       return false;
   5329     }
   5330 }
   5331 
   5332 /* The function cached_num_sign_bit_copies is a wrapper around
   5333    num_sign_bit_copies1.  It avoids exponential behavior in
   5334    num_sign_bit_copies1 when X has identical subexpressions on the
   5335    first or the second level.  */
   5336 
   5337 static unsigned int
   5338 cached_num_sign_bit_copies (const_rtx x, scalar_int_mode mode,
   5339 			    const_rtx known_x, machine_mode known_mode,
   5340 			    unsigned int known_ret)
   5341 {
   5342   if (x == known_x && mode == known_mode)
   5343     return known_ret;
   5344 
   5345   /* Try to find identical subexpressions.  If found call
   5346      num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
   5347      the precomputed value for the subexpression as KNOWN_RET.  */
   5348 
   5349   if (num_sign_bit_copies_binary_arith_p (x))
   5350     {
   5351       rtx x0 = XEXP (x, 0);
   5352       rtx x1 = XEXP (x, 1);
   5353 
   5354       /* Check the first level.  */
   5355       if (x0 == x1)
   5356 	return
   5357 	  num_sign_bit_copies1 (x, mode, x0, mode,
   5358 				cached_num_sign_bit_copies (x0, mode, known_x,
   5359 							    known_mode,
   5360 							    known_ret));
   5361 
   5362       /* Check the second level.  */
   5363       if (num_sign_bit_copies_binary_arith_p (x0)
   5364 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
   5365 	return
   5366 	  num_sign_bit_copies1 (x, mode, x1, mode,
   5367 				cached_num_sign_bit_copies (x1, mode, known_x,
   5368 							    known_mode,
   5369 							    known_ret));
   5370 
   5371       if (num_sign_bit_copies_binary_arith_p (x1)
   5372 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
   5373 	return
   5374 	  num_sign_bit_copies1 (x, mode, x0, mode,
   5375 				cached_num_sign_bit_copies (x0, mode, known_x,
   5376 							    known_mode,
   5377 							    known_ret));
   5378     }
   5379 
   5380   return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
   5381 }
   5382 
   5383 /* Return the number of bits at the high-order end of X that are known to
   5384    be equal to the sign bit.  X will be used in mode MODE.  The returned
   5385    value will always be between 1 and the number of bits in MODE.  */
   5386 
   5387 static unsigned int
   5388 num_sign_bit_copies1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
   5389 		      machine_mode known_mode,
   5390 		      unsigned int known_ret)
   5391 {
   5392   enum rtx_code code = GET_CODE (x);
   5393   unsigned int bitwidth = GET_MODE_PRECISION (mode);
   5394   int num0, num1, result;
   5395   unsigned HOST_WIDE_INT nonzero;
   5396 
   5397   if (CONST_INT_P (x))
   5398     {
   5399       /* If the constant is negative, take its 1's complement and remask.
   5400 	 Then see how many zero bits we have.  */
   5401       nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
   5402       if (bitwidth <= HOST_BITS_PER_WIDE_INT
   5403 	  && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
   5404 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
   5405 
   5406       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
   5407     }
   5408 
   5409   scalar_int_mode xmode, inner_mode;
   5410   if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
   5411     return 1;
   5412 
   5413   unsigned int xmode_width = GET_MODE_PRECISION (xmode);
   5414 
   5415   /* For a smaller mode, just ignore the high bits.  */
   5416   if (bitwidth < xmode_width)
   5417     {
   5418       num0 = cached_num_sign_bit_copies (x, xmode,
   5419 					 known_x, known_mode, known_ret);
   5420       return MAX (1, num0 - (int) (xmode_width - bitwidth));
   5421     }
   5422 
   5423   if (bitwidth > xmode_width)
   5424     {
   5425       /* If this machine does not do all register operations on the entire
   5426 	 register and MODE is wider than the mode of X, we can say nothing
   5427 	 at all about the high-order bits.  We extend this reasoning to RISC
   5428 	 machines for operations that might not operate on full registers.  */
   5429       if (!(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
   5430 	return 1;
   5431 
   5432       /* Likewise on machines that do, if the mode of the object is smaller
   5433 	 than a word and loads of that size don't sign extend, we can say
   5434 	 nothing about the high order bits.  */
   5435       if (xmode_width < BITS_PER_WORD
   5436 	  && load_extend_op (xmode) != SIGN_EXTEND)
   5437 	return 1;
   5438     }
   5439 
   5440   /* Please keep num_sign_bit_copies_binary_arith_p above in sync with
   5441      the code in the switch below.  */
   5442   switch (code)
   5443     {
   5444     case REG:
   5445 
   5446 #if defined(POINTERS_EXTEND_UNSIGNED)
   5447       /* If pointers extend signed and this is a pointer in Pmode, say that
   5448 	 all the bits above ptr_mode are known to be sign bit copies.  */
   5449       /* As we do not know which address space the pointer is referring to,
   5450 	 we can do this only if the target does not support different pointer
   5451 	 or address modes depending on the address space.  */
   5452       if (target_default_pointer_address_modes_p ()
   5453 	  && ! POINTERS_EXTEND_UNSIGNED && xmode == Pmode
   5454 	  && mode == Pmode && REG_POINTER (x)
   5455 	  && !targetm.have_ptr_extend ())
   5456 	return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
   5457 #endif
   5458 
   5459       {
   5460 	unsigned int copies_for_hook = 1, copies = 1;
   5461 	rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, xmode, mode,
   5462 							 &copies_for_hook);
   5463 
   5464 	if (new_rtx)
   5465 	  copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
   5466 					       known_mode, known_ret);
   5467 
   5468 	if (copies > 1 || copies_for_hook > 1)
   5469 	  return MAX (copies, copies_for_hook);
   5470 
   5471 	/* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
   5472       }
   5473       break;
   5474 
   5475     case MEM:
   5476       /* Some RISC machines sign-extend all loads of smaller than a word.  */
   5477       if (load_extend_op (xmode) == SIGN_EXTEND)
   5478 	return MAX (1, ((int) bitwidth - (int) xmode_width + 1));
   5479       break;
   5480 
   5481     case SUBREG:
   5482       /* If this is a SUBREG for a promoted object that is sign-extended
   5483 	 and we are looking at it in a wider mode, we know that at least the
   5484 	 high-order bits are known to be sign bit copies.  */
   5485 
   5486       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
   5487 	{
   5488 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
   5489 					     known_x, known_mode, known_ret);
   5490 	  return MAX ((int) bitwidth - (int) xmode_width + 1, num0);
   5491 	}
   5492 
   5493       if (is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (x)), &inner_mode))
   5494 	{
   5495 	  /* For a smaller object, just ignore the high bits.  */
   5496 	  if (bitwidth <= GET_MODE_PRECISION (inner_mode))
   5497 	    {
   5498 	      num0 = cached_num_sign_bit_copies (SUBREG_REG (x), inner_mode,
   5499 						 known_x, known_mode,
   5500 						 known_ret);
   5501 	      return MAX (1, num0 - (int) (GET_MODE_PRECISION (inner_mode)
   5502 					   - bitwidth));
   5503 	    }
   5504 
   5505 	  /* For paradoxical SUBREGs on machines where all register operations
   5506 	     affect the entire register, just look inside.  Note that we are
   5507 	     passing MODE to the recursive call, so the number of sign bit
   5508 	     copies will remain relative to that mode, not the inner mode.
   5509 
   5510 	     This works only if loads sign extend.  Otherwise, if we get a
   5511 	     reload for the inner part, it may be loaded from the stack, and
   5512 	     then we lose all sign bit copies that existed before the store
   5513 	     to the stack.  */
   5514 	  if (WORD_REGISTER_OPERATIONS
   5515 	      && load_extend_op (inner_mode) == SIGN_EXTEND
   5516 	      && paradoxical_subreg_p (x)
   5517 	      && MEM_P (SUBREG_REG (x)))
   5518 	    return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
   5519 					       known_x, known_mode, known_ret);
   5520 	}
   5521       break;
   5522 
   5523     case SIGN_EXTRACT:
   5524       if (CONST_INT_P (XEXP (x, 1)))
   5525 	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
   5526       break;
   5527 
   5528     case SIGN_EXTEND:
   5529       if (is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
   5530 	return (bitwidth - GET_MODE_PRECISION (inner_mode)
   5531 		+ cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
   5532 					      known_x, known_mode, known_ret));
   5533       break;
   5534 
   5535     case TRUNCATE:
   5536       /* For a smaller object, just ignore the high bits.  */
   5537       inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
   5538       num0 = cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
   5539 					 known_x, known_mode, known_ret);
   5540       return MAX (1, (num0 - (int) (GET_MODE_PRECISION (inner_mode)
   5541 				    - bitwidth)));
   5542 
   5543     case NOT:
   5544       return cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5545 					 known_x, known_mode, known_ret);
   5546 
   5547     case ROTATE:       case ROTATERT:
   5548       /* If we are rotating left by a number of bits less than the number
   5549 	 of sign bit copies, we can just subtract that amount from the
   5550 	 number.  */
   5551       if (CONST_INT_P (XEXP (x, 1))
   5552 	  && INTVAL (XEXP (x, 1)) >= 0
   5553 	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
   5554 	{
   5555 	  num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5556 					     known_x, known_mode, known_ret);
   5557 	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
   5558 				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
   5559 	}
   5560       break;
   5561 
   5562     case NEG:
   5563       /* In general, this subtracts one sign bit copy.  But if the value
   5564 	 is known to be positive, the number of sign bit copies is the
   5565 	 same as that of the input.  Finally, if the input has just one bit
   5566 	 that might be nonzero, all the bits are copies of the sign bit.  */
   5567       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5568 					 known_x, known_mode, known_ret);
   5569       if (bitwidth > HOST_BITS_PER_WIDE_INT)
   5570 	return num0 > 1 ? num0 - 1 : 1;
   5571 
   5572       nonzero = nonzero_bits (XEXP (x, 0), mode);
   5573       if (nonzero == 1)
   5574 	return bitwidth;
   5575 
   5576       if (num0 > 1
   5577 	  && ((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero))
   5578 	num0--;
   5579 
   5580       return num0;
   5581 
   5582     case IOR:   case AND:   case XOR:
   5583     case SMIN:  case SMAX:  case UMIN:  case UMAX:
   5584       /* Logical operations will preserve the number of sign-bit copies.
   5585 	 MIN and MAX operations always return one of the operands.  */
   5586       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5587 					 known_x, known_mode, known_ret);
   5588       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
   5589 					 known_x, known_mode, known_ret);
   5590 
   5591       /* If num1 is clearing some of the top bits then regardless of
   5592 	 the other term, we are guaranteed to have at least that many
   5593 	 high-order zero bits.  */
   5594       if (code == AND
   5595 	  && num1 > 1
   5596 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
   5597 	  && CONST_INT_P (XEXP (x, 1))
   5598 	  && (UINTVAL (XEXP (x, 1))
   5599 	      & (HOST_WIDE_INT_1U << (bitwidth - 1))) == 0)
   5600 	return num1;
   5601 
   5602       /* Similarly for IOR when setting high-order bits.  */
   5603       if (code == IOR
   5604 	  && num1 > 1
   5605 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
   5606 	  && CONST_INT_P (XEXP (x, 1))
   5607 	  && (UINTVAL (XEXP (x, 1))
   5608 	      & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
   5609 	return num1;
   5610 
   5611       return MIN (num0, num1);
   5612 
   5613     case PLUS:  case MINUS:
   5614       /* For addition and subtraction, we can have a 1-bit carry.  However,
   5615 	 if we are subtracting 1 from a positive number, there will not
   5616 	 be such a carry.  Furthermore, if the positive number is known to
   5617 	 be 0 or 1, we know the result is either -1 or 0.  */
   5618 
   5619       if (code == PLUS && XEXP (x, 1) == constm1_rtx
   5620 	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
   5621 	{
   5622 	  nonzero = nonzero_bits (XEXP (x, 0), mode);
   5623 	  if (((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero) == 0)
   5624 	    return (nonzero == 1 || nonzero == 0 ? bitwidth
   5625 		    : bitwidth - floor_log2 (nonzero) - 1);
   5626 	}
   5627 
   5628       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5629 					 known_x, known_mode, known_ret);
   5630       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
   5631 					 known_x, known_mode, known_ret);
   5632       result = MAX (1, MIN (num0, num1) - 1);
   5633 
   5634       return result;
   5635 
   5636     case MULT:
   5637       /* The number of bits of the product is the sum of the number of
   5638 	 bits of both terms.  However, unless one of the terms if known
   5639 	 to be positive, we must allow for an additional bit since negating
   5640 	 a negative number can remove one sign bit copy.  */
   5641 
   5642       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5643 					 known_x, known_mode, known_ret);
   5644       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
   5645 					 known_x, known_mode, known_ret);
   5646 
   5647       result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
   5648       if (result > 0
   5649 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
   5650 	      || (((nonzero_bits (XEXP (x, 0), mode)
   5651 		    & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
   5652 		  && ((nonzero_bits (XEXP (x, 1), mode)
   5653 		       & (HOST_WIDE_INT_1U << (bitwidth - 1)))
   5654 		      != 0))))
   5655 	result--;
   5656 
   5657       return MAX (1, result);
   5658 
   5659     case UDIV:
   5660       /* The result must be <= the first operand.  If the first operand
   5661 	 has the high bit set, we know nothing about the number of sign
   5662 	 bit copies.  */
   5663       if (bitwidth > HOST_BITS_PER_WIDE_INT)
   5664 	return 1;
   5665       else if ((nonzero_bits (XEXP (x, 0), mode)
   5666 		& (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
   5667 	return 1;
   5668       else
   5669 	return cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5670 					   known_x, known_mode, known_ret);
   5671 
   5672     case UMOD:
   5673       /* The result must be <= the second operand.  If the second operand
   5674 	 has (or just might have) the high bit set, we know nothing about
   5675 	 the number of sign bit copies.  */
   5676       if (bitwidth > HOST_BITS_PER_WIDE_INT)
   5677 	return 1;
   5678       else if ((nonzero_bits (XEXP (x, 1), mode)
   5679 		& (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
   5680 	return 1;
   5681       else
   5682 	return cached_num_sign_bit_copies (XEXP (x, 1), mode,
   5683 					   known_x, known_mode, known_ret);
   5684 
   5685     case DIV:
   5686       /* Similar to unsigned division, except that we have to worry about
   5687 	 the case where the divisor is negative, in which case we have
   5688 	 to add 1.  */
   5689       result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5690 					   known_x, known_mode, known_ret);
   5691       if (result > 1
   5692 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
   5693 	      || (nonzero_bits (XEXP (x, 1), mode)
   5694 		  & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
   5695 	result--;
   5696 
   5697       return result;
   5698 
   5699     case MOD:
   5700       result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
   5701 					   known_x, known_mode, known_ret);
   5702       if (result > 1
   5703 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
   5704 	      || (nonzero_bits (XEXP (x, 1), mode)
   5705 		  & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
   5706 	result--;
   5707 
   5708       return result;
   5709 
   5710     case ASHIFTRT:
   5711       /* Shifts by a constant add to the number of bits equal to the
   5712 	 sign bit.  */
   5713       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5714 					 known_x, known_mode, known_ret);
   5715       if (CONST_INT_P (XEXP (x, 1))
   5716 	  && INTVAL (XEXP (x, 1)) > 0
   5717 	  && INTVAL (XEXP (x, 1)) < xmode_width)
   5718 	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
   5719 
   5720       return num0;
   5721 
   5722     case ASHIFT:
   5723       /* Left shifts destroy copies.  */
   5724       if (!CONST_INT_P (XEXP (x, 1))
   5725 	  || INTVAL (XEXP (x, 1)) < 0
   5726 	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth
   5727 	  || INTVAL (XEXP (x, 1)) >= xmode_width)
   5728 	return 1;
   5729 
   5730       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
   5731 					 known_x, known_mode, known_ret);
   5732       return MAX (1, num0 - INTVAL (XEXP (x, 1)));
   5733 
   5734     case IF_THEN_ELSE:
   5735       num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
   5736 					 known_x, known_mode, known_ret);
   5737       num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
   5738 					 known_x, known_mode, known_ret);
   5739       return MIN (num0, num1);
   5740 
   5741     case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
   5742     case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
   5743     case GEU: case GTU: case LEU: case LTU:
   5744     case UNORDERED: case ORDERED:
   5745       /* If the constant is negative, take its 1's complement and remask.
   5746 	 Then see how many zero bits we have.  */
   5747       nonzero = STORE_FLAG_VALUE;
   5748       if (bitwidth <= HOST_BITS_PER_WIDE_INT
   5749 	  && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
   5750 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
   5751 
   5752       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
   5753 
   5754     default:
   5755       break;
   5756     }
   5757 
   5758   /* If we haven't been able to figure it out by one of the above rules,
   5759      see if some of the high-order bits are known to be zero.  If so,
   5760      count those bits and return one less than that amount.  If we can't
   5761      safely compute the mask for this mode, always return BITWIDTH.  */
   5762 
   5763   bitwidth = GET_MODE_PRECISION (mode);
   5764   if (bitwidth > HOST_BITS_PER_WIDE_INT)
   5765     return 1;
   5766 
   5767   nonzero = nonzero_bits (x, mode);
   5768   return nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))
   5769 	 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
   5770 }
   5771 
   5772 /* Calculate the rtx_cost of a single instruction pattern.  A return value of
   5773    zero indicates an instruction pattern without a known cost.  */
   5774 
   5775 int
   5776 pattern_cost (rtx pat, bool speed)
   5777 {
   5778   int i, cost;
   5779   rtx set;
   5780 
   5781   /* Extract the single set rtx from the instruction pattern.  We
   5782      can't use single_set since we only have the pattern.  We also
   5783      consider PARALLELs of a normal set and a single comparison.  In
   5784      that case we use the cost of the non-comparison SET operation,
   5785      which is most-likely to be the real cost of this operation.  */
   5786   if (GET_CODE (pat) == SET)
   5787     set = pat;
   5788   else if (GET_CODE (pat) == PARALLEL)
   5789     {
   5790       set = NULL_RTX;
   5791       rtx comparison = NULL_RTX;
   5792 
   5793       for (i = 0; i < XVECLEN (pat, 0); i++)
   5794 	{
   5795 	  rtx x = XVECEXP (pat, 0, i);
   5796 	  if (GET_CODE (x) == SET)
   5797 	    {
   5798 	      if (GET_CODE (SET_SRC (x)) == COMPARE)
   5799 		{
   5800 		  if (comparison)
   5801 		    return 0;
   5802 		  comparison = x;
   5803 		}
   5804 	      else
   5805 		{
   5806 		  if (set)
   5807 		    return 0;
   5808 		  set = x;
   5809 		}
   5810 	    }
   5811 	}
   5812 
   5813       if (!set && comparison)
   5814 	set = comparison;
   5815 
   5816       if (!set)
   5817 	return 0;
   5818     }
   5819   else
   5820     return 0;
   5821 
   5822   cost = set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)), speed);
   5823   return cost > 0 ? cost : COSTS_N_INSNS (1);
   5824 }
   5825 
   5826 /* Calculate the cost of a single instruction.  A return value of zero
   5827    indicates an instruction pattern without a known cost.  */
   5828 
   5829 int
   5830 insn_cost (rtx_insn *insn, bool speed)
   5831 {
   5832   if (targetm.insn_cost)
   5833     return targetm.insn_cost (insn, speed);
   5834 
   5835   return pattern_cost (PATTERN (insn), speed);
   5836 }
   5837 
   5838 /* Returns estimate on cost of computing SEQ.  */
   5839 
   5840 unsigned
   5841 seq_cost (const rtx_insn *seq, bool speed)
   5842 {
   5843   unsigned cost = 0;
   5844   rtx set;
   5845 
   5846   for (; seq; seq = NEXT_INSN (seq))
   5847     {
   5848       set = single_set (seq);
   5849       if (set)
   5850         cost += set_rtx_cost (set, speed);
   5851       else if (NONDEBUG_INSN_P (seq))
   5852 	{
   5853 	  int this_cost = insn_cost (CONST_CAST_RTX_INSN (seq), speed);
   5854 	  if (this_cost > 0)
   5855 	    cost += this_cost;
   5856 	  else
   5857 	    cost++;
   5858 	}
   5859     }
   5860 
   5861   return cost;
   5862 }
   5863 
   5864 /* Given an insn INSN and condition COND, return the condition in a
   5865    canonical form to simplify testing by callers.  Specifically:
   5866 
   5867    (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
   5868    (2) Both operands will be machine operands.
   5869    (3) If an operand is a constant, it will be the second operand.
   5870    (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
   5871        for GE, GEU, and LEU.
   5872 
   5873    If the condition cannot be understood, or is an inequality floating-point
   5874    comparison which needs to be reversed, 0 will be returned.
   5875 
   5876    If REVERSE is nonzero, then reverse the condition prior to canonizing it.
   5877 
   5878    If EARLIEST is nonzero, it is a pointer to a place where the earliest
   5879    insn used in locating the condition was found.  If a replacement test
   5880    of the condition is desired, it should be placed in front of that
   5881    insn and we will be sure that the inputs are still valid.
   5882 
   5883    If WANT_REG is nonzero, we wish the condition to be relative to that
   5884    register, if possible.  Therefore, do not canonicalize the condition
   5885    further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
   5886    to be a compare to a CC mode register.
   5887 
   5888    If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
   5889    and at INSN.  */
   5890 
   5891 rtx
   5892 canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
   5893 			rtx_insn **earliest,
   5894 			rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
   5895 {
   5896   enum rtx_code code;
   5897   rtx_insn *prev = insn;
   5898   const_rtx set;
   5899   rtx tem;
   5900   rtx op0, op1;
   5901   int reverse_code = 0;
   5902   machine_mode mode;
   5903   basic_block bb = BLOCK_FOR_INSN (insn);
   5904 
   5905   code = GET_CODE (cond);
   5906   mode = GET_MODE (cond);
   5907   op0 = XEXP (cond, 0);
   5908   op1 = XEXP (cond, 1);
   5909 
   5910   if (reverse)
   5911     code = reversed_comparison_code (cond, insn);
   5912   if (code == UNKNOWN)
   5913     return 0;
   5914 
   5915   if (earliest)
   5916     *earliest = insn;
   5917 
   5918   /* If we are comparing a register with zero, see if the register is set
   5919      in the previous insn to a COMPARE or a comparison operation.  Perform
   5920      the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
   5921      in cse.cc  */
   5922 
   5923   while ((GET_RTX_CLASS (code) == RTX_COMPARE
   5924 	  || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
   5925 	 && op1 == CONST0_RTX (GET_MODE (op0))
   5926 	 && op0 != want_reg)
   5927     {
   5928       /* Set nonzero when we find something of interest.  */
   5929       rtx x = 0;
   5930 
   5931       /* If this is a COMPARE, pick up the two things being compared.  */
   5932       if (GET_CODE (op0) == COMPARE)
   5933 	{
   5934 	  op1 = XEXP (op0, 1);
   5935 	  op0 = XEXP (op0, 0);
   5936 	  continue;
   5937 	}
   5938       else if (!REG_P (op0))
   5939 	break;
   5940 
   5941       /* Go back to the previous insn.  Stop if it is not an INSN.  We also
   5942 	 stop if it isn't a single set or if it has a REG_INC note because
   5943 	 we don't want to bother dealing with it.  */
   5944 
   5945       prev = prev_nonnote_nondebug_insn (prev);
   5946 
   5947       if (prev == 0
   5948 	  || !NONJUMP_INSN_P (prev)
   5949 	  || FIND_REG_INC_NOTE (prev, NULL_RTX)
   5950 	  /* In cfglayout mode, there do not have to be labels at the
   5951 	     beginning of a block, or jumps at the end, so the previous
   5952 	     conditions would not stop us when we reach bb boundary.  */
   5953 	  || BLOCK_FOR_INSN (prev) != bb)
   5954 	break;
   5955 
   5956       set = set_of (op0, prev);
   5957 
   5958       if (set
   5959 	  && (GET_CODE (set) != SET
   5960 	      || !rtx_equal_p (SET_DEST (set), op0)))
   5961 	break;
   5962 
   5963       /* If this is setting OP0, get what it sets it to if it looks
   5964 	 relevant.  */
   5965       if (set)
   5966 	{
   5967 	  machine_mode inner_mode = GET_MODE (SET_DEST (set));
   5968 #ifdef FLOAT_STORE_FLAG_VALUE
   5969 	  REAL_VALUE_TYPE fsfv;
   5970 #endif
   5971 
   5972 	  /* ??? We may not combine comparisons done in a CCmode with
   5973 	     comparisons not done in a CCmode.  This is to aid targets
   5974 	     like Alpha that have an IEEE compliant EQ instruction, and
   5975 	     a non-IEEE compliant BEQ instruction.  The use of CCmode is
   5976 	     actually artificial, simply to prevent the combination, but
   5977 	     should not affect other platforms.
   5978 
   5979 	     However, we must allow VOIDmode comparisons to match either
   5980 	     CCmode or non-CCmode comparison, because some ports have
   5981 	     modeless comparisons inside branch patterns.
   5982 
   5983 	     ??? This mode check should perhaps look more like the mode check
   5984 	     in simplify_comparison in combine.  */
   5985 	  if (((GET_MODE_CLASS (mode) == MODE_CC)
   5986 	       != (GET_MODE_CLASS (inner_mode) == MODE_CC))
   5987 	      && mode != VOIDmode
   5988 	      && inner_mode != VOIDmode)
   5989 	    break;
   5990 	  if (GET_CODE (SET_SRC (set)) == COMPARE
   5991 	      || (((code == NE
   5992 		    || (code == LT
   5993 			&& val_signbit_known_set_p (inner_mode,
   5994 						    STORE_FLAG_VALUE))
   5995 #ifdef FLOAT_STORE_FLAG_VALUE
   5996 		    || (code == LT
   5997 			&& SCALAR_FLOAT_MODE_P (inner_mode)
   5998 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
   5999 			    REAL_VALUE_NEGATIVE (fsfv)))
   6000 #endif
   6001 		    ))
   6002 		  && COMPARISON_P (SET_SRC (set))))
   6003 	    x = SET_SRC (set);
   6004 	  else if (((code == EQ
   6005 		     || (code == GE
   6006 			 && val_signbit_known_set_p (inner_mode,
   6007 						     STORE_FLAG_VALUE))
   6008 #ifdef FLOAT_STORE_FLAG_VALUE
   6009 		     || (code == GE
   6010 			 && SCALAR_FLOAT_MODE_P (inner_mode)
   6011 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
   6012 			     REAL_VALUE_NEGATIVE (fsfv)))
   6013 #endif
   6014 		     ))
   6015 		   && COMPARISON_P (SET_SRC (set)))
   6016 	    {
   6017 	      reverse_code = 1;
   6018 	      x = SET_SRC (set);
   6019 	    }
   6020 	  else if ((code == EQ || code == NE)
   6021 		   && GET_CODE (SET_SRC (set)) == XOR)
   6022 	    /* Handle sequences like:
   6023 
   6024 	       (set op0 (xor X Y))
   6025 	       ...(eq|ne op0 (const_int 0))...
   6026 
   6027 	       in which case:
   6028 
   6029 	       (eq op0 (const_int 0)) reduces to (eq X Y)
   6030 	       (ne op0 (const_int 0)) reduces to (ne X Y)
   6031 
   6032 	       This is the form used by MIPS16, for example.  */
   6033 	    x = SET_SRC (set);
   6034 	  else
   6035 	    break;
   6036 	}
   6037 
   6038       else if (reg_set_p (op0, prev))
   6039 	/* If this sets OP0, but not directly, we have to give up.  */
   6040 	break;
   6041 
   6042       if (x)
   6043 	{
   6044 	  /* If the caller is expecting the condition to be valid at INSN,
   6045 	     make sure X doesn't change before INSN.  */
   6046 	  if (valid_at_insn_p)
   6047 	    if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
   6048 	      break;
   6049 	  if (COMPARISON_P (x))
   6050 	    code = GET_CODE (x);
   6051 	  if (reverse_code)
   6052 	    {
   6053 	      code = reversed_comparison_code (x, prev);
   6054 	      if (code == UNKNOWN)
   6055 		return 0;
   6056 	      reverse_code = 0;
   6057 	    }
   6058 
   6059 	  op0 = XEXP (x, 0), op1 = XEXP (x, 1);
   6060 	  if (earliest)
   6061 	    *earliest = prev;
   6062 	}
   6063     }
   6064 
   6065   /* If constant is first, put it last.  */
   6066   if (CONSTANT_P (op0))
   6067     code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
   6068 
   6069   /* If OP0 is the result of a comparison, we weren't able to find what
   6070      was really being compared, so fail.  */
   6071   if (!allow_cc_mode
   6072       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
   6073     return 0;
   6074 
   6075   /* Canonicalize any ordered comparison with integers involving equality
   6076      if we can do computations in the relevant mode and we do not
   6077      overflow.  */
   6078 
   6079   scalar_int_mode op0_mode;
   6080   if (CONST_INT_P (op1)
   6081       && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
   6082       && GET_MODE_PRECISION (op0_mode) <= HOST_BITS_PER_WIDE_INT)
   6083     {
   6084       HOST_WIDE_INT const_val = INTVAL (op1);
   6085       unsigned HOST_WIDE_INT uconst_val = const_val;
   6086       unsigned HOST_WIDE_INT max_val
   6087 	= (unsigned HOST_WIDE_INT) GET_MODE_MASK (op0_mode);
   6088 
   6089       switch (code)
   6090 	{
   6091 	case LE:
   6092 	  if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
   6093 	    code = LT, op1 = gen_int_mode (const_val + 1, op0_mode);
   6094 	  break;
   6095 
   6096 	/* When cross-compiling, const_val might be sign-extended from
   6097 	   BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
   6098 	case GE:
   6099 	  if ((const_val & max_val)
   6100 	      != (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (op0_mode) - 1)))
   6101 	    code = GT, op1 = gen_int_mode (const_val - 1, op0_mode);
   6102 	  break;
   6103 
   6104 	case LEU:
   6105 	  if (uconst_val < max_val)
   6106 	    code = LTU, op1 = gen_int_mode (uconst_val + 1, op0_mode);
   6107 	  break;
   6108 
   6109 	case GEU:
   6110 	  if (uconst_val != 0)
   6111 	    code = GTU, op1 = gen_int_mode (uconst_val - 1, op0_mode);
   6112 	  break;
   6113 
   6114 	default:
   6115 	  break;
   6116 	}
   6117     }
   6118 
   6119   /* We promised to return a comparison.  */
   6120   rtx ret = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
   6121   if (COMPARISON_P (ret))
   6122     return ret;
   6123   return 0;
   6124 }
   6125 
   6126 /* Given a jump insn JUMP, return the condition that will cause it to branch
   6127    to its JUMP_LABEL.  If the condition cannot be understood, or is an
   6128    inequality floating-point comparison which needs to be reversed, 0 will
   6129    be returned.
   6130 
   6131    If EARLIEST is nonzero, it is a pointer to a place where the earliest
   6132    insn used in locating the condition was found.  If a replacement test
   6133    of the condition is desired, it should be placed in front of that
   6134    insn and we will be sure that the inputs are still valid.  If EARLIEST
   6135    is null, the returned condition will be valid at INSN.
   6136 
   6137    If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
   6138    compare CC mode register.
   6139 
   6140    VALID_AT_INSN_P is the same as for canonicalize_condition.  */
   6141 
   6142 rtx
   6143 get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
   6144 	       int valid_at_insn_p)
   6145 {
   6146   rtx cond;
   6147   int reverse;
   6148   rtx set;
   6149 
   6150   /* If this is not a standard conditional jump, we can't parse it.  */
   6151   if (!JUMP_P (jump)
   6152       || ! any_condjump_p (jump))
   6153     return 0;
   6154   set = pc_set (jump);
   6155 
   6156   cond = XEXP (SET_SRC (set), 0);
   6157 
   6158   /* If this branches to JUMP_LABEL when the condition is false, reverse
   6159      the condition.  */
   6160   reverse
   6161     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
   6162       && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
   6163 
   6164   return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
   6165 				 allow_cc_mode, valid_at_insn_p);
   6166 }
   6167 
   6168 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
   6169    TARGET_MODE_REP_EXTENDED.
   6170 
   6171    Note that we assume that the property of
   6172    TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
   6173    narrower than mode B.  I.e., if A is a mode narrower than B then in
   6174    order to be able to operate on it in mode B, mode A needs to
   6175    satisfy the requirements set by the representation of mode B.  */
   6176 
   6177 static void
   6178 init_num_sign_bit_copies_in_rep (void)
   6179 {
   6180   opt_scalar_int_mode in_mode_iter;
   6181   scalar_int_mode mode;
   6182 
   6183   FOR_EACH_MODE_IN_CLASS (in_mode_iter, MODE_INT)
   6184     FOR_EACH_MODE_UNTIL (mode, in_mode_iter.require ())
   6185       {
   6186 	scalar_int_mode in_mode = in_mode_iter.require ();
   6187 	scalar_int_mode i;
   6188 
   6189 	/* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
   6190 	   extends to the next widest mode.  */
   6191 	gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
   6192 		    || GET_MODE_WIDER_MODE (mode).require () == in_mode);
   6193 
   6194 	/* We are in in_mode.  Count how many bits outside of mode
   6195 	   have to be copies of the sign-bit.  */
   6196 	FOR_EACH_MODE (i, mode, in_mode)
   6197 	  {
   6198 	    /* This must always exist (for the last iteration it will be
   6199 	       IN_MODE).  */
   6200 	    scalar_int_mode wider = GET_MODE_WIDER_MODE (i).require ();
   6201 
   6202 	    if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
   6203 		/* We can only check sign-bit copies starting from the
   6204 		   top-bit.  In order to be able to check the bits we
   6205 		   have already seen we pretend that subsequent bits
   6206 		   have to be sign-bit copies too.  */
   6207 		|| num_sign_bit_copies_in_rep [in_mode][mode])
   6208 	      num_sign_bit_copies_in_rep [in_mode][mode]
   6209 		+= GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
   6210 	  }
   6211       }
   6212 }
   6213 
   6214 /* Suppose that truncation from the machine mode of X to MODE is not a
   6215    no-op.  See if there is anything special about X so that we can
   6216    assume it already contains a truncated value of MODE.  */
   6217 
   6218 bool
   6219 truncated_to_mode (machine_mode mode, const_rtx x)
   6220 {
   6221   /* This register has already been used in MODE without explicit
   6222      truncation.  */
   6223   if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
   6224     return true;
   6225 
   6226   /* See if we already satisfy the requirements of MODE.  If yes we
   6227      can just switch to MODE.  */
   6228   if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
   6229       && (num_sign_bit_copies (x, GET_MODE (x))
   6230 	  >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
   6231     return true;
   6232 
   6233   return false;
   6234 }
   6235 
   6236 /* Return true if RTX code CODE has a single sequence of zero or more
   6238    "e" operands and no rtvec operands.  Initialize its rtx_all_subrtx_bounds
   6239    entry in that case.  */
   6240 
   6241 static bool
   6242 setup_reg_subrtx_bounds (unsigned int code)
   6243 {
   6244   const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
   6245   unsigned int i = 0;
   6246   for (; format[i] != 'e'; ++i)
   6247     {
   6248       if (!format[i])
   6249 	/* No subrtxes.  Leave start and count as 0.  */
   6250 	return true;
   6251       if (format[i] == 'E' || format[i] == 'V')
   6252 	return false;
   6253     }
   6254 
   6255   /* Record the sequence of 'e's.  */
   6256   rtx_all_subrtx_bounds[code].start = i;
   6257   do
   6258     ++i;
   6259   while (format[i] == 'e');
   6260   rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
   6261   /* rtl-iter.h relies on this.  */
   6262   gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
   6263 
   6264   for (; format[i]; ++i)
   6265     if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
   6266       return false;
   6267 
   6268   return true;
   6269 }
   6270 
   6271 /* Initialize rtx_all_subrtx_bounds.  */
   6272 void
   6273 init_rtlanal (void)
   6274 {
   6275   int i;
   6276   for (i = 0; i < NUM_RTX_CODE; i++)
   6277     {
   6278       if (!setup_reg_subrtx_bounds (i))
   6279 	rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
   6280       if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
   6281 	rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
   6282     }
   6283 
   6284   init_num_sign_bit_copies_in_rep ();
   6285 }
   6286 
   6287 /* Check whether this is a constant pool constant.  */
   6289 bool
   6290 constant_pool_constant_p (rtx x)
   6291 {
   6292   x = avoid_constant_pool_reference (x);
   6293   return CONST_DOUBLE_P (x);
   6294 }
   6295 
   6296 /* If M is a bitmask that selects a field of low-order bits within an item but
   6298    not the entire word, return the length of the field.  Return -1 otherwise.
   6299    M is used in machine mode MODE.  */
   6300 
   6301 int
   6302 low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
   6303 {
   6304   if (mode != VOIDmode)
   6305     {
   6306       if (!HWI_COMPUTABLE_MODE_P (mode))
   6307 	return -1;
   6308       m &= GET_MODE_MASK (mode);
   6309     }
   6310 
   6311   return exact_log2 (m + 1);
   6312 }
   6313 
   6314 /* Return the mode of MEM's address.  */
   6315 
   6316 scalar_int_mode
   6317 get_address_mode (rtx mem)
   6318 {
   6319   machine_mode mode;
   6320 
   6321   gcc_assert (MEM_P (mem));
   6322   mode = GET_MODE (XEXP (mem, 0));
   6323   if (mode != VOIDmode)
   6324     return as_a <scalar_int_mode> (mode);
   6325   return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
   6326 }
   6327 
   6328 /* Split up a CONST_DOUBLE or integer constant rtx
   6330    into two rtx's for single words,
   6331    storing in *FIRST the word that comes first in memory in the target
   6332    and in *SECOND the other.
   6333 
   6334    TODO: This function needs to be rewritten to work on any size
   6335    integer.  */
   6336 
   6337 void
   6338 split_double (rtx value, rtx *first, rtx *second)
   6339 {
   6340   if (CONST_INT_P (value))
   6341     {
   6342       if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
   6343 	{
   6344 	  /* In this case the CONST_INT holds both target words.
   6345 	     Extract the bits from it into two word-sized pieces.
   6346 	     Sign extend each half to HOST_WIDE_INT.  */
   6347 	  unsigned HOST_WIDE_INT low, high;
   6348 	  unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
   6349 	  unsigned bits_per_word = BITS_PER_WORD;
   6350 
   6351 	  /* Set sign_bit to the most significant bit of a word.  */
   6352 	  sign_bit = 1;
   6353 	  sign_bit <<= bits_per_word - 1;
   6354 
   6355 	  /* Set mask so that all bits of the word are set.  We could
   6356 	     have used 1 << BITS_PER_WORD instead of basing the
   6357 	     calculation on sign_bit.  However, on machines where
   6358 	     HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
   6359 	     compiler warning, even though the code would never be
   6360 	     executed.  */
   6361 	  mask = sign_bit << 1;
   6362 	  mask--;
   6363 
   6364 	  /* Set sign_extend as any remaining bits.  */
   6365 	  sign_extend = ~mask;
   6366 
   6367 	  /* Pick the lower word and sign-extend it.  */
   6368 	  low = INTVAL (value);
   6369 	  low &= mask;
   6370 	  if (low & sign_bit)
   6371 	    low |= sign_extend;
   6372 
   6373 	  /* Pick the higher word, shifted to the least significant
   6374 	     bits, and sign-extend it.  */
   6375 	  high = INTVAL (value);
   6376 	  high >>= bits_per_word - 1;
   6377 	  high >>= 1;
   6378 	  high &= mask;
   6379 	  if (high & sign_bit)
   6380 	    high |= sign_extend;
   6381 
   6382 	  /* Store the words in the target machine order.  */
   6383 	  if (WORDS_BIG_ENDIAN)
   6384 	    {
   6385 	      *first = GEN_INT (high);
   6386 	      *second = GEN_INT (low);
   6387 	    }
   6388 	  else
   6389 	    {
   6390 	      *first = GEN_INT (low);
   6391 	      *second = GEN_INT (high);
   6392 	    }
   6393 	}
   6394       else
   6395 	{
   6396 	  /* The rule for using CONST_INT for a wider mode
   6397 	     is that we regard the value as signed.
   6398 	     So sign-extend it.  */
   6399 	  rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
   6400 	  if (WORDS_BIG_ENDIAN)
   6401 	    {
   6402 	      *first = high;
   6403 	      *second = value;
   6404 	    }
   6405 	  else
   6406 	    {
   6407 	      *first = value;
   6408 	      *second = high;
   6409 	    }
   6410 	}
   6411     }
   6412   else if (GET_CODE (value) == CONST_WIDE_INT)
   6413     {
   6414       /* All of this is scary code and needs to be converted to
   6415 	 properly work with any size integer.  */
   6416       gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
   6417       if (WORDS_BIG_ENDIAN)
   6418 	{
   6419 	  *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
   6420 	  *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
   6421 	}
   6422       else
   6423 	{
   6424 	  *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
   6425 	  *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
   6426 	}
   6427     }
   6428   else if (!CONST_DOUBLE_P (value))
   6429     {
   6430       if (WORDS_BIG_ENDIAN)
   6431 	{
   6432 	  *first = const0_rtx;
   6433 	  *second = value;
   6434 	}
   6435       else
   6436 	{
   6437 	  *first = value;
   6438 	  *second = const0_rtx;
   6439 	}
   6440     }
   6441   else if (GET_MODE (value) == VOIDmode
   6442 	   /* This is the old way we did CONST_DOUBLE integers.  */
   6443 	   || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
   6444     {
   6445       /* In an integer, the words are defined as most and least significant.
   6446 	 So order them by the target's convention.  */
   6447       if (WORDS_BIG_ENDIAN)
   6448 	{
   6449 	  *first = GEN_INT (CONST_DOUBLE_HIGH (value));
   6450 	  *second = GEN_INT (CONST_DOUBLE_LOW (value));
   6451 	}
   6452       else
   6453 	{
   6454 	  *first = GEN_INT (CONST_DOUBLE_LOW (value));
   6455 	  *second = GEN_INT (CONST_DOUBLE_HIGH (value));
   6456 	}
   6457     }
   6458   else
   6459     {
   6460       long l[2];
   6461 
   6462       /* Note, this converts the REAL_VALUE_TYPE to the target's
   6463 	 format, splits up the floating point double and outputs
   6464 	 exactly 32 bits of it into each of l[0] and l[1] --
   6465 	 not necessarily BITS_PER_WORD bits.  */
   6466       REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (value), l);
   6467 
   6468       /* If 32 bits is an entire word for the target, but not for the host,
   6469 	 then sign-extend on the host so that the number will look the same
   6470 	 way on the host that it would on the target.  See for instance
   6471 	 simplify_unary_operation.  The #if is needed to avoid compiler
   6472 	 warnings.  */
   6473 
   6474 #if HOST_BITS_PER_LONG > 32
   6475       if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
   6476 	{
   6477 	  if (l[0] & ((long) 1 << 31))
   6478 	    l[0] |= ((unsigned long) (-1) << 32);
   6479 	  if (l[1] & ((long) 1 << 31))
   6480 	    l[1] |= ((unsigned long) (-1) << 32);
   6481 	}
   6482 #endif
   6483 
   6484       *first = GEN_INT (l[0]);
   6485       *second = GEN_INT (l[1]);
   6486     }
   6487 }
   6488 
   6489 /* Return true if X is a sign_extract or zero_extract from the least
   6490    significant bit.  */
   6491 
   6492 static bool
   6493 lsb_bitfield_op_p (rtx x)
   6494 {
   6495   if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
   6496     {
   6497       machine_mode mode = GET_MODE (XEXP (x, 0));
   6498       HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
   6499       HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
   6500       poly_int64 remaining_bits = GET_MODE_PRECISION (mode) - len;
   6501 
   6502       return known_eq (pos, BITS_BIG_ENDIAN ? remaining_bits : 0);
   6503     }
   6504   return false;
   6505 }
   6506 
   6507 /* Strip outer address "mutations" from LOC and return a pointer to the
   6508    inner value.  If OUTER_CODE is nonnull, store the code of the innermost
   6509    stripped expression there.
   6510 
   6511    "Mutations" either convert between modes or apply some kind of
   6512    extension, truncation or alignment.  */
   6513 
   6514 rtx *
   6515 strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
   6516 {
   6517   for (;;)
   6518     {
   6519       enum rtx_code code = GET_CODE (*loc);
   6520       if (GET_RTX_CLASS (code) == RTX_UNARY)
   6521 	/* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
   6522 	   used to convert between pointer sizes.  */
   6523 	loc = &XEXP (*loc, 0);
   6524       else if (lsb_bitfield_op_p (*loc))
   6525 	/* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
   6526 	   acts as a combined truncation and extension.  */
   6527 	loc = &XEXP (*loc, 0);
   6528       else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
   6529 	/* (and ... (const_int -X)) is used to align to X bytes.  */
   6530 	loc = &XEXP (*loc, 0);
   6531       else if (code == SUBREG
   6532                && !OBJECT_P (SUBREG_REG (*loc))
   6533                && subreg_lowpart_p (*loc))
   6534 	/* (subreg (operator ...) ...) inside and is used for mode
   6535 	   conversion too.  */
   6536 	loc = &SUBREG_REG (*loc);
   6537       else
   6538 	return loc;
   6539       if (outer_code)
   6540 	*outer_code = code;
   6541     }
   6542 }
   6543 
   6544 /* Return true if CODE applies some kind of scale.  The scaled value is
   6545    is the first operand and the scale is the second.  */
   6546 
   6547 static bool
   6548 binary_scale_code_p (enum rtx_code code)
   6549 {
   6550   return (code == MULT
   6551           || code == ASHIFT
   6552           /* Needed by ARM targets.  */
   6553           || code == ASHIFTRT
   6554           || code == LSHIFTRT
   6555           || code == ROTATE
   6556           || code == ROTATERT);
   6557 }
   6558 
   6559 /* If *INNER can be interpreted as a base, return a pointer to the inner term
   6560    (see address_info).  Return null otherwise.  */
   6561 
   6562 static rtx *
   6563 get_base_term (rtx *inner)
   6564 {
   6565   if (GET_CODE (*inner) == LO_SUM)
   6566     inner = strip_address_mutations (&XEXP (*inner, 0));
   6567   if (REG_P (*inner)
   6568       || MEM_P (*inner)
   6569       || GET_CODE (*inner) == SUBREG
   6570       || GET_CODE (*inner) == SCRATCH)
   6571     return inner;
   6572   return 0;
   6573 }
   6574 
   6575 /* If *INNER can be interpreted as an index, return a pointer to the inner term
   6576    (see address_info).  Return null otherwise.  */
   6577 
   6578 static rtx *
   6579 get_index_term (rtx *inner)
   6580 {
   6581   /* At present, only constant scales are allowed.  */
   6582   if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
   6583     inner = strip_address_mutations (&XEXP (*inner, 0));
   6584   if (REG_P (*inner)
   6585       || MEM_P (*inner)
   6586       || GET_CODE (*inner) == SUBREG
   6587       || GET_CODE (*inner) == SCRATCH)
   6588     return inner;
   6589   return 0;
   6590 }
   6591 
   6592 /* Set the segment part of address INFO to LOC, given that INNER is the
   6593    unmutated value.  */
   6594 
   6595 static void
   6596 set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
   6597 {
   6598   gcc_assert (!info->segment);
   6599   info->segment = loc;
   6600   info->segment_term = inner;
   6601 }
   6602 
   6603 /* Set the base part of address INFO to LOC, given that INNER is the
   6604    unmutated value.  */
   6605 
   6606 static void
   6607 set_address_base (struct address_info *info, rtx *loc, rtx *inner)
   6608 {
   6609   gcc_assert (!info->base);
   6610   info->base = loc;
   6611   info->base_term = inner;
   6612 }
   6613 
   6614 /* Set the index part of address INFO to LOC, given that INNER is the
   6615    unmutated value.  */
   6616 
   6617 static void
   6618 set_address_index (struct address_info *info, rtx *loc, rtx *inner)
   6619 {
   6620   gcc_assert (!info->index);
   6621   info->index = loc;
   6622   info->index_term = inner;
   6623 }
   6624 
   6625 /* Set the displacement part of address INFO to LOC, given that INNER
   6626    is the constant term.  */
   6627 
   6628 static void
   6629 set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
   6630 {
   6631   gcc_assert (!info->disp);
   6632   info->disp = loc;
   6633   info->disp_term = inner;
   6634 }
   6635 
   6636 /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address.  Set up the
   6637    rest of INFO accordingly.  */
   6638 
   6639 static void
   6640 decompose_incdec_address (struct address_info *info)
   6641 {
   6642   info->autoinc_p = true;
   6643 
   6644   rtx *base = &XEXP (*info->inner, 0);
   6645   set_address_base (info, base, base);
   6646   gcc_checking_assert (info->base == info->base_term);
   6647 
   6648   /* These addresses are only valid when the size of the addressed
   6649      value is known.  */
   6650   gcc_checking_assert (info->mode != VOIDmode);
   6651 }
   6652 
   6653 /* INFO->INNER describes a {PRE,POST}_MODIFY address.  Set up the rest
   6654    of INFO accordingly.  */
   6655 
   6656 static void
   6657 decompose_automod_address (struct address_info *info)
   6658 {
   6659   info->autoinc_p = true;
   6660 
   6661   rtx *base = &XEXP (*info->inner, 0);
   6662   set_address_base (info, base, base);
   6663   gcc_checking_assert (info->base == info->base_term);
   6664 
   6665   rtx plus = XEXP (*info->inner, 1);
   6666   gcc_assert (GET_CODE (plus) == PLUS);
   6667 
   6668   info->base_term2 = &XEXP (plus, 0);
   6669   gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
   6670 
   6671   rtx *step = &XEXP (plus, 1);
   6672   rtx *inner_step = strip_address_mutations (step);
   6673   if (CONSTANT_P (*inner_step))
   6674     set_address_disp (info, step, inner_step);
   6675   else
   6676     set_address_index (info, step, inner_step);
   6677 }
   6678 
   6679 /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
   6680    values in [PTR, END).  Return a pointer to the end of the used array.  */
   6681 
   6682 static rtx **
   6683 extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
   6684 {
   6685   rtx x = *loc;
   6686   if (GET_CODE (x) == PLUS)
   6687     {
   6688       ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
   6689       ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
   6690     }
   6691   else
   6692     {
   6693       gcc_assert (ptr != end);
   6694       *ptr++ = loc;
   6695     }
   6696   return ptr;
   6697 }
   6698 
   6699 /* Evaluate the likelihood of X being a base or index value, returning
   6700    positive if it is likely to be a base, negative if it is likely to be
   6701    an index, and 0 if we can't tell.  Make the magnitude of the return
   6702    value reflect the amount of confidence we have in the answer.
   6703 
   6704    MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1.  */
   6705 
   6706 static int
   6707 baseness (rtx x, machine_mode mode, addr_space_t as,
   6708 	  enum rtx_code outer_code, enum rtx_code index_code)
   6709 {
   6710   /* Believe *_POINTER unless the address shape requires otherwise.  */
   6711   if (REG_P (x) && REG_POINTER (x))
   6712     return 2;
   6713   if (MEM_P (x) && MEM_POINTER (x))
   6714     return 2;
   6715 
   6716   if (REG_P (x) && HARD_REGISTER_P (x))
   6717     {
   6718       /* X is a hard register.  If it only fits one of the base
   6719 	 or index classes, choose that interpretation.  */
   6720       int regno = REGNO (x);
   6721       bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
   6722       bool index_p = REGNO_OK_FOR_INDEX_P (regno);
   6723       if (base_p != index_p)
   6724 	return base_p ? 1 : -1;
   6725     }
   6726   return 0;
   6727 }
   6728 
   6729 /* INFO->INNER describes a normal, non-automodified address.
   6730    Fill in the rest of INFO accordingly.  */
   6731 
   6732 static void
   6733 decompose_normal_address (struct address_info *info)
   6734 {
   6735   /* Treat the address as the sum of up to four values.  */
   6736   rtx *ops[4];
   6737   size_t n_ops = extract_plus_operands (info->inner, ops,
   6738 					ops + ARRAY_SIZE (ops)) - ops;
   6739 
   6740   /* If there is more than one component, any base component is in a PLUS.  */
   6741   if (n_ops > 1)
   6742     info->base_outer_code = PLUS;
   6743 
   6744   /* Try to classify each sum operand now.  Leave those that could be
   6745      either a base or an index in OPS.  */
   6746   rtx *inner_ops[4];
   6747   size_t out = 0;
   6748   for (size_t in = 0; in < n_ops; ++in)
   6749     {
   6750       rtx *loc = ops[in];
   6751       rtx *inner = strip_address_mutations (loc);
   6752       if (CONSTANT_P (*inner))
   6753 	set_address_disp (info, loc, inner);
   6754       else if (GET_CODE (*inner) == UNSPEC)
   6755 	set_address_segment (info, loc, inner);
   6756       else
   6757 	{
   6758 	  /* The only other possibilities are a base or an index.  */
   6759 	  rtx *base_term = get_base_term (inner);
   6760 	  rtx *index_term = get_index_term (inner);
   6761 	  gcc_assert (base_term || index_term);
   6762 	  if (!base_term)
   6763 	    set_address_index (info, loc, index_term);
   6764 	  else if (!index_term)
   6765 	    set_address_base (info, loc, base_term);
   6766 	  else
   6767 	    {
   6768 	      gcc_assert (base_term == index_term);
   6769 	      ops[out] = loc;
   6770 	      inner_ops[out] = base_term;
   6771 	      ++out;
   6772 	    }
   6773 	}
   6774     }
   6775 
   6776   /* Classify the remaining OPS members as bases and indexes.  */
   6777   if (out == 1)
   6778     {
   6779       /* If we haven't seen a base or an index yet, assume that this is
   6780 	 the base.  If we were confident that another term was the base
   6781 	 or index, treat the remaining operand as the other kind.  */
   6782       if (!info->base)
   6783 	set_address_base (info, ops[0], inner_ops[0]);
   6784       else
   6785 	set_address_index (info, ops[0], inner_ops[0]);
   6786     }
   6787   else if (out == 2)
   6788     {
   6789       /* In the event of a tie, assume the base comes first.  */
   6790       if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
   6791 		    GET_CODE (*ops[1]))
   6792 	  >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
   6793 		       GET_CODE (*ops[0])))
   6794 	{
   6795 	  set_address_base (info, ops[0], inner_ops[0]);
   6796 	  set_address_index (info, ops[1], inner_ops[1]);
   6797 	}
   6798       else
   6799 	{
   6800 	  set_address_base (info, ops[1], inner_ops[1]);
   6801 	  set_address_index (info, ops[0], inner_ops[0]);
   6802 	}
   6803     }
   6804   else
   6805     gcc_assert (out == 0);
   6806 }
   6807 
   6808 /* Describe address *LOC in *INFO.  MODE is the mode of the addressed value,
   6809    or VOIDmode if not known.  AS is the address space associated with LOC.
   6810    OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise.  */
   6811 
   6812 void
   6813 decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
   6814 		   addr_space_t as, enum rtx_code outer_code)
   6815 {
   6816   memset (info, 0, sizeof (*info));
   6817   info->mode = mode;
   6818   info->as = as;
   6819   info->addr_outer_code = outer_code;
   6820   info->outer = loc;
   6821   info->inner = strip_address_mutations (loc, &outer_code);
   6822   info->base_outer_code = outer_code;
   6823   switch (GET_CODE (*info->inner))
   6824     {
   6825     case PRE_DEC:
   6826     case PRE_INC:
   6827     case POST_DEC:
   6828     case POST_INC:
   6829       decompose_incdec_address (info);
   6830       break;
   6831 
   6832     case PRE_MODIFY:
   6833     case POST_MODIFY:
   6834       decompose_automod_address (info);
   6835       break;
   6836 
   6837     default:
   6838       decompose_normal_address (info);
   6839       break;
   6840     }
   6841 }
   6842 
   6843 /* Describe address operand LOC in INFO.  */
   6844 
   6845 void
   6846 decompose_lea_address (struct address_info *info, rtx *loc)
   6847 {
   6848   decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
   6849 }
   6850 
   6851 /* Describe the address of MEM X in INFO.  */
   6852 
   6853 void
   6854 decompose_mem_address (struct address_info *info, rtx x)
   6855 {
   6856   gcc_assert (MEM_P (x));
   6857   decompose_address (info, &XEXP (x, 0), GET_MODE (x),
   6858 		     MEM_ADDR_SPACE (x), MEM);
   6859 }
   6860 
   6861 /* Update INFO after a change to the address it describes.  */
   6862 
   6863 void
   6864 update_address (struct address_info *info)
   6865 {
   6866   decompose_address (info, info->outer, info->mode, info->as,
   6867 		     info->addr_outer_code);
   6868 }
   6869 
   6870 /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
   6871    more complicated than that.  */
   6872 
   6873 HOST_WIDE_INT
   6874 get_index_scale (const struct address_info *info)
   6875 {
   6876   rtx index = *info->index;
   6877   if (GET_CODE (index) == MULT
   6878       && CONST_INT_P (XEXP (index, 1))
   6879       && info->index_term == &XEXP (index, 0))
   6880     return INTVAL (XEXP (index, 1));
   6881 
   6882   if (GET_CODE (index) == ASHIFT
   6883       && CONST_INT_P (XEXP (index, 1))
   6884       && info->index_term == &XEXP (index, 0))
   6885     return HOST_WIDE_INT_1 << INTVAL (XEXP (index, 1));
   6886 
   6887   if (info->index == info->index_term)
   6888     return 1;
   6889 
   6890   return 0;
   6891 }
   6892 
   6893 /* Return the "index code" of INFO, in the form required by
   6894    ok_for_base_p_1.  */
   6895 
   6896 enum rtx_code
   6897 get_index_code (const struct address_info *info)
   6898 {
   6899   if (info->index)
   6900     return GET_CODE (*info->index);
   6901 
   6902   if (info->disp)
   6903     return GET_CODE (*info->disp);
   6904 
   6905   return SCRATCH;
   6906 }
   6907 
   6908 /* Return true if RTL X contains a SYMBOL_REF.  */
   6909 
   6910 bool
   6911 contains_symbol_ref_p (const_rtx x)
   6912 {
   6913   subrtx_iterator::array_type array;
   6914   FOR_EACH_SUBRTX (iter, array, x, ALL)
   6915     if (SYMBOL_REF_P (*iter))
   6916       return true;
   6917 
   6918   return false;
   6919 }
   6920 
   6921 /* Return true if RTL X contains a SYMBOL_REF or LABEL_REF.  */
   6922 
   6923 bool
   6924 contains_symbolic_reference_p (const_rtx x)
   6925 {
   6926   subrtx_iterator::array_type array;
   6927   FOR_EACH_SUBRTX (iter, array, x, ALL)
   6928     if (SYMBOL_REF_P (*iter) || GET_CODE (*iter) == LABEL_REF)
   6929       return true;
   6930 
   6931   return false;
   6932 }
   6933 
   6934 /* Return true if RTL X contains a constant pool address.  */
   6935 
   6936 bool
   6937 contains_constant_pool_address_p (const_rtx x)
   6938 {
   6939   subrtx_iterator::array_type array;
   6940   FOR_EACH_SUBRTX (iter, array, x, ALL)
   6941     if (SYMBOL_REF_P (*iter) && CONSTANT_POOL_ADDRESS_P (*iter))
   6942       return true;
   6943 
   6944   return false;
   6945 }
   6946 
   6947 
   6948 /* Return true if X contains a thread-local symbol.  */
   6949 
   6950 bool
   6951 tls_referenced_p (const_rtx x)
   6952 {
   6953   if (!targetm.have_tls)
   6954     return false;
   6955 
   6956   subrtx_iterator::array_type array;
   6957   FOR_EACH_SUBRTX (iter, array, x, ALL)
   6958     if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
   6959       return true;
   6960   return false;
   6961 }
   6962 
   6963 /* Process recursively X of INSN and add REG_INC notes if necessary.  */
   6964 void
   6965 add_auto_inc_notes (rtx_insn *insn, rtx x)
   6966 {
   6967   enum rtx_code code = GET_CODE (x);
   6968   const char *fmt;
   6969   int i, j;
   6970 
   6971   if (code == MEM && auto_inc_p (XEXP (x, 0)))
   6972     {
   6973       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
   6974       return;
   6975     }
   6976 
   6977   /* Scan all X sub-expressions.  */
   6978   fmt = GET_RTX_FORMAT (code);
   6979   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   6980     {
   6981       if (fmt[i] == 'e')
   6982 	add_auto_inc_notes (insn, XEXP (x, i));
   6983       else if (fmt[i] == 'E')
   6984 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   6985 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
   6986     }
   6987 }
   6988 
   6989 /* Return true if X is register asm.  */
   6990 
   6991 bool
   6992 register_asm_p (const_rtx x)
   6993 {
   6994   return (REG_P (x)
   6995 	  && REG_EXPR (x) != NULL_TREE
   6996 	  && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (x))
   6997 	  && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (x))
   6998 	  && DECL_REGISTER (REG_EXPR (x)));
   6999 }
   7000 
   7001 /* Return true if, for all OP of mode OP_MODE:
   7002 
   7003      (vec_select:RESULT_MODE OP SEL)
   7004 
   7005    is equivalent to the highpart RESULT_MODE of OP.  */
   7006 
   7007 bool
   7008 vec_series_highpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
   7009 {
   7010   int nunits;
   7011   if (GET_MODE_NUNITS (op_mode).is_constant (&nunits)
   7012       && targetm.can_change_mode_class (op_mode, result_mode, ALL_REGS))
   7013     {
   7014       int offset = BYTES_BIG_ENDIAN ? 0 : nunits - XVECLEN (sel, 0);
   7015       return rtvec_series_p (XVEC (sel, 0), offset);
   7016     }
   7017   return false;
   7018 }
   7019 
   7020 /* Return true if, for all OP of mode OP_MODE:
   7021 
   7022      (vec_select:RESULT_MODE OP SEL)
   7023 
   7024    is equivalent to the lowpart RESULT_MODE of OP.  */
   7025 
   7026 bool
   7027 vec_series_lowpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
   7028 {
   7029   int nunits;
   7030   if (GET_MODE_NUNITS (op_mode).is_constant (&nunits)
   7031       && targetm.can_change_mode_class (op_mode, result_mode, ALL_REGS))
   7032     {
   7033       int offset = BYTES_BIG_ENDIAN ? nunits - XVECLEN (sel, 0) : 0;
   7034       return rtvec_series_p (XVEC (sel, 0), offset);
   7035     }
   7036   return false;
   7037 }
   7038 
   7039 /* Return true if X contains a paradoxical subreg.  */
   7040 
   7041 bool
   7042 contains_paradoxical_subreg_p (rtx x)
   7043 {
   7044   subrtx_var_iterator::array_type array;
   7045   FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
   7046     {
   7047       x = *iter;
   7048       if (SUBREG_P (x) && paradoxical_subreg_p (x))
   7049 	return true;
   7050     }
   7051   return false;
   7052 }
   7053