Home | History | Annotate | Line # | Download | only in gcc
      1 /* Emit RTL for the GCC expander.
      2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
      3 
      4 This file is part of GCC.
      5 
      6 GCC is free software; you can redistribute it and/or modify it under
      7 the terms of the GNU General Public License as published by the Free
      8 Software Foundation; either version 3, or (at your option) any later
      9 version.
     10 
     11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14 for more details.
     15 
     16 You should have received a copy of the GNU General Public License
     17 along with GCC; see the file COPYING3.  If not see
     18 <http://www.gnu.org/licenses/>.  */
     19 
     20 
     21 /* Middle-to-low level generation of rtx code and insns.
     22 
     23    This file contains support functions for creating rtl expressions
     24    and manipulating them in the doubly-linked chain of insns.
     25 
     26    The patterns of the insns are created by machine-dependent
     27    routines in insn-emit.cc, which is generated automatically from
     28    the machine description.  These routines make the individual rtx's
     29    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
     30    which are automatically generated from rtl.def; what is machine
     31    dependent is the kind of rtx's they make and what arguments they
     32    use.  */
     33 
     34 #include "config.h"
     35 #include "system.h"
     36 #include "coretypes.h"
     37 #include "memmodel.h"
     38 #include "backend.h"
     39 #include "target.h"
     40 #include "rtl.h"
     41 #include "tree.h"
     42 #include "df.h"
     43 #include "tm_p.h"
     44 #include "stringpool.h"
     45 #include "insn-config.h"
     46 #include "regs.h"
     47 #include "emit-rtl.h"
     48 #include "recog.h"
     49 #include "diagnostic-core.h"
     50 #include "alias.h"
     51 #include "fold-const.h"
     52 #include "varasm.h"
     53 #include "cfgrtl.h"
     54 #include "tree-eh.h"
     55 #include "explow.h"
     56 #include "expr.h"
     57 #include "builtins.h"
     58 #include "rtl-iter.h"
     59 #include "stor-layout.h"
     60 #include "opts.h"
     61 #include "predict.h"
     62 #include "rtx-vector-builder.h"
     63 #include "gimple.h"
     64 #include "gimple-ssa.h"
     65 #include "gimplify.h"
     66 
     67 struct target_rtl default_target_rtl;
     68 #if SWITCHABLE_TARGET
     69 struct target_rtl *this_target_rtl = &default_target_rtl;
     70 #endif
     71 
     72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
     73 
     74 /* Commonly used modes.  */
     75 
     76 scalar_int_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
     77 scalar_int_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
     78 scalar_int_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
     79 
     80 /* Datastructures maintained for currently processed function in RTL form.  */
     81 
     82 struct rtl_data x_rtl;
     83 
     84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
     85    Allocated in parallel with regno_pointer_align.
     86    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
     87    with length attribute nested in top level structures.  */
     88 
     89 rtx * regno_reg_rtx;
     90 
     91 /* This is *not* reset after each function.  It gives each CODE_LABEL
     92    in the entire compilation a unique label number.  */
     93 
     94 static GTY(()) int label_num = 1;
     95 
     96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
     97    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
     98    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
     99    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
    100 
    101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
    102 
    103 rtx const_true_rtx;
    104 
    105 REAL_VALUE_TYPE dconst0;
    106 REAL_VALUE_TYPE dconst1;
    107 REAL_VALUE_TYPE dconst2;
    108 REAL_VALUE_TYPE dconstm1;
    109 REAL_VALUE_TYPE dconsthalf;
    110 
    111 /* Record fixed-point constant 0 and 1.  */
    112 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
    113 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
    114 
    115 /* We make one copy of (const_int C) where C is in
    116    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
    117    to save space during the compilation and simplify comparisons of
    118    integers.  */
    119 
    120 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
    121 
    122 /* Standard pieces of rtx, to be substituted directly into things.  */
    123 rtx pc_rtx;
    124 rtx ret_rtx;
    125 rtx simple_return_rtx;
    126 
    127 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
    128    this pointer should normally never be dereferenced), but is required to be
    129    distinct from NULL_RTX.  Currently used by peephole2 pass.  */
    130 rtx_insn *invalid_insn_rtx;
    131 
    132 /* A hash table storing CONST_INTs whose absolute value is greater
    133    than MAX_SAVED_CONST_INT.  */
    134 
    135 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
    136 {
    137   typedef HOST_WIDE_INT compare_type;
    138 
    139   static hashval_t hash (rtx i);
    140   static bool equal (rtx i, HOST_WIDE_INT h);
    141 };
    142 
    143 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
    144 
    145 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
    146 {
    147   static hashval_t hash (rtx x);
    148   static bool equal (rtx x, rtx y);
    149 };
    150 
    151 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
    152 
    153 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
    154 {
    155   typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
    156 
    157   static hashval_t hash (rtx x);
    158   static bool equal (rtx x, const compare_type &y);
    159 };
    160 
    161 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
    162 
    163 /* A hash table storing register attribute structures.  */
    164 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
    165 {
    166   static hashval_t hash (reg_attrs *x);
    167   static bool equal (reg_attrs *a, reg_attrs *b);
    168 };
    169 
    170 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
    171 
    172 /* A hash table storing all CONST_DOUBLEs.  */
    173 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
    174 {
    175   static hashval_t hash (rtx x);
    176   static bool equal (rtx x, rtx y);
    177 };
    178 
    179 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
    180 
    181 /* A hash table storing all CONST_FIXEDs.  */
    182 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
    183 {
    184   static hashval_t hash (rtx x);
    185   static bool equal (rtx x, rtx y);
    186 };
    187 
    188 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
    189 
    190 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
    191 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
    192 #define first_label_num (crtl->emit.x_first_label_num)
    193 
    194 static void set_used_decls (tree);
    195 static void mark_label_nuses (rtx);
    196 #if TARGET_SUPPORTS_WIDE_INT
    197 static rtx lookup_const_wide_int (rtx);
    198 #endif
    199 static rtx lookup_const_double (rtx);
    200 static rtx lookup_const_fixed (rtx);
    201 static rtx gen_const_vector (machine_mode, int);
    202 static void copy_rtx_if_shared_1 (rtx *orig);
    203 
    204 /* Probability of the conditional branch currently proceeded by try_split.  */
    205 profile_probability split_branch_probability;
    206 
    207 /* Returns a hash code for X (which is a really a CONST_INT).  */
    209 
    210 hashval_t
    211 const_int_hasher::hash (rtx x)
    212 {
    213   return (hashval_t) INTVAL (x);
    214 }
    215 
    216 /* Returns nonzero if the value represented by X (which is really a
    217    CONST_INT) is the same as that given by Y (which is really a
    218    HOST_WIDE_INT *).  */
    219 
    220 bool
    221 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
    222 {
    223   return (INTVAL (x) == y);
    224 }
    225 
    226 #if TARGET_SUPPORTS_WIDE_INT
    227 /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
    228 
    229 hashval_t
    230 const_wide_int_hasher::hash (rtx x)
    231 {
    232   int i;
    233   unsigned HOST_WIDE_INT hash = 0;
    234   const_rtx xr = x;
    235 
    236   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
    237     hash += CONST_WIDE_INT_ELT (xr, i);
    238 
    239   return (hashval_t) hash;
    240 }
    241 
    242 /* Returns nonzero if the value represented by X (which is really a
    243    CONST_WIDE_INT) is the same as that given by Y (which is really a
    244    CONST_WIDE_INT).  */
    245 
    246 bool
    247 const_wide_int_hasher::equal (rtx x, rtx y)
    248 {
    249   int i;
    250   const_rtx xr = x;
    251   const_rtx yr = y;
    252   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
    253     return false;
    254 
    255   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
    256     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
    257       return false;
    258 
    259   return true;
    260 }
    261 #endif
    262 
    263 /* Returns a hash code for CONST_POLY_INT X.  */
    264 
    265 hashval_t
    266 const_poly_int_hasher::hash (rtx x)
    267 {
    268   inchash::hash h;
    269   h.add_int (GET_MODE (x));
    270   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
    271     h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
    272   return h.end ();
    273 }
    274 
    275 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y.  */
    276 
    277 bool
    278 const_poly_int_hasher::equal (rtx x, const compare_type &y)
    279 {
    280   if (GET_MODE (x) != y.first)
    281     return false;
    282   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
    283     if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
    284       return false;
    285   return true;
    286 }
    287 
    288 /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
    289 hashval_t
    290 const_double_hasher::hash (rtx x)
    291 {
    292   const_rtx const value = x;
    293   hashval_t h;
    294 
    295   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
    296     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
    297   else
    298     {
    299       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
    300       /* MODE is used in the comparison, so it should be in the hash.  */
    301       h ^= GET_MODE (value);
    302     }
    303   return h;
    304 }
    305 
    306 /* Returns nonzero if the value represented by X (really a ...)
    307    is the same as that represented by Y (really a ...) */
    308 bool
    309 const_double_hasher::equal (rtx x, rtx y)
    310 {
    311   const_rtx const a = x, b = y;
    312 
    313   if (GET_MODE (a) != GET_MODE (b))
    314     return 0;
    315   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
    316     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
    317 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
    318   else
    319     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
    320 			   CONST_DOUBLE_REAL_VALUE (b));
    321 }
    322 
    323 /* Returns a hash code for X (which is really a CONST_FIXED).  */
    324 
    325 hashval_t
    326 const_fixed_hasher::hash (rtx x)
    327 {
    328   const_rtx const value = x;
    329   hashval_t h;
    330 
    331   h = fixed_hash (CONST_FIXED_VALUE (value));
    332   /* MODE is used in the comparison, so it should be in the hash.  */
    333   h ^= GET_MODE (value);
    334   return h;
    335 }
    336 
    337 /* Returns nonzero if the value represented by X is the same as that
    338    represented by Y.  */
    339 
    340 bool
    341 const_fixed_hasher::equal (rtx x, rtx y)
    342 {
    343   const_rtx const a = x, b = y;
    344 
    345   if (GET_MODE (a) != GET_MODE (b))
    346     return 0;
    347   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
    348 }
    349 
    350 /* Return true if the given memory attributes are equal.  */
    351 
    352 bool
    353 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
    354 {
    355   if (p == q)
    356     return true;
    357   if (!p || !q)
    358     return false;
    359   return (p->alias == q->alias
    360 	  && p->offset_known_p == q->offset_known_p
    361 	  && (!p->offset_known_p || known_eq (p->offset, q->offset))
    362 	  && p->size_known_p == q->size_known_p
    363 	  && (!p->size_known_p || known_eq (p->size, q->size))
    364 	  && p->align == q->align
    365 	  && p->addrspace == q->addrspace
    366 	  && (p->expr == q->expr
    367 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
    368 		  && operand_equal_p (p->expr, q->expr, 0))));
    369 }
    370 
    371 /* Set MEM's memory attributes so that they are the same as ATTRS.  */
    372 
    373 static void
    374 set_mem_attrs (rtx mem, mem_attrs *attrs)
    375 {
    376   /* If everything is the default, we can just clear the attributes.  */
    377   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
    378     {
    379       MEM_ATTRS (mem) = 0;
    380       return;
    381     }
    382 
    383   if (!MEM_ATTRS (mem)
    384       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
    385     {
    386       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
    387       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
    388     }
    389 }
    390 
    391 /* Returns a hash code for X (which is a really a reg_attrs *).  */
    392 
    393 hashval_t
    394 reg_attr_hasher::hash (reg_attrs *x)
    395 {
    396   const reg_attrs *const p = x;
    397 
    398   inchash::hash h;
    399   h.add_ptr (p->decl);
    400   h.add_poly_hwi (p->offset);
    401   return h.end ();
    402 }
    403 
    404 /* Returns nonzero if the value represented by X  is the same as that given by
    405    Y.  */
    406 
    407 bool
    408 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
    409 {
    410   const reg_attrs *const p = x;
    411   const reg_attrs *const q = y;
    412 
    413   return (p->decl == q->decl && known_eq (p->offset, q->offset));
    414 }
    415 /* Allocate a new reg_attrs structure and insert it into the hash table if
    416    one identical to it is not already in the table.  We are doing this for
    417    MEM of mode MODE.  */
    418 
    419 static reg_attrs *
    420 get_reg_attrs (tree decl, poly_int64 offset)
    421 {
    422   reg_attrs attrs;
    423 
    424   /* If everything is the default, we can just return zero.  */
    425   if (decl == 0 && known_eq (offset, 0))
    426     return 0;
    427 
    428   attrs.decl = decl;
    429   attrs.offset = offset;
    430 
    431   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
    432   if (*slot == 0)
    433     {
    434       *slot = ggc_alloc<reg_attrs> ();
    435       memcpy (*slot, &attrs, sizeof (reg_attrs));
    436     }
    437 
    438   return *slot;
    439 }
    440 
    441 
    442 #if !HAVE_blockage
    443 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
    444    and to block register equivalences to be seen across this insn.  */
    445 
    446 rtx
    447 gen_blockage (void)
    448 {
    449   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
    450   MEM_VOLATILE_P (x) = true;
    451   return x;
    452 }
    453 #endif
    454 
    455 
    456 /* Set the mode and register number of X to MODE and REGNO.  */
    457 
    458 void
    459 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
    460 {
    461   unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
    462 			? hard_regno_nregs (regno, mode)
    463 			: 1);
    464   PUT_MODE_RAW (x, mode);
    465   set_regno_raw (x, regno, nregs);
    466 }
    467 
    468 /* Initialize a fresh REG rtx with mode MODE and register REGNO.  */
    469 
    470 rtx
    471 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
    472 {
    473   set_mode_and_regno (x, mode, regno);
    474   REG_ATTRS (x) = NULL;
    475   ORIGINAL_REGNO (x) = regno;
    476   return x;
    477 }
    478 
    479 /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
    480    don't attempt to share with the various global pieces of rtl (such as
    481    frame_pointer_rtx).  */
    482 
    483 rtx
    484 gen_raw_REG (machine_mode mode, unsigned int regno)
    485 {
    486   rtx x = rtx_alloc (REG MEM_STAT_INFO);
    487   init_raw_REG (x, mode, regno);
    488   return x;
    489 }
    490 
    491 /* There are some RTL codes that require special attention; the generation
    492    functions do the raw handling.  If you add to this list, modify
    493    special_rtx in gengenrtl.cc as well.  */
    494 
    495 rtx_expr_list *
    496 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
    497 {
    498   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
    499 						 expr_list));
    500 }
    501 
    502 rtx_insn_list *
    503 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
    504 {
    505   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
    506 						 insn_list));
    507 }
    508 
    509 rtx_insn *
    510 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
    511 	      basic_block bb, rtx pattern, int location, int code,
    512 	      rtx reg_notes)
    513 {
    514   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
    515 						 prev_insn, next_insn,
    516 						 bb, pattern, location, code,
    517 						 reg_notes));
    518 }
    519 
    520 rtx
    521 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
    522 {
    523   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
    524     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
    525 
    526 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
    527   if (const_true_rtx && arg == STORE_FLAG_VALUE)
    528     return const_true_rtx;
    529 #endif
    530 
    531   /* Look up the CONST_INT in the hash table.  */
    532   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
    533 						   INSERT);
    534   if (*slot == 0)
    535     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
    536 
    537   return *slot;
    538 }
    539 
    540 rtx
    541 gen_int_mode (poly_int64 c, machine_mode mode)
    542 {
    543   c = trunc_int_for_mode (c, mode);
    544   if (c.is_constant ())
    545     return GEN_INT (c.coeffs[0]);
    546   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
    547   return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
    548 }
    549 
    550 /* CONST_DOUBLEs might be created from pairs of integers, or from
    551    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
    552    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
    553 
    554 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
    555    hash table.  If so, return its counterpart; otherwise add it
    556    to the hash table and return it.  */
    557 static rtx
    558 lookup_const_double (rtx real)
    559 {
    560   rtx *slot = const_double_htab->find_slot (real, INSERT);
    561   if (*slot == 0)
    562     *slot = real;
    563 
    564   return *slot;
    565 }
    566 
    567 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
    568    VALUE in mode MODE.  */
    569 rtx
    570 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
    571 {
    572   rtx real = rtx_alloc (CONST_DOUBLE);
    573   PUT_MODE (real, mode);
    574 
    575   real->u.rv = value;
    576 
    577   return lookup_const_double (real);
    578 }
    579 
    580 /* Determine whether FIXED, a CONST_FIXED, already exists in the
    581    hash table.  If so, return its counterpart; otherwise add it
    582    to the hash table and return it.  */
    583 
    584 static rtx
    585 lookup_const_fixed (rtx fixed)
    586 {
    587   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
    588   if (*slot == 0)
    589     *slot = fixed;
    590 
    591   return *slot;
    592 }
    593 
    594 /* Return a CONST_FIXED rtx for a fixed-point value specified by
    595    VALUE in mode MODE.  */
    596 
    597 rtx
    598 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
    599 {
    600   rtx fixed = rtx_alloc (CONST_FIXED);
    601   PUT_MODE (fixed, mode);
    602 
    603   fixed->u.fv = value;
    604 
    605   return lookup_const_fixed (fixed);
    606 }
    607 
    608 #if TARGET_SUPPORTS_WIDE_INT == 0
    609 /* Constructs double_int from rtx CST.  */
    610 
    611 double_int
    612 rtx_to_double_int (const_rtx cst)
    613 {
    614   double_int r;
    615 
    616   if (CONST_INT_P (cst))
    617       r = double_int::from_shwi (INTVAL (cst));
    618   else if (CONST_DOUBLE_AS_INT_P (cst))
    619     {
    620       r.low = CONST_DOUBLE_LOW (cst);
    621       r.high = CONST_DOUBLE_HIGH (cst);
    622     }
    623   else
    624     gcc_unreachable ();
    625 
    626   return r;
    627 }
    628 #endif
    629 
    630 #if TARGET_SUPPORTS_WIDE_INT
    631 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
    632    If so, return its counterpart; otherwise add it to the hash table and
    633    return it.  */
    634 
    635 static rtx
    636 lookup_const_wide_int (rtx wint)
    637 {
    638   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
    639   if (*slot == 0)
    640     *slot = wint;
    641 
    642   return *slot;
    643 }
    644 #endif
    645 
    646 /* Return an rtx constant for V, given that the constant has mode MODE.
    647    The returned rtx will be a CONST_INT if V fits, otherwise it will be
    648    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
    649    (if TARGET_SUPPORTS_WIDE_INT).  */
    650 
    651 static rtx
    652 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
    653 {
    654   unsigned int len = v.get_len ();
    655   /* Not scalar_int_mode because we also allow pointer bound modes.  */
    656   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
    657 
    658   /* Allow truncation but not extension since we do not know if the
    659      number is signed or unsigned.  */
    660   gcc_assert (prec <= v.get_precision ());
    661 
    662   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
    663     return gen_int_mode (v.elt (0), mode);
    664 
    665 #if TARGET_SUPPORTS_WIDE_INT
    666   {
    667     unsigned int i;
    668     rtx value;
    669     unsigned int blocks_needed
    670       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
    671 
    672     if (len > blocks_needed)
    673       len = blocks_needed;
    674 
    675     value = const_wide_int_alloc (len);
    676 
    677     /* It is so tempting to just put the mode in here.  Must control
    678        myself ... */
    679     PUT_MODE (value, VOIDmode);
    680     CWI_PUT_NUM_ELEM (value, len);
    681 
    682     for (i = 0; i < len; i++)
    683       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
    684 
    685     return lookup_const_wide_int (value);
    686   }
    687 #else
    688   return immed_double_const (v.elt (0), v.elt (1), mode);
    689 #endif
    690 }
    691 
    692 #if TARGET_SUPPORTS_WIDE_INT == 0
    693 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
    694    of ints: I0 is the low-order word and I1 is the high-order word.
    695    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
    696    implied upper bits are copies of the high bit of i1.  The value
    697    itself is neither signed nor unsigned.  Do not use this routine for
    698    non-integer modes; convert to REAL_VALUE_TYPE and use
    699    const_double_from_real_value.  */
    700 
    701 rtx
    702 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
    703 {
    704   rtx value;
    705   unsigned int i;
    706 
    707   /* There are the following cases (note that there are no modes with
    708      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
    709 
    710      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
    711 	gen_int_mode.
    712      2) If the value of the integer fits into HOST_WIDE_INT anyway
    713         (i.e., i1 consists only from copies of the sign bit, and sign
    714 	of i0 and i1 are the same), then we return a CONST_INT for i0.
    715      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
    716   scalar_mode smode;
    717   if (is_a <scalar_mode> (mode, &smode)
    718       && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
    719     return gen_int_mode (i0, mode);
    720 
    721   /* If this integer fits in one word, return a CONST_INT.  */
    722   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
    723     return GEN_INT (i0);
    724 
    725   /* We use VOIDmode for integers.  */
    726   value = rtx_alloc (CONST_DOUBLE);
    727   PUT_MODE (value, VOIDmode);
    728 
    729   CONST_DOUBLE_LOW (value) = i0;
    730   CONST_DOUBLE_HIGH (value) = i1;
    731 
    732   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
    733     XWINT (value, i) = 0;
    734 
    735   return lookup_const_double (value);
    736 }
    737 #endif
    738 
    739 /* Return an rtx representation of C in mode MODE.  */
    740 
    741 rtx
    742 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
    743 {
    744   if (c.is_constant ())
    745     return immed_wide_int_const_1 (c.coeffs[0], mode);
    746 
    747   /* Not scalar_int_mode because we also allow pointer bound modes.  */
    748   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
    749 
    750   /* Allow truncation but not extension since we do not know if the
    751      number is signed or unsigned.  */
    752   gcc_assert (prec <= c.coeffs[0].get_precision ());
    753   poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
    754 
    755   /* See whether we already have an rtx for this constant.  */
    756   inchash::hash h;
    757   h.add_int (mode);
    758   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
    759     h.add_wide_int (newc.coeffs[i]);
    760   const_poly_int_hasher::compare_type typed_value (mode, newc);
    761   rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
    762 							h.end (), INSERT);
    763   rtx x = *slot;
    764   if (x)
    765     return x;
    766 
    767   /* Create a new rtx.  There's a choice to be made here between installing
    768      the actual mode of the rtx or leaving it as VOIDmode (for consistency
    769      with CONST_INT).  In practice the handling of the codes is different
    770      enough that we get no benefit from using VOIDmode, and various places
    771      assume that VOIDmode implies CONST_INT.  Using the real mode seems like
    772      the right long-term direction anyway.  */
    773   typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
    774   size_t extra_size = twi::extra_size (prec);
    775   x = rtx_alloc_v (CONST_POLY_INT,
    776 		   sizeof (struct const_poly_int_def) + extra_size);
    777   PUT_MODE (x, mode);
    778   CONST_POLY_INT_COEFFS (x).set_precision (prec);
    779   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
    780     CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
    781 
    782   *slot = x;
    783   return x;
    784 }
    785 
    786 rtx
    787 gen_rtx_REG (machine_mode mode, unsigned int regno)
    788 {
    789   /* In case the MD file explicitly references the frame pointer, have
    790      all such references point to the same frame pointer.  This is
    791      used during frame pointer elimination to distinguish the explicit
    792      references to these registers from pseudos that happened to be
    793      assigned to them.
    794 
    795      If we have eliminated the frame pointer or arg pointer, we will
    796      be using it as a normal register, for example as a spill
    797      register.  In such cases, we might be accessing it in a mode that
    798      is not Pmode and therefore cannot use the pre-allocated rtx.
    799 
    800      Also don't do this when we are making new REGs in reload, since
    801      we don't want to get confused with the real pointers.  */
    802 
    803   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
    804     {
    805       if (regno == FRAME_POINTER_REGNUM
    806 	  && (!reload_completed || frame_pointer_needed))
    807 	return frame_pointer_rtx;
    808 
    809       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
    810 	  && regno == HARD_FRAME_POINTER_REGNUM
    811 	  && (!reload_completed || frame_pointer_needed))
    812 	return hard_frame_pointer_rtx;
    813 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
    814       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
    815 	  && regno == ARG_POINTER_REGNUM)
    816 	return arg_pointer_rtx;
    817 #endif
    818 #ifdef RETURN_ADDRESS_POINTER_REGNUM
    819       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
    820 	return return_address_pointer_rtx;
    821 #endif
    822       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
    823 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
    824 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
    825 	return pic_offset_table_rtx;
    826       if (regno == STACK_POINTER_REGNUM)
    827 	return stack_pointer_rtx;
    828     }
    829 
    830 #if 0
    831   /* If the per-function register table has been set up, try to re-use
    832      an existing entry in that table to avoid useless generation of RTL.
    833 
    834      This code is disabled for now until we can fix the various backends
    835      which depend on having non-shared hard registers in some cases.   Long
    836      term we want to re-enable this code as it can significantly cut down
    837      on the amount of useless RTL that gets generated.
    838 
    839      We'll also need to fix some code that runs after reload that wants to
    840      set ORIGINAL_REGNO.  */
    841 
    842   if (cfun
    843       && cfun->emit
    844       && regno_reg_rtx
    845       && regno < FIRST_PSEUDO_REGISTER
    846       && reg_raw_mode[regno] == mode)
    847     return regno_reg_rtx[regno];
    848 #endif
    849 
    850   return gen_raw_REG (mode, regno);
    851 }
    852 
    853 rtx
    854 gen_rtx_MEM (machine_mode mode, rtx addr)
    855 {
    856   rtx rt = gen_rtx_raw_MEM (mode, addr);
    857 
    858   /* This field is not cleared by the mere allocation of the rtx, so
    859      we clear it here.  */
    860   MEM_ATTRS (rt) = 0;
    861 
    862   return rt;
    863 }
    864 
    865 /* Generate a memory referring to non-trapping constant memory.  */
    866 
    867 rtx
    868 gen_const_mem (machine_mode mode, rtx addr)
    869 {
    870   rtx mem = gen_rtx_MEM (mode, addr);
    871   MEM_READONLY_P (mem) = 1;
    872   MEM_NOTRAP_P (mem) = 1;
    873   return mem;
    874 }
    875 
    876 /* Generate a MEM referring to fixed portions of the frame, e.g., register
    877    save areas.  */
    878 
    879 rtx
    880 gen_frame_mem (machine_mode mode, rtx addr)
    881 {
    882   rtx mem = gen_rtx_MEM (mode, addr);
    883   MEM_NOTRAP_P (mem) = 1;
    884   set_mem_alias_set (mem, get_frame_alias_set ());
    885   return mem;
    886 }
    887 
    888 /* Generate a MEM referring to a temporary use of the stack, not part
    889     of the fixed stack frame.  For example, something which is pushed
    890     by a target splitter.  */
    891 rtx
    892 gen_tmp_stack_mem (machine_mode mode, rtx addr)
    893 {
    894   rtx mem = gen_rtx_MEM (mode, addr);
    895   MEM_NOTRAP_P (mem) = 1;
    896   if (!cfun->calls_alloca)
    897     set_mem_alias_set (mem, get_frame_alias_set ());
    898   return mem;
    899 }
    900 
    901 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
    902    this construct would be valid, and false otherwise.  */
    903 
    904 bool
    905 validate_subreg (machine_mode omode, machine_mode imode,
    906 		 const_rtx reg, poly_uint64 offset)
    907 {
    908   poly_uint64 isize = GET_MODE_SIZE (imode);
    909   poly_uint64 osize = GET_MODE_SIZE (omode);
    910 
    911   /* The sizes must be ordered, so that we know whether the subreg
    912      is partial, paradoxical or complete.  */
    913   if (!ordered_p (isize, osize))
    914     return false;
    915 
    916   /* All subregs must be aligned.  */
    917   if (!multiple_p (offset, osize))
    918     return false;
    919 
    920   /* The subreg offset cannot be outside the inner object.  */
    921   if (maybe_ge (offset, isize))
    922     return false;
    923 
    924   poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
    925 
    926   /* ??? This should not be here.  Temporarily continue to allow word_mode
    927      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
    928      Generally, backends are doing something sketchy but it'll take time to
    929      fix them all.  */
    930   if (omode == word_mode)
    931     ;
    932   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
    933      is the culprit here, and not the backends.  */
    934   else if (known_ge (osize, regsize) && known_ge (isize, osize))
    935     ;
    936   /* Allow component subregs of complex and vector.  Though given the below
    937      extraction rules, it's not always clear what that means.  */
    938   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
    939 	   && GET_MODE_INNER (imode) == omode)
    940     ;
    941   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
    942      i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0).  This
    943      surely isn't the cleanest way to represent this.  It's questionable
    944      if this ought to be represented at all -- why can't this all be hidden
    945      in post-reload splitters that make arbitrarily mode changes to the
    946      registers themselves.  */
    947   else if (VECTOR_MODE_P (omode)
    948 	   && GET_MODE_INNER (omode) == GET_MODE_INNER (imode))
    949     ;
    950   /* Subregs involving floating point modes are not allowed to
    951      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
    952      (subreg:SI (reg:DF) 0) isn't.  */
    953   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
    954     {
    955       if (! (known_eq (isize, osize)
    956 	     /* LRA can use subreg to store a floating point value in
    957 		an integer mode.  Although the floating point and the
    958 		integer modes need the same number of hard registers,
    959 		the size of floating point mode can be less than the
    960 		integer mode.  LRA also uses subregs for a register
    961 		should be used in different mode in on insn.  */
    962 	     || lra_in_progress))
    963 	return false;
    964     }
    965 
    966   /* Paradoxical subregs must have offset zero.  */
    967   if (maybe_gt (osize, isize))
    968     return known_eq (offset, 0U);
    969 
    970   /* This is a normal subreg.  Verify that the offset is representable.  */
    971 
    972   /* For hard registers, we already have most of these rules collected in
    973      subreg_offset_representable_p.  */
    974   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
    975     {
    976       unsigned int regno = REGNO (reg);
    977 
    978       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
    979 	  && GET_MODE_INNER (imode) == omode)
    980 	;
    981       else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
    982 	return false;
    983 
    984       return subreg_offset_representable_p (regno, imode, offset, omode);
    985     }
    986 
    987   /* The outer size must be ordered wrt the register size, otherwise
    988      we wouldn't know at compile time how many registers the outer
    989      mode occupies.  */
    990   if (!ordered_p (osize, regsize))
    991     return false;
    992 
    993   /* For pseudo registers, we want most of the same checks.  Namely:
    994 
    995      Assume that the pseudo register will be allocated to hard registers
    996      that can hold REGSIZE bytes each.  If OSIZE is not a multiple of REGSIZE,
    997      the remainder must correspond to the lowpart of the containing hard
    998      register.  If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
    999      otherwise it is at the lowest offset.
   1000 
   1001      Given that we've already checked the mode and offset alignment,
   1002      we only have to check subblock subregs here.  */
   1003   if (maybe_lt (osize, regsize)
   1004       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
   1005     {
   1006       /* It is invalid for the target to pick a register size for a mode
   1007 	 that isn't ordered wrt to the size of that mode.  */
   1008       poly_uint64 block_size = ordered_min (isize, regsize);
   1009       unsigned int start_reg;
   1010       poly_uint64 offset_within_reg;
   1011       if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
   1012 	  || (BYTES_BIG_ENDIAN
   1013 	      ? maybe_ne (offset_within_reg, block_size - osize)
   1014 	      : maybe_ne (offset_within_reg, 0U)))
   1015 	return false;
   1016     }
   1017   return true;
   1018 }
   1019 
   1020 rtx
   1021 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
   1022 {
   1023   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
   1024   return gen_rtx_raw_SUBREG (mode, reg, offset);
   1025 }
   1026 
   1027 /* Generate a SUBREG representing the least-significant part of REG if MODE
   1028    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
   1029 
   1030 rtx
   1031 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
   1032 {
   1033   machine_mode inmode;
   1034 
   1035   inmode = GET_MODE (reg);
   1036   if (inmode == VOIDmode)
   1037     inmode = mode;
   1038   return gen_rtx_SUBREG (mode, reg,
   1039 			 subreg_lowpart_offset (mode, inmode));
   1040 }
   1041 
   1042 rtx
   1043 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
   1044 		      enum var_init_status status)
   1045 {
   1046   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
   1047   PAT_VAR_LOCATION_STATUS (x) = status;
   1048   return x;
   1049 }
   1050 
   1051 
   1053 /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
   1054 
   1055 rtvec
   1056 gen_rtvec (int n, ...)
   1057 {
   1058   int i;
   1059   rtvec rt_val;
   1060   va_list p;
   1061 
   1062   va_start (p, n);
   1063 
   1064   /* Don't allocate an empty rtvec...  */
   1065   if (n == 0)
   1066     {
   1067       va_end (p);
   1068       return NULL_RTVEC;
   1069     }
   1070 
   1071   rt_val = rtvec_alloc (n);
   1072 
   1073   for (i = 0; i < n; i++)
   1074     rt_val->elem[i] = va_arg (p, rtx);
   1075 
   1076   va_end (p);
   1077   return rt_val;
   1078 }
   1079 
   1080 rtvec
   1081 gen_rtvec_v (int n, rtx *argp)
   1082 {
   1083   int i;
   1084   rtvec rt_val;
   1085 
   1086   /* Don't allocate an empty rtvec...  */
   1087   if (n == 0)
   1088     return NULL_RTVEC;
   1089 
   1090   rt_val = rtvec_alloc (n);
   1091 
   1092   for (i = 0; i < n; i++)
   1093     rt_val->elem[i] = *argp++;
   1094 
   1095   return rt_val;
   1096 }
   1097 
   1098 rtvec
   1099 gen_rtvec_v (int n, rtx_insn **argp)
   1100 {
   1101   int i;
   1102   rtvec rt_val;
   1103 
   1104   /* Don't allocate an empty rtvec...  */
   1105   if (n == 0)
   1106     return NULL_RTVEC;
   1107 
   1108   rt_val = rtvec_alloc (n);
   1109 
   1110   for (i = 0; i < n; i++)
   1111     rt_val->elem[i] = *argp++;
   1112 
   1113   return rt_val;
   1114 }
   1115 
   1116 
   1117 /* Return the number of bytes between the start of an OUTER_MODE
   1119    in-memory value and the start of an INNER_MODE in-memory value,
   1120    given that the former is a lowpart of the latter.  It may be a
   1121    paradoxical lowpart, in which case the offset will be negative
   1122    on big-endian targets.  */
   1123 
   1124 poly_int64
   1125 byte_lowpart_offset (machine_mode outer_mode,
   1126 		     machine_mode inner_mode)
   1127 {
   1128   if (paradoxical_subreg_p (outer_mode, inner_mode))
   1129     return -subreg_lowpart_offset (inner_mode, outer_mode);
   1130   else
   1131     return subreg_lowpart_offset (outer_mode, inner_mode);
   1132 }
   1133 
   1134 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
   1135    from address X.  For paradoxical big-endian subregs this is a
   1136    negative value, otherwise it's the same as OFFSET.  */
   1137 
   1138 poly_int64
   1139 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
   1140 		      poly_uint64 offset)
   1141 {
   1142   if (paradoxical_subreg_p (outer_mode, inner_mode))
   1143     {
   1144       gcc_assert (known_eq (offset, 0U));
   1145       return -subreg_lowpart_offset (inner_mode, outer_mode);
   1146     }
   1147   return offset;
   1148 }
   1149 
   1150 /* As above, but return the offset that existing subreg X would have
   1151    if SUBREG_REG (X) were stored in memory.  The only significant thing
   1152    about the current SUBREG_REG is its mode.  */
   1153 
   1154 poly_int64
   1155 subreg_memory_offset (const_rtx x)
   1156 {
   1157   return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
   1158 			       SUBREG_BYTE (x));
   1159 }
   1160 
   1161 /* Generate a REG rtx for a new pseudo register of mode MODE.
   1163    This pseudo is assigned the next sequential register number.  */
   1164 
   1165 rtx
   1166 gen_reg_rtx (machine_mode mode)
   1167 {
   1168   rtx val;
   1169   unsigned int align = GET_MODE_ALIGNMENT (mode);
   1170 
   1171   gcc_assert (can_create_pseudo_p ());
   1172 
   1173   /* If a virtual register with bigger mode alignment is generated,
   1174      increase stack alignment estimation because it might be spilled
   1175      to stack later.  */
   1176   if (SUPPORTS_STACK_ALIGNMENT
   1177       && crtl->stack_alignment_estimated < align
   1178       && !crtl->stack_realign_processed)
   1179     {
   1180       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
   1181       if (crtl->stack_alignment_estimated < min_align)
   1182 	crtl->stack_alignment_estimated = min_align;
   1183     }
   1184 
   1185   if (generating_concat_p
   1186       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
   1187 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
   1188     {
   1189       /* For complex modes, don't make a single pseudo.
   1190 	 Instead, make a CONCAT of two pseudos.
   1191 	 This allows noncontiguous allocation of the real and imaginary parts,
   1192 	 which makes much better code.  Besides, allocating DCmode
   1193 	 pseudos overstrains reload on some machines like the 386.  */
   1194       rtx realpart, imagpart;
   1195       machine_mode partmode = GET_MODE_INNER (mode);
   1196 
   1197       realpart = gen_reg_rtx (partmode);
   1198       imagpart = gen_reg_rtx (partmode);
   1199       return gen_rtx_CONCAT (mode, realpart, imagpart);
   1200     }
   1201 
   1202   /* Do not call gen_reg_rtx with uninitialized crtl.  */
   1203   gcc_assert (crtl->emit.regno_pointer_align_length);
   1204 
   1205   crtl->emit.ensure_regno_capacity ();
   1206   gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
   1207 
   1208   val = gen_raw_REG (mode, reg_rtx_no);
   1209   regno_reg_rtx[reg_rtx_no++] = val;
   1210   return val;
   1211 }
   1212 
   1213 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
   1214    enough to have elements in the range 0 <= idx <= reg_rtx_no.  */
   1215 
   1216 void
   1217 emit_status::ensure_regno_capacity ()
   1218 {
   1219   int old_size = regno_pointer_align_length;
   1220 
   1221   if (reg_rtx_no < old_size)
   1222     return;
   1223 
   1224   int new_size = old_size * 2;
   1225   while (reg_rtx_no >= new_size)
   1226     new_size *= 2;
   1227 
   1228   char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
   1229   memset (tmp + old_size, 0, new_size - old_size);
   1230   regno_pointer_align = (unsigned char *) tmp;
   1231 
   1232   rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
   1233   memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
   1234   regno_reg_rtx = new1;
   1235 
   1236   crtl->emit.regno_pointer_align_length = new_size;
   1237 }
   1238 
   1239 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
   1240 
   1241 bool
   1242 reg_is_parm_p (rtx reg)
   1243 {
   1244   tree decl;
   1245 
   1246   gcc_assert (REG_P (reg));
   1247   decl = REG_EXPR (reg);
   1248   return (decl && TREE_CODE (decl) == PARM_DECL);
   1249 }
   1250 
   1251 /* Update NEW with the same attributes as REG, but with OFFSET added
   1252    to the REG_OFFSET.  */
   1253 
   1254 static void
   1255 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
   1256 {
   1257   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
   1258 				       REG_OFFSET (reg) + offset);
   1259 }
   1260 
   1261 /* Generate a register with same attributes as REG, but with OFFSET
   1262    added to the REG_OFFSET.  */
   1263 
   1264 rtx
   1265 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
   1266 		    poly_int64 offset)
   1267 {
   1268   /* Use gen_raw_REG rather than gen_rtx_REG, because otherwise we'd
   1269      overwrite REG_ATTRS (and in the callers often ORIGINAL_REGNO too)
   1270      of the shared REG rtxes like stack_pointer_rtx etc.  This should
   1271      happen only for SUBREGs from DEBUG_INSNs, RA should ensure
   1272      multi-word registers don't overlap the special registers like
   1273      stack pointer.  */
   1274   rtx new_rtx = gen_raw_REG (mode, regno);
   1275 
   1276   update_reg_offset (new_rtx, reg, offset);
   1277   return new_rtx;
   1278 }
   1279 
   1280 /* Generate a new pseudo-register with the same attributes as REG, but
   1281    with OFFSET added to the REG_OFFSET.  */
   1282 
   1283 rtx
   1284 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
   1285 {
   1286   rtx new_rtx = gen_reg_rtx (mode);
   1287 
   1288   update_reg_offset (new_rtx, reg, offset);
   1289   return new_rtx;
   1290 }
   1291 
   1292 /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
   1293    new register is a (possibly paradoxical) lowpart of the old one.  */
   1294 
   1295 void
   1296 adjust_reg_mode (rtx reg, machine_mode mode)
   1297 {
   1298   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
   1299   PUT_MODE (reg, mode);
   1300 }
   1301 
   1302 /* Copy REG's attributes from X, if X has any attributes.  If REG and X
   1303    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
   1304 
   1305 void
   1306 set_reg_attrs_from_value (rtx reg, rtx x)
   1307 {
   1308   poly_int64 offset;
   1309   bool can_be_reg_pointer = true;
   1310 
   1311   /* Don't call mark_reg_pointer for incompatible pointer sign
   1312      extension.  */
   1313   while (GET_CODE (x) == SIGN_EXTEND
   1314 	 || GET_CODE (x) == ZERO_EXTEND
   1315 	 || GET_CODE (x) == TRUNCATE
   1316 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
   1317     {
   1318 #if defined(POINTERS_EXTEND_UNSIGNED)
   1319       if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
   1320 	   || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
   1321 	   || (paradoxical_subreg_p (x)
   1322 	       && ! (SUBREG_PROMOTED_VAR_P (x)
   1323 		     && SUBREG_CHECK_PROMOTED_SIGN (x,
   1324 						    POINTERS_EXTEND_UNSIGNED))))
   1325 	  && !targetm.have_ptr_extend ())
   1326 	can_be_reg_pointer = false;
   1327 #endif
   1328       x = XEXP (x, 0);
   1329     }
   1330 
   1331   /* Hard registers can be reused for multiple purposes within the same
   1332      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
   1333      on them is wrong.  */
   1334   if (HARD_REGISTER_P (reg))
   1335     return;
   1336 
   1337   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
   1338   if (MEM_P (x))
   1339     {
   1340       if (MEM_OFFSET_KNOWN_P (x))
   1341 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
   1342 					 MEM_OFFSET (x) + offset);
   1343       if (can_be_reg_pointer && MEM_POINTER (x))
   1344 	mark_reg_pointer (reg, 0);
   1345     }
   1346   else if (REG_P (x))
   1347     {
   1348       if (REG_ATTRS (x))
   1349 	update_reg_offset (reg, x, offset);
   1350       if (can_be_reg_pointer && REG_POINTER (x))
   1351 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
   1352     }
   1353 }
   1354 
   1355 /* Generate a REG rtx for a new pseudo register, copying the mode
   1356    and attributes from X.  */
   1357 
   1358 rtx
   1359 gen_reg_rtx_and_attrs (rtx x)
   1360 {
   1361   rtx reg = gen_reg_rtx (GET_MODE (x));
   1362   set_reg_attrs_from_value (reg, x);
   1363   return reg;
   1364 }
   1365 
   1366 /* Set the register attributes for registers contained in PARM_RTX.
   1367    Use needed values from memory attributes of MEM.  */
   1368 
   1369 void
   1370 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
   1371 {
   1372   if (REG_P (parm_rtx))
   1373     set_reg_attrs_from_value (parm_rtx, mem);
   1374   else if (GET_CODE (parm_rtx) == PARALLEL)
   1375     {
   1376       /* Check for a NULL entry in the first slot, used to indicate that the
   1377 	 parameter goes both on the stack and in registers.  */
   1378       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
   1379       for (; i < XVECLEN (parm_rtx, 0); i++)
   1380 	{
   1381 	  rtx x = XVECEXP (parm_rtx, 0, i);
   1382 	  if (REG_P (XEXP (x, 0)))
   1383 	    REG_ATTRS (XEXP (x, 0))
   1384 	      = get_reg_attrs (MEM_EXPR (mem),
   1385 			       INTVAL (XEXP (x, 1)));
   1386 	}
   1387     }
   1388 }
   1389 
   1390 /* Set the REG_ATTRS for registers in value X, given that X represents
   1391    decl T.  */
   1392 
   1393 void
   1394 set_reg_attrs_for_decl_rtl (tree t, rtx x)
   1395 {
   1396   if (!t)
   1397     return;
   1398   tree tdecl = t;
   1399   if (GET_CODE (x) == SUBREG)
   1400     {
   1401       gcc_assert (subreg_lowpart_p (x));
   1402       x = SUBREG_REG (x);
   1403     }
   1404   if (REG_P (x))
   1405     REG_ATTRS (x)
   1406       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
   1407 					       DECL_P (tdecl)
   1408 					       ? DECL_MODE (tdecl)
   1409 					       : TYPE_MODE (TREE_TYPE (tdecl))));
   1410   if (GET_CODE (x) == CONCAT)
   1411     {
   1412       if (REG_P (XEXP (x, 0)))
   1413         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
   1414       if (REG_P (XEXP (x, 1)))
   1415 	REG_ATTRS (XEXP (x, 1))
   1416 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
   1417     }
   1418   if (GET_CODE (x) == PARALLEL)
   1419     {
   1420       int i, start;
   1421 
   1422       /* Check for a NULL entry, used to indicate that the parameter goes
   1423 	 both on the stack and in registers.  */
   1424       if (XEXP (XVECEXP (x, 0, 0), 0))
   1425 	start = 0;
   1426       else
   1427 	start = 1;
   1428 
   1429       for (i = start; i < XVECLEN (x, 0); i++)
   1430 	{
   1431 	  rtx y = XVECEXP (x, 0, i);
   1432 	  if (REG_P (XEXP (y, 0)))
   1433 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
   1434 	}
   1435     }
   1436 }
   1437 
   1438 /* Assign the RTX X to declaration T.  */
   1439 
   1440 void
   1441 set_decl_rtl (tree t, rtx x)
   1442 {
   1443   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
   1444   if (x)
   1445     set_reg_attrs_for_decl_rtl (t, x);
   1446 }
   1447 
   1448 /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
   1449    if the ABI requires the parameter to be passed by reference.  */
   1450 
   1451 void
   1452 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
   1453 {
   1454   DECL_INCOMING_RTL (t) = x;
   1455   if (x && !by_reference_p)
   1456     set_reg_attrs_for_decl_rtl (t, x);
   1457 }
   1458 
   1459 /* Identify REG (which may be a CONCAT) as a user register.  */
   1460 
   1461 void
   1462 mark_user_reg (rtx reg)
   1463 {
   1464   if (GET_CODE (reg) == CONCAT)
   1465     {
   1466       REG_USERVAR_P (XEXP (reg, 0)) = 1;
   1467       REG_USERVAR_P (XEXP (reg, 1)) = 1;
   1468     }
   1469   else
   1470     {
   1471       gcc_assert (REG_P (reg));
   1472       REG_USERVAR_P (reg) = 1;
   1473     }
   1474 }
   1475 
   1476 /* Identify REG as a probable pointer register and show its alignment
   1477    as ALIGN, if nonzero.  */
   1478 
   1479 void
   1480 mark_reg_pointer (rtx reg, int align)
   1481 {
   1482   if (! REG_POINTER (reg))
   1483     {
   1484       REG_POINTER (reg) = 1;
   1485 
   1486       if (align)
   1487 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
   1488     }
   1489   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
   1490     /* We can no-longer be sure just how aligned this pointer is.  */
   1491     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
   1492 }
   1493 
   1494 /* Return 1 plus largest pseudo reg number used in the current function.  */
   1495 
   1496 int
   1497 max_reg_num (void)
   1498 {
   1499   return reg_rtx_no;
   1500 }
   1501 
   1502 /* Return 1 + the largest label number used so far in the current function.  */
   1503 
   1504 int
   1505 max_label_num (void)
   1506 {
   1507   return label_num;
   1508 }
   1509 
   1510 /* Return first label number used in this function (if any were used).  */
   1511 
   1512 int
   1513 get_first_label_num (void)
   1514 {
   1515   return first_label_num;
   1516 }
   1517 
   1518 /* If the rtx for label was created during the expansion of a nested
   1519    function, then first_label_num won't include this label number.
   1520    Fix this now so that array indices work later.  */
   1521 
   1522 void
   1523 maybe_set_first_label_num (rtx_code_label *x)
   1524 {
   1525   if (CODE_LABEL_NUMBER (x) < first_label_num)
   1526     first_label_num = CODE_LABEL_NUMBER (x);
   1527 }
   1528 
   1529 /* For use by the RTL function loader, when mingling with normal
   1530    functions.
   1531    Ensure that label_num is greater than the label num of X, to avoid
   1532    duplicate labels in the generated assembler.  */
   1533 
   1534 void
   1535 maybe_set_max_label_num (rtx_code_label *x)
   1536 {
   1537   if (CODE_LABEL_NUMBER (x) >= label_num)
   1538     label_num = CODE_LABEL_NUMBER (x) + 1;
   1539 }
   1540 
   1541 
   1542 /* Return a value representing some low-order bits of X, where the number
   1544    of low-order bits is given by MODE.  Note that no conversion is done
   1545    between floating-point and fixed-point values, rather, the bit
   1546    representation is returned.
   1547 
   1548    This function handles the cases in common between gen_lowpart, below,
   1549    and two variants in cse.cc and combine.cc.  These are the cases that can
   1550    be safely handled at all points in the compilation.
   1551 
   1552    If this is not a case we can handle, return 0.  */
   1553 
   1554 rtx
   1555 gen_lowpart_common (machine_mode mode, rtx x)
   1556 {
   1557   poly_uint64 msize = GET_MODE_SIZE (mode);
   1558   machine_mode innermode;
   1559 
   1560   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
   1561      so we have to make one up.  Yuk.  */
   1562   innermode = GET_MODE (x);
   1563   if (CONST_INT_P (x)
   1564       && known_le (msize * BITS_PER_UNIT,
   1565 		   (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
   1566     innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
   1567   else if (innermode == VOIDmode)
   1568     innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
   1569 
   1570   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
   1571 
   1572   if (innermode == mode)
   1573     return x;
   1574 
   1575   /* The size of the outer and inner modes must be ordered.  */
   1576   poly_uint64 xsize = GET_MODE_SIZE (innermode);
   1577   if (!ordered_p (msize, xsize))
   1578     return 0;
   1579 
   1580   if (SCALAR_FLOAT_MODE_P (mode))
   1581     {
   1582       /* Don't allow paradoxical FLOAT_MODE subregs.  */
   1583       if (maybe_gt (msize, xsize))
   1584 	return 0;
   1585     }
   1586   else
   1587     {
   1588       /* MODE must occupy no more of the underlying registers than X.  */
   1589       poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
   1590       unsigned int mregs, xregs;
   1591       if (!can_div_away_from_zero_p (msize, regsize, &mregs)
   1592 	  || !can_div_away_from_zero_p (xsize, regsize, &xregs)
   1593 	  || mregs > xregs)
   1594 	return 0;
   1595     }
   1596 
   1597   scalar_int_mode int_mode, int_innermode, from_mode;
   1598   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
   1599       && is_a <scalar_int_mode> (mode, &int_mode)
   1600       && is_a <scalar_int_mode> (innermode, &int_innermode)
   1601       && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
   1602     {
   1603       /* If we are getting the low-order part of something that has been
   1604 	 sign- or zero-extended, we can either just use the object being
   1605 	 extended or make a narrower extension.  If we want an even smaller
   1606 	 piece than the size of the object being extended, call ourselves
   1607 	 recursively.
   1608 
   1609 	 This case is used mostly by combine and cse.  */
   1610 
   1611       if (from_mode == int_mode)
   1612 	return XEXP (x, 0);
   1613       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
   1614 	return gen_lowpart_common (int_mode, XEXP (x, 0));
   1615       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
   1616 	return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
   1617     }
   1618   else if (GET_CODE (x) == SUBREG || REG_P (x)
   1619 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
   1620 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
   1621 	   || CONST_POLY_INT_P (x))
   1622     return lowpart_subreg (mode, x, innermode);
   1623 
   1624   /* Otherwise, we can't do this.  */
   1625   return 0;
   1626 }
   1627 
   1628 rtx
   1630 gen_highpart (machine_mode mode, rtx x)
   1631 {
   1632   poly_uint64 msize = GET_MODE_SIZE (mode);
   1633   rtx result;
   1634 
   1635   /* This case loses if X is a subreg.  To catch bugs early,
   1636      complain if an invalid MODE is used even in other cases.  */
   1637   gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
   1638 	      || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
   1639 
   1640   /* gen_lowpart_common handles a lot of special cases due to needing to handle
   1641      paradoxical subregs; it only calls simplify_gen_subreg when certain that
   1642      it will produce something meaningful.  The only case we need to handle
   1643      specially here is MEM.  */
   1644   if (MEM_P (x))
   1645     {
   1646       poly_int64 offset = subreg_highpart_offset (mode, GET_MODE (x));
   1647       return adjust_address (x, mode, offset);
   1648     }
   1649 
   1650   result = simplify_gen_subreg (mode, x, GET_MODE (x),
   1651 				subreg_highpart_offset (mode, GET_MODE (x)));
   1652   /* Since we handle MEM directly above, we should never get a MEM back
   1653      from simplify_gen_subreg.  */
   1654   gcc_assert (result && !MEM_P (result));
   1655 
   1656   return result;
   1657 }
   1658 
   1659 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
   1660    be VOIDmode constant.  */
   1661 rtx
   1662 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
   1663 {
   1664   if (GET_MODE (exp) != VOIDmode)
   1665     {
   1666       gcc_assert (GET_MODE (exp) == innermode);
   1667       return gen_highpart (outermode, exp);
   1668     }
   1669   return simplify_gen_subreg (outermode, exp, innermode,
   1670 			      subreg_highpart_offset (outermode, innermode));
   1671 }
   1672 
   1673 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
   1674    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
   1675 
   1676 poly_uint64
   1677 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
   1678 {
   1679   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
   1680   if (maybe_gt (outer_bytes, inner_bytes))
   1681     /* Paradoxical subregs always have a SUBREG_BYTE of 0.  */
   1682     return 0;
   1683 
   1684   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
   1685     return inner_bytes - outer_bytes;
   1686   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
   1687     return 0;
   1688   else
   1689     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
   1690 }
   1691 
   1692 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
   1693    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
   1694 
   1695 poly_uint64
   1696 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
   1697 {
   1698   gcc_assert (known_ge (inner_bytes, outer_bytes));
   1699 
   1700   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
   1701     return 0;
   1702   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
   1703     return inner_bytes - outer_bytes;
   1704   else
   1705     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
   1706 					(inner_bytes - outer_bytes)
   1707 					* BITS_PER_UNIT);
   1708 }
   1709 
   1710 /* Return 1 iff X, assumed to be a SUBREG,
   1711    refers to the least significant part of its containing reg.
   1712    If X is not a SUBREG, always return 1 (it is its own low part!).  */
   1713 
   1714 int
   1715 subreg_lowpart_p (const_rtx x)
   1716 {
   1717   if (GET_CODE (x) != SUBREG)
   1718     return 1;
   1719   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
   1720     return 0;
   1721 
   1722   return known_eq (subreg_lowpart_offset (GET_MODE (x),
   1723 					  GET_MODE (SUBREG_REG (x))),
   1724 		   SUBREG_BYTE (x));
   1725 }
   1726 
   1727 /* Return subword OFFSET of operand OP.
   1729    The word number, OFFSET, is interpreted as the word number starting
   1730    at the low-order address.  OFFSET 0 is the low-order word if not
   1731    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
   1732 
   1733    If we cannot extract the required word, we return zero.  Otherwise,
   1734    an rtx corresponding to the requested word will be returned.
   1735 
   1736    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
   1737    reload has completed, a valid address will always be returned.  After
   1738    reload, if a valid address cannot be returned, we return zero.
   1739 
   1740    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
   1741    it is the responsibility of the caller.
   1742 
   1743    MODE is the mode of OP in case it is a CONST_INT.
   1744 
   1745    ??? This is still rather broken for some cases.  The problem for the
   1746    moment is that all callers of this thing provide no 'goal mode' to
   1747    tell us to work with.  This exists because all callers were written
   1748    in a word based SUBREG world.
   1749    Now use of this function can be deprecated by simplify_subreg in most
   1750    cases.
   1751  */
   1752 
   1753 rtx
   1754 operand_subword (rtx op, poly_uint64 offset, int validate_address,
   1755 		 machine_mode mode)
   1756 {
   1757   if (mode == VOIDmode)
   1758     mode = GET_MODE (op);
   1759 
   1760   gcc_assert (mode != VOIDmode);
   1761 
   1762   /* If OP is narrower than a word, fail.  */
   1763   if (mode != BLKmode
   1764       && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
   1765     return 0;
   1766 
   1767   /* If we want a word outside OP, return zero.  */
   1768   if (mode != BLKmode
   1769       && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
   1770     return const0_rtx;
   1771 
   1772   /* Form a new MEM at the requested address.  */
   1773   if (MEM_P (op))
   1774     {
   1775       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
   1776 
   1777       if (! validate_address)
   1778 	return new_rtx;
   1779 
   1780       else if (reload_completed)
   1781 	{
   1782 	  if (! strict_memory_address_addr_space_p (word_mode,
   1783 						    XEXP (new_rtx, 0),
   1784 						    MEM_ADDR_SPACE (op)))
   1785 	    return 0;
   1786 	}
   1787       else
   1788 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
   1789     }
   1790 
   1791   /* Rest can be handled by simplify_subreg.  */
   1792   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
   1793 }
   1794 
   1795 /* Similar to `operand_subword', but never return 0.  If we can't
   1796    extract the required subword, put OP into a register and try again.
   1797    The second attempt must succeed.  We always validate the address in
   1798    this case.
   1799 
   1800    MODE is the mode of OP, in case it is CONST_INT.  */
   1801 
   1802 rtx
   1803 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
   1804 {
   1805   rtx result = operand_subword (op, offset, 1, mode);
   1806 
   1807   if (result)
   1808     return result;
   1809 
   1810   if (mode != BLKmode && mode != VOIDmode)
   1811     {
   1812       /* If this is a register which cannot be accessed by words, copy it
   1813 	 to a pseudo register.  */
   1814       if (REG_P (op))
   1815 	op = copy_to_reg (op);
   1816       else
   1817 	op = force_reg (mode, op);
   1818     }
   1819 
   1820   result = operand_subword (op, offset, 1, mode);
   1821   gcc_assert (result);
   1822 
   1823   return result;
   1824 }
   1825 
   1826 mem_attrs::mem_attrs ()
   1828   : expr (NULL_TREE),
   1829     offset (0),
   1830     size (0),
   1831     alias (0),
   1832     align (0),
   1833     addrspace (ADDR_SPACE_GENERIC),
   1834     offset_known_p (false),
   1835     size_known_p (false)
   1836 {}
   1837 
   1838 /* Returns 1 if both MEM_EXPR can be considered equal
   1839    and 0 otherwise.  */
   1840 
   1841 int
   1842 mem_expr_equal_p (const_tree expr1, const_tree expr2)
   1843 {
   1844   if (expr1 == expr2)
   1845     return 1;
   1846 
   1847   if (! expr1 || ! expr2)
   1848     return 0;
   1849 
   1850   if (TREE_CODE (expr1) != TREE_CODE (expr2))
   1851     return 0;
   1852 
   1853   return operand_equal_p (expr1, expr2, 0);
   1854 }
   1855 
   1856 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
   1857    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
   1858    -1 if not known.  */
   1859 
   1860 int
   1861 get_mem_align_offset (rtx mem, unsigned int align)
   1862 {
   1863   tree expr;
   1864   poly_uint64 offset;
   1865 
   1866   /* This function can't use
   1867      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
   1868 	 || (MAX (MEM_ALIGN (mem),
   1869 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
   1870 	     < align))
   1871        return -1;
   1872      else
   1873        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
   1874      for two reasons:
   1875      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
   1876        for <variable>.  get_inner_reference doesn't handle it and
   1877        even if it did, the alignment in that case needs to be determined
   1878        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
   1879      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
   1880        isn't sufficiently aligned, the object it is in might be.  */
   1881   gcc_assert (MEM_P (mem));
   1882   expr = MEM_EXPR (mem);
   1883   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
   1884     return -1;
   1885 
   1886   offset = MEM_OFFSET (mem);
   1887   if (DECL_P (expr))
   1888     {
   1889       if (DECL_ALIGN (expr) < align)
   1890 	return -1;
   1891     }
   1892   else if (INDIRECT_REF_P (expr))
   1893     {
   1894       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
   1895 	return -1;
   1896     }
   1897   else if (TREE_CODE (expr) == COMPONENT_REF)
   1898     {
   1899       while (1)
   1900 	{
   1901 	  tree inner = TREE_OPERAND (expr, 0);
   1902 	  tree field = TREE_OPERAND (expr, 1);
   1903 	  tree byte_offset = component_ref_field_offset (expr);
   1904 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
   1905 
   1906 	  poly_uint64 suboffset;
   1907 	  if (!byte_offset
   1908 	      || !poly_int_tree_p (byte_offset, &suboffset)
   1909 	      || !tree_fits_uhwi_p (bit_offset))
   1910 	    return -1;
   1911 
   1912 	  offset += suboffset;
   1913 	  offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
   1914 
   1915 	  if (inner == NULL_TREE)
   1916 	    {
   1917 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
   1918 		  < (unsigned int) align)
   1919 		return -1;
   1920 	      break;
   1921 	    }
   1922 	  else if (DECL_P (inner))
   1923 	    {
   1924 	      if (DECL_ALIGN (inner) < align)
   1925 		return -1;
   1926 	      break;
   1927 	    }
   1928 	  else if (TREE_CODE (inner) != COMPONENT_REF)
   1929 	    return -1;
   1930 	  expr = inner;
   1931 	}
   1932     }
   1933   else
   1934     return -1;
   1935 
   1936   HOST_WIDE_INT misalign;
   1937   if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
   1938     return -1;
   1939   return misalign;
   1940 }
   1941 
   1942 /* Given REF (a MEM) and T, either the type of X or the expression
   1943    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
   1944    if we are making a new object of this type.  BITPOS is nonzero if
   1945    there is an offset outstanding on T that will be applied later.  */
   1946 
   1947 void
   1948 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
   1949 				 poly_int64 bitpos)
   1950 {
   1951   poly_int64 apply_bitpos = 0;
   1952   tree type;
   1953   class mem_attrs attrs, *defattrs, *refattrs;
   1954   addr_space_t as;
   1955 
   1956   /* It can happen that type_for_mode was given a mode for which there
   1957      is no language-level type.  In which case it returns NULL, which
   1958      we can see here.  */
   1959   if (t == NULL_TREE)
   1960     return;
   1961 
   1962   type = TYPE_P (t) ? t : TREE_TYPE (t);
   1963   if (type == error_mark_node)
   1964     return;
   1965 
   1966   /* If we have already set DECL_RTL = ref, get_alias_set will get the
   1967      wrong answer, as it assumes that DECL_RTL already has the right alias
   1968      info.  Callers should not set DECL_RTL until after the call to
   1969      set_mem_attributes.  */
   1970   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
   1971 
   1972   /* Get the alias set from the expression or type (perhaps using a
   1973      front-end routine) and use it.  */
   1974   attrs.alias = get_alias_set (t);
   1975 
   1976   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
   1977   MEM_POINTER (ref) = POINTER_TYPE_P (type);
   1978 
   1979   /* Default values from pre-existing memory attributes if present.  */
   1980   refattrs = MEM_ATTRS (ref);
   1981   if (refattrs)
   1982     {
   1983       /* ??? Can this ever happen?  Calling this routine on a MEM that
   1984 	 already carries memory attributes should probably be invalid.  */
   1985       attrs.expr = refattrs->expr;
   1986       attrs.offset_known_p = refattrs->offset_known_p;
   1987       attrs.offset = refattrs->offset;
   1988       attrs.size_known_p = refattrs->size_known_p;
   1989       attrs.size = refattrs->size;
   1990       attrs.align = refattrs->align;
   1991     }
   1992 
   1993   /* Otherwise, default values from the mode of the MEM reference.  */
   1994   else
   1995     {
   1996       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
   1997       gcc_assert (!defattrs->expr);
   1998       gcc_assert (!defattrs->offset_known_p);
   1999 
   2000       /* Respect mode size.  */
   2001       attrs.size_known_p = defattrs->size_known_p;
   2002       attrs.size = defattrs->size;
   2003       /* ??? Is this really necessary?  We probably should always get
   2004 	 the size from the type below.  */
   2005 
   2006       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
   2007          if T is an object, always compute the object alignment below.  */
   2008       if (TYPE_P (t))
   2009 	attrs.align = defattrs->align;
   2010       else
   2011 	attrs.align = BITS_PER_UNIT;
   2012       /* ??? If T is a type, respecting mode alignment may *also* be wrong
   2013 	 e.g. if the type carries an alignment attribute.  Should we be
   2014 	 able to simply always use TYPE_ALIGN?  */
   2015     }
   2016 
   2017   /* We can set the alignment from the type if we are making an object or if
   2018      this is an INDIRECT_REF.  */
   2019   if (objectp || TREE_CODE (t) == INDIRECT_REF)
   2020     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
   2021 
   2022   /* If the size is known, we can set that.  */
   2023   tree new_size = TYPE_SIZE_UNIT (type);
   2024 
   2025   /* The address-space is that of the type.  */
   2026   as = TYPE_ADDR_SPACE (type);
   2027 
   2028   /* If T is not a type, we may be able to deduce some more information about
   2029      the expression.  */
   2030   if (! TYPE_P (t))
   2031     {
   2032       tree base;
   2033 
   2034       if (TREE_THIS_VOLATILE (t))
   2035 	MEM_VOLATILE_P (ref) = 1;
   2036 
   2037       /* Now remove any conversions: they don't change what the underlying
   2038 	 object is.  Likewise for SAVE_EXPR.  */
   2039       while (CONVERT_EXPR_P (t)
   2040 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
   2041 	     || TREE_CODE (t) == SAVE_EXPR)
   2042 	t = TREE_OPERAND (t, 0);
   2043 
   2044       /* Note whether this expression can trap.  */
   2045       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
   2046 
   2047       base = get_base_address (t);
   2048       if (base)
   2049 	{
   2050 	  if (DECL_P (base)
   2051 	      && TREE_READONLY (base)
   2052 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
   2053 	      && !TREE_THIS_VOLATILE (base))
   2054 	    MEM_READONLY_P (ref) = 1;
   2055 
   2056 	  /* Mark static const strings readonly as well.  */
   2057 	  if (TREE_CODE (base) == STRING_CST
   2058 	      && TREE_READONLY (base)
   2059 	      && TREE_STATIC (base))
   2060 	    MEM_READONLY_P (ref) = 1;
   2061 
   2062 	  /* Address-space information is on the base object.  */
   2063 	  if (TREE_CODE (base) == MEM_REF
   2064 	      || TREE_CODE (base) == TARGET_MEM_REF)
   2065 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
   2066 								      0))));
   2067 	  else
   2068 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
   2069 	}
   2070 
   2071       /* If this expression uses it's parent's alias set, mark it such
   2072 	 that we won't change it.  */
   2073       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
   2074 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
   2075 
   2076       /* If this is a decl, set the attributes of the MEM from it.  */
   2077       if (DECL_P (t))
   2078 	{
   2079 	  attrs.expr = t;
   2080 	  attrs.offset_known_p = true;
   2081 	  attrs.offset = 0;
   2082 	  apply_bitpos = bitpos;
   2083 	  new_size = DECL_SIZE_UNIT (t);
   2084 	}
   2085 
   2086       /* ???  If we end up with a constant or a descriptor do not
   2087 	 record a MEM_EXPR.  */
   2088       else if (CONSTANT_CLASS_P (t)
   2089 	       || TREE_CODE (t) == CONSTRUCTOR)
   2090 	;
   2091 
   2092       /* If this is a field reference, record it.  */
   2093       else if (TREE_CODE (t) == COMPONENT_REF)
   2094 	{
   2095 	  attrs.expr = t;
   2096 	  attrs.offset_known_p = true;
   2097 	  attrs.offset = 0;
   2098 	  apply_bitpos = bitpos;
   2099 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
   2100 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
   2101 	}
   2102 
   2103       /* Else record it.  */
   2104       else
   2105 	{
   2106 	  gcc_assert (handled_component_p (t)
   2107 		      || TREE_CODE (t) == MEM_REF
   2108 		      || TREE_CODE (t) == TARGET_MEM_REF);
   2109 	  attrs.expr = t;
   2110 	  attrs.offset_known_p = true;
   2111 	  attrs.offset = 0;
   2112 	  apply_bitpos = bitpos;
   2113 	}
   2114 
   2115       /* If this is a reference based on a partitioned decl replace the
   2116 	 base with a MEM_REF of the pointer representative we created
   2117 	 during stack slot partitioning.  */
   2118       if (attrs.expr
   2119 	  && VAR_P (base)
   2120 	  && ! is_global_var (base)
   2121 	  && cfun->gimple_df->decls_to_pointers != NULL)
   2122 	{
   2123 	  tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
   2124 	  if (namep)
   2125 	    {
   2126 	      attrs.expr = unshare_expr (attrs.expr);
   2127 	      tree *orig_base = &attrs.expr;
   2128 	      while (handled_component_p (*orig_base))
   2129 		orig_base = &TREE_OPERAND (*orig_base, 0);
   2130 	      tree aptrt = reference_alias_ptr_type (*orig_base);
   2131 	      *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
   2132 				   build_int_cst (aptrt, 0));
   2133 	    }
   2134 	}
   2135 
   2136       /* Compute the alignment.  */
   2137       unsigned int obj_align;
   2138       unsigned HOST_WIDE_INT obj_bitpos;
   2139       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
   2140       unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
   2141       if (diff_align != 0)
   2142 	obj_align = MIN (obj_align, diff_align);
   2143       attrs.align = MAX (attrs.align, obj_align);
   2144     }
   2145 
   2146   poly_uint64 const_size;
   2147   if (poly_int_tree_p (new_size, &const_size))
   2148     {
   2149       attrs.size_known_p = true;
   2150       attrs.size = const_size;
   2151     }
   2152 
   2153   /* If we modified OFFSET based on T, then subtract the outstanding
   2154      bit position offset.  Similarly, increase the size of the accessed
   2155      object to contain the negative offset.  */
   2156   if (maybe_ne (apply_bitpos, 0))
   2157     {
   2158       gcc_assert (attrs.offset_known_p);
   2159       poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
   2160       attrs.offset -= bytepos;
   2161       if (attrs.size_known_p)
   2162 	attrs.size += bytepos;
   2163     }
   2164 
   2165   /* Now set the attributes we computed above.  */
   2166   attrs.addrspace = as;
   2167   set_mem_attrs (ref, &attrs);
   2168 }
   2169 
   2170 void
   2171 set_mem_attributes (rtx ref, tree t, int objectp)
   2172 {
   2173   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
   2174 }
   2175 
   2176 /* Set the alias set of MEM to SET.  */
   2177 
   2178 void
   2179 set_mem_alias_set (rtx mem, alias_set_type set)
   2180 {
   2181   /* If the new and old alias sets don't conflict, something is wrong.  */
   2182   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
   2183   mem_attrs attrs (*get_mem_attrs (mem));
   2184   attrs.alias = set;
   2185   set_mem_attrs (mem, &attrs);
   2186 }
   2187 
   2188 /* Set the address space of MEM to ADDRSPACE (target-defined).  */
   2189 
   2190 void
   2191 set_mem_addr_space (rtx mem, addr_space_t addrspace)
   2192 {
   2193   mem_attrs attrs (*get_mem_attrs (mem));
   2194   attrs.addrspace = addrspace;
   2195   set_mem_attrs (mem, &attrs);
   2196 }
   2197 
   2198 /* Set the alignment of MEM to ALIGN bits.  */
   2199 
   2200 void
   2201 set_mem_align (rtx mem, unsigned int align)
   2202 {
   2203   mem_attrs attrs (*get_mem_attrs (mem));
   2204   attrs.align = align;
   2205   set_mem_attrs (mem, &attrs);
   2206 }
   2207 
   2208 /* Set the expr for MEM to EXPR.  */
   2209 
   2210 void
   2211 set_mem_expr (rtx mem, tree expr)
   2212 {
   2213   mem_attrs attrs (*get_mem_attrs (mem));
   2214   attrs.expr = expr;
   2215   set_mem_attrs (mem, &attrs);
   2216 }
   2217 
   2218 /* Set the offset of MEM to OFFSET.  */
   2219 
   2220 void
   2221 set_mem_offset (rtx mem, poly_int64 offset)
   2222 {
   2223   mem_attrs attrs (*get_mem_attrs (mem));
   2224   attrs.offset_known_p = true;
   2225   attrs.offset = offset;
   2226   set_mem_attrs (mem, &attrs);
   2227 }
   2228 
   2229 /* Clear the offset of MEM.  */
   2230 
   2231 void
   2232 clear_mem_offset (rtx mem)
   2233 {
   2234   mem_attrs attrs (*get_mem_attrs (mem));
   2235   attrs.offset_known_p = false;
   2236   set_mem_attrs (mem, &attrs);
   2237 }
   2238 
   2239 /* Set the size of MEM to SIZE.  */
   2240 
   2241 void
   2242 set_mem_size (rtx mem, poly_int64 size)
   2243 {
   2244   mem_attrs attrs (*get_mem_attrs (mem));
   2245   attrs.size_known_p = true;
   2246   attrs.size = size;
   2247   set_mem_attrs (mem, &attrs);
   2248 }
   2249 
   2250 /* Clear the size of MEM.  */
   2251 
   2252 void
   2253 clear_mem_size (rtx mem)
   2254 {
   2255   mem_attrs attrs (*get_mem_attrs (mem));
   2256   attrs.size_known_p = false;
   2257   set_mem_attrs (mem, &attrs);
   2258 }
   2259 
   2260 /* Return a memory reference like MEMREF, but with its mode changed to MODE
   2262    and its address changed to ADDR.  (VOIDmode means don't change the mode.
   2263    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
   2264    returned memory location is required to be valid.  INPLACE is true if any
   2265    changes can be made directly to MEMREF or false if MEMREF must be treated
   2266    as immutable.
   2267 
   2268    The memory attributes are not changed.  */
   2269 
   2270 static rtx
   2271 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
   2272 		  bool inplace)
   2273 {
   2274   addr_space_t as;
   2275   rtx new_rtx;
   2276 
   2277   gcc_assert (MEM_P (memref));
   2278   as = MEM_ADDR_SPACE (memref);
   2279   if (mode == VOIDmode)
   2280     mode = GET_MODE (memref);
   2281   if (addr == 0)
   2282     addr = XEXP (memref, 0);
   2283   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
   2284       && (!validate || memory_address_addr_space_p (mode, addr, as)))
   2285     return memref;
   2286 
   2287   /* Don't validate address for LRA.  LRA can make the address valid
   2288      by itself in most efficient way.  */
   2289   if (validate && !lra_in_progress)
   2290     {
   2291       if (reload_in_progress || reload_completed)
   2292 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
   2293       else
   2294 	addr = memory_address_addr_space (mode, addr, as);
   2295     }
   2296 
   2297   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
   2298     return memref;
   2299 
   2300   if (inplace)
   2301     {
   2302       XEXP (memref, 0) = addr;
   2303       return memref;
   2304     }
   2305 
   2306   new_rtx = gen_rtx_MEM (mode, addr);
   2307   MEM_COPY_ATTRIBUTES (new_rtx, memref);
   2308   return new_rtx;
   2309 }
   2310 
   2311 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
   2312    way we are changing MEMREF, so we only preserve the alias set.  */
   2313 
   2314 rtx
   2315 change_address (rtx memref, machine_mode mode, rtx addr)
   2316 {
   2317   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
   2318   machine_mode mmode = GET_MODE (new_rtx);
   2319   class mem_attrs *defattrs;
   2320 
   2321   mem_attrs attrs (*get_mem_attrs (memref));
   2322   defattrs = mode_mem_attrs[(int) mmode];
   2323   attrs.expr = NULL_TREE;
   2324   attrs.offset_known_p = false;
   2325   attrs.size_known_p = defattrs->size_known_p;
   2326   attrs.size = defattrs->size;
   2327   attrs.align = defattrs->align;
   2328 
   2329   /* If there are no changes, just return the original memory reference.  */
   2330   if (new_rtx == memref)
   2331     {
   2332       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
   2333 	return new_rtx;
   2334 
   2335       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
   2336       MEM_COPY_ATTRIBUTES (new_rtx, memref);
   2337     }
   2338 
   2339   set_mem_attrs (new_rtx, &attrs);
   2340   return new_rtx;
   2341 }
   2342 
   2343 /* Return a memory reference like MEMREF, but with its mode changed
   2344    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
   2345    nonzero, the memory address is forced to be valid.
   2346    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
   2347    and the caller is responsible for adjusting MEMREF base register.
   2348    If ADJUST_OBJECT is zero, the underlying object associated with the
   2349    memory reference is left unchanged and the caller is responsible for
   2350    dealing with it.  Otherwise, if the new memory reference is outside
   2351    the underlying object, even partially, then the object is dropped.
   2352    SIZE, if nonzero, is the size of an access in cases where MODE
   2353    has no inherent size.  */
   2354 
   2355 rtx
   2356 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
   2357 		  int validate, int adjust_address, int adjust_object,
   2358 		  poly_int64 size)
   2359 {
   2360   rtx addr = XEXP (memref, 0);
   2361   rtx new_rtx;
   2362   scalar_int_mode address_mode;
   2363   class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
   2364   unsigned HOST_WIDE_INT max_align;
   2365 #ifdef POINTERS_EXTEND_UNSIGNED
   2366   scalar_int_mode pointer_mode
   2367     = targetm.addr_space.pointer_mode (attrs.addrspace);
   2368 #endif
   2369 
   2370   /* VOIDmode means no mode change for change_address_1.  */
   2371   if (mode == VOIDmode)
   2372     mode = GET_MODE (memref);
   2373 
   2374   /* Take the size of non-BLKmode accesses from the mode.  */
   2375   defattrs = mode_mem_attrs[(int) mode];
   2376   if (defattrs->size_known_p)
   2377     size = defattrs->size;
   2378 
   2379   /* If there are no changes, just return the original memory reference.  */
   2380   if (mode == GET_MODE (memref)
   2381       && known_eq (offset, 0)
   2382       && (known_eq (size, 0)
   2383 	  || (attrs.size_known_p && known_eq (attrs.size, size)))
   2384       && (!validate || memory_address_addr_space_p (mode, addr,
   2385 						    attrs.addrspace)))
   2386     return memref;
   2387 
   2388   /* ??? Prefer to create garbage instead of creating shared rtl.
   2389      This may happen even if offset is nonzero -- consider
   2390      (plus (plus reg reg) const_int) -- so do this always.  */
   2391   addr = copy_rtx (addr);
   2392 
   2393   /* Convert a possibly large offset to a signed value within the
   2394      range of the target address space.  */
   2395   address_mode = get_address_mode (memref);
   2396   offset = trunc_int_for_mode (offset, address_mode);
   2397 
   2398   if (adjust_address)
   2399     {
   2400       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
   2401 	 object, we can merge it into the LO_SUM.  */
   2402       if (GET_MODE (memref) != BLKmode
   2403 	  && GET_CODE (addr) == LO_SUM
   2404 	  && known_in_range_p (offset,
   2405 			       0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
   2406 				   / BITS_PER_UNIT)))
   2407 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
   2408 			       plus_constant (address_mode,
   2409 					      XEXP (addr, 1), offset));
   2410 #ifdef POINTERS_EXTEND_UNSIGNED
   2411       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
   2412 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
   2413 	 the fact that pointers are not allowed to overflow.  */
   2414       else if (POINTERS_EXTEND_UNSIGNED > 0
   2415 	       && GET_CODE (addr) == ZERO_EXTEND
   2416 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
   2417 	       && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
   2418 	addr = gen_rtx_ZERO_EXTEND (address_mode,
   2419 				    plus_constant (pointer_mode,
   2420 						   XEXP (addr, 0), offset));
   2421 #endif
   2422       else
   2423 	addr = plus_constant (address_mode, addr, offset);
   2424     }
   2425 
   2426   new_rtx = change_address_1 (memref, mode, addr, validate, false);
   2427 
   2428   /* If the address is a REG, change_address_1 rightfully returns memref,
   2429      but this would destroy memref's MEM_ATTRS.  */
   2430   if (new_rtx == memref && maybe_ne (offset, 0))
   2431     new_rtx = copy_rtx (new_rtx);
   2432 
   2433   /* Conservatively drop the object if we don't know where we start from.  */
   2434   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
   2435     {
   2436       attrs.expr = NULL_TREE;
   2437       attrs.alias = 0;
   2438     }
   2439 
   2440   /* Compute the new values of the memory attributes due to this adjustment.
   2441      We add the offsets and update the alignment.  */
   2442   if (attrs.offset_known_p)
   2443     {
   2444       attrs.offset += offset;
   2445 
   2446       /* Drop the object if the new left end is not within its bounds.  */
   2447       if (adjust_object && maybe_lt (attrs.offset, 0))
   2448 	{
   2449 	  attrs.expr = NULL_TREE;
   2450 	  attrs.alias = 0;
   2451 	}
   2452     }
   2453 
   2454   /* Compute the new alignment by taking the MIN of the alignment and the
   2455      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
   2456      if zero.  */
   2457   if (maybe_ne (offset, 0))
   2458     {
   2459       max_align = known_alignment (offset) * BITS_PER_UNIT;
   2460       attrs.align = MIN (attrs.align, max_align);
   2461     }
   2462 
   2463   if (maybe_ne (size, 0))
   2464     {
   2465       /* Drop the object if the new right end is not within its bounds.  */
   2466       if (adjust_object && maybe_gt (offset + size, attrs.size))
   2467 	{
   2468 	  attrs.expr = NULL_TREE;
   2469 	  attrs.alias = 0;
   2470 	}
   2471       attrs.size_known_p = true;
   2472       attrs.size = size;
   2473     }
   2474   else if (attrs.size_known_p)
   2475     {
   2476       gcc_assert (!adjust_object);
   2477       attrs.size -= offset;
   2478       /* ??? The store_by_pieces machinery generates negative sizes,
   2479 	 so don't assert for that here.  */
   2480     }
   2481 
   2482   set_mem_attrs (new_rtx, &attrs);
   2483 
   2484   return new_rtx;
   2485 }
   2486 
   2487 /* Return a memory reference like MEMREF, but with its mode changed
   2488    to MODE and its address changed to ADDR, which is assumed to be
   2489    MEMREF offset by OFFSET bytes.  If VALIDATE is
   2490    nonzero, the memory address is forced to be valid.  */
   2491 
   2492 rtx
   2493 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
   2494 			     poly_int64 offset, int validate)
   2495 {
   2496   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
   2497   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
   2498 }
   2499 
   2500 /* Return a memory reference like MEMREF, but whose address is changed by
   2501    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
   2502    known to be in OFFSET (possibly 1).  */
   2503 
   2504 rtx
   2505 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
   2506 {
   2507   rtx new_rtx, addr = XEXP (memref, 0);
   2508   machine_mode address_mode;
   2509   class mem_attrs *defattrs;
   2510 
   2511   mem_attrs attrs (*get_mem_attrs (memref));
   2512   address_mode = get_address_mode (memref);
   2513   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
   2514 
   2515   /* At this point we don't know _why_ the address is invalid.  It
   2516      could have secondary memory references, multiplies or anything.
   2517 
   2518      However, if we did go and rearrange things, we can wind up not
   2519      being able to recognize the magic around pic_offset_table_rtx.
   2520      This stuff is fragile, and is yet another example of why it is
   2521      bad to expose PIC machinery too early.  */
   2522   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
   2523 				     attrs.addrspace)
   2524       && GET_CODE (addr) == PLUS
   2525       && XEXP (addr, 0) == pic_offset_table_rtx)
   2526     {
   2527       addr = force_reg (GET_MODE (addr), addr);
   2528       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
   2529     }
   2530 
   2531   update_temp_slot_address (XEXP (memref, 0), new_rtx);
   2532   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
   2533 
   2534   /* If there are no changes, just return the original memory reference.  */
   2535   if (new_rtx == memref)
   2536     return new_rtx;
   2537 
   2538   /* Update the alignment to reflect the offset.  Reset the offset, which
   2539      we don't know.  */
   2540   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
   2541   attrs.offset_known_p = false;
   2542   attrs.size_known_p = defattrs->size_known_p;
   2543   attrs.size = defattrs->size;
   2544   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
   2545   set_mem_attrs (new_rtx, &attrs);
   2546   return new_rtx;
   2547 }
   2548 
   2549 /* Return a memory reference like MEMREF, but with its address changed to
   2550    ADDR.  The caller is asserting that the actual piece of memory pointed
   2551    to is the same, just the form of the address is being changed, such as
   2552    by putting something into a register.  INPLACE is true if any changes
   2553    can be made directly to MEMREF or false if MEMREF must be treated as
   2554    immutable.  */
   2555 
   2556 rtx
   2557 replace_equiv_address (rtx memref, rtx addr, bool inplace)
   2558 {
   2559   /* change_address_1 copies the memory attribute structure without change
   2560      and that's exactly what we want here.  */
   2561   update_temp_slot_address (XEXP (memref, 0), addr);
   2562   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
   2563 }
   2564 
   2565 /* Likewise, but the reference is not required to be valid.  */
   2566 
   2567 rtx
   2568 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
   2569 {
   2570   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
   2571 }
   2572 
   2573 /* Return a memory reference like MEMREF, but with its mode widened to
   2574    MODE and offset by OFFSET.  This would be used by targets that e.g.
   2575    cannot issue QImode memory operations and have to use SImode memory
   2576    operations plus masking logic.  */
   2577 
   2578 rtx
   2579 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
   2580 {
   2581   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
   2582   poly_uint64 size = GET_MODE_SIZE (mode);
   2583 
   2584   /* If there are no changes, just return the original memory reference.  */
   2585   if (new_rtx == memref)
   2586     return new_rtx;
   2587 
   2588   mem_attrs attrs (*get_mem_attrs (new_rtx));
   2589 
   2590   /* If we don't know what offset we were at within the expression, then
   2591      we can't know if we've overstepped the bounds.  */
   2592   if (! attrs.offset_known_p)
   2593     attrs.expr = NULL_TREE;
   2594 
   2595   while (attrs.expr)
   2596     {
   2597       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
   2598 	{
   2599 	  tree field = TREE_OPERAND (attrs.expr, 1);
   2600 	  tree offset = component_ref_field_offset (attrs.expr);
   2601 
   2602 	  if (! DECL_SIZE_UNIT (field))
   2603 	    {
   2604 	      attrs.expr = NULL_TREE;
   2605 	      break;
   2606 	    }
   2607 
   2608 	  /* Is the field at least as large as the access?  If so, ok,
   2609 	     otherwise strip back to the containing structure.  */
   2610 	  if (poly_int_tree_p (DECL_SIZE_UNIT (field))
   2611 	      && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
   2612 	      && known_ge (attrs.offset, 0))
   2613 	    break;
   2614 
   2615 	  poly_uint64 suboffset;
   2616 	  if (!poly_int_tree_p (offset, &suboffset))
   2617 	    {
   2618 	      attrs.expr = NULL_TREE;
   2619 	      break;
   2620 	    }
   2621 
   2622 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
   2623 	  attrs.offset += suboffset;
   2624 	  attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
   2625 			   / BITS_PER_UNIT);
   2626 	}
   2627       /* Similarly for the decl.  */
   2628       else if (DECL_P (attrs.expr)
   2629 	       && DECL_SIZE_UNIT (attrs.expr)
   2630 	       && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
   2631 	       && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
   2632 			   size)
   2633 	       && known_ge (attrs.offset, 0))
   2634 	break;
   2635       else
   2636 	{
   2637 	  /* The widened memory access overflows the expression, which means
   2638 	     that it could alias another expression.  Zap it.  */
   2639 	  attrs.expr = NULL_TREE;
   2640 	  break;
   2641 	}
   2642     }
   2643 
   2644   if (! attrs.expr)
   2645     attrs.offset_known_p = false;
   2646 
   2647   /* The widened memory may alias other stuff, so zap the alias set.  */
   2648   /* ??? Maybe use get_alias_set on any remaining expression.  */
   2649   attrs.alias = 0;
   2650   attrs.size_known_p = true;
   2651   attrs.size = size;
   2652   set_mem_attrs (new_rtx, &attrs);
   2653   return new_rtx;
   2654 }
   2655 
   2656 /* A fake decl that is used as the MEM_EXPR of spill slots.  */
   2658 static GTY(()) tree spill_slot_decl;
   2659 
   2660 tree
   2661 get_spill_slot_decl (bool force_build_p)
   2662 {
   2663   tree d = spill_slot_decl;
   2664   rtx rd;
   2665 
   2666   if (d || !force_build_p)
   2667     return d;
   2668 
   2669   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
   2670 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
   2671   DECL_ARTIFICIAL (d) = 1;
   2672   DECL_IGNORED_P (d) = 1;
   2673   TREE_USED (d) = 1;
   2674   spill_slot_decl = d;
   2675 
   2676   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
   2677   MEM_NOTRAP_P (rd) = 1;
   2678   mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
   2679   attrs.alias = new_alias_set ();
   2680   attrs.expr = d;
   2681   set_mem_attrs (rd, &attrs);
   2682   SET_DECL_RTL (d, rd);
   2683 
   2684   return d;
   2685 }
   2686 
   2687 /* Given MEM, a result from assign_stack_local, fill in the memory
   2688    attributes as appropriate for a register allocator spill slot.
   2689    These slots are not aliasable by other memory.  We arrange for
   2690    them all to use a single MEM_EXPR, so that the aliasing code can
   2691    work properly in the case of shared spill slots.  */
   2692 
   2693 void
   2694 set_mem_attrs_for_spill (rtx mem)
   2695 {
   2696   rtx addr;
   2697 
   2698   mem_attrs attrs (*get_mem_attrs (mem));
   2699   attrs.expr = get_spill_slot_decl (true);
   2700   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
   2701   attrs.addrspace = ADDR_SPACE_GENERIC;
   2702 
   2703   /* We expect the incoming memory to be of the form:
   2704 	(mem:MODE (plus (reg sfp) (const_int offset)))
   2705      with perhaps the plus missing for offset = 0.  */
   2706   addr = XEXP (mem, 0);
   2707   attrs.offset_known_p = true;
   2708   strip_offset (addr, &attrs.offset);
   2709 
   2710   set_mem_attrs (mem, &attrs);
   2711   MEM_NOTRAP_P (mem) = 1;
   2712 }
   2713 
   2714 /* Return a newly created CODE_LABEL rtx with a unique label number.  */
   2716 
   2717 rtx_code_label *
   2718 gen_label_rtx (void)
   2719 {
   2720   return as_a <rtx_code_label *> (
   2721 	    gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
   2722 				NULL, label_num++, NULL));
   2723 }
   2724 
   2725 /* For procedure integration.  */
   2727 
   2728 /* Install new pointers to the first and last insns in the chain.
   2729    Also, set cur_insn_uid to one higher than the last in use.
   2730    Used for an inline-procedure after copying the insn chain.  */
   2731 
   2732 void
   2733 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
   2734 {
   2735   rtx_insn *insn;
   2736 
   2737   set_first_insn (first);
   2738   set_last_insn (last);
   2739   cur_insn_uid = 0;
   2740 
   2741   if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
   2742     {
   2743       int debug_count = 0;
   2744 
   2745       cur_insn_uid = param_min_nondebug_insn_uid - 1;
   2746       cur_debug_insn_uid = 0;
   2747 
   2748       for (insn = first; insn; insn = NEXT_INSN (insn))
   2749 	if (INSN_UID (insn) < param_min_nondebug_insn_uid)
   2750 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
   2751 	else
   2752 	  {
   2753 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
   2754 	    if (DEBUG_INSN_P (insn))
   2755 	      debug_count++;
   2756 	  }
   2757 
   2758       if (debug_count)
   2759 	cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
   2760       else
   2761 	cur_debug_insn_uid++;
   2762     }
   2763   else
   2764     for (insn = first; insn; insn = NEXT_INSN (insn))
   2765       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
   2766 
   2767   cur_insn_uid++;
   2768 }
   2769 
   2770 /* Go through all the RTL insn bodies and copy any invalid shared
   2772    structure.  This routine should only be called once.  */
   2773 
   2774 static void
   2775 unshare_all_rtl_1 (rtx_insn *insn)
   2776 {
   2777   /* Unshare just about everything else.  */
   2778   unshare_all_rtl_in_chain (insn);
   2779 
   2780   /* Make sure the addresses of stack slots found outside the insn chain
   2781      (such as, in DECL_RTL of a variable) are not shared
   2782      with the insn chain.
   2783 
   2784      This special care is necessary when the stack slot MEM does not
   2785      actually appear in the insn chain.  If it does appear, its address
   2786      is unshared from all else at that point.  */
   2787   unsigned int i;
   2788   rtx temp;
   2789   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
   2790     (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
   2791 }
   2792 
   2793 /* Go through all the RTL insn bodies and copy any invalid shared
   2794    structure, again.  This is a fairly expensive thing to do so it
   2795    should be done sparingly.  */
   2796 
   2797 void
   2798 unshare_all_rtl_again (rtx_insn *insn)
   2799 {
   2800   rtx_insn *p;
   2801   tree decl;
   2802 
   2803   for (p = insn; p; p = NEXT_INSN (p))
   2804     if (INSN_P (p))
   2805       {
   2806 	reset_used_flags (PATTERN (p));
   2807 	reset_used_flags (REG_NOTES (p));
   2808 	if (CALL_P (p))
   2809 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
   2810       }
   2811 
   2812   /* Make sure that virtual stack slots are not shared.  */
   2813   set_used_decls (DECL_INITIAL (cfun->decl));
   2814 
   2815   /* Make sure that virtual parameters are not shared.  */
   2816   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
   2817     set_used_flags (DECL_RTL (decl));
   2818 
   2819   rtx temp;
   2820   unsigned int i;
   2821   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
   2822     reset_used_flags (temp);
   2823 
   2824   unshare_all_rtl_1 (insn);
   2825 }
   2826 
   2827 unsigned int
   2828 unshare_all_rtl (void)
   2829 {
   2830   unshare_all_rtl_1 (get_insns ());
   2831 
   2832   for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
   2833     {
   2834       if (DECL_RTL_SET_P (decl))
   2835 	SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
   2836       DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
   2837     }
   2838 
   2839   return 0;
   2840 }
   2841 
   2842 
   2843 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
   2844    Recursively does the same for subexpressions.  */
   2845 
   2846 static void
   2847 verify_rtx_sharing (rtx orig, rtx insn)
   2848 {
   2849   rtx x = orig;
   2850   int i;
   2851   enum rtx_code code;
   2852   const char *format_ptr;
   2853 
   2854   if (x == 0)
   2855     return;
   2856 
   2857   code = GET_CODE (x);
   2858 
   2859   /* These types may be freely shared.  */
   2860 
   2861   switch (code)
   2862     {
   2863     case REG:
   2864     case DEBUG_EXPR:
   2865     case VALUE:
   2866     CASE_CONST_ANY:
   2867     case SYMBOL_REF:
   2868     case LABEL_REF:
   2869     case CODE_LABEL:
   2870     case PC:
   2871     case RETURN:
   2872     case SIMPLE_RETURN:
   2873     case SCRATCH:
   2874       /* SCRATCH must be shared because they represent distinct values.  */
   2875       return;
   2876     case CLOBBER:
   2877       /* Share clobbers of hard registers, but do not share pseudo reg
   2878          clobbers or clobbers of hard registers that originated as pseudos.
   2879          This is needed to allow safe register renaming.  */
   2880       if (REG_P (XEXP (x, 0))
   2881 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
   2882 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
   2883 	return;
   2884       break;
   2885 
   2886     case CONST:
   2887       if (shared_const_p (orig))
   2888 	return;
   2889       break;
   2890 
   2891     case MEM:
   2892       /* A MEM is allowed to be shared if its address is constant.  */
   2893       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
   2894 	  || reload_completed || reload_in_progress)
   2895 	return;
   2896 
   2897       break;
   2898 
   2899     default:
   2900       break;
   2901     }
   2902 
   2903   /* This rtx may not be shared.  If it has already been seen,
   2904      replace it with a copy of itself.  */
   2905   if (flag_checking && RTX_FLAG (x, used))
   2906     {
   2907       error ("invalid rtl sharing found in the insn");
   2908       debug_rtx (insn);
   2909       error ("shared rtx");
   2910       debug_rtx (x);
   2911       internal_error ("internal consistency failure");
   2912     }
   2913   gcc_assert (!RTX_FLAG (x, used));
   2914 
   2915   RTX_FLAG (x, used) = 1;
   2916 
   2917   /* Now scan the subexpressions recursively.  */
   2918 
   2919   format_ptr = GET_RTX_FORMAT (code);
   2920 
   2921   for (i = 0; i < GET_RTX_LENGTH (code); i++)
   2922     {
   2923       switch (*format_ptr++)
   2924 	{
   2925 	case 'e':
   2926 	  verify_rtx_sharing (XEXP (x, i), insn);
   2927 	  break;
   2928 
   2929 	case 'E':
   2930 	  if (XVEC (x, i) != NULL)
   2931 	    {
   2932 	      int j;
   2933 	      int len = XVECLEN (x, i);
   2934 
   2935 	      for (j = 0; j < len; j++)
   2936 		{
   2937 		  /* We allow sharing of ASM_OPERANDS inside single
   2938 		     instruction.  */
   2939 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
   2940 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
   2941 			  == ASM_OPERANDS))
   2942 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
   2943 		  else
   2944 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
   2945 		}
   2946 	    }
   2947 	  break;
   2948 	}
   2949     }
   2950   return;
   2951 }
   2952 
   2953 /* Reset used-flags for INSN.  */
   2954 
   2955 static void
   2956 reset_insn_used_flags (rtx insn)
   2957 {
   2958   gcc_assert (INSN_P (insn));
   2959   reset_used_flags (PATTERN (insn));
   2960   reset_used_flags (REG_NOTES (insn));
   2961   if (CALL_P (insn))
   2962     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
   2963 }
   2964 
   2965 /* Go through all the RTL insn bodies and clear all the USED bits.  */
   2966 
   2967 static void
   2968 reset_all_used_flags (void)
   2969 {
   2970   rtx_insn *p;
   2971 
   2972   for (p = get_insns (); p; p = NEXT_INSN (p))
   2973     if (INSN_P (p))
   2974       {
   2975 	rtx pat = PATTERN (p);
   2976 	if (GET_CODE (pat) != SEQUENCE)
   2977 	  reset_insn_used_flags (p);
   2978 	else
   2979 	  {
   2980 	    gcc_assert (REG_NOTES (p) == NULL);
   2981 	    for (int i = 0; i < XVECLEN (pat, 0); i++)
   2982 	      {
   2983 		rtx insn = XVECEXP (pat, 0, i);
   2984 		if (INSN_P (insn))
   2985 		  reset_insn_used_flags (insn);
   2986 	      }
   2987 	  }
   2988       }
   2989 }
   2990 
   2991 /* Verify sharing in INSN.  */
   2992 
   2993 static void
   2994 verify_insn_sharing (rtx insn)
   2995 {
   2996   gcc_assert (INSN_P (insn));
   2997   verify_rtx_sharing (PATTERN (insn), insn);
   2998   verify_rtx_sharing (REG_NOTES (insn), insn);
   2999   if (CALL_P (insn))
   3000     verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
   3001 }
   3002 
   3003 /* Go through all the RTL insn bodies and check that there is no unexpected
   3004    sharing in between the subexpressions.  */
   3005 
   3006 DEBUG_FUNCTION void
   3007 verify_rtl_sharing (void)
   3008 {
   3009   rtx_insn *p;
   3010 
   3011   timevar_push (TV_VERIFY_RTL_SHARING);
   3012 
   3013   reset_all_used_flags ();
   3014 
   3015   for (p = get_insns (); p; p = NEXT_INSN (p))
   3016     if (INSN_P (p))
   3017       {
   3018 	rtx pat = PATTERN (p);
   3019 	if (GET_CODE (pat) != SEQUENCE)
   3020 	  verify_insn_sharing (p);
   3021 	else
   3022 	  for (int i = 0; i < XVECLEN (pat, 0); i++)
   3023 	      {
   3024 		rtx insn = XVECEXP (pat, 0, i);
   3025 		if (INSN_P (insn))
   3026 		  verify_insn_sharing (insn);
   3027 	      }
   3028       }
   3029 
   3030   reset_all_used_flags ();
   3031 
   3032   timevar_pop (TV_VERIFY_RTL_SHARING);
   3033 }
   3034 
   3035 /* Go through all the RTL insn bodies and copy any invalid shared structure.
   3036    Assumes the mark bits are cleared at entry.  */
   3037 
   3038 void
   3039 unshare_all_rtl_in_chain (rtx_insn *insn)
   3040 {
   3041   for (; insn; insn = NEXT_INSN (insn))
   3042     if (INSN_P (insn))
   3043       {
   3044 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
   3045 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
   3046 	if (CALL_P (insn))
   3047 	  CALL_INSN_FUNCTION_USAGE (insn)
   3048 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
   3049       }
   3050 }
   3051 
   3052 /* Go through all virtual stack slots of a function and mark them as
   3053    shared.  We never replace the DECL_RTLs themselves with a copy,
   3054    but expressions mentioned into a DECL_RTL cannot be shared with
   3055    expressions in the instruction stream.
   3056 
   3057    Note that reload may convert pseudo registers into memories in-place.
   3058    Pseudo registers are always shared, but MEMs never are.  Thus if we
   3059    reset the used flags on MEMs in the instruction stream, we must set
   3060    them again on MEMs that appear in DECL_RTLs.  */
   3061 
   3062 static void
   3063 set_used_decls (tree blk)
   3064 {
   3065   tree t;
   3066 
   3067   /* Mark decls.  */
   3068   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
   3069     if (DECL_RTL_SET_P (t))
   3070       set_used_flags (DECL_RTL (t));
   3071 
   3072   /* Now process sub-blocks.  */
   3073   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
   3074     set_used_decls (t);
   3075 }
   3076 
   3077 /* Mark ORIG as in use, and return a copy of it if it was already in use.
   3078    Recursively does the same for subexpressions.  Uses
   3079    copy_rtx_if_shared_1 to reduce stack space.  */
   3080 
   3081 rtx
   3082 copy_rtx_if_shared (rtx orig)
   3083 {
   3084   copy_rtx_if_shared_1 (&orig);
   3085   return orig;
   3086 }
   3087 
   3088 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
   3089    use.  Recursively does the same for subexpressions.  */
   3090 
   3091 static void
   3092 copy_rtx_if_shared_1 (rtx *orig1)
   3093 {
   3094   rtx x;
   3095   int i;
   3096   enum rtx_code code;
   3097   rtx *last_ptr;
   3098   const char *format_ptr;
   3099   int copied = 0;
   3100   int length;
   3101 
   3102   /* Repeat is used to turn tail-recursion into iteration.  */
   3103 repeat:
   3104   x = *orig1;
   3105 
   3106   if (x == 0)
   3107     return;
   3108 
   3109   code = GET_CODE (x);
   3110 
   3111   /* These types may be freely shared.  */
   3112 
   3113   switch (code)
   3114     {
   3115     case REG:
   3116     case DEBUG_EXPR:
   3117     case VALUE:
   3118     CASE_CONST_ANY:
   3119     case SYMBOL_REF:
   3120     case LABEL_REF:
   3121     case CODE_LABEL:
   3122     case PC:
   3123     case RETURN:
   3124     case SIMPLE_RETURN:
   3125     case SCRATCH:
   3126       /* SCRATCH must be shared because they represent distinct values.  */
   3127       return;
   3128     case CLOBBER:
   3129       /* Share clobbers of hard registers, but do not share pseudo reg
   3130          clobbers or clobbers of hard registers that originated as pseudos.
   3131          This is needed to allow safe register renaming.  */
   3132       if (REG_P (XEXP (x, 0))
   3133 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
   3134 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
   3135 	return;
   3136       break;
   3137 
   3138     case CONST:
   3139       if (shared_const_p (x))
   3140 	return;
   3141       break;
   3142 
   3143     case DEBUG_INSN:
   3144     case INSN:
   3145     case JUMP_INSN:
   3146     case CALL_INSN:
   3147     case NOTE:
   3148     case BARRIER:
   3149       /* The chain of insns is not being copied.  */
   3150       return;
   3151 
   3152     default:
   3153       break;
   3154     }
   3155 
   3156   /* This rtx may not be shared.  If it has already been seen,
   3157      replace it with a copy of itself.  */
   3158 
   3159   if (RTX_FLAG (x, used))
   3160     {
   3161       x = shallow_copy_rtx (x);
   3162       copied = 1;
   3163     }
   3164   RTX_FLAG (x, used) = 1;
   3165 
   3166   /* Now scan the subexpressions recursively.
   3167      We can store any replaced subexpressions directly into X
   3168      since we know X is not shared!  Any vectors in X
   3169      must be copied if X was copied.  */
   3170 
   3171   format_ptr = GET_RTX_FORMAT (code);
   3172   length = GET_RTX_LENGTH (code);
   3173   last_ptr = NULL;
   3174 
   3175   for (i = 0; i < length; i++)
   3176     {
   3177       switch (*format_ptr++)
   3178 	{
   3179 	case 'e':
   3180           if (last_ptr)
   3181             copy_rtx_if_shared_1 (last_ptr);
   3182 	  last_ptr = &XEXP (x, i);
   3183 	  break;
   3184 
   3185 	case 'E':
   3186 	  if (XVEC (x, i) != NULL)
   3187 	    {
   3188 	      int j;
   3189 	      int len = XVECLEN (x, i);
   3190 
   3191               /* Copy the vector iff I copied the rtx and the length
   3192 		 is nonzero.  */
   3193 	      if (copied && len > 0)
   3194 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
   3195 
   3196               /* Call recursively on all inside the vector.  */
   3197 	      for (j = 0; j < len; j++)
   3198                 {
   3199 		  if (last_ptr)
   3200 		    copy_rtx_if_shared_1 (last_ptr);
   3201                   last_ptr = &XVECEXP (x, i, j);
   3202                 }
   3203 	    }
   3204 	  break;
   3205 	}
   3206     }
   3207   *orig1 = x;
   3208   if (last_ptr)
   3209     {
   3210       orig1 = last_ptr;
   3211       goto repeat;
   3212     }
   3213   return;
   3214 }
   3215 
   3216 /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
   3217 
   3218 static void
   3219 mark_used_flags (rtx x, int flag)
   3220 {
   3221   int i, j;
   3222   enum rtx_code code;
   3223   const char *format_ptr;
   3224   int length;
   3225 
   3226   /* Repeat is used to turn tail-recursion into iteration.  */
   3227 repeat:
   3228   if (x == 0)
   3229     return;
   3230 
   3231   code = GET_CODE (x);
   3232 
   3233   /* These types may be freely shared so we needn't do any resetting
   3234      for them.  */
   3235 
   3236   switch (code)
   3237     {
   3238     case REG:
   3239     case DEBUG_EXPR:
   3240     case VALUE:
   3241     CASE_CONST_ANY:
   3242     case SYMBOL_REF:
   3243     case CODE_LABEL:
   3244     case PC:
   3245     case RETURN:
   3246     case SIMPLE_RETURN:
   3247       return;
   3248 
   3249     case DEBUG_INSN:
   3250     case INSN:
   3251     case JUMP_INSN:
   3252     case CALL_INSN:
   3253     case NOTE:
   3254     case LABEL_REF:
   3255     case BARRIER:
   3256       /* The chain of insns is not being copied.  */
   3257       return;
   3258 
   3259     default:
   3260       break;
   3261     }
   3262 
   3263   RTX_FLAG (x, used) = flag;
   3264 
   3265   format_ptr = GET_RTX_FORMAT (code);
   3266   length = GET_RTX_LENGTH (code);
   3267 
   3268   for (i = 0; i < length; i++)
   3269     {
   3270       switch (*format_ptr++)
   3271 	{
   3272 	case 'e':
   3273           if (i == length-1)
   3274             {
   3275               x = XEXP (x, i);
   3276 	      goto repeat;
   3277             }
   3278 	  mark_used_flags (XEXP (x, i), flag);
   3279 	  break;
   3280 
   3281 	case 'E':
   3282 	  for (j = 0; j < XVECLEN (x, i); j++)
   3283 	    mark_used_flags (XVECEXP (x, i, j), flag);
   3284 	  break;
   3285 	}
   3286     }
   3287 }
   3288 
   3289 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
   3290    to look for shared sub-parts.  */
   3291 
   3292 void
   3293 reset_used_flags (rtx x)
   3294 {
   3295   mark_used_flags (x, 0);
   3296 }
   3297 
   3298 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
   3299    to look for shared sub-parts.  */
   3300 
   3301 void
   3302 set_used_flags (rtx x)
   3303 {
   3304   mark_used_flags (x, 1);
   3305 }
   3306 
   3307 /* Copy X if necessary so that it won't be altered by changes in OTHER.
   3309    Return X or the rtx for the pseudo reg the value of X was copied into.
   3310    OTHER must be valid as a SET_DEST.  */
   3311 
   3312 rtx
   3313 make_safe_from (rtx x, rtx other)
   3314 {
   3315   while (1)
   3316     switch (GET_CODE (other))
   3317       {
   3318       case SUBREG:
   3319 	other = SUBREG_REG (other);
   3320 	break;
   3321       case STRICT_LOW_PART:
   3322       case SIGN_EXTEND:
   3323       case ZERO_EXTEND:
   3324 	other = XEXP (other, 0);
   3325 	break;
   3326       default:
   3327 	goto done;
   3328       }
   3329  done:
   3330   if ((MEM_P (other)
   3331        && ! CONSTANT_P (x)
   3332        && !REG_P (x)
   3333        && GET_CODE (x) != SUBREG)
   3334       || (REG_P (other)
   3335 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
   3336 	      || reg_mentioned_p (other, x))))
   3337     {
   3338       rtx temp = gen_reg_rtx (GET_MODE (x));
   3339       emit_move_insn (temp, x);
   3340       return temp;
   3341     }
   3342   return x;
   3343 }
   3344 
   3345 /* Emission of insns (adding them to the doubly-linked list).  */
   3347 
   3348 /* Return the last insn emitted, even if it is in a sequence now pushed.  */
   3349 
   3350 rtx_insn *
   3351 get_last_insn_anywhere (void)
   3352 {
   3353   struct sequence_stack *seq;
   3354   for (seq = get_current_sequence (); seq; seq = seq->next)
   3355     if (seq->last != 0)
   3356       return seq->last;
   3357   return 0;
   3358 }
   3359 
   3360 /* Return the first nonnote insn emitted in current sequence or current
   3361    function.  This routine looks inside SEQUENCEs.  */
   3362 
   3363 rtx_insn *
   3364 get_first_nonnote_insn (void)
   3365 {
   3366   rtx_insn *insn = get_insns ();
   3367 
   3368   if (insn)
   3369     {
   3370       if (NOTE_P (insn))
   3371 	for (insn = next_insn (insn);
   3372 	     insn && NOTE_P (insn);
   3373 	     insn = next_insn (insn))
   3374 	  continue;
   3375       else
   3376 	{
   3377 	  if (NONJUMP_INSN_P (insn)
   3378 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
   3379 	    insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
   3380 	}
   3381     }
   3382 
   3383   return insn;
   3384 }
   3385 
   3386 /* Return the last nonnote insn emitted in current sequence or current
   3387    function.  This routine looks inside SEQUENCEs.  */
   3388 
   3389 rtx_insn *
   3390 get_last_nonnote_insn (void)
   3391 {
   3392   rtx_insn *insn = get_last_insn ();
   3393 
   3394   if (insn)
   3395     {
   3396       if (NOTE_P (insn))
   3397 	for (insn = previous_insn (insn);
   3398 	     insn && NOTE_P (insn);
   3399 	     insn = previous_insn (insn))
   3400 	  continue;
   3401       else
   3402 	{
   3403 	  if (NONJUMP_INSN_P (insn))
   3404 	    if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
   3405 	      insn = seq->insn (seq->len () - 1);
   3406 	}
   3407     }
   3408 
   3409   return insn;
   3410 }
   3411 
   3412 /* Return the number of actual (non-debug) insns emitted in this
   3413    function.  */
   3414 
   3415 int
   3416 get_max_insn_count (void)
   3417 {
   3418   int n = cur_insn_uid;
   3419 
   3420   /* The table size must be stable across -g, to avoid codegen
   3421      differences due to debug insns, and not be affected by
   3422      -fmin-insn-uid, to avoid excessive table size and to simplify
   3423      debugging of -fcompare-debug failures.  */
   3424   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
   3425     n -= cur_debug_insn_uid;
   3426   else
   3427     n -= param_min_nondebug_insn_uid;
   3428 
   3429   return n;
   3430 }
   3431 
   3432 
   3433 /* Return the next insn.  If it is a SEQUENCE, return the first insn
   3435    of the sequence.  */
   3436 
   3437 rtx_insn *
   3438 next_insn (rtx_insn *insn)
   3439 {
   3440   if (insn)
   3441     {
   3442       insn = NEXT_INSN (insn);
   3443       if (insn && NONJUMP_INSN_P (insn)
   3444 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
   3445 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
   3446     }
   3447 
   3448   return insn;
   3449 }
   3450 
   3451 /* Return the previous insn.  If it is a SEQUENCE, return the last insn
   3452    of the sequence.  */
   3453 
   3454 rtx_insn *
   3455 previous_insn (rtx_insn *insn)
   3456 {
   3457   if (insn)
   3458     {
   3459       insn = PREV_INSN (insn);
   3460       if (insn && NONJUMP_INSN_P (insn))
   3461 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
   3462 	  insn = seq->insn (seq->len () - 1);
   3463     }
   3464 
   3465   return insn;
   3466 }
   3467 
   3468 /* Return the next insn after INSN that is not a NOTE.  This routine does not
   3469    look inside SEQUENCEs.  */
   3470 
   3471 rtx_insn *
   3472 next_nonnote_insn (rtx_insn *insn)
   3473 {
   3474   while (insn)
   3475     {
   3476       insn = NEXT_INSN (insn);
   3477       if (insn == 0 || !NOTE_P (insn))
   3478 	break;
   3479     }
   3480 
   3481   return insn;
   3482 }
   3483 
   3484 /* Return the next insn after INSN that is not a DEBUG_INSN.  This
   3485    routine does not look inside SEQUENCEs.  */
   3486 
   3487 rtx_insn *
   3488 next_nondebug_insn (rtx_insn *insn)
   3489 {
   3490   while (insn)
   3491     {
   3492       insn = NEXT_INSN (insn);
   3493       if (insn == 0 || !DEBUG_INSN_P (insn))
   3494 	break;
   3495     }
   3496 
   3497   return insn;
   3498 }
   3499 
   3500 /* Return the previous insn before INSN that is not a NOTE.  This routine does
   3501    not look inside SEQUENCEs.  */
   3502 
   3503 rtx_insn *
   3504 prev_nonnote_insn (rtx_insn *insn)
   3505 {
   3506   while (insn)
   3507     {
   3508       insn = PREV_INSN (insn);
   3509       if (insn == 0 || !NOTE_P (insn))
   3510 	break;
   3511     }
   3512 
   3513   return insn;
   3514 }
   3515 
   3516 /* Return the previous insn before INSN that is not a DEBUG_INSN.
   3517    This routine does not look inside SEQUENCEs.  */
   3518 
   3519 rtx_insn *
   3520 prev_nondebug_insn (rtx_insn *insn)
   3521 {
   3522   while (insn)
   3523     {
   3524       insn = PREV_INSN (insn);
   3525       if (insn == 0 || !DEBUG_INSN_P (insn))
   3526 	break;
   3527     }
   3528 
   3529   return insn;
   3530 }
   3531 
   3532 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
   3533    This routine does not look inside SEQUENCEs.  */
   3534 
   3535 rtx_insn *
   3536 next_nonnote_nondebug_insn (rtx_insn *insn)
   3537 {
   3538   while (insn)
   3539     {
   3540       insn = NEXT_INSN (insn);
   3541       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
   3542 	break;
   3543     }
   3544 
   3545   return insn;
   3546 }
   3547 
   3548 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
   3549    but stop the search before we enter another basic block.  This
   3550    routine does not look inside SEQUENCEs.  */
   3551 
   3552 rtx_insn *
   3553 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
   3554 {
   3555   while (insn)
   3556     {
   3557       insn = NEXT_INSN (insn);
   3558       if (insn == 0)
   3559 	break;
   3560       if (DEBUG_INSN_P (insn))
   3561 	continue;
   3562       if (!NOTE_P (insn))
   3563 	break;
   3564       if (NOTE_INSN_BASIC_BLOCK_P (insn))
   3565 	return NULL;
   3566     }
   3567 
   3568   return insn;
   3569 }
   3570 
   3571 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
   3572    This routine does not look inside SEQUENCEs.  */
   3573 
   3574 rtx_insn *
   3575 prev_nonnote_nondebug_insn (rtx_insn *insn)
   3576 {
   3577   while (insn)
   3578     {
   3579       insn = PREV_INSN (insn);
   3580       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
   3581 	break;
   3582     }
   3583 
   3584   return insn;
   3585 }
   3586 
   3587 /* Return the previous insn before INSN that is not a NOTE nor
   3588    DEBUG_INSN, but stop the search before we enter another basic
   3589    block.  This routine does not look inside SEQUENCEs.  */
   3590 
   3591 rtx_insn *
   3592 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
   3593 {
   3594   while (insn)
   3595     {
   3596       insn = PREV_INSN (insn);
   3597       if (insn == 0)
   3598 	break;
   3599       if (DEBUG_INSN_P (insn))
   3600 	continue;
   3601       if (!NOTE_P (insn))
   3602 	break;
   3603       if (NOTE_INSN_BASIC_BLOCK_P (insn))
   3604 	return NULL;
   3605     }
   3606 
   3607   return insn;
   3608 }
   3609 
   3610 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
   3611    or 0, if there is none.  This routine does not look inside
   3612    SEQUENCEs.  */
   3613 
   3614 rtx_insn *
   3615 next_real_insn (rtx_insn *insn)
   3616 {
   3617   while (insn)
   3618     {
   3619       insn = NEXT_INSN (insn);
   3620       if (insn == 0 || INSN_P (insn))
   3621 	break;
   3622     }
   3623 
   3624   return insn;
   3625 }
   3626 
   3627 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
   3628    or 0, if there is none.  This routine does not look inside
   3629    SEQUENCEs.  */
   3630 
   3631 rtx_insn *
   3632 prev_real_insn (rtx_insn *insn)
   3633 {
   3634   while (insn)
   3635     {
   3636       insn = PREV_INSN (insn);
   3637       if (insn == 0 || INSN_P (insn))
   3638 	break;
   3639     }
   3640 
   3641   return insn;
   3642 }
   3643 
   3644 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
   3645    or 0, if there is none.  This routine does not look inside
   3646    SEQUENCEs.  */
   3647 
   3648 rtx_insn *
   3649 next_real_nondebug_insn (rtx uncast_insn)
   3650 {
   3651   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
   3652 
   3653   while (insn)
   3654     {
   3655       insn = NEXT_INSN (insn);
   3656       if (insn == 0 || NONDEBUG_INSN_P (insn))
   3657 	break;
   3658     }
   3659 
   3660   return insn;
   3661 }
   3662 
   3663 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
   3664    or 0, if there is none.  This routine does not look inside
   3665    SEQUENCEs.  */
   3666 
   3667 rtx_insn *
   3668 prev_real_nondebug_insn (rtx_insn *insn)
   3669 {
   3670   while (insn)
   3671     {
   3672       insn = PREV_INSN (insn);
   3673       if (insn == 0 || NONDEBUG_INSN_P (insn))
   3674 	break;
   3675     }
   3676 
   3677   return insn;
   3678 }
   3679 
   3680 /* Return the last CALL_INSN in the current list, or 0 if there is none.
   3681    This routine does not look inside SEQUENCEs.  */
   3682 
   3683 rtx_call_insn *
   3684 last_call_insn (void)
   3685 {
   3686   rtx_insn *insn;
   3687 
   3688   for (insn = get_last_insn ();
   3689        insn && !CALL_P (insn);
   3690        insn = PREV_INSN (insn))
   3691     ;
   3692 
   3693   return safe_as_a <rtx_call_insn *> (insn);
   3694 }
   3695 
   3696 /* Find the next insn after INSN that really does something.  This routine
   3697    does not look inside SEQUENCEs.  After reload this also skips over
   3698    standalone USE and CLOBBER insn.  */
   3699 
   3700 int
   3701 active_insn_p (const rtx_insn *insn)
   3702 {
   3703   return (CALL_P (insn) || JUMP_P (insn)
   3704 	  || JUMP_TABLE_DATA_P (insn) /* FIXME */
   3705 	  || (NONJUMP_INSN_P (insn)
   3706 	      && (! reload_completed
   3707 		  || (GET_CODE (PATTERN (insn)) != USE
   3708 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
   3709 }
   3710 
   3711 rtx_insn *
   3712 next_active_insn (rtx_insn *insn)
   3713 {
   3714   while (insn)
   3715     {
   3716       insn = NEXT_INSN (insn);
   3717       if (insn == 0 || active_insn_p (insn))
   3718 	break;
   3719     }
   3720 
   3721   return insn;
   3722 }
   3723 
   3724 /* Find the last insn before INSN that really does something.  This routine
   3725    does not look inside SEQUENCEs.  After reload this also skips over
   3726    standalone USE and CLOBBER insn.  */
   3727 
   3728 rtx_insn *
   3729 prev_active_insn (rtx_insn *insn)
   3730 {
   3731   while (insn)
   3732     {
   3733       insn = PREV_INSN (insn);
   3734       if (insn == 0 || active_insn_p (insn))
   3735 	break;
   3736     }
   3737 
   3738   return insn;
   3739 }
   3740 
   3741 /* Find a RTX_AUTOINC class rtx which matches DATA.  */
   3743 
   3744 static int
   3745 find_auto_inc (const_rtx x, const_rtx reg)
   3746 {
   3747   subrtx_iterator::array_type array;
   3748   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
   3749     {
   3750       const_rtx x = *iter;
   3751       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
   3752 	  && rtx_equal_p (reg, XEXP (x, 0)))
   3753 	return true;
   3754     }
   3755   return false;
   3756 }
   3757 
   3758 /* Increment the label uses for all labels present in rtx.  */
   3759 
   3760 static void
   3761 mark_label_nuses (rtx x)
   3762 {
   3763   enum rtx_code code;
   3764   int i, j;
   3765   const char *fmt;
   3766 
   3767   code = GET_CODE (x);
   3768   if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
   3769     LABEL_NUSES (label_ref_label (x))++;
   3770 
   3771   fmt = GET_RTX_FORMAT (code);
   3772   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   3773     {
   3774       if (fmt[i] == 'e')
   3775 	mark_label_nuses (XEXP (x, i));
   3776       else if (fmt[i] == 'E')
   3777 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   3778 	  mark_label_nuses (XVECEXP (x, i, j));
   3779     }
   3780 }
   3781 
   3782 
   3783 /* Try splitting insns that can be split for better scheduling.
   3785    PAT is the pattern which might split.
   3786    TRIAL is the insn providing PAT.
   3787    LAST is nonzero if we should return the last insn of the sequence produced.
   3788 
   3789    If this routine succeeds in splitting, it returns the first or last
   3790    replacement insn depending on the value of LAST.  Otherwise, it
   3791    returns TRIAL.  If the insn to be returned can be split, it will be.  */
   3792 
   3793 rtx_insn *
   3794 try_split (rtx pat, rtx_insn *trial, int last)
   3795 {
   3796   rtx_insn *before, *after;
   3797   rtx note;
   3798   rtx_insn *seq, *tem;
   3799   profile_probability probability;
   3800   rtx_insn *insn_last, *insn;
   3801   int njumps = 0;
   3802   rtx_insn *call_insn = NULL;
   3803 
   3804   if (any_condjump_p (trial)
   3805       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
   3806     split_branch_probability
   3807       = profile_probability::from_reg_br_prob_note (XINT (note, 0));
   3808   else
   3809     split_branch_probability = profile_probability::uninitialized ();
   3810 
   3811   probability = split_branch_probability;
   3812 
   3813   seq = split_insns (pat, trial);
   3814 
   3815   split_branch_probability = profile_probability::uninitialized ();
   3816 
   3817   if (!seq)
   3818     return trial;
   3819 
   3820   int split_insn_count = 0;
   3821   /* Avoid infinite loop if any insn of the result matches
   3822      the original pattern.  */
   3823   insn_last = seq;
   3824   while (1)
   3825     {
   3826       if (INSN_P (insn_last)
   3827 	  && rtx_equal_p (PATTERN (insn_last), pat))
   3828 	return trial;
   3829       split_insn_count++;
   3830       if (!NEXT_INSN (insn_last))
   3831 	break;
   3832       insn_last = NEXT_INSN (insn_last);
   3833     }
   3834 
   3835   /* We're not good at redistributing frame information if
   3836      the split occurs before reload or if it results in more
   3837      than one insn.  */
   3838   if (RTX_FRAME_RELATED_P (trial))
   3839     {
   3840       if (!reload_completed || split_insn_count != 1)
   3841         return trial;
   3842 
   3843       rtx_insn *new_insn = seq;
   3844       rtx_insn *old_insn = trial;
   3845       copy_frame_info_to_split_insn (old_insn, new_insn);
   3846     }
   3847 
   3848   /* We will be adding the new sequence to the function.  The splitters
   3849      may have introduced invalid RTL sharing, so unshare the sequence now.  */
   3850   unshare_all_rtl_in_chain (seq);
   3851 
   3852   /* Mark labels and copy flags.  */
   3853   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
   3854     {
   3855       if (JUMP_P (insn))
   3856 	{
   3857 	  if (JUMP_P (trial))
   3858 	    CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
   3859 	  mark_jump_label (PATTERN (insn), insn, 0);
   3860 	  njumps++;
   3861 	  if (probability.initialized_p ()
   3862 	      && any_condjump_p (insn)
   3863 	      && !find_reg_note (insn, REG_BR_PROB, 0))
   3864 	    {
   3865 	      /* We can preserve the REG_BR_PROB notes only if exactly
   3866 		 one jump is created, otherwise the machine description
   3867 		 is responsible for this step using
   3868 		 split_branch_probability variable.  */
   3869 	      gcc_assert (njumps == 1);
   3870 	      add_reg_br_prob_note (insn, probability);
   3871 	    }
   3872 	}
   3873     }
   3874 
   3875   /* If we are splitting a CALL_INSN, look for the CALL_INSN
   3876      in SEQ and copy any additional information across.  */
   3877   if (CALL_P (trial))
   3878     {
   3879       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
   3880 	if (CALL_P (insn))
   3881 	  {
   3882 	    gcc_assert (call_insn == NULL_RTX);
   3883 	    call_insn = insn;
   3884 
   3885 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
   3886 	       target may have explicitly specified.  */
   3887 	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
   3888 	    while (*p)
   3889 	      p = &XEXP (*p, 1);
   3890 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
   3891 
   3892 	    /* If the old call was a sibling call, the new one must
   3893 	       be too.  */
   3894 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
   3895 	  }
   3896     }
   3897 
   3898   /* Copy notes, particularly those related to the CFG.  */
   3899   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
   3900     {
   3901       switch (REG_NOTE_KIND (note))
   3902 	{
   3903 	case REG_EH_REGION:
   3904 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
   3905 	  break;
   3906 
   3907 	case REG_NORETURN:
   3908 	case REG_SETJMP:
   3909 	case REG_TM:
   3910 	case REG_CALL_NOCF_CHECK:
   3911 	case REG_CALL_ARG_LOCATION:
   3912 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
   3913 	    {
   3914 	      if (CALL_P (insn))
   3915 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
   3916 	    }
   3917 	  break;
   3918 
   3919 	case REG_NON_LOCAL_GOTO:
   3920 	case REG_LABEL_TARGET:
   3921 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
   3922 	    {
   3923 	      if (JUMP_P (insn))
   3924 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
   3925 	    }
   3926 	  break;
   3927 
   3928 	case REG_INC:
   3929 	  if (!AUTO_INC_DEC)
   3930 	    break;
   3931 
   3932 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
   3933 	    {
   3934 	      rtx reg = XEXP (note, 0);
   3935 	      if (!FIND_REG_INC_NOTE (insn, reg)
   3936 		  && find_auto_inc (PATTERN (insn), reg))
   3937 		add_reg_note (insn, REG_INC, reg);
   3938 	    }
   3939 	  break;
   3940 
   3941 	case REG_ARGS_SIZE:
   3942 	  fixup_args_size_notes (NULL, insn_last, get_args_size (note));
   3943 	  break;
   3944 
   3945 	case REG_CALL_DECL:
   3946 	case REG_UNTYPED_CALL:
   3947 	  gcc_assert (call_insn != NULL_RTX);
   3948 	  add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
   3949 	  break;
   3950 
   3951 	default:
   3952 	  break;
   3953 	}
   3954     }
   3955 
   3956   /* If there are LABELS inside the split insns increment the
   3957      usage count so we don't delete the label.  */
   3958   if (INSN_P (trial))
   3959     {
   3960       insn = insn_last;
   3961       while (insn != NULL_RTX)
   3962 	{
   3963 	  /* JUMP_P insns have already been "marked" above.  */
   3964 	  if (NONJUMP_INSN_P (insn))
   3965 	    mark_label_nuses (PATTERN (insn));
   3966 
   3967 	  insn = PREV_INSN (insn);
   3968 	}
   3969     }
   3970 
   3971   before = PREV_INSN (trial);
   3972   after = NEXT_INSN (trial);
   3973 
   3974   emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
   3975 
   3976   delete_insn (trial);
   3977 
   3978   /* Recursively call try_split for each new insn created; by the
   3979      time control returns here that insn will be fully split, so
   3980      set LAST and continue from the insn after the one returned.
   3981      We can't use next_active_insn here since AFTER may be a note.
   3982      Ignore deleted insns, which can be occur if not optimizing.  */
   3983   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
   3984     if (! tem->deleted () && INSN_P (tem))
   3985       tem = try_split (PATTERN (tem), tem, 1);
   3986 
   3987   /* Return either the first or the last insn, depending on which was
   3988      requested.  */
   3989   return last
   3990     ? (after ? PREV_INSN (after) : get_last_insn ())
   3991     : NEXT_INSN (before);
   3992 }
   3993 
   3994 /* Make and return an INSN rtx, initializing all its slots.
   3996    Store PATTERN in the pattern slots.  */
   3997 
   3998 rtx_insn *
   3999 make_insn_raw (rtx pattern)
   4000 {
   4001   rtx_insn *insn;
   4002 
   4003   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
   4004 
   4005   INSN_UID (insn) = cur_insn_uid++;
   4006   PATTERN (insn) = pattern;
   4007   INSN_CODE (insn) = -1;
   4008   REG_NOTES (insn) = NULL;
   4009   INSN_LOCATION (insn) = curr_insn_location ();
   4010   BLOCK_FOR_INSN (insn) = NULL;
   4011 
   4012 #ifdef ENABLE_RTL_CHECKING
   4013   if (insn
   4014       && INSN_P (insn)
   4015       && (returnjump_p (insn)
   4016 	  || (GET_CODE (insn) == SET
   4017 	      && SET_DEST (insn) == pc_rtx)))
   4018     {
   4019       warning (0, "ICE: %<emit_insn%> used where %<emit_jump_insn%> needed:");
   4020       debug_rtx (insn);
   4021     }
   4022 #endif
   4023 
   4024   return insn;
   4025 }
   4026 
   4027 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
   4028 
   4029 static rtx_insn *
   4030 make_debug_insn_raw (rtx pattern)
   4031 {
   4032   rtx_debug_insn *insn;
   4033 
   4034   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
   4035   INSN_UID (insn) = cur_debug_insn_uid++;
   4036   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
   4037     INSN_UID (insn) = cur_insn_uid++;
   4038 
   4039   PATTERN (insn) = pattern;
   4040   INSN_CODE (insn) = -1;
   4041   REG_NOTES (insn) = NULL;
   4042   INSN_LOCATION (insn) = curr_insn_location ();
   4043   BLOCK_FOR_INSN (insn) = NULL;
   4044 
   4045   return insn;
   4046 }
   4047 
   4048 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
   4049 
   4050 static rtx_insn *
   4051 make_jump_insn_raw (rtx pattern)
   4052 {
   4053   rtx_jump_insn *insn;
   4054 
   4055   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
   4056   INSN_UID (insn) = cur_insn_uid++;
   4057 
   4058   PATTERN (insn) = pattern;
   4059   INSN_CODE (insn) = -1;
   4060   REG_NOTES (insn) = NULL;
   4061   JUMP_LABEL (insn) = NULL;
   4062   INSN_LOCATION (insn) = curr_insn_location ();
   4063   BLOCK_FOR_INSN (insn) = NULL;
   4064 
   4065   return insn;
   4066 }
   4067 
   4068 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
   4069 
   4070 static rtx_insn *
   4071 make_call_insn_raw (rtx pattern)
   4072 {
   4073   rtx_call_insn *insn;
   4074 
   4075   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
   4076   INSN_UID (insn) = cur_insn_uid++;
   4077 
   4078   PATTERN (insn) = pattern;
   4079   INSN_CODE (insn) = -1;
   4080   REG_NOTES (insn) = NULL;
   4081   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
   4082   INSN_LOCATION (insn) = curr_insn_location ();
   4083   BLOCK_FOR_INSN (insn) = NULL;
   4084 
   4085   return insn;
   4086 }
   4087 
   4088 /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
   4089 
   4090 static rtx_note *
   4091 make_note_raw (enum insn_note subtype)
   4092 {
   4093   /* Some notes are never created this way at all.  These notes are
   4094      only created by patching out insns.  */
   4095   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
   4096 	      && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
   4097 
   4098   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
   4099   INSN_UID (note) = cur_insn_uid++;
   4100   NOTE_KIND (note) = subtype;
   4101   BLOCK_FOR_INSN (note) = NULL;
   4102   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
   4103   return note;
   4104 }
   4105 
   4106 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
   4108    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
   4109    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
   4110 
   4111 static inline void
   4112 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
   4113 {
   4114   SET_PREV_INSN (insn) = prev;
   4115   SET_NEXT_INSN (insn) = next;
   4116   if (prev != NULL)
   4117     {
   4118       SET_NEXT_INSN (prev) = insn;
   4119       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
   4120 	{
   4121 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
   4122 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
   4123 	}
   4124     }
   4125   if (next != NULL)
   4126     {
   4127       SET_PREV_INSN (next) = insn;
   4128       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
   4129 	{
   4130 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
   4131 	  SET_PREV_INSN (sequence->insn (0)) = insn;
   4132 	}
   4133     }
   4134 
   4135   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
   4136     {
   4137       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
   4138       SET_PREV_INSN (sequence->insn (0)) = prev;
   4139       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
   4140     }
   4141 }
   4142 
   4143 /* Add INSN to the end of the doubly-linked list.
   4144    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
   4145 
   4146 void
   4147 add_insn (rtx_insn *insn)
   4148 {
   4149   rtx_insn *prev = get_last_insn ();
   4150   link_insn_into_chain (insn, prev, NULL);
   4151   if (get_insns () == NULL)
   4152     set_first_insn (insn);
   4153   set_last_insn (insn);
   4154 }
   4155 
   4156 /* Add INSN into the doubly-linked list after insn AFTER.  */
   4157 
   4158 static void
   4159 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
   4160 {
   4161   rtx_insn *next = NEXT_INSN (after);
   4162 
   4163   gcc_assert (!optimize || !after->deleted ());
   4164 
   4165   link_insn_into_chain (insn, after, next);
   4166 
   4167   if (next == NULL)
   4168     {
   4169       struct sequence_stack *seq;
   4170 
   4171       for (seq = get_current_sequence (); seq; seq = seq->next)
   4172 	if (after == seq->last)
   4173 	  {
   4174 	    seq->last = insn;
   4175 	    break;
   4176 	  }
   4177     }
   4178 }
   4179 
   4180 /* Add INSN into the doubly-linked list before insn BEFORE.  */
   4181 
   4182 static void
   4183 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
   4184 {
   4185   rtx_insn *prev = PREV_INSN (before);
   4186 
   4187   gcc_assert (!optimize || !before->deleted ());
   4188 
   4189   link_insn_into_chain (insn, prev, before);
   4190 
   4191   if (prev == NULL)
   4192     {
   4193       struct sequence_stack *seq;
   4194 
   4195       for (seq = get_current_sequence (); seq; seq = seq->next)
   4196 	if (before == seq->first)
   4197 	  {
   4198 	    seq->first = insn;
   4199 	    break;
   4200 	  }
   4201 
   4202       gcc_assert (seq);
   4203     }
   4204 }
   4205 
   4206 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
   4207    If BB is NULL, an attempt is made to infer the bb from before.
   4208 
   4209    This and the next function should be the only functions called
   4210    to insert an insn once delay slots have been filled since only
   4211    they know how to update a SEQUENCE. */
   4212 
   4213 void
   4214 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
   4215 {
   4216   add_insn_after_nobb (insn, after);
   4217   if (!BARRIER_P (after)
   4218       && !BARRIER_P (insn)
   4219       && (bb = BLOCK_FOR_INSN (after)))
   4220     {
   4221       set_block_for_insn (insn, bb);
   4222       if (INSN_P (insn))
   4223 	df_insn_rescan (insn);
   4224       /* Should not happen as first in the BB is always
   4225 	 either NOTE or LABEL.  */
   4226       if (BB_END (bb) == after
   4227 	  /* Avoid clobbering of structure when creating new BB.  */
   4228 	  && !BARRIER_P (insn)
   4229 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
   4230 	BB_END (bb) = insn;
   4231     }
   4232 }
   4233 
   4234 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
   4235    If BB is NULL, an attempt is made to infer the bb from before.
   4236 
   4237    This and the previous function should be the only functions called
   4238    to insert an insn once delay slots have been filled since only
   4239    they know how to update a SEQUENCE. */
   4240 
   4241 void
   4242 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
   4243 {
   4244   add_insn_before_nobb (insn, before);
   4245 
   4246   if (!bb
   4247       && !BARRIER_P (before)
   4248       && !BARRIER_P (insn))
   4249     bb = BLOCK_FOR_INSN (before);
   4250 
   4251   if (bb)
   4252     {
   4253       set_block_for_insn (insn, bb);
   4254       if (INSN_P (insn))
   4255 	df_insn_rescan (insn);
   4256       /* Should not happen as first in the BB is always either NOTE or
   4257 	 LABEL.  */
   4258       gcc_assert (BB_HEAD (bb) != insn
   4259 		  /* Avoid clobbering of structure when creating new BB.  */
   4260 		  || BARRIER_P (insn)
   4261 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
   4262     }
   4263 }
   4264 
   4265 /* Replace insn with an deleted instruction note.  */
   4266 
   4267 void
   4268 set_insn_deleted (rtx_insn *insn)
   4269 {
   4270   if (INSN_P (insn))
   4271     df_insn_delete (insn);
   4272   PUT_CODE (insn, NOTE);
   4273   NOTE_KIND (insn) = NOTE_INSN_DELETED;
   4274 }
   4275 
   4276 
   4277 /* Unlink INSN from the insn chain.
   4278 
   4279    This function knows how to handle sequences.
   4280 
   4281    This function does not invalidate data flow information associated with
   4282    INSN (i.e. does not call df_insn_delete).  That makes this function
   4283    usable for only disconnecting an insn from the chain, and re-emit it
   4284    elsewhere later.
   4285 
   4286    To later insert INSN elsewhere in the insn chain via add_insn and
   4287    similar functions, PREV_INSN and NEXT_INSN must be nullified by
   4288    the caller.  Nullifying them here breaks many insn chain walks.
   4289 
   4290    To really delete an insn and related DF information, use delete_insn.  */
   4291 
   4292 void
   4293 remove_insn (rtx_insn *insn)
   4294 {
   4295   rtx_insn *next = NEXT_INSN (insn);
   4296   rtx_insn *prev = PREV_INSN (insn);
   4297   basic_block bb;
   4298 
   4299   if (prev)
   4300     {
   4301       SET_NEXT_INSN (prev) = next;
   4302       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
   4303 	{
   4304 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
   4305 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
   4306 	}
   4307     }
   4308   else
   4309     {
   4310       struct sequence_stack *seq;
   4311 
   4312       for (seq = get_current_sequence (); seq; seq = seq->next)
   4313 	if (insn == seq->first)
   4314 	  {
   4315 	    seq->first = next;
   4316 	    break;
   4317 	  }
   4318 
   4319       gcc_assert (seq);
   4320     }
   4321 
   4322   if (next)
   4323     {
   4324       SET_PREV_INSN (next) = prev;
   4325       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
   4326 	{
   4327 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
   4328 	  SET_PREV_INSN (sequence->insn (0)) = prev;
   4329 	}
   4330     }
   4331   else
   4332     {
   4333       struct sequence_stack *seq;
   4334 
   4335       for (seq = get_current_sequence (); seq; seq = seq->next)
   4336 	if (insn == seq->last)
   4337 	  {
   4338 	    seq->last = prev;
   4339 	    break;
   4340 	  }
   4341 
   4342       gcc_assert (seq);
   4343     }
   4344 
   4345   /* Fix up basic block boundaries, if necessary.  */
   4346   if (!BARRIER_P (insn)
   4347       && (bb = BLOCK_FOR_INSN (insn)))
   4348     {
   4349       if (BB_HEAD (bb) == insn)
   4350 	{
   4351 	  /* Never ever delete the basic block note without deleting whole
   4352 	     basic block.  */
   4353 	  gcc_assert (!NOTE_P (insn));
   4354 	  BB_HEAD (bb) = next;
   4355 	}
   4356       if (BB_END (bb) == insn)
   4357 	BB_END (bb) = prev;
   4358     }
   4359 }
   4360 
   4361 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
   4362 
   4363 void
   4364 add_function_usage_to (rtx call_insn, rtx call_fusage)
   4365 {
   4366   gcc_assert (call_insn && CALL_P (call_insn));
   4367 
   4368   /* Put the register usage information on the CALL.  If there is already
   4369      some usage information, put ours at the end.  */
   4370   if (CALL_INSN_FUNCTION_USAGE (call_insn))
   4371     {
   4372       rtx link;
   4373 
   4374       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
   4375 	   link = XEXP (link, 1))
   4376 	;
   4377 
   4378       XEXP (link, 1) = call_fusage;
   4379     }
   4380   else
   4381     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
   4382 }
   4383 
   4384 /* Delete all insns made since FROM.
   4385    FROM becomes the new last instruction.  */
   4386 
   4387 void
   4388 delete_insns_since (rtx_insn *from)
   4389 {
   4390   if (from == 0)
   4391     set_first_insn (0);
   4392   else
   4393     SET_NEXT_INSN (from) = 0;
   4394   set_last_insn (from);
   4395 }
   4396 
   4397 /* This function is deprecated, please use sequences instead.
   4398 
   4399    Move a consecutive bunch of insns to a different place in the chain.
   4400    The insns to be moved are those between FROM and TO.
   4401    They are moved to a new position after the insn AFTER.
   4402    AFTER must not be FROM or TO or any insn in between.
   4403 
   4404    This function does not know about SEQUENCEs and hence should not be
   4405    called after delay-slot filling has been done.  */
   4406 
   4407 void
   4408 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
   4409 {
   4410   if (flag_checking)
   4411     {
   4412       for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
   4413 	gcc_assert (after != x);
   4414       gcc_assert (after != to);
   4415     }
   4416 
   4417   /* Splice this bunch out of where it is now.  */
   4418   if (PREV_INSN (from))
   4419     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
   4420   if (NEXT_INSN (to))
   4421     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
   4422   if (get_last_insn () == to)
   4423     set_last_insn (PREV_INSN (from));
   4424   if (get_insns () == from)
   4425     set_first_insn (NEXT_INSN (to));
   4426 
   4427   /* Make the new neighbors point to it and it to them.  */
   4428   if (NEXT_INSN (after))
   4429     SET_PREV_INSN (NEXT_INSN (after)) = to;
   4430 
   4431   SET_NEXT_INSN (to) = NEXT_INSN (after);
   4432   SET_PREV_INSN (from) = after;
   4433   SET_NEXT_INSN (after) = from;
   4434   if (after == get_last_insn ())
   4435     set_last_insn (to);
   4436 }
   4437 
   4438 /* Same as function above, but take care to update BB boundaries.  */
   4439 void
   4440 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
   4441 {
   4442   rtx_insn *prev = PREV_INSN (from);
   4443   basic_block bb, bb2;
   4444 
   4445   reorder_insns_nobb (from, to, after);
   4446 
   4447   if (!BARRIER_P (after)
   4448       && (bb = BLOCK_FOR_INSN (after)))
   4449     {
   4450       rtx_insn *x;
   4451       df_set_bb_dirty (bb);
   4452 
   4453       if (!BARRIER_P (from)
   4454 	  && (bb2 = BLOCK_FOR_INSN (from)))
   4455 	{
   4456 	  if (BB_END (bb2) == to)
   4457 	    BB_END (bb2) = prev;
   4458 	  df_set_bb_dirty (bb2);
   4459 	}
   4460 
   4461       if (BB_END (bb) == after)
   4462 	BB_END (bb) = to;
   4463 
   4464       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
   4465 	if (!BARRIER_P (x))
   4466 	  df_insn_change_bb (x, bb);
   4467     }
   4468 }
   4469 
   4470 
   4471 /* Emit insn(s) of given code and pattern
   4473    at a specified place within the doubly-linked list.
   4474 
   4475    All of the emit_foo global entry points accept an object
   4476    X which is either an insn list or a PATTERN of a single
   4477    instruction.
   4478 
   4479    There are thus a few canonical ways to generate code and
   4480    emit it at a specific place in the instruction stream.  For
   4481    example, consider the instruction named SPOT and the fact that
   4482    we would like to emit some instructions before SPOT.  We might
   4483    do it like this:
   4484 
   4485 	start_sequence ();
   4486 	... emit the new instructions ...
   4487 	insns_head = get_insns ();
   4488 	end_sequence ();
   4489 
   4490 	emit_insn_before (insns_head, SPOT);
   4491 
   4492    It used to be common to generate SEQUENCE rtl instead, but that
   4493    is a relic of the past which no longer occurs.  The reason is that
   4494    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
   4495    generated would almost certainly die right after it was created.  */
   4496 
   4497 static rtx_insn *
   4498 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
   4499 			   basic_block bb,
   4500                            rtx_insn *(*make_raw) (rtx))
   4501 {
   4502   rtx_insn *insn;
   4503 
   4504   gcc_assert (before);
   4505 
   4506   if (x == NULL_RTX)
   4507     return last;
   4508 
   4509   switch (GET_CODE (x))
   4510     {
   4511     case DEBUG_INSN:
   4512     case INSN:
   4513     case JUMP_INSN:
   4514     case CALL_INSN:
   4515     case CODE_LABEL:
   4516     case BARRIER:
   4517     case NOTE:
   4518       insn = as_a <rtx_insn *> (x);
   4519       while (insn)
   4520 	{
   4521 	  rtx_insn *next = NEXT_INSN (insn);
   4522 	  add_insn_before (insn, before, bb);
   4523 	  last = insn;
   4524 	  insn = next;
   4525 	}
   4526       break;
   4527 
   4528 #ifdef ENABLE_RTL_CHECKING
   4529     case SEQUENCE:
   4530       gcc_unreachable ();
   4531       break;
   4532 #endif
   4533 
   4534     default:
   4535       last = (*make_raw) (x);
   4536       add_insn_before (last, before, bb);
   4537       break;
   4538     }
   4539 
   4540   return last;
   4541 }
   4542 
   4543 /* Make X be output before the instruction BEFORE.  */
   4544 
   4545 rtx_insn *
   4546 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
   4547 {
   4548   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
   4549 }
   4550 
   4551 /* Make an instruction with body X and code JUMP_INSN
   4552    and output it before the instruction BEFORE.  */
   4553 
   4554 rtx_jump_insn *
   4555 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
   4556 {
   4557   return as_a <rtx_jump_insn *> (
   4558 		emit_pattern_before_noloc (x, before, NULL, NULL,
   4559 					   make_jump_insn_raw));
   4560 }
   4561 
   4562 /* Make an instruction with body X and code CALL_INSN
   4563    and output it before the instruction BEFORE.  */
   4564 
   4565 rtx_insn *
   4566 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
   4567 {
   4568   return emit_pattern_before_noloc (x, before, NULL, NULL,
   4569 				    make_call_insn_raw);
   4570 }
   4571 
   4572 /* Make an instruction with body X and code DEBUG_INSN
   4573    and output it before the instruction BEFORE.  */
   4574 
   4575 rtx_insn *
   4576 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
   4577 {
   4578   return emit_pattern_before_noloc (x, before, NULL, NULL,
   4579 				    make_debug_insn_raw);
   4580 }
   4581 
   4582 /* Make an insn of code BARRIER
   4583    and output it before the insn BEFORE.  */
   4584 
   4585 rtx_barrier *
   4586 emit_barrier_before (rtx_insn *before)
   4587 {
   4588   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
   4589 
   4590   INSN_UID (insn) = cur_insn_uid++;
   4591 
   4592   add_insn_before (insn, before, NULL);
   4593   return insn;
   4594 }
   4595 
   4596 /* Emit the label LABEL before the insn BEFORE.  */
   4597 
   4598 rtx_code_label *
   4599 emit_label_before (rtx_code_label *label, rtx_insn *before)
   4600 {
   4601   gcc_checking_assert (INSN_UID (label) == 0);
   4602   INSN_UID (label) = cur_insn_uid++;
   4603   add_insn_before (label, before, NULL);
   4604   return label;
   4605 }
   4606 
   4607 /* Helper for emit_insn_after, handles lists of instructions
   4609    efficiently.  */
   4610 
   4611 static rtx_insn *
   4612 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
   4613 {
   4614   rtx_insn *last;
   4615   rtx_insn *after_after;
   4616   if (!bb && !BARRIER_P (after))
   4617     bb = BLOCK_FOR_INSN (after);
   4618 
   4619   if (bb)
   4620     {
   4621       df_set_bb_dirty (bb);
   4622       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
   4623 	if (!BARRIER_P (last))
   4624 	  {
   4625 	    set_block_for_insn (last, bb);
   4626 	    df_insn_rescan (last);
   4627 	  }
   4628       if (!BARRIER_P (last))
   4629 	{
   4630 	  set_block_for_insn (last, bb);
   4631 	  df_insn_rescan (last);
   4632 	}
   4633       if (BB_END (bb) == after)
   4634 	BB_END (bb) = last;
   4635     }
   4636   else
   4637     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
   4638       continue;
   4639 
   4640   after_after = NEXT_INSN (after);
   4641 
   4642   SET_NEXT_INSN (after) = first;
   4643   SET_PREV_INSN (first) = after;
   4644   SET_NEXT_INSN (last) = after_after;
   4645   if (after_after)
   4646     SET_PREV_INSN (after_after) = last;
   4647 
   4648   if (after == get_last_insn ())
   4649     set_last_insn (last);
   4650 
   4651   return last;
   4652 }
   4653 
   4654 static rtx_insn *
   4655 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
   4656 			  rtx_insn *(*make_raw)(rtx))
   4657 {
   4658   rtx_insn *last = after;
   4659 
   4660   gcc_assert (after);
   4661 
   4662   if (x == NULL_RTX)
   4663     return last;
   4664 
   4665   switch (GET_CODE (x))
   4666     {
   4667     case DEBUG_INSN:
   4668     case INSN:
   4669     case JUMP_INSN:
   4670     case CALL_INSN:
   4671     case CODE_LABEL:
   4672     case BARRIER:
   4673     case NOTE:
   4674       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
   4675       break;
   4676 
   4677 #ifdef ENABLE_RTL_CHECKING
   4678     case SEQUENCE:
   4679       gcc_unreachable ();
   4680       break;
   4681 #endif
   4682 
   4683     default:
   4684       last = (*make_raw) (x);
   4685       add_insn_after (last, after, bb);
   4686       break;
   4687     }
   4688 
   4689   return last;
   4690 }
   4691 
   4692 /* Make X be output after the insn AFTER and set the BB of insn.  If
   4693    BB is NULL, an attempt is made to infer the BB from AFTER.  */
   4694 
   4695 rtx_insn *
   4696 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
   4697 {
   4698   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
   4699 }
   4700 
   4701 
   4702 /* Make an insn of code JUMP_INSN with body X
   4703    and output it after the insn AFTER.  */
   4704 
   4705 rtx_jump_insn *
   4706 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
   4707 {
   4708   return as_a <rtx_jump_insn *> (
   4709 		emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
   4710 }
   4711 
   4712 /* Make an instruction with body X and code CALL_INSN
   4713    and output it after the instruction AFTER.  */
   4714 
   4715 rtx_insn *
   4716 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
   4717 {
   4718   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
   4719 }
   4720 
   4721 /* Make an instruction with body X and code CALL_INSN
   4722    and output it after the instruction AFTER.  */
   4723 
   4724 rtx_insn *
   4725 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
   4726 {
   4727   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
   4728 }
   4729 
   4730 /* Make an insn of code BARRIER
   4731    and output it after the insn AFTER.  */
   4732 
   4733 rtx_barrier *
   4734 emit_barrier_after (rtx_insn *after)
   4735 {
   4736   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
   4737 
   4738   INSN_UID (insn) = cur_insn_uid++;
   4739 
   4740   add_insn_after (insn, after, NULL);
   4741   return insn;
   4742 }
   4743 
   4744 /* Emit the label LABEL after the insn AFTER.  */
   4745 
   4746 rtx_insn *
   4747 emit_label_after (rtx_insn *label, rtx_insn *after)
   4748 {
   4749   gcc_checking_assert (INSN_UID (label) == 0);
   4750   INSN_UID (label) = cur_insn_uid++;
   4751   add_insn_after (label, after, NULL);
   4752   return label;
   4753 }
   4754 
   4755 /* Notes require a bit of special handling: Some notes need to have their
   4757    BLOCK_FOR_INSN set, others should never have it set, and some should
   4758    have it set or clear depending on the context.   */
   4759 
   4760 /* Return true iff a note of kind SUBTYPE should be emitted with routines
   4761    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
   4762    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
   4763 
   4764 static bool
   4765 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
   4766 {
   4767   switch (subtype)
   4768     {
   4769       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
   4770       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
   4771 	return true;
   4772 
   4773       /* Notes for var tracking and EH region markers can appear between or
   4774 	 inside basic blocks.  If the caller is emitting on the basic block
   4775 	 boundary, do not set BLOCK_FOR_INSN on the new note.  */
   4776       case NOTE_INSN_VAR_LOCATION:
   4777       case NOTE_INSN_EH_REGION_BEG:
   4778       case NOTE_INSN_EH_REGION_END:
   4779 	return on_bb_boundary_p;
   4780 
   4781       /* Otherwise, BLOCK_FOR_INSN must be set.  */
   4782       default:
   4783 	return false;
   4784     }
   4785 }
   4786 
   4787 /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
   4788 
   4789 rtx_note *
   4790 emit_note_after (enum insn_note subtype, rtx_insn *after)
   4791 {
   4792   rtx_note *note = make_note_raw (subtype);
   4793   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
   4794   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
   4795 
   4796   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
   4797     add_insn_after_nobb (note, after);
   4798   else
   4799     add_insn_after (note, after, bb);
   4800   return note;
   4801 }
   4802 
   4803 /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
   4804 
   4805 rtx_note *
   4806 emit_note_before (enum insn_note subtype, rtx_insn *before)
   4807 {
   4808   rtx_note *note = make_note_raw (subtype);
   4809   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
   4810   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
   4811 
   4812   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
   4813     add_insn_before_nobb (note, before);
   4814   else
   4815     add_insn_before (note, before, bb);
   4816   return note;
   4817 }
   4818 
   4819 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
   4821    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
   4822 
   4823 static rtx_insn *
   4824 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
   4825 			   rtx_insn *(*make_raw) (rtx))
   4826 {
   4827   rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
   4828 
   4829   if (pattern == NULL_RTX || !loc)
   4830     return last;
   4831 
   4832   after = NEXT_INSN (after);
   4833   while (1)
   4834     {
   4835       if (active_insn_p (after)
   4836 	  && !JUMP_TABLE_DATA_P (after) /* FIXME */
   4837 	  && !INSN_LOCATION (after))
   4838 	INSN_LOCATION (after) = loc;
   4839       if (after == last)
   4840 	break;
   4841       after = NEXT_INSN (after);
   4842     }
   4843   return last;
   4844 }
   4845 
   4846 /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
   4847    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
   4848    any DEBUG_INSNs.  */
   4849 
   4850 static rtx_insn *
   4851 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
   4852 		    rtx_insn *(*make_raw) (rtx))
   4853 {
   4854   rtx_insn *prev = after;
   4855 
   4856   if (skip_debug_insns)
   4857     while (DEBUG_INSN_P (prev))
   4858       prev = PREV_INSN (prev);
   4859 
   4860   if (INSN_P (prev))
   4861     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
   4862 				      make_raw);
   4863   else
   4864     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
   4865 }
   4866 
   4867 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
   4868 rtx_insn *
   4869 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
   4870 {
   4871   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
   4872 }
   4873 
   4874 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
   4875 rtx_insn *
   4876 emit_insn_after (rtx pattern, rtx_insn *after)
   4877 {
   4878   return emit_pattern_after (pattern, after, true, make_insn_raw);
   4879 }
   4880 
   4881 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
   4882 rtx_jump_insn *
   4883 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
   4884 {
   4885   return as_a <rtx_jump_insn *> (
   4886 	emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
   4887 }
   4888 
   4889 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
   4890 rtx_jump_insn *
   4891 emit_jump_insn_after (rtx pattern, rtx_insn *after)
   4892 {
   4893   return as_a <rtx_jump_insn *> (
   4894 	emit_pattern_after (pattern, after, true, make_jump_insn_raw));
   4895 }
   4896 
   4897 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
   4898 rtx_insn *
   4899 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
   4900 {
   4901   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
   4902 }
   4903 
   4904 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
   4905 rtx_insn *
   4906 emit_call_insn_after (rtx pattern, rtx_insn *after)
   4907 {
   4908   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
   4909 }
   4910 
   4911 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
   4912 rtx_insn *
   4913 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
   4914 {
   4915   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
   4916 }
   4917 
   4918 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
   4919 rtx_insn *
   4920 emit_debug_insn_after (rtx pattern, rtx_insn *after)
   4921 {
   4922   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
   4923 }
   4924 
   4925 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
   4926    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
   4927    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
   4928    CALL_INSN, etc.  */
   4929 
   4930 static rtx_insn *
   4931 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
   4932 			    bool insnp, rtx_insn *(*make_raw) (rtx))
   4933 {
   4934   rtx_insn *first = PREV_INSN (before);
   4935   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
   4936 					      insnp ? before : NULL,
   4937 					      NULL, make_raw);
   4938 
   4939   if (pattern == NULL_RTX || !loc)
   4940     return last;
   4941 
   4942   if (!first)
   4943     first = get_insns ();
   4944   else
   4945     first = NEXT_INSN (first);
   4946   while (1)
   4947     {
   4948       if (active_insn_p (first)
   4949 	  && !JUMP_TABLE_DATA_P (first) /* FIXME */
   4950 	  && !INSN_LOCATION (first))
   4951 	INSN_LOCATION (first) = loc;
   4952       if (first == last)
   4953 	break;
   4954       first = NEXT_INSN (first);
   4955     }
   4956   return last;
   4957 }
   4958 
   4959 /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
   4960    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
   4961    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
   4962    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
   4963 
   4964 static rtx_insn *
   4965 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
   4966 		     bool insnp, rtx_insn *(*make_raw) (rtx))
   4967 {
   4968   rtx_insn *next = before;
   4969 
   4970   if (skip_debug_insns)
   4971     while (DEBUG_INSN_P (next))
   4972       next = PREV_INSN (next);
   4973 
   4974   if (INSN_P (next))
   4975     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
   4976 				       insnp, make_raw);
   4977   else
   4978     return emit_pattern_before_noloc (pattern, before,
   4979 				      insnp ? before : NULL,
   4980                                       NULL, make_raw);
   4981 }
   4982 
   4983 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
   4984 rtx_insn *
   4985 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
   4986 {
   4987   return emit_pattern_before_setloc (pattern, before, loc, true,
   4988 				     make_insn_raw);
   4989 }
   4990 
   4991 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
   4992 rtx_insn *
   4993 emit_insn_before (rtx pattern, rtx_insn *before)
   4994 {
   4995   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
   4996 }
   4997 
   4998 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
   4999 rtx_jump_insn *
   5000 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
   5001 {
   5002   return as_a <rtx_jump_insn *> (
   5003 	emit_pattern_before_setloc (pattern, before, loc, false,
   5004 				    make_jump_insn_raw));
   5005 }
   5006 
   5007 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
   5008 rtx_jump_insn *
   5009 emit_jump_insn_before (rtx pattern, rtx_insn *before)
   5010 {
   5011   return as_a <rtx_jump_insn *> (
   5012 	emit_pattern_before (pattern, before, true, false,
   5013 			     make_jump_insn_raw));
   5014 }
   5015 
   5016 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
   5017 rtx_insn *
   5018 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
   5019 {
   5020   return emit_pattern_before_setloc (pattern, before, loc, false,
   5021 				     make_call_insn_raw);
   5022 }
   5023 
   5024 /* Like emit_call_insn_before_noloc,
   5025    but set insn_location according to BEFORE.  */
   5026 rtx_insn *
   5027 emit_call_insn_before (rtx pattern, rtx_insn *before)
   5028 {
   5029   return emit_pattern_before (pattern, before, true, false,
   5030 			      make_call_insn_raw);
   5031 }
   5032 
   5033 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
   5034 rtx_insn *
   5035 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
   5036 {
   5037   return emit_pattern_before_setloc (pattern, before, loc, false,
   5038 				     make_debug_insn_raw);
   5039 }
   5040 
   5041 /* Like emit_debug_insn_before_noloc,
   5042    but set insn_location according to BEFORE.  */
   5043 rtx_insn *
   5044 emit_debug_insn_before (rtx pattern, rtx_insn *before)
   5045 {
   5046   return emit_pattern_before (pattern, before, false, false,
   5047 			      make_debug_insn_raw);
   5048 }
   5049 
   5050 /* Take X and emit it at the end of the doubly-linked
   5052    INSN list.
   5053 
   5054    Returns the last insn emitted.  */
   5055 
   5056 rtx_insn *
   5057 emit_insn (rtx x)
   5058 {
   5059   rtx_insn *last = get_last_insn ();
   5060   rtx_insn *insn;
   5061 
   5062   if (x == NULL_RTX)
   5063     return last;
   5064 
   5065   switch (GET_CODE (x))
   5066     {
   5067     case DEBUG_INSN:
   5068     case INSN:
   5069     case JUMP_INSN:
   5070     case CALL_INSN:
   5071     case CODE_LABEL:
   5072     case BARRIER:
   5073     case NOTE:
   5074       insn = as_a <rtx_insn *> (x);
   5075       while (insn)
   5076 	{
   5077 	  rtx_insn *next = NEXT_INSN (insn);
   5078 	  add_insn (insn);
   5079 	  last = insn;
   5080 	  insn = next;
   5081 	}
   5082       break;
   5083 
   5084 #ifdef ENABLE_RTL_CHECKING
   5085     case JUMP_TABLE_DATA:
   5086     case SEQUENCE:
   5087       gcc_unreachable ();
   5088       break;
   5089 #endif
   5090 
   5091     default:
   5092       last = make_insn_raw (x);
   5093       add_insn (last);
   5094       break;
   5095     }
   5096 
   5097   return last;
   5098 }
   5099 
   5100 /* Make an insn of code DEBUG_INSN with pattern X
   5101    and add it to the end of the doubly-linked list.  */
   5102 
   5103 rtx_insn *
   5104 emit_debug_insn (rtx x)
   5105 {
   5106   rtx_insn *last = get_last_insn ();
   5107   rtx_insn *insn;
   5108 
   5109   if (x == NULL_RTX)
   5110     return last;
   5111 
   5112   switch (GET_CODE (x))
   5113     {
   5114     case DEBUG_INSN:
   5115     case INSN:
   5116     case JUMP_INSN:
   5117     case CALL_INSN:
   5118     case CODE_LABEL:
   5119     case BARRIER:
   5120     case NOTE:
   5121       insn = as_a <rtx_insn *> (x);
   5122       while (insn)
   5123 	{
   5124 	  rtx_insn *next = NEXT_INSN (insn);
   5125 	  add_insn (insn);
   5126 	  last = insn;
   5127 	  insn = next;
   5128 	}
   5129       break;
   5130 
   5131 #ifdef ENABLE_RTL_CHECKING
   5132     case JUMP_TABLE_DATA:
   5133     case SEQUENCE:
   5134       gcc_unreachable ();
   5135       break;
   5136 #endif
   5137 
   5138     default:
   5139       last = make_debug_insn_raw (x);
   5140       add_insn (last);
   5141       break;
   5142     }
   5143 
   5144   return last;
   5145 }
   5146 
   5147 /* Make an insn of code JUMP_INSN with pattern X
   5148    and add it to the end of the doubly-linked list.  */
   5149 
   5150 rtx_insn *
   5151 emit_jump_insn (rtx x)
   5152 {
   5153   rtx_insn *last = NULL;
   5154   rtx_insn *insn;
   5155 
   5156   switch (GET_CODE (x))
   5157     {
   5158     case DEBUG_INSN:
   5159     case INSN:
   5160     case JUMP_INSN:
   5161     case CALL_INSN:
   5162     case CODE_LABEL:
   5163     case BARRIER:
   5164     case NOTE:
   5165       insn = as_a <rtx_insn *> (x);
   5166       while (insn)
   5167 	{
   5168 	  rtx_insn *next = NEXT_INSN (insn);
   5169 	  add_insn (insn);
   5170 	  last = insn;
   5171 	  insn = next;
   5172 	}
   5173       break;
   5174 
   5175 #ifdef ENABLE_RTL_CHECKING
   5176     case JUMP_TABLE_DATA:
   5177     case SEQUENCE:
   5178       gcc_unreachable ();
   5179       break;
   5180 #endif
   5181 
   5182     default:
   5183       last = make_jump_insn_raw (x);
   5184       add_insn (last);
   5185       break;
   5186     }
   5187 
   5188   return last;
   5189 }
   5190 
   5191 /* Make an insn of code CALL_INSN with pattern X
   5192    and add it to the end of the doubly-linked list.  */
   5193 
   5194 rtx_insn *
   5195 emit_call_insn (rtx x)
   5196 {
   5197   rtx_insn *insn;
   5198 
   5199   switch (GET_CODE (x))
   5200     {
   5201     case DEBUG_INSN:
   5202     case INSN:
   5203     case JUMP_INSN:
   5204     case CALL_INSN:
   5205     case CODE_LABEL:
   5206     case BARRIER:
   5207     case NOTE:
   5208       insn = emit_insn (x);
   5209       break;
   5210 
   5211 #ifdef ENABLE_RTL_CHECKING
   5212     case SEQUENCE:
   5213     case JUMP_TABLE_DATA:
   5214       gcc_unreachable ();
   5215       break;
   5216 #endif
   5217 
   5218     default:
   5219       insn = make_call_insn_raw (x);
   5220       add_insn (insn);
   5221       break;
   5222     }
   5223 
   5224   return insn;
   5225 }
   5226 
   5227 /* Add the label LABEL to the end of the doubly-linked list.  */
   5228 
   5229 rtx_code_label *
   5230 emit_label (rtx uncast_label)
   5231 {
   5232   rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
   5233 
   5234   gcc_checking_assert (INSN_UID (label) == 0);
   5235   INSN_UID (label) = cur_insn_uid++;
   5236   add_insn (label);
   5237   return label;
   5238 }
   5239 
   5240 /* Make an insn of code JUMP_TABLE_DATA
   5241    and add it to the end of the doubly-linked list.  */
   5242 
   5243 rtx_jump_table_data *
   5244 emit_jump_table_data (rtx table)
   5245 {
   5246   rtx_jump_table_data *jump_table_data =
   5247     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
   5248   INSN_UID (jump_table_data) = cur_insn_uid++;
   5249   PATTERN (jump_table_data) = table;
   5250   BLOCK_FOR_INSN (jump_table_data) = NULL;
   5251   add_insn (jump_table_data);
   5252   return jump_table_data;
   5253 }
   5254 
   5255 /* Make an insn of code BARRIER
   5256    and add it to the end of the doubly-linked list.  */
   5257 
   5258 rtx_barrier *
   5259 emit_barrier (void)
   5260 {
   5261   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
   5262   INSN_UID (barrier) = cur_insn_uid++;
   5263   add_insn (barrier);
   5264   return barrier;
   5265 }
   5266 
   5267 /* Emit a copy of note ORIG.  */
   5268 
   5269 rtx_note *
   5270 emit_note_copy (rtx_note *orig)
   5271 {
   5272   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
   5273   rtx_note *note = make_note_raw (kind);
   5274   NOTE_DATA (note) = NOTE_DATA (orig);
   5275   add_insn (note);
   5276   return note;
   5277 }
   5278 
   5279 /* Make an insn of code NOTE or type NOTE_NO
   5280    and add it to the end of the doubly-linked list.  */
   5281 
   5282 rtx_note *
   5283 emit_note (enum insn_note kind)
   5284 {
   5285   rtx_note *note = make_note_raw (kind);
   5286   add_insn (note);
   5287   return note;
   5288 }
   5289 
   5290 /* Emit a clobber of lvalue X.  */
   5291 
   5292 rtx_insn *
   5293 emit_clobber (rtx x)
   5294 {
   5295   /* CONCATs should not appear in the insn stream.  */
   5296   if (GET_CODE (x) == CONCAT)
   5297     {
   5298       emit_clobber (XEXP (x, 0));
   5299       return emit_clobber (XEXP (x, 1));
   5300     }
   5301   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
   5302 }
   5303 
   5304 /* Return a sequence of insns to clobber lvalue X.  */
   5305 
   5306 rtx_insn *
   5307 gen_clobber (rtx x)
   5308 {
   5309   rtx_insn *seq;
   5310 
   5311   start_sequence ();
   5312   emit_clobber (x);
   5313   seq = get_insns ();
   5314   end_sequence ();
   5315   return seq;
   5316 }
   5317 
   5318 /* Emit a use of rvalue X.  */
   5319 
   5320 rtx_insn *
   5321 emit_use (rtx x)
   5322 {
   5323   /* CONCATs should not appear in the insn stream.  */
   5324   if (GET_CODE (x) == CONCAT)
   5325     {
   5326       emit_use (XEXP (x, 0));
   5327       return emit_use (XEXP (x, 1));
   5328     }
   5329   return emit_insn (gen_rtx_USE (VOIDmode, x));
   5330 }
   5331 
   5332 /* Return a sequence of insns to use rvalue X.  */
   5333 
   5334 rtx_insn *
   5335 gen_use (rtx x)
   5336 {
   5337   rtx_insn *seq;
   5338 
   5339   start_sequence ();
   5340   emit_use (x);
   5341   seq = get_insns ();
   5342   end_sequence ();
   5343   return seq;
   5344 }
   5345 
   5346 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
   5347    Return the set in INSN that such notes describe, or NULL if the notes
   5348    have no meaning for INSN.  */
   5349 
   5350 rtx
   5351 set_for_reg_notes (rtx insn)
   5352 {
   5353   rtx pat, reg;
   5354 
   5355   if (!INSN_P (insn))
   5356     return NULL_RTX;
   5357 
   5358   pat = PATTERN (insn);
   5359   if (GET_CODE (pat) == PARALLEL)
   5360     {
   5361       /* We do not use single_set because that ignores SETs of unused
   5362 	 registers.  REG_EQUAL and REG_EQUIV notes really do require the
   5363 	 PARALLEL to have a single SET.  */
   5364       if (multiple_sets (insn))
   5365 	return NULL_RTX;
   5366       pat = XVECEXP (pat, 0, 0);
   5367     }
   5368 
   5369   if (GET_CODE (pat) != SET)
   5370     return NULL_RTX;
   5371 
   5372   reg = SET_DEST (pat);
   5373 
   5374   /* Notes apply to the contents of a STRICT_LOW_PART.  */
   5375   if (GET_CODE (reg) == STRICT_LOW_PART
   5376       || GET_CODE (reg) == ZERO_EXTRACT)
   5377     reg = XEXP (reg, 0);
   5378 
   5379   /* Check that we have a register.  */
   5380   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
   5381     return NULL_RTX;
   5382 
   5383   return pat;
   5384 }
   5385 
   5386 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
   5387    note of this type already exists, remove it first.  */
   5388 
   5389 rtx
   5390 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
   5391 {
   5392   rtx note = find_reg_note (insn, kind, NULL_RTX);
   5393 
   5394   switch (kind)
   5395     {
   5396     case REG_EQUAL:
   5397     case REG_EQUIV:
   5398       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
   5399       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
   5400 	return NULL_RTX;
   5401 
   5402       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
   5403 	 It serves no useful purpose and breaks eliminate_regs.  */
   5404       if (GET_CODE (datum) == ASM_OPERANDS)
   5405 	return NULL_RTX;
   5406 
   5407       /* Notes with side effects are dangerous.  Even if the side-effect
   5408 	 initially mirrors one in PATTERN (INSN), later optimizations
   5409 	 might alter the way that the final register value is calculated
   5410 	 and so move or alter the side-effect in some way.  The note would
   5411 	 then no longer be a valid substitution for SET_SRC.  */
   5412       if (side_effects_p (datum))
   5413 	return NULL_RTX;
   5414       break;
   5415 
   5416     default:
   5417       break;
   5418     }
   5419 
   5420   if (note)
   5421     XEXP (note, 0) = datum;
   5422   else
   5423     {
   5424       add_reg_note (insn, kind, datum);
   5425       note = REG_NOTES (insn);
   5426     }
   5427 
   5428   switch (kind)
   5429     {
   5430     case REG_EQUAL:
   5431     case REG_EQUIV:
   5432       df_notes_rescan (as_a <rtx_insn *> (insn));
   5433       break;
   5434     default:
   5435       break;
   5436     }
   5437 
   5438   return note;
   5439 }
   5440 
   5441 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
   5442 rtx
   5443 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
   5444 {
   5445   rtx set = set_for_reg_notes (insn);
   5446 
   5447   if (set && SET_DEST (set) == dst)
   5448     return set_unique_reg_note (insn, kind, datum);
   5449   return NULL_RTX;
   5450 }
   5451 
   5452 /* Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
   5454    following barrier if the instruction needs one and if ALLOW_BARRIER_P
   5455    is true.
   5456 
   5457    If X is a label, it is simply added into the insn chain.  */
   5458 
   5459 rtx_insn *
   5460 emit (rtx x, bool allow_barrier_p)
   5461 {
   5462   enum rtx_code code = classify_insn (x);
   5463 
   5464   switch (code)
   5465     {
   5466     case CODE_LABEL:
   5467       return emit_label (x);
   5468     case INSN:
   5469       return emit_insn (x);
   5470     case  JUMP_INSN:
   5471       {
   5472 	rtx_insn *insn = emit_jump_insn (x);
   5473 	if (allow_barrier_p
   5474 	    && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
   5475 	  return emit_barrier ();
   5476 	return insn;
   5477       }
   5478     case CALL_INSN:
   5479       return emit_call_insn (x);
   5480     case DEBUG_INSN:
   5481       return emit_debug_insn (x);
   5482     default:
   5483       gcc_unreachable ();
   5484     }
   5485 }
   5486 
   5487 /* Space for free sequence stack entries.  */
   5489 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
   5490 
   5491 /* Begin emitting insns to a sequence.  If this sequence will contain
   5492    something that might cause the compiler to pop arguments to function
   5493    calls (because those pops have previously been deferred; see
   5494    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
   5495    before calling this function.  That will ensure that the deferred
   5496    pops are not accidentally emitted in the middle of this sequence.  */
   5497 
   5498 void
   5499 start_sequence (void)
   5500 {
   5501   struct sequence_stack *tem;
   5502 
   5503   if (free_sequence_stack != NULL)
   5504     {
   5505       tem = free_sequence_stack;
   5506       free_sequence_stack = tem->next;
   5507     }
   5508   else
   5509     tem = ggc_alloc<sequence_stack> ();
   5510 
   5511   tem->next = get_current_sequence ()->next;
   5512   tem->first = get_insns ();
   5513   tem->last = get_last_insn ();
   5514   get_current_sequence ()->next = tem;
   5515 
   5516   set_first_insn (0);
   5517   set_last_insn (0);
   5518 }
   5519 
   5520 /* Set up the insn chain starting with FIRST as the current sequence,
   5521    saving the previously current one.  See the documentation for
   5522    start_sequence for more information about how to use this function.  */
   5523 
   5524 void
   5525 push_to_sequence (rtx_insn *first)
   5526 {
   5527   rtx_insn *last;
   5528 
   5529   start_sequence ();
   5530 
   5531   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
   5532     ;
   5533 
   5534   set_first_insn (first);
   5535   set_last_insn (last);
   5536 }
   5537 
   5538 /* Like push_to_sequence, but take the last insn as an argument to avoid
   5539    looping through the list.  */
   5540 
   5541 void
   5542 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
   5543 {
   5544   start_sequence ();
   5545 
   5546   set_first_insn (first);
   5547   set_last_insn (last);
   5548 }
   5549 
   5550 /* Set up the outer-level insn chain
   5551    as the current sequence, saving the previously current one.  */
   5552 
   5553 void
   5554 push_topmost_sequence (void)
   5555 {
   5556   struct sequence_stack *top;
   5557 
   5558   start_sequence ();
   5559 
   5560   top = get_topmost_sequence ();
   5561   set_first_insn (top->first);
   5562   set_last_insn (top->last);
   5563 }
   5564 
   5565 /* After emitting to the outer-level insn chain, update the outer-level
   5566    insn chain, and restore the previous saved state.  */
   5567 
   5568 void
   5569 pop_topmost_sequence (void)
   5570 {
   5571   struct sequence_stack *top;
   5572 
   5573   top = get_topmost_sequence ();
   5574   top->first = get_insns ();
   5575   top->last = get_last_insn ();
   5576 
   5577   end_sequence ();
   5578 }
   5579 
   5580 /* After emitting to a sequence, restore previous saved state.
   5581 
   5582    To get the contents of the sequence just made, you must call
   5583    `get_insns' *before* calling here.
   5584 
   5585    If the compiler might have deferred popping arguments while
   5586    generating this sequence, and this sequence will not be immediately
   5587    inserted into the instruction stream, use do_pending_stack_adjust
   5588    before calling get_insns.  That will ensure that the deferred
   5589    pops are inserted into this sequence, and not into some random
   5590    location in the instruction stream.  See INHIBIT_DEFER_POP for more
   5591    information about deferred popping of arguments.  */
   5592 
   5593 void
   5594 end_sequence (void)
   5595 {
   5596   struct sequence_stack *tem = get_current_sequence ()->next;
   5597 
   5598   set_first_insn (tem->first);
   5599   set_last_insn (tem->last);
   5600   get_current_sequence ()->next = tem->next;
   5601 
   5602   memset (tem, 0, sizeof (*tem));
   5603   tem->next = free_sequence_stack;
   5604   free_sequence_stack = tem;
   5605 }
   5606 
   5607 /* Return 1 if currently emitting into a sequence.  */
   5608 
   5609 int
   5610 in_sequence_p (void)
   5611 {
   5612   return get_current_sequence ()->next != 0;
   5613 }
   5614 
   5615 /* Put the various virtual registers into REGNO_REG_RTX.  */
   5617 
   5618 static void
   5619 init_virtual_regs (void)
   5620 {
   5621   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
   5622   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
   5623   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
   5624   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
   5625   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
   5626   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
   5627     = virtual_preferred_stack_boundary_rtx;
   5628 }
   5629 
   5630 
   5631 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
   5633 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
   5634 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
   5635 static int copy_insn_n_scratches;
   5636 
   5637 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
   5638    copied an ASM_OPERANDS.
   5639    In that case, it is the original input-operand vector.  */
   5640 static rtvec orig_asm_operands_vector;
   5641 
   5642 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
   5643    copied an ASM_OPERANDS.
   5644    In that case, it is the copied input-operand vector.  */
   5645 static rtvec copy_asm_operands_vector;
   5646 
   5647 /* Likewise for the constraints vector.  */
   5648 static rtvec orig_asm_constraints_vector;
   5649 static rtvec copy_asm_constraints_vector;
   5650 
   5651 /* Recursively create a new copy of an rtx for copy_insn.
   5652    This function differs from copy_rtx in that it handles SCRATCHes and
   5653    ASM_OPERANDs properly.
   5654    Normally, this function is not used directly; use copy_insn as front end.
   5655    However, you could first copy an insn pattern with copy_insn and then use
   5656    this function afterwards to properly copy any REG_NOTEs containing
   5657    SCRATCHes.  */
   5658 
   5659 rtx
   5660 copy_insn_1 (rtx orig)
   5661 {
   5662   rtx copy;
   5663   int i, j;
   5664   RTX_CODE code;
   5665   const char *format_ptr;
   5666 
   5667   if (orig == NULL)
   5668     return NULL;
   5669 
   5670   code = GET_CODE (orig);
   5671 
   5672   switch (code)
   5673     {
   5674     case REG:
   5675     case DEBUG_EXPR:
   5676     CASE_CONST_ANY:
   5677     case SYMBOL_REF:
   5678     case CODE_LABEL:
   5679     case PC:
   5680     case RETURN:
   5681     case SIMPLE_RETURN:
   5682       return orig;
   5683     case CLOBBER:
   5684       /* Share clobbers of hard registers, but do not share pseudo reg
   5685          clobbers or clobbers of hard registers that originated as pseudos.
   5686          This is needed to allow safe register renaming.  */
   5687       if (REG_P (XEXP (orig, 0))
   5688 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
   5689 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
   5690 	return orig;
   5691       break;
   5692 
   5693     case SCRATCH:
   5694       for (i = 0; i < copy_insn_n_scratches; i++)
   5695 	if (copy_insn_scratch_in[i] == orig)
   5696 	  return copy_insn_scratch_out[i];
   5697       break;
   5698 
   5699     case CONST:
   5700       if (shared_const_p (orig))
   5701 	return orig;
   5702       break;
   5703 
   5704       /* A MEM with a constant address is not sharable.  The problem is that
   5705 	 the constant address may need to be reloaded.  If the mem is shared,
   5706 	 then reloading one copy of this mem will cause all copies to appear
   5707 	 to have been reloaded.  */
   5708 
   5709     default:
   5710       break;
   5711     }
   5712 
   5713   /* Copy the various flags, fields, and other information.  We assume
   5714      that all fields need copying, and then clear the fields that should
   5715      not be copied.  That is the sensible default behavior, and forces
   5716      us to explicitly document why we are *not* copying a flag.  */
   5717   copy = shallow_copy_rtx (orig);
   5718 
   5719   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
   5720   if (INSN_P (orig))
   5721     {
   5722       RTX_FLAG (copy, jump) = 0;
   5723       RTX_FLAG (copy, call) = 0;
   5724       RTX_FLAG (copy, frame_related) = 0;
   5725     }
   5726 
   5727   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
   5728 
   5729   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
   5730     switch (*format_ptr++)
   5731       {
   5732       case 'e':
   5733 	if (XEXP (orig, i) != NULL)
   5734 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
   5735 	break;
   5736 
   5737       case 'E':
   5738       case 'V':
   5739 	if (XVEC (orig, i) == orig_asm_constraints_vector)
   5740 	  XVEC (copy, i) = copy_asm_constraints_vector;
   5741 	else if (XVEC (orig, i) == orig_asm_operands_vector)
   5742 	  XVEC (copy, i) = copy_asm_operands_vector;
   5743 	else if (XVEC (orig, i) != NULL)
   5744 	  {
   5745 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
   5746 	    for (j = 0; j < XVECLEN (copy, i); j++)
   5747 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
   5748 	  }
   5749 	break;
   5750 
   5751       case 't':
   5752       case 'w':
   5753       case 'i':
   5754       case 'p':
   5755       case 's':
   5756       case 'S':
   5757       case 'u':
   5758       case '0':
   5759 	/* These are left unchanged.  */
   5760 	break;
   5761 
   5762       default:
   5763 	gcc_unreachable ();
   5764       }
   5765 
   5766   if (code == SCRATCH)
   5767     {
   5768       i = copy_insn_n_scratches++;
   5769       gcc_assert (i < MAX_RECOG_OPERANDS);
   5770       copy_insn_scratch_in[i] = orig;
   5771       copy_insn_scratch_out[i] = copy;
   5772     }
   5773   else if (code == ASM_OPERANDS)
   5774     {
   5775       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
   5776       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
   5777       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
   5778       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
   5779     }
   5780 
   5781   return copy;
   5782 }
   5783 
   5784 /* Create a new copy of an rtx.
   5785    This function differs from copy_rtx in that it handles SCRATCHes and
   5786    ASM_OPERANDs properly.
   5787    INSN doesn't really have to be a full INSN; it could be just the
   5788    pattern.  */
   5789 rtx
   5790 copy_insn (rtx insn)
   5791 {
   5792   copy_insn_n_scratches = 0;
   5793   orig_asm_operands_vector = 0;
   5794   orig_asm_constraints_vector = 0;
   5795   copy_asm_operands_vector = 0;
   5796   copy_asm_constraints_vector = 0;
   5797   return copy_insn_1 (insn);
   5798 }
   5799 
   5800 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
   5801    on that assumption that INSN itself remains in its original place.  */
   5802 
   5803 rtx_insn *
   5804 copy_delay_slot_insn (rtx_insn *insn)
   5805 {
   5806   /* Copy INSN with its rtx_code, all its notes, location etc.  */
   5807   insn = as_a <rtx_insn *> (copy_rtx (insn));
   5808   INSN_UID (insn) = cur_insn_uid++;
   5809   return insn;
   5810 }
   5811 
   5812 /* Initialize data structures and variables in this file
   5813    before generating rtl for each function.  */
   5814 
   5815 void
   5816 init_emit (void)
   5817 {
   5818   set_first_insn (NULL);
   5819   set_last_insn (NULL);
   5820   if (param_min_nondebug_insn_uid)
   5821     cur_insn_uid = param_min_nondebug_insn_uid;
   5822   else
   5823     cur_insn_uid = 1;
   5824   cur_debug_insn_uid = 1;
   5825   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
   5826   first_label_num = label_num;
   5827   get_current_sequence ()->next = NULL;
   5828 
   5829   /* Init the tables that describe all the pseudo regs.  */
   5830 
   5831   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
   5832 
   5833   crtl->emit.regno_pointer_align
   5834     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
   5835 
   5836   regno_reg_rtx
   5837     = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
   5838 
   5839   /* Put copies of all the hard registers into regno_reg_rtx.  */
   5840   memcpy (regno_reg_rtx,
   5841 	  initial_regno_reg_rtx,
   5842 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
   5843 
   5844   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
   5845   init_virtual_regs ();
   5846 
   5847   /* Indicate that the virtual registers and stack locations are
   5848      all pointers.  */
   5849   REG_POINTER (stack_pointer_rtx) = 1;
   5850   REG_POINTER (frame_pointer_rtx) = 1;
   5851   REG_POINTER (hard_frame_pointer_rtx) = 1;
   5852   REG_POINTER (arg_pointer_rtx) = 1;
   5853 
   5854   REG_POINTER (virtual_incoming_args_rtx) = 1;
   5855   REG_POINTER (virtual_stack_vars_rtx) = 1;
   5856   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
   5857   REG_POINTER (virtual_outgoing_args_rtx) = 1;
   5858   REG_POINTER (virtual_cfa_rtx) = 1;
   5859 
   5860 #ifdef STACK_BOUNDARY
   5861   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
   5862   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
   5863   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
   5864   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
   5865 
   5866   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
   5867   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
   5868   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
   5869   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
   5870 
   5871   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
   5872 #endif
   5873 
   5874 #ifdef INIT_EXPANDERS
   5875   INIT_EXPANDERS;
   5876 #endif
   5877 }
   5878 
   5879 /* Return the value of element I of CONST_VECTOR X as a wide_int.  */
   5880 
   5881 wide_int
   5882 const_vector_int_elt (const_rtx x, unsigned int i)
   5883 {
   5884   /* First handle elements that are directly encoded.  */
   5885   machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
   5886   if (i < (unsigned int) XVECLEN (x, 0))
   5887     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
   5888 
   5889   /* Identify the pattern that contains element I and work out the index of
   5890      the last encoded element for that pattern.  */
   5891   unsigned int encoded_nelts = const_vector_encoded_nelts (x);
   5892   unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
   5893   unsigned int count = i / npatterns;
   5894   unsigned int pattern = i % npatterns;
   5895   unsigned int final_i = encoded_nelts - npatterns + pattern;
   5896 
   5897   /* If there are no steps, the final encoded value is the right one.  */
   5898   if (!CONST_VECTOR_STEPPED_P (x))
   5899     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
   5900 
   5901   /* Otherwise work out the value from the last two encoded elements.  */
   5902   rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
   5903   rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
   5904   wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
   5905 			   rtx_mode_t (v1, elt_mode));
   5906   return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
   5907 }
   5908 
   5909 /* Return the value of element I of CONST_VECTOR X.  */
   5910 
   5911 rtx
   5912 const_vector_elt (const_rtx x, unsigned int i)
   5913 {
   5914   /* First handle elements that are directly encoded.  */
   5915   if (i < (unsigned int) XVECLEN (x, 0))
   5916     return CONST_VECTOR_ENCODED_ELT (x, i);
   5917 
   5918   /* If there are no steps, the final encoded value is the right one.  */
   5919   if (!CONST_VECTOR_STEPPED_P (x))
   5920     {
   5921       /* Identify the pattern that contains element I and work out the index of
   5922 	 the last encoded element for that pattern.  */
   5923       unsigned int encoded_nelts = const_vector_encoded_nelts (x);
   5924       unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
   5925       unsigned int pattern = i % npatterns;
   5926       unsigned int final_i = encoded_nelts - npatterns + pattern;
   5927       return CONST_VECTOR_ENCODED_ELT (x, final_i);
   5928     }
   5929 
   5930   /* Otherwise work out the value from the last two encoded elements.  */
   5931   return immed_wide_int_const (const_vector_int_elt (x, i),
   5932 			       GET_MODE_INNER (GET_MODE (x)));
   5933 }
   5934 
   5935 /* Return true if X is a valid element for a CONST_VECTOR of the given
   5936   mode.  */
   5937 
   5938 bool
   5939 valid_for_const_vector_p (machine_mode, rtx x)
   5940 {
   5941   return (CONST_SCALAR_INT_P (x)
   5942 	  || CONST_POLY_INT_P (x)
   5943 	  || CONST_DOUBLE_AS_FLOAT_P (x)
   5944 	  || CONST_FIXED_P (x));
   5945 }
   5946 
   5947 /* Generate a vector constant of mode MODE in which every element has
   5948    value ELT.  */
   5949 
   5950 rtx
   5951 gen_const_vec_duplicate (machine_mode mode, rtx elt)
   5952 {
   5953   rtx_vector_builder builder (mode, 1, 1);
   5954   builder.quick_push (elt);
   5955   return builder.build ();
   5956 }
   5957 
   5958 /* Return a vector rtx of mode MODE in which every element has value X.
   5959    The result will be a constant if X is constant.  */
   5960 
   5961 rtx
   5962 gen_vec_duplicate (machine_mode mode, rtx x)
   5963 {
   5964   if (valid_for_const_vector_p (mode, x))
   5965     return gen_const_vec_duplicate (mode, x);
   5966   return gen_rtx_VEC_DUPLICATE (mode, x);
   5967 }
   5968 
   5969 /* A subroutine of const_vec_series_p that handles the case in which:
   5970 
   5971      (GET_CODE (X) == CONST_VECTOR
   5972       && CONST_VECTOR_NPATTERNS (X) == 1
   5973       && !CONST_VECTOR_DUPLICATE_P (X))
   5974 
   5975    is known to hold.  */
   5976 
   5977 bool
   5978 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
   5979 {
   5980   /* Stepped sequences are only defined for integers, to avoid specifying
   5981      rounding behavior.  */
   5982   if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
   5983     return false;
   5984 
   5985   /* A non-duplicated vector with two elements can always be seen as a
   5986      series with a nonzero step.  Longer vectors must have a stepped
   5987      encoding.  */
   5988   if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
   5989       && !CONST_VECTOR_STEPPED_P (x))
   5990     return false;
   5991 
   5992   /* Calculate the step between the first and second elements.  */
   5993   scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
   5994   rtx base = CONST_VECTOR_ELT (x, 0);
   5995   rtx step = simplify_binary_operation (MINUS, inner,
   5996 					CONST_VECTOR_ENCODED_ELT (x, 1), base);
   5997   if (rtx_equal_p (step, CONST0_RTX (inner)))
   5998     return false;
   5999 
   6000   /* If we have a stepped encoding, check that the step between the
   6001      second and third elements is the same as STEP.  */
   6002   if (CONST_VECTOR_STEPPED_P (x))
   6003     {
   6004       rtx diff = simplify_binary_operation (MINUS, inner,
   6005 					    CONST_VECTOR_ENCODED_ELT (x, 2),
   6006 					    CONST_VECTOR_ENCODED_ELT (x, 1));
   6007       if (!rtx_equal_p (step, diff))
   6008 	return false;
   6009     }
   6010 
   6011   *base_out = base;
   6012   *step_out = step;
   6013   return true;
   6014 }
   6015 
   6016 /* Generate a vector constant of mode MODE in which element I has
   6017    the value BASE + I * STEP.  */
   6018 
   6019 rtx
   6020 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
   6021 {
   6022   gcc_assert (valid_for_const_vector_p (mode, base)
   6023 	      && valid_for_const_vector_p (mode, step));
   6024 
   6025   rtx_vector_builder builder (mode, 1, 3);
   6026   builder.quick_push (base);
   6027   for (int i = 1; i < 3; ++i)
   6028     builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
   6029 					     builder[i - 1], step));
   6030   return builder.build ();
   6031 }
   6032 
   6033 /* Generate a vector of mode MODE in which element I has the value
   6034    BASE + I * STEP.  The result will be a constant if BASE and STEP
   6035    are both constants.  */
   6036 
   6037 rtx
   6038 gen_vec_series (machine_mode mode, rtx base, rtx step)
   6039 {
   6040   if (step == const0_rtx)
   6041     return gen_vec_duplicate (mode, base);
   6042   if (valid_for_const_vector_p (mode, base)
   6043       && valid_for_const_vector_p (mode, step))
   6044     return gen_const_vec_series (mode, base, step);
   6045   return gen_rtx_VEC_SERIES (mode, base, step);
   6046 }
   6047 
   6048 /* Generate a new vector constant for mode MODE and constant value
   6049    CONSTANT.  */
   6050 
   6051 static rtx
   6052 gen_const_vector (machine_mode mode, int constant)
   6053 {
   6054   machine_mode inner = GET_MODE_INNER (mode);
   6055 
   6056   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
   6057 
   6058   rtx el = const_tiny_rtx[constant][(int) inner];
   6059   gcc_assert (el);
   6060 
   6061   return gen_const_vec_duplicate (mode, el);
   6062 }
   6063 
   6064 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
   6065    all elements are zero, and the one vector when all elements are one.  */
   6066 rtx
   6067 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
   6068 {
   6069   gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
   6070 
   6071   /* If the values are all the same, check to see if we can use one of the
   6072      standard constant vectors.  */
   6073   if (rtvec_all_equal_p (v))
   6074     return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
   6075 
   6076   unsigned int nunits = GET_NUM_ELEM (v);
   6077   rtx_vector_builder builder (mode, nunits, 1);
   6078   for (unsigned int i = 0; i < nunits; ++i)
   6079     builder.quick_push (RTVEC_ELT (v, i));
   6080   return builder.build (v);
   6081 }
   6082 
   6083 /* Initialise global register information required by all functions.  */
   6084 
   6085 void
   6086 init_emit_regs (void)
   6087 {
   6088   int i;
   6089   machine_mode mode;
   6090   mem_attrs *attrs;
   6091 
   6092   /* Reset register attributes */
   6093   reg_attrs_htab->empty ();
   6094 
   6095   /* We need reg_raw_mode, so initialize the modes now.  */
   6096   init_reg_modes_target ();
   6097 
   6098   /* Assign register numbers to the globally defined register rtx.  */
   6099   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
   6100   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
   6101   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
   6102   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
   6103   virtual_incoming_args_rtx =
   6104     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
   6105   virtual_stack_vars_rtx =
   6106     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
   6107   virtual_stack_dynamic_rtx =
   6108     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
   6109   virtual_outgoing_args_rtx =
   6110     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
   6111   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
   6112   virtual_preferred_stack_boundary_rtx =
   6113     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
   6114 
   6115   /* Initialize RTL for commonly used hard registers.  These are
   6116      copied into regno_reg_rtx as we begin to compile each function.  */
   6117   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
   6118     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
   6119 
   6120 #ifdef RETURN_ADDRESS_POINTER_REGNUM
   6121   return_address_pointer_rtx
   6122     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
   6123 #endif
   6124 
   6125   pic_offset_table_rtx = NULL_RTX;
   6126   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
   6127     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
   6128 
   6129   /* Process stack-limiting command-line options.  */
   6130   if (opt_fstack_limit_symbol_arg != NULL)
   6131     stack_limit_rtx
   6132       = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
   6133   if (opt_fstack_limit_register_no >= 0)
   6134     stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
   6135 
   6136   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
   6137     {
   6138       mode = (machine_mode) i;
   6139       attrs = ggc_cleared_alloc<mem_attrs> ();
   6140       attrs->align = BITS_PER_UNIT;
   6141       attrs->addrspace = ADDR_SPACE_GENERIC;
   6142       if (mode != BLKmode && mode != VOIDmode)
   6143 	{
   6144 	  attrs->size_known_p = true;
   6145 	  attrs->size = GET_MODE_SIZE (mode);
   6146 	  if (STRICT_ALIGNMENT)
   6147 	    attrs->align = GET_MODE_ALIGNMENT (mode);
   6148 	}
   6149       mode_mem_attrs[i] = attrs;
   6150     }
   6151 
   6152   split_branch_probability = profile_probability::uninitialized ();
   6153 }
   6154 
   6155 /* Initialize global machine_mode variables.  */
   6156 
   6157 void
   6158 init_derived_machine_modes (void)
   6159 {
   6160   opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
   6161   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
   6162     {
   6163       scalar_int_mode mode = mode_iter.require ();
   6164 
   6165       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
   6166 	  && !opt_byte_mode.exists ())
   6167 	opt_byte_mode = mode;
   6168 
   6169       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
   6170 	  && !opt_word_mode.exists ())
   6171 	opt_word_mode = mode;
   6172     }
   6173 
   6174   byte_mode = opt_byte_mode.require ();
   6175   word_mode = opt_word_mode.require ();
   6176   ptr_mode = as_a <scalar_int_mode>
   6177     (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
   6178 }
   6179 
   6180 /* Create some permanent unique rtl objects shared between all functions.  */
   6181 
   6182 void
   6183 init_emit_once (void)
   6184 {
   6185   int i;
   6186   machine_mode mode;
   6187   scalar_float_mode double_mode;
   6188   opt_scalar_mode smode_iter;
   6189 
   6190   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
   6191      CONST_FIXED, and memory attribute hash tables.  */
   6192   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
   6193 
   6194 #if TARGET_SUPPORTS_WIDE_INT
   6195   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
   6196 #endif
   6197   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
   6198 
   6199   if (NUM_POLY_INT_COEFFS > 1)
   6200     const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
   6201 
   6202   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
   6203 
   6204   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
   6205 
   6206 #ifdef INIT_EXPANDERS
   6207   /* This is to initialize {init|mark|free}_machine_status before the first
   6208      call to push_function_context_to.  This is needed by the Chill front
   6209      end which calls push_function_context_to before the first call to
   6210      init_function_start.  */
   6211   INIT_EXPANDERS;
   6212 #endif
   6213 
   6214   /* Create the unique rtx's for certain rtx codes and operand values.  */
   6215 
   6216   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
   6217      tries to use these variables.  */
   6218   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
   6219     const_int_rtx[i + MAX_SAVED_CONST_INT] =
   6220       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
   6221 
   6222   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
   6223       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
   6224     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
   6225   else
   6226     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
   6227 
   6228   double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
   6229 
   6230   real_from_integer (&dconst0, double_mode, 0, SIGNED);
   6231   real_from_integer (&dconst1, double_mode, 1, SIGNED);
   6232   real_from_integer (&dconst2, double_mode, 2, SIGNED);
   6233 
   6234   dconstm1 = dconst1;
   6235   dconstm1.sign = 1;
   6236 
   6237   dconsthalf = dconst1;
   6238   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
   6239 
   6240   for (i = 0; i < 3; i++)
   6241     {
   6242       const REAL_VALUE_TYPE *const r =
   6243 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
   6244 
   6245       FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
   6246 	const_tiny_rtx[i][(int) mode] =
   6247 	  const_double_from_real_value (*r, mode);
   6248 
   6249       FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
   6250 	const_tiny_rtx[i][(int) mode] =
   6251 	  const_double_from_real_value (*r, mode);
   6252 
   6253       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
   6254 
   6255       FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
   6256 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
   6257 
   6258       for (mode = MIN_MODE_PARTIAL_INT;
   6259 	   mode <= MAX_MODE_PARTIAL_INT;
   6260 	   mode = (machine_mode)((int)(mode) + 1))
   6261 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
   6262     }
   6263 
   6264   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
   6265 
   6266   FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
   6267     const_tiny_rtx[3][(int) mode] = constm1_rtx;
   6268 
   6269   /* For BImode, 1 and -1 are unsigned and signed interpretations
   6270      of the same value.  */
   6271   for (mode = MIN_MODE_BOOL;
   6272        mode <= MAX_MODE_BOOL;
   6273        mode = (machine_mode)((int)(mode) + 1))
   6274     {
   6275       const_tiny_rtx[0][(int) mode] = const0_rtx;
   6276       if (mode == BImode)
   6277 	{
   6278 	  const_tiny_rtx[1][(int) mode] = const_true_rtx;
   6279 	  const_tiny_rtx[3][(int) mode] = const_true_rtx;
   6280 	}
   6281       else
   6282 	{
   6283 	  const_tiny_rtx[1][(int) mode] = const1_rtx;
   6284 	  const_tiny_rtx[3][(int) mode] = constm1_rtx;
   6285 	}
   6286     }
   6287 
   6288   for (mode = MIN_MODE_PARTIAL_INT;
   6289        mode <= MAX_MODE_PARTIAL_INT;
   6290        mode = (machine_mode)((int)(mode) + 1))
   6291     const_tiny_rtx[3][(int) mode] = constm1_rtx;
   6292 
   6293   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
   6294     {
   6295       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
   6296       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
   6297     }
   6298 
   6299   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
   6300     {
   6301       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
   6302       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
   6303     }
   6304 
   6305   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
   6306     {
   6307       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
   6308       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
   6309       if (GET_MODE_INNER (mode) == BImode)
   6310 	/* As for BImode, "all 1" and "all -1" are unsigned and signed
   6311 	   interpretations of the same value.  */
   6312 	const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
   6313       else
   6314 	const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
   6315     }
   6316 
   6317   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
   6318     {
   6319       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
   6320       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
   6321       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
   6322     }
   6323 
   6324   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
   6325     {
   6326       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
   6327       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
   6328     }
   6329 
   6330   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
   6331     {
   6332       scalar_mode smode = smode_iter.require ();
   6333       FCONST0 (smode).data.high = 0;
   6334       FCONST0 (smode).data.low = 0;
   6335       FCONST0 (smode).mode = smode;
   6336       const_tiny_rtx[0][(int) smode]
   6337 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
   6338     }
   6339 
   6340   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
   6341     {
   6342       scalar_mode smode = smode_iter.require ();
   6343       FCONST0 (smode).data.high = 0;
   6344       FCONST0 (smode).data.low = 0;
   6345       FCONST0 (smode).mode = smode;
   6346       const_tiny_rtx[0][(int) smode]
   6347 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
   6348     }
   6349 
   6350   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
   6351     {
   6352       scalar_mode smode = smode_iter.require ();
   6353       FCONST0 (smode).data.high = 0;
   6354       FCONST0 (smode).data.low = 0;
   6355       FCONST0 (smode).mode = smode;
   6356       const_tiny_rtx[0][(int) smode]
   6357 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
   6358 
   6359       /* We store the value 1.  */
   6360       FCONST1 (smode).data.high = 0;
   6361       FCONST1 (smode).data.low = 0;
   6362       FCONST1 (smode).mode = smode;
   6363       FCONST1 (smode).data
   6364 	= double_int_one.lshift (GET_MODE_FBIT (smode),
   6365 				 HOST_BITS_PER_DOUBLE_INT,
   6366 				 SIGNED_FIXED_POINT_MODE_P (smode));
   6367       const_tiny_rtx[1][(int) smode]
   6368 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
   6369     }
   6370 
   6371   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
   6372     {
   6373       scalar_mode smode = smode_iter.require ();
   6374       FCONST0 (smode).data.high = 0;
   6375       FCONST0 (smode).data.low = 0;
   6376       FCONST0 (smode).mode = smode;
   6377       const_tiny_rtx[0][(int) smode]
   6378 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
   6379 
   6380       /* We store the value 1.  */
   6381       FCONST1 (smode).data.high = 0;
   6382       FCONST1 (smode).data.low = 0;
   6383       FCONST1 (smode).mode = smode;
   6384       FCONST1 (smode).data
   6385 	= double_int_one.lshift (GET_MODE_FBIT (smode),
   6386 				 HOST_BITS_PER_DOUBLE_INT,
   6387 				 SIGNED_FIXED_POINT_MODE_P (smode));
   6388       const_tiny_rtx[1][(int) smode]
   6389 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
   6390     }
   6391 
   6392   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
   6393     {
   6394       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
   6395     }
   6396 
   6397   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
   6398     {
   6399       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
   6400     }
   6401 
   6402   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
   6403     {
   6404       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
   6405       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
   6406     }
   6407 
   6408   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
   6409     {
   6410       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
   6411       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
   6412     }
   6413 
   6414   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
   6415     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
   6416       const_tiny_rtx[0][i] = const0_rtx;
   6417 
   6418   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
   6419   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
   6420   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
   6421   invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
   6422 				   /*prev_insn=*/NULL,
   6423 				   /*next_insn=*/NULL,
   6424 				   /*bb=*/NULL,
   6425 				   /*pattern=*/NULL_RTX,
   6426 				   /*location=*/-1,
   6427 				   CODE_FOR_nothing,
   6428 				   /*reg_notes=*/NULL_RTX);
   6429 }
   6430 
   6431 /* Produce exact duplicate of insn INSN after AFTER.
   6433    Care updating of libcall regions if present.  */
   6434 
   6435 rtx_insn *
   6436 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
   6437 {
   6438   rtx_insn *new_rtx;
   6439   rtx link;
   6440 
   6441   switch (GET_CODE (insn))
   6442     {
   6443     case INSN:
   6444       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
   6445       break;
   6446 
   6447     case JUMP_INSN:
   6448       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
   6449       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
   6450       break;
   6451 
   6452     case DEBUG_INSN:
   6453       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
   6454       break;
   6455 
   6456     case CALL_INSN:
   6457       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
   6458       if (CALL_INSN_FUNCTION_USAGE (insn))
   6459 	CALL_INSN_FUNCTION_USAGE (new_rtx)
   6460 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
   6461       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
   6462       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
   6463       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
   6464       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
   6465 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
   6466       break;
   6467 
   6468     default:
   6469       gcc_unreachable ();
   6470     }
   6471 
   6472   /* Update LABEL_NUSES.  */
   6473   if (NONDEBUG_INSN_P (insn))
   6474     mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
   6475 
   6476   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
   6477 
   6478   /* If the old insn is frame related, then so is the new one.  This is
   6479      primarily needed for IA-64 unwind info which marks epilogue insns,
   6480      which may be duplicated by the basic block reordering code.  */
   6481   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
   6482 
   6483   /* Locate the end of existing REG_NOTES in NEW_RTX.  */
   6484   rtx *ptail = &REG_NOTES (new_rtx);
   6485   while (*ptail != NULL_RTX)
   6486     ptail = &XEXP (*ptail, 1);
   6487 
   6488   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
   6489      will make them.  REG_LABEL_TARGETs are created there too, but are
   6490      supposed to be sticky, so we copy them.  */
   6491   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
   6492     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
   6493       {
   6494 	*ptail = duplicate_reg_note (link);
   6495 	ptail = &XEXP (*ptail, 1);
   6496       }
   6497 
   6498   INSN_CODE (new_rtx) = INSN_CODE (insn);
   6499   return new_rtx;
   6500 }
   6501 
   6502 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
   6503 rtx
   6504 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
   6505 {
   6506   if (hard_reg_clobbers[mode][regno])
   6507     return hard_reg_clobbers[mode][regno];
   6508   else
   6509     return (hard_reg_clobbers[mode][regno] =
   6510 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
   6511 }
   6512 
   6513 location_t prologue_location;
   6514 location_t epilogue_location;
   6515 
   6516 /* Hold current location information and last location information, so the
   6517    datastructures are built lazily only when some instructions in given
   6518    place are needed.  */
   6519 static location_t curr_location;
   6520 
   6521 /* Allocate insn location datastructure.  */
   6522 void
   6523 insn_locations_init (void)
   6524 {
   6525   prologue_location = epilogue_location = 0;
   6526   curr_location = UNKNOWN_LOCATION;
   6527 }
   6528 
   6529 /* At the end of emit stage, clear current location.  */
   6530 void
   6531 insn_locations_finalize (void)
   6532 {
   6533   epilogue_location = curr_location;
   6534   curr_location = UNKNOWN_LOCATION;
   6535 }
   6536 
   6537 /* Set current location.  */
   6538 void
   6539 set_curr_insn_location (location_t location)
   6540 {
   6541   curr_location = location;
   6542 }
   6543 
   6544 /* Get current location.  */
   6545 location_t
   6546 curr_insn_location (void)
   6547 {
   6548   return curr_location;
   6549 }
   6550 
   6551 /* Set the location of the insn chain starting at INSN to LOC.  */
   6552 void
   6553 set_insn_locations (rtx_insn *insn, location_t loc)
   6554 {
   6555   while (insn)
   6556     {
   6557       if (INSN_P (insn))
   6558 	INSN_LOCATION (insn) = loc;
   6559       insn = NEXT_INSN (insn);
   6560     }
   6561 }
   6562 
   6563 /* Return lexical scope block insn belongs to.  */
   6564 tree
   6565 insn_scope (const rtx_insn *insn)
   6566 {
   6567   return LOCATION_BLOCK (INSN_LOCATION (insn));
   6568 }
   6569 
   6570 /* Return line number of the statement that produced this insn.  */
   6571 int
   6572 insn_line (const rtx_insn *insn)
   6573 {
   6574   return LOCATION_LINE (INSN_LOCATION (insn));
   6575 }
   6576 
   6577 /* Return source file of the statement that produced this insn.  */
   6578 const char *
   6579 insn_file (const rtx_insn *insn)
   6580 {
   6581   return LOCATION_FILE (INSN_LOCATION (insn));
   6582 }
   6583 
   6584 /* Return expanded location of the statement that produced this insn.  */
   6585 expanded_location
   6586 insn_location (const rtx_insn *insn)
   6587 {
   6588   return expand_location (INSN_LOCATION (insn));
   6589 }
   6590 
   6591 /* Return true if memory model MODEL requires a pre-operation (release-style)
   6592    barrier or a post-operation (acquire-style) barrier.  While not universal,
   6593    this function matches behavior of several targets.  */
   6594 
   6595 bool
   6596 need_atomic_barrier_p (enum memmodel model, bool pre)
   6597 {
   6598   switch (model & MEMMODEL_BASE_MASK)
   6599     {
   6600     case MEMMODEL_RELAXED:
   6601     case MEMMODEL_CONSUME:
   6602       return false;
   6603     case MEMMODEL_RELEASE:
   6604       return pre;
   6605     case MEMMODEL_ACQUIRE:
   6606       return !pre;
   6607     case MEMMODEL_ACQ_REL:
   6608     case MEMMODEL_SEQ_CST:
   6609       return true;
   6610     default:
   6611       gcc_unreachable ();
   6612     }
   6613 }
   6614 
   6615 /* Return a constant shift amount for shifting a value of mode MODE
   6616    by VALUE bits.  */
   6617 
   6618 rtx
   6619 gen_int_shift_amount (machine_mode, poly_int64 value)
   6620 {
   6621   /* Use a 64-bit mode, to avoid any truncation.
   6622 
   6623      ??? Perhaps this should be automatically derived from the .md files
   6624      instead, or perhaps have a target hook.  */
   6625   scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
   6626 				? DImode
   6627 				: int_mode_for_size (64, 0).require ());
   6628   return gen_int_mode (value, shift_mode);
   6629 }
   6630 
   6631 /* Initialize fields of rtl_data related to stack alignment.  */
   6632 
   6633 void
   6634 rtl_data::init_stack_alignment ()
   6635 {
   6636   stack_alignment_needed = STACK_BOUNDARY;
   6637   max_used_stack_slot_alignment = STACK_BOUNDARY;
   6638   stack_alignment_estimated = 0;
   6639   preferred_stack_boundary = STACK_BOUNDARY;
   6640 }
   6641 
   6642 
   6643 #include "gt-emit-rtl.h"
   6645