Home | History | Annotate | Line # | Download | only in gcc
lra-constraints.cc revision 1.1.1.1
      1 /* Code for RTL transformations to satisfy insn constraints.
      2    Copyright (C) 2010-2022 Free Software Foundation, Inc.
      3    Contributed by Vladimir Makarov <vmakarov (at) redhat.com>.
      4 
      5    This file is part of GCC.
      6 
      7    GCC is free software; you can redistribute it and/or modify it under
      8    the terms of the GNU General Public License as published by the Free
      9    Software Foundation; either version 3, or (at your option) any later
     10    version.
     11 
     12    GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13    WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15    for more details.
     16 
     17    You should have received a copy of the GNU General Public License
     18    along with GCC; see the file COPYING3.  If not see
     19    <http://www.gnu.org/licenses/>.  */
     20 
     21 
     22 /* This file contains code for 3 passes: constraint pass,
     23    inheritance/split pass, and pass for undoing failed inheritance and
     24    split.
     25 
     26    The major goal of constraint pass is to transform RTL to satisfy
     27    insn and address constraints by:
     28      o choosing insn alternatives;
     29      o generating *reload insns* (or reloads in brief) and *reload
     30        pseudos* which will get necessary hard registers later;
     31      o substituting pseudos with equivalent values and removing the
     32        instructions that initialized those pseudos.
     33 
     34    The constraint pass has biggest and most complicated code in LRA.
     35    There are a lot of important details like:
     36      o reuse of input reload pseudos to simplify reload pseudo
     37        allocations;
     38      o some heuristics to choose insn alternative to improve the
     39        inheritance;
     40      o early clobbers etc.
     41 
     42    The pass is mimicking former reload pass in alternative choosing
     43    because the reload pass is oriented to current machine description
     44    model.  It might be changed if the machine description model is
     45    changed.
     46 
     47    There is special code for preventing all LRA and this pass cycling
     48    in case of bugs.
     49 
     50    On the first iteration of the pass we process every instruction and
     51    choose an alternative for each one.  On subsequent iterations we try
     52    to avoid reprocessing instructions if we can be sure that the old
     53    choice is still valid.
     54 
     55    The inheritance/spilt pass is to transform code to achieve
     56    ineheritance and live range splitting.  It is done on backward
     57    traversal of EBBs.
     58 
     59    The inheritance optimization goal is to reuse values in hard
     60    registers. There is analogous optimization in old reload pass.  The
     61    inheritance is achieved by following transformation:
     62 
     63        reload_p1 <- p	     reload_p1 <- p
     64        ...		     new_p <- reload_p1
     65        ...		=>   ...
     66        reload_p2 <- p	     reload_p2 <- new_p
     67 
     68    where p is spilled and not changed between the insns.  Reload_p1 is
     69    also called *original pseudo* and new_p is called *inheritance
     70    pseudo*.
     71 
     72    The subsequent assignment pass will try to assign the same (or
     73    another if it is not possible) hard register to new_p as to
     74    reload_p1 or reload_p2.
     75 
     76    If the assignment pass fails to assign a hard register to new_p,
     77    this file will undo the inheritance and restore the original code.
     78    This is because implementing the above sequence with a spilled
     79    new_p would make the code much worse.  The inheritance is done in
     80    EBB scope.  The above is just a simplified example to get an idea
     81    of the inheritance as the inheritance is also done for non-reload
     82    insns.
     83 
     84    Splitting (transformation) is also done in EBB scope on the same
     85    pass as the inheritance:
     86 
     87        r <- ... or ... <- r		 r <- ... or ... <- r
     88        ...				 s <- r (new insn -- save)
     89        ...			  =>
     90        ...				 r <- s (new insn -- restore)
     91        ... <- r				 ... <- r
     92 
     93     The *split pseudo* s is assigned to the hard register of the
     94     original pseudo or hard register r.
     95 
     96     Splitting is done:
     97       o In EBBs with high register pressure for global pseudos (living
     98 	in at least 2 BBs) and assigned to hard registers when there
     99 	are more one reloads needing the hard registers;
    100       o for pseudos needing save/restore code around calls.
    101 
    102     If the split pseudo still has the same hard register as the
    103     original pseudo after the subsequent assignment pass or the
    104     original pseudo was split, the opposite transformation is done on
    105     the same pass for undoing inheritance.  */
    106 
    107 #undef REG_OK_STRICT
    108 
    109 #include "config.h"
    110 #include "system.h"
    111 #include "coretypes.h"
    112 #include "backend.h"
    113 #include "target.h"
    114 #include "rtl.h"
    115 #include "tree.h"
    116 #include "predict.h"
    117 #include "df.h"
    118 #include "memmodel.h"
    119 #include "tm_p.h"
    120 #include "expmed.h"
    121 #include "optabs.h"
    122 #include "regs.h"
    123 #include "ira.h"
    124 #include "recog.h"
    125 #include "output.h"
    126 #include "addresses.h"
    127 #include "expr.h"
    128 #include "cfgrtl.h"
    129 #include "rtl-error.h"
    130 #include "lra.h"
    131 #include "lra-int.h"
    132 #include "print-rtl.h"
    133 #include "function-abi.h"
    134 #include "rtl-iter.h"
    135 
    136 /* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
    137    insn.  Remember that LRA_CURR_RELOAD_NUM is the number of emitted
    138    reload insns.  */
    139 static int bb_reload_num;
    140 
    141 /* The current insn being processed and corresponding its single set
    142    (NULL otherwise), its data (basic block, the insn data, the insn
    143    static data, and the mode of each operand).  */
    144 static rtx_insn *curr_insn;
    145 static rtx curr_insn_set;
    146 static basic_block curr_bb;
    147 static lra_insn_recog_data_t curr_id;
    148 static struct lra_static_insn_data *curr_static_id;
    149 static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
    150 /* Mode of the register substituted by its equivalence with VOIDmode
    151    (e.g. constant) and whose subreg is given operand of the current
    152    insn.  VOIDmode in all other cases.  */
    153 static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
    154 
    155 
    156 
    158 /* Start numbers for new registers and insns at the current constraints
    159    pass start.	*/
    160 static int new_regno_start;
    161 static int new_insn_uid_start;
    162 
    163 /* If LOC is nonnull, strip any outer subreg from it.  */
    164 static inline rtx *
    165 strip_subreg (rtx *loc)
    166 {
    167   return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
    168 }
    169 
    170 /* Return hard regno of REGNO or if it is was not assigned to a hard
    171    register, use a hard register from its allocno class.  */
    172 static int
    173 get_try_hard_regno (int regno)
    174 {
    175   int hard_regno;
    176   enum reg_class rclass;
    177 
    178   if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
    179     hard_regno = lra_get_regno_hard_regno (regno);
    180   if (hard_regno >= 0)
    181     return hard_regno;
    182   rclass = lra_get_allocno_class (regno);
    183   if (rclass == NO_REGS)
    184     return -1;
    185   return ira_class_hard_regs[rclass][0];
    186 }
    187 
    188 /* Return the hard regno of X after removing its subreg.  If X is not
    189    a register or a subreg of a register, return -1.  If X is a pseudo,
    190    use its assignment.  If FINAL_P return the final hard regno which will
    191    be after elimination.  */
    192 static int
    193 get_hard_regno (rtx x, bool final_p)
    194 {
    195   rtx reg;
    196   int hard_regno;
    197 
    198   reg = x;
    199   if (SUBREG_P (x))
    200     reg = SUBREG_REG (x);
    201   if (! REG_P (reg))
    202     return -1;
    203   if (! HARD_REGISTER_NUM_P (hard_regno = REGNO (reg)))
    204     hard_regno = lra_get_regno_hard_regno (hard_regno);
    205   if (hard_regno < 0)
    206     return -1;
    207   if (final_p)
    208     hard_regno = lra_get_elimination_hard_regno (hard_regno);
    209   if (SUBREG_P (x))
    210     hard_regno += subreg_regno_offset (hard_regno, GET_MODE (reg),
    211 				       SUBREG_BYTE (x),  GET_MODE (x));
    212   return hard_regno;
    213 }
    214 
    215 /* If REGNO is a hard register or has been allocated a hard register,
    216    return the class of that register.  If REGNO is a reload pseudo
    217    created by the current constraints pass, return its allocno class.
    218    Return NO_REGS otherwise.  */
    219 static enum reg_class
    220 get_reg_class (int regno)
    221 {
    222   int hard_regno;
    223 
    224   if (! HARD_REGISTER_NUM_P (hard_regno = regno))
    225     hard_regno = lra_get_regno_hard_regno (regno);
    226   if (hard_regno >= 0)
    227     {
    228       hard_regno = lra_get_elimination_hard_regno (hard_regno);
    229       return REGNO_REG_CLASS (hard_regno);
    230     }
    231   if (regno >= new_regno_start)
    232     return lra_get_allocno_class (regno);
    233   return NO_REGS;
    234 }
    235 
    236 /* Return true if REG satisfies (or will satisfy) reg class constraint
    237    CL.  Use elimination first if REG is a hard register.  If REG is a
    238    reload pseudo created by this constraints pass, assume that it will
    239    be allocated a hard register from its allocno class, but allow that
    240    class to be narrowed to CL if it is currently a superset of CL and
    241    if either:
    242 
    243    - ALLOW_ALL_RELOAD_CLASS_CHANGES_P is true or
    244    - the instruction we're processing is not a reload move.
    245 
    246    If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
    247    REGNO (reg), or NO_REGS if no change in its class was needed.  */
    248 static bool
    249 in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class,
    250 	    bool allow_all_reload_class_changes_p = false)
    251 {
    252   enum reg_class rclass, common_class;
    253   machine_mode reg_mode;
    254   rtx src;
    255   int class_size, hard_regno, nregs, i, j;
    256   int regno = REGNO (reg);
    257 
    258   if (new_class != NULL)
    259     *new_class = NO_REGS;
    260   if (regno < FIRST_PSEUDO_REGISTER)
    261     {
    262       rtx final_reg = reg;
    263       rtx *final_loc = &final_reg;
    264 
    265       lra_eliminate_reg_if_possible (final_loc);
    266       return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
    267     }
    268   reg_mode = GET_MODE (reg);
    269   rclass = get_reg_class (regno);
    270   src = curr_insn_set != NULL ? SET_SRC (curr_insn_set) : NULL;
    271   if (regno < new_regno_start
    272       /* Do not allow the constraints for reload instructions to
    273 	 influence the classes of new pseudos.  These reloads are
    274 	 typically moves that have many alternatives, and restricting
    275 	 reload pseudos for one alternative may lead to situations
    276 	 where other reload pseudos are no longer allocatable.  */
    277       || (!allow_all_reload_class_changes_p
    278 	  && INSN_UID (curr_insn) >= new_insn_uid_start
    279 	  && src != NULL
    280 	  && ((REG_P (src) || MEM_P (src))
    281 	      || (GET_CODE (src) == SUBREG
    282 		  && (REG_P (SUBREG_REG (src)) || MEM_P (SUBREG_REG (src)))))))
    283     /* When we don't know what class will be used finally for reload
    284        pseudos, we use ALL_REGS.  */
    285     return ((regno >= new_regno_start && rclass == ALL_REGS)
    286 	    || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
    287 		&& ! hard_reg_set_subset_p (reg_class_contents[cl],
    288 					    lra_no_alloc_regs)));
    289   else
    290     {
    291       common_class = ira_reg_class_subset[rclass][cl];
    292       if (new_class != NULL)
    293 	*new_class = common_class;
    294       if (hard_reg_set_subset_p (reg_class_contents[common_class],
    295 				 lra_no_alloc_regs))
    296 	return false;
    297       /* Check that there are enough allocatable regs.  */
    298       class_size = ira_class_hard_regs_num[common_class];
    299       for (i = 0; i < class_size; i++)
    300 	{
    301 	  hard_regno = ira_class_hard_regs[common_class][i];
    302 	  nregs = hard_regno_nregs (hard_regno, reg_mode);
    303 	  if (nregs == 1)
    304 	    return true;
    305 	  for (j = 0; j < nregs; j++)
    306 	    if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
    307 		|| ! TEST_HARD_REG_BIT (reg_class_contents[common_class],
    308 					hard_regno + j))
    309 	      break;
    310 	  if (j >= nregs)
    311 	    return true;
    312 	}
    313       return false;
    314     }
    315 }
    316 
    317 /* Return true if REGNO satisfies a memory constraint.	*/
    318 static bool
    319 in_mem_p (int regno)
    320 {
    321   return get_reg_class (regno) == NO_REGS;
    322 }
    323 
    324 /* Return 1 if ADDR is a valid memory address for mode MODE in address
    325    space AS, and check that each pseudo has the proper kind of hard
    326    reg.	 */
    327 static int
    328 valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
    329 		 rtx addr, addr_space_t as)
    330 {
    331 #ifdef GO_IF_LEGITIMATE_ADDRESS
    332   lra_assert (ADDR_SPACE_GENERIC_P (as));
    333   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
    334   return 0;
    335 
    336  win:
    337   return 1;
    338 #else
    339   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
    340 #endif
    341 }
    342 
    343 namespace {
    344   /* Temporarily eliminates registers in an address (for the lifetime of
    345      the object).  */
    346   class address_eliminator {
    347   public:
    348     address_eliminator (struct address_info *ad);
    349     ~address_eliminator ();
    350 
    351   private:
    352     struct address_info *m_ad;
    353     rtx *m_base_loc;
    354     rtx m_base_reg;
    355     rtx *m_index_loc;
    356     rtx m_index_reg;
    357   };
    358 }
    359 
    360 address_eliminator::address_eliminator (struct address_info *ad)
    361   : m_ad (ad),
    362     m_base_loc (strip_subreg (ad->base_term)),
    363     m_base_reg (NULL_RTX),
    364     m_index_loc (strip_subreg (ad->index_term)),
    365     m_index_reg (NULL_RTX)
    366 {
    367   if (m_base_loc != NULL)
    368     {
    369       m_base_reg = *m_base_loc;
    370       /* If we have non-legitimate address which is decomposed not in
    371 	 the way we expected, don't do elimination here.  In such case
    372 	 the address will be reloaded and elimination will be done in
    373 	 reload insn finally.  */
    374       if (REG_P (m_base_reg))
    375 	lra_eliminate_reg_if_possible (m_base_loc);
    376       if (m_ad->base_term2 != NULL)
    377 	*m_ad->base_term2 = *m_ad->base_term;
    378     }
    379   if (m_index_loc != NULL)
    380     {
    381       m_index_reg = *m_index_loc;
    382       if (REG_P (m_index_reg))
    383 	lra_eliminate_reg_if_possible (m_index_loc);
    384     }
    385 }
    386 
    387 address_eliminator::~address_eliminator ()
    388 {
    389   if (m_base_loc && *m_base_loc != m_base_reg)
    390     {
    391       *m_base_loc = m_base_reg;
    392       if (m_ad->base_term2 != NULL)
    393 	*m_ad->base_term2 = *m_ad->base_term;
    394     }
    395   if (m_index_loc && *m_index_loc != m_index_reg)
    396     *m_index_loc = m_index_reg;
    397 }
    398 
    399 /* Return true if the eliminated form of AD is a legitimate target address.
    400    If OP is a MEM, AD is the address within OP, otherwise OP should be
    401    ignored.  CONSTRAINT is one constraint that the operand may need
    402    to meet.  */
    403 static bool
    404 valid_address_p (rtx op, struct address_info *ad,
    405 		 enum constraint_num constraint)
    406 {
    407   address_eliminator eliminator (ad);
    408 
    409   /* Allow a memory OP if it matches CONSTRAINT, even if CONSTRAINT is more
    410      forgiving than "m".
    411      Need to extract memory from op for special memory constraint,
    412      i.e. bcst_mem_operand in i386 backend.  */
    413   if (MEM_P (extract_mem_from_operand (op))
    414       && insn_extra_relaxed_memory_constraint (constraint)
    415       && constraint_satisfied_p (op, constraint))
    416     return true;
    417 
    418   return valid_address_p (ad->mode, *ad->outer, ad->as);
    419 }
    420 
    421 /* For special_memory_operand, it could be false for MEM_P (op),
    422    i.e. bcst_mem_operand in i386 backend.
    423    Extract and return real memory operand or op.  */
    424 rtx
    425 extract_mem_from_operand (rtx op)
    426 {
    427   for (rtx x = op;; x = XEXP (x, 0))
    428     {
    429       if (MEM_P (x))
    430 	return x;
    431       if (GET_RTX_LENGTH (GET_CODE (x)) != 1
    432 	  || GET_RTX_FORMAT (GET_CODE (x))[0] != 'e')
    433 	break;
    434     }
    435   return op;
    436 }
    437 
    438 /* Return true if the eliminated form of memory reference OP satisfies
    439    extra (special) memory constraint CONSTRAINT.  */
    440 static bool
    441 satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
    442 {
    443   struct address_info ad;
    444   rtx mem = extract_mem_from_operand (op);
    445   if (!MEM_P (mem))
    446     return false;
    447 
    448   decompose_mem_address (&ad, mem);
    449   address_eliminator eliminator (&ad);
    450   return constraint_satisfied_p (op, constraint);
    451 }
    452 
    453 /* Return true if the eliminated form of address AD satisfies extra
    454    address constraint CONSTRAINT.  */
    455 static bool
    456 satisfies_address_constraint_p (struct address_info *ad,
    457 				enum constraint_num constraint)
    458 {
    459   address_eliminator eliminator (ad);
    460   return constraint_satisfied_p (*ad->outer, constraint);
    461 }
    462 
    463 /* Return true if the eliminated form of address OP satisfies extra
    464    address constraint CONSTRAINT.  */
    465 static bool
    466 satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
    467 {
    468   struct address_info ad;
    469 
    470   decompose_lea_address (&ad, &op);
    471   return satisfies_address_constraint_p (&ad, constraint);
    472 }
    473 
    474 /* Initiate equivalences for LRA.  As we keep original equivalences
    475    before any elimination, we need to make copies otherwise any change
    476    in insns might change the equivalences.  */
    477 void
    478 lra_init_equiv (void)
    479 {
    480   ira_expand_reg_equiv ();
    481   for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
    482     {
    483       rtx res;
    484 
    485       if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
    486 	ira_reg_equiv[i].memory = copy_rtx (res);
    487       if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
    488 	ira_reg_equiv[i].invariant = copy_rtx (res);
    489     }
    490 }
    491 
    492 static rtx loc_equivalence_callback (rtx, const_rtx, void *);
    493 
    494 /* Update equivalence for REGNO.  We need to this as the equivalence
    495    might contain other pseudos which are changed by their
    496    equivalences.  */
    497 static void
    498 update_equiv (int regno)
    499 {
    500   rtx x;
    501 
    502   if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
    503     ira_reg_equiv[regno].memory
    504       = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
    505 				 NULL_RTX);
    506   if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
    507     ira_reg_equiv[regno].invariant
    508       = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
    509 				 NULL_RTX);
    510 }
    511 
    512 /* If we have decided to substitute X with another value, return that
    513    value, otherwise return X.  */
    514 static rtx
    515 get_equiv (rtx x)
    516 {
    517   int regno;
    518   rtx res;
    519 
    520   if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
    521       || ! ira_reg_equiv[regno].defined_p
    522       || ! ira_reg_equiv[regno].profitable_p
    523       || lra_get_regno_hard_regno (regno) >= 0)
    524     return x;
    525   if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
    526     {
    527       if (targetm.cannot_substitute_mem_equiv_p (res))
    528 	return x;
    529       return res;
    530     }
    531   if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
    532     return res;
    533   if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
    534     return res;
    535   gcc_unreachable ();
    536 }
    537 
    538 /* If we have decided to substitute X with the equivalent value,
    539    return that value after elimination for INSN, otherwise return
    540    X.  */
    541 static rtx
    542 get_equiv_with_elimination (rtx x, rtx_insn *insn)
    543 {
    544   rtx res = get_equiv (x);
    545 
    546   if (x == res || CONSTANT_P (res))
    547     return res;
    548   return lra_eliminate_regs_1 (insn, res, GET_MODE (res),
    549 			       false, false, 0, true);
    550 }
    551 
    552 /* Set up curr_operand_mode.  */
    553 static void
    554 init_curr_operand_mode (void)
    555 {
    556   int nop = curr_static_id->n_operands;
    557   for (int i = 0; i < nop; i++)
    558     {
    559       machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
    560       if (mode == VOIDmode)
    561 	{
    562 	  /* The .md mode for address operands is the mode of the
    563 	     addressed value rather than the mode of the address itself.  */
    564 	  if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
    565 	    mode = Pmode;
    566 	  else
    567 	    mode = curr_static_id->operand[i].mode;
    568 	}
    569       curr_operand_mode[i] = mode;
    570     }
    571 }
    572 
    573 
    574 
    576 /* The page contains code to reuse input reloads.  */
    577 
    578 /* Structure describes input reload of the current insns.  */
    579 struct input_reload
    580 {
    581   /* True for input reload of matched operands.  */
    582   bool match_p;
    583   /* Reloaded value.  */
    584   rtx input;
    585   /* Reload pseudo used.  */
    586   rtx reg;
    587 };
    588 
    589 /* The number of elements in the following array.  */
    590 static int curr_insn_input_reloads_num;
    591 /* Array containing info about input reloads.  It is used to find the
    592    same input reload and reuse the reload pseudo in this case.	*/
    593 static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
    594 
    595 /* Initiate data concerning reuse of input reloads for the current
    596    insn.  */
    597 static void
    598 init_curr_insn_input_reloads (void)
    599 {
    600   curr_insn_input_reloads_num = 0;
    601 }
    602 
    603 /* The canonical form of an rtx inside a MEM is not necessarily the same as the
    604    canonical form of the rtx outside the MEM.  Fix this up in the case that
    605    we're reloading an address (and therefore pulling it outside a MEM).  */
    606 static rtx
    607 canonicalize_reload_addr (rtx addr)
    608 {
    609   subrtx_var_iterator::array_type array;
    610   FOR_EACH_SUBRTX_VAR (iter, array, addr, NONCONST)
    611     {
    612       rtx x = *iter;
    613       if (GET_CODE (x) == MULT && CONST_INT_P (XEXP (x, 1)))
    614 	{
    615 	  const HOST_WIDE_INT ci = INTVAL (XEXP (x, 1));
    616 	  const int pwr2 = exact_log2 (ci);
    617 	  if (pwr2 > 0)
    618 	    {
    619 	      /* Rewrite this to use a shift instead, which is canonical when
    620 		 outside of a MEM.  */
    621 	      PUT_CODE (x, ASHIFT);
    622 	      XEXP (x, 1) = GEN_INT (pwr2);
    623 	    }
    624 	}
    625     }
    626 
    627   return addr;
    628 }
    629 
    630 /* Create a new pseudo using MODE, RCLASS, EXCLUDE_START_HARD_REGS, ORIGINAL or
    631    reuse an existing reload pseudo.  Don't reuse an existing reload pseudo if
    632    IN_SUBREG_P is true and the reused pseudo should be wrapped up in a SUBREG.
    633    The result pseudo is returned through RESULT_REG.  Return TRUE if we created
    634    a new pseudo, FALSE if we reused an existing reload pseudo.  Use TITLE to
    635    describe new registers for debug purposes.  */
    636 static bool
    637 get_reload_reg (enum op_type type, machine_mode mode, rtx original,
    638 		enum reg_class rclass, HARD_REG_SET *exclude_start_hard_regs,
    639 		bool in_subreg_p, const char *title, rtx *result_reg)
    640 {
    641   int i, regno;
    642   enum reg_class new_class;
    643   bool unique_p = false;
    644 
    645   if (type == OP_OUT)
    646     {
    647       /* Output reload registers tend to start out with a conservative
    648 	 choice of register class.  Usually this is ALL_REGS, although
    649 	 a target might narrow it (for performance reasons) through
    650 	 targetm.preferred_reload_class.  It's therefore quite common
    651 	 for a reload instruction to require a more restrictive class
    652 	 than the class that was originally assigned to the reload register.
    653 
    654 	 In these situations, it's more efficient to refine the choice
    655 	 of register class rather than create a second reload register.
    656 	 This also helps to avoid cycling for registers that are only
    657 	 used by reload instructions.  */
    658       if (REG_P (original)
    659 	  && (int) REGNO (original) >= new_regno_start
    660 	  && INSN_UID (curr_insn) >= new_insn_uid_start
    661 	  && in_class_p (original, rclass, &new_class, true))
    662 	{
    663 	  unsigned int regno = REGNO (original);
    664 	  if (lra_dump_file != NULL)
    665 	    {
    666 	      fprintf (lra_dump_file, "	 Reuse r%d for output ", regno);
    667 	      dump_value_slim (lra_dump_file, original, 1);
    668 	    }
    669 	  if (new_class != lra_get_allocno_class (regno))
    670 	    lra_change_class (regno, new_class, ", change to", false);
    671 	  if (lra_dump_file != NULL)
    672 	    fprintf (lra_dump_file, "\n");
    673 	  *result_reg = original;
    674 	  return false;
    675 	}
    676       *result_reg
    677 	= lra_create_new_reg_with_unique_value (mode, original, rclass,
    678 						exclude_start_hard_regs, title);
    679       return true;
    680     }
    681   /* Prevent reuse value of expression with side effects,
    682      e.g. volatile memory.  */
    683   if (! side_effects_p (original))
    684     for (i = 0; i < curr_insn_input_reloads_num; i++)
    685       {
    686 	if (! curr_insn_input_reloads[i].match_p
    687 	    && rtx_equal_p (curr_insn_input_reloads[i].input, original)
    688 	    && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
    689 	  {
    690 	    rtx reg = curr_insn_input_reloads[i].reg;
    691 	    regno = REGNO (reg);
    692 	    /* If input is equal to original and both are VOIDmode,
    693 	       GET_MODE (reg) might be still different from mode.
    694 	       Ensure we don't return *result_reg with wrong mode.  */
    695 	    if (GET_MODE (reg) != mode)
    696 	      {
    697 		if (in_subreg_p)
    698 		  continue;
    699 		if (maybe_lt (GET_MODE_SIZE (GET_MODE (reg)),
    700 			      GET_MODE_SIZE (mode)))
    701 		  continue;
    702 		reg = lowpart_subreg (mode, reg, GET_MODE (reg));
    703 		if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
    704 		  continue;
    705 	      }
    706 	    *result_reg = reg;
    707 	    if (lra_dump_file != NULL)
    708 	      {
    709 		fprintf (lra_dump_file, "	 Reuse r%d for reload ", regno);
    710 		dump_value_slim (lra_dump_file, original, 1);
    711 	      }
    712 	    if (new_class != lra_get_allocno_class (regno))
    713 	      lra_change_class (regno, new_class, ", change to", false);
    714 	    if (lra_dump_file != NULL)
    715 	      fprintf (lra_dump_file, "\n");
    716 	    return false;
    717 	  }
    718 	/* If we have an input reload with a different mode, make sure it
    719 	   will get a different hard reg.  */
    720 	else if (REG_P (original)
    721 		 && REG_P (curr_insn_input_reloads[i].input)
    722 		 && REGNO (original) == REGNO (curr_insn_input_reloads[i].input)
    723 		 && (GET_MODE (original)
    724 		     != GET_MODE (curr_insn_input_reloads[i].input)))
    725 	  unique_p = true;
    726       }
    727   *result_reg = (unique_p
    728 		 ? lra_create_new_reg_with_unique_value
    729 		 : lra_create_new_reg) (mode, original, rclass,
    730 					exclude_start_hard_regs, title);
    731   lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
    732   curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
    733   curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
    734   curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
    735   return true;
    736 }
    737 
    738 
    739 /* The page contains major code to choose the current insn alternative
    741    and generate reloads for it.	 */
    742 
    743 /* Return the offset from REGNO of the least significant register
    744    in (reg:MODE REGNO).
    745 
    746    This function is used to tell whether two registers satisfy
    747    a matching constraint.  (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
    748 
    749          REGNO1 + lra_constraint_offset (REGNO1, MODE1)
    750 	 == REGNO2 + lra_constraint_offset (REGNO2, MODE2)  */
    751 int
    752 lra_constraint_offset (int regno, machine_mode mode)
    753 {
    754   lra_assert (regno < FIRST_PSEUDO_REGISTER);
    755 
    756   scalar_int_mode int_mode;
    757   if (WORDS_BIG_ENDIAN
    758       && is_a <scalar_int_mode> (mode, &int_mode)
    759       && GET_MODE_SIZE (int_mode) > UNITS_PER_WORD)
    760     return hard_regno_nregs (regno, mode) - 1;
    761   return 0;
    762 }
    763 
    764 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
    765    if they are the same hard reg, and has special hacks for
    766    auto-increment and auto-decrement.  This is specifically intended for
    767    process_alt_operands to use in determining whether two operands
    768    match.  X is the operand whose number is the lower of the two.
    769 
    770    It is supposed that X is the output operand and Y is the input
    771    operand.  Y_HARD_REGNO is the final hard regno of register Y or
    772    register in subreg Y as we know it now.  Otherwise, it is a
    773    negative value.  */
    774 static bool
    775 operands_match_p (rtx x, rtx y, int y_hard_regno)
    776 {
    777   int i;
    778   RTX_CODE code = GET_CODE (x);
    779   const char *fmt;
    780 
    781   if (x == y)
    782     return true;
    783   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
    784       && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
    785     {
    786       int j;
    787 
    788       i = get_hard_regno (x, false);
    789       if (i < 0)
    790 	goto slow;
    791 
    792       if ((j = y_hard_regno) < 0)
    793 	goto slow;
    794 
    795       i += lra_constraint_offset (i, GET_MODE (x));
    796       j += lra_constraint_offset (j, GET_MODE (y));
    797 
    798       return i == j;
    799     }
    800 
    801   /* If two operands must match, because they are really a single
    802      operand of an assembler insn, then two post-increments are invalid
    803      because the assembler insn would increment only once.  On the
    804      other hand, a post-increment matches ordinary indexing if the
    805      post-increment is the output operand.  */
    806   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
    807     return operands_match_p (XEXP (x, 0), y, y_hard_regno);
    808 
    809   /* Two pre-increments are invalid because the assembler insn would
    810      increment only once.  On the other hand, a pre-increment matches
    811      ordinary indexing if the pre-increment is the input operand.  */
    812   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
    813       || GET_CODE (y) == PRE_MODIFY)
    814     return operands_match_p (x, XEXP (y, 0), -1);
    815 
    816  slow:
    817 
    818   if (code == REG && REG_P (y))
    819     return REGNO (x) == REGNO (y);
    820 
    821   if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
    822       && x == SUBREG_REG (y))
    823     return true;
    824   if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
    825       && SUBREG_REG (x) == y)
    826     return true;
    827 
    828   /* Now we have disposed of all the cases in which different rtx
    829      codes can match.  */
    830   if (code != GET_CODE (y))
    831     return false;
    832 
    833   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
    834   if (GET_MODE (x) != GET_MODE (y))
    835     return false;
    836 
    837   switch (code)
    838     {
    839     CASE_CONST_UNIQUE:
    840       return false;
    841 
    842     case CONST_VECTOR:
    843       if (!same_vector_encodings_p (x, y))
    844 	return false;
    845       break;
    846 
    847     case LABEL_REF:
    848       return label_ref_label (x) == label_ref_label (y);
    849     case SYMBOL_REF:
    850       return XSTR (x, 0) == XSTR (y, 0);
    851 
    852     default:
    853       break;
    854     }
    855 
    856   /* Compare the elements.  If any pair of corresponding elements fail
    857      to match, return false for the whole things.  */
    858 
    859   fmt = GET_RTX_FORMAT (code);
    860   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    861     {
    862       int val, j;
    863       switch (fmt[i])
    864 	{
    865 	case 'w':
    866 	  if (XWINT (x, i) != XWINT (y, i))
    867 	    return false;
    868 	  break;
    869 
    870 	case 'i':
    871 	  if (XINT (x, i) != XINT (y, i))
    872 	    return false;
    873 	  break;
    874 
    875 	case 'p':
    876 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
    877 	    return false;
    878 	  break;
    879 
    880 	case 'e':
    881 	  val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
    882 	  if (val == 0)
    883 	    return false;
    884 	  break;
    885 
    886 	case '0':
    887 	  break;
    888 
    889 	case 'E':
    890 	  if (XVECLEN (x, i) != XVECLEN (y, i))
    891 	    return false;
    892 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
    893 	    {
    894 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
    895 	      if (val == 0)
    896 		return false;
    897 	    }
    898 	  break;
    899 
    900 	  /* It is believed that rtx's at this level will never
    901 	     contain anything but integers and other rtx's, except for
    902 	     within LABEL_REFs and SYMBOL_REFs.	 */
    903 	default:
    904 	  gcc_unreachable ();
    905 	}
    906     }
    907   return true;
    908 }
    909 
    910 /* True if X is a constant that can be forced into the constant pool.
    911    MODE is the mode of the operand, or VOIDmode if not known.  */
    912 #define CONST_POOL_OK_P(MODE, X)		\
    913   ((MODE) != VOIDmode				\
    914    && CONSTANT_P (X)				\
    915    && GET_CODE (X) != HIGH			\
    916    && GET_MODE_SIZE (MODE).is_constant ()	\
    917    && !targetm.cannot_force_const_mem (MODE, X))
    918 
    919 /* True if C is a non-empty register class that has too few registers
    920    to be safely used as a reload target class.	*/
    921 #define SMALL_REGISTER_CLASS_P(C)		\
    922   (ira_class_hard_regs_num [(C)] == 1		\
    923    || (ira_class_hard_regs_num [(C)] >= 1	\
    924        && targetm.class_likely_spilled_p (C)))
    925 
    926 /* If REG is a reload pseudo, try to make its class satisfying CL.  */
    927 static void
    928 narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
    929 {
    930   enum reg_class rclass;
    931 
    932   /* Do not make more accurate class from reloads generated.  They are
    933      mostly moves with a lot of constraints.  Making more accurate
    934      class may results in very narrow class and impossibility of find
    935      registers for several reloads of one insn.	 */
    936   if (INSN_UID (curr_insn) >= new_insn_uid_start)
    937     return;
    938   if (GET_CODE (reg) == SUBREG)
    939     reg = SUBREG_REG (reg);
    940   if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
    941     return;
    942   if (in_class_p (reg, cl, &rclass) && rclass != cl)
    943     lra_change_class (REGNO (reg), rclass, "      Change to", true);
    944 }
    945 
    946 /* Searches X for any reference to a reg with the same value as REGNO,
    947    returning the rtx of the reference found if any.  Otherwise,
    948    returns NULL_RTX.  */
    949 static rtx
    950 regno_val_use_in (unsigned int regno, rtx x)
    951 {
    952   const char *fmt;
    953   int i, j;
    954   rtx tem;
    955 
    956   if (REG_P (x) && lra_reg_info[REGNO (x)].val == lra_reg_info[regno].val)
    957     return x;
    958 
    959   fmt = GET_RTX_FORMAT (GET_CODE (x));
    960   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
    961     {
    962       if (fmt[i] == 'e')
    963 	{
    964 	  if ((tem = regno_val_use_in (regno, XEXP (x, i))))
    965 	    return tem;
    966 	}
    967       else if (fmt[i] == 'E')
    968 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    969 	  if ((tem = regno_val_use_in (regno , XVECEXP (x, i, j))))
    970 	    return tem;
    971     }
    972 
    973   return NULL_RTX;
    974 }
    975 
    976 /* Return true if all current insn non-output operands except INS (it
    977    has a negaitve end marker) do not use pseudos with the same value
    978    as REGNO.  */
    979 static bool
    980 check_conflict_input_operands (int regno, signed char *ins)
    981 {
    982   int in;
    983   int n_operands = curr_static_id->n_operands;
    984 
    985   for (int nop = 0; nop < n_operands; nop++)
    986     if (! curr_static_id->operand[nop].is_operator
    987 	&& curr_static_id->operand[nop].type != OP_OUT)
    988       {
    989 	for (int i = 0; (in = ins[i]) >= 0; i++)
    990 	  if (in == nop)
    991 	    break;
    992 	if (in < 0
    993 	    && regno_val_use_in (regno, *curr_id->operand_loc[nop]) != NULL_RTX)
    994 	  return false;
    995       }
    996   return true;
    997 }
    998 
    999 /* Generate reloads for matching OUT and INS (array of input operand numbers
   1000    with end marker -1) with reg class GOAL_CLASS and EXCLUDE_START_HARD_REGS,
   1001    considering output operands OUTS (similar array to INS) needing to be in
   1002    different registers.  Add input and output reloads correspondingly to the
   1003    lists *BEFORE and *AFTER.  OUT might be negative.  In this case we generate
   1004    input reloads for matched input operands INS.  EARLY_CLOBBER_P is a flag
   1005    that the output operand is early clobbered for chosen alternative.  */
   1006 static void
   1007 match_reload (signed char out, signed char *ins, signed char *outs,
   1008 	      enum reg_class goal_class, HARD_REG_SET *exclude_start_hard_regs,
   1009 	      rtx_insn **before, rtx_insn **after, bool early_clobber_p)
   1010 {
   1011   bool out_conflict;
   1012   int i, in;
   1013   rtx new_in_reg, new_out_reg, reg;
   1014   machine_mode inmode, outmode;
   1015   rtx in_rtx = *curr_id->operand_loc[ins[0]];
   1016   rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
   1017 
   1018   inmode = curr_operand_mode[ins[0]];
   1019   outmode = out < 0 ? inmode : curr_operand_mode[out];
   1020   push_to_sequence (*before);
   1021   if (inmode != outmode)
   1022     {
   1023       /* process_alt_operands has already checked that the mode sizes
   1024 	 are ordered.  */
   1025       if (partial_subreg_p (outmode, inmode))
   1026 	{
   1027 	  reg = new_in_reg
   1028 	    = lra_create_new_reg_with_unique_value (inmode, in_rtx, goal_class,
   1029 						    exclude_start_hard_regs,
   1030 						    "");
   1031 	  new_out_reg = gen_lowpart_SUBREG (outmode, reg);
   1032 	  LRA_SUBREG_P (new_out_reg) = 1;
   1033 	  /* If the input reg is dying here, we can use the same hard
   1034 	     register for REG and IN_RTX.  We do it only for original
   1035 	     pseudos as reload pseudos can die although original
   1036 	     pseudos still live where reload pseudos dies.  */
   1037 	  if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
   1038 	      && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
   1039 	      && (!early_clobber_p
   1040 		  || check_conflict_input_operands(REGNO (in_rtx), ins)))
   1041 	    lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
   1042 	}
   1043       else
   1044 	{
   1045 	  reg = new_out_reg
   1046 	    = lra_create_new_reg_with_unique_value (outmode, out_rtx,
   1047 						    goal_class,
   1048 						    exclude_start_hard_regs,
   1049 						    "");
   1050 	  new_in_reg = gen_lowpart_SUBREG (inmode, reg);
   1051 	  /* NEW_IN_REG is non-paradoxical subreg.  We don't want
   1052 	     NEW_OUT_REG living above.  We add clobber clause for
   1053 	     this.  This is just a temporary clobber.  We can remove
   1054 	     it at the end of LRA work.  */
   1055 	  rtx_insn *clobber = emit_clobber (new_out_reg);
   1056 	  LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
   1057 	  LRA_SUBREG_P (new_in_reg) = 1;
   1058 	  if (GET_CODE (in_rtx) == SUBREG)
   1059 	    {
   1060 	      rtx subreg_reg = SUBREG_REG (in_rtx);
   1061 
   1062 	      /* If SUBREG_REG is dying here and sub-registers IN_RTX
   1063 		 and NEW_IN_REG are similar, we can use the same hard
   1064 		 register for REG and SUBREG_REG.  */
   1065 	      if (REG_P (subreg_reg)
   1066 		  && (int) REGNO (subreg_reg) < lra_new_regno_start
   1067 		  && GET_MODE (subreg_reg) == outmode
   1068 		  && known_eq (SUBREG_BYTE (in_rtx), SUBREG_BYTE (new_in_reg))
   1069 		  && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg))
   1070 		  && (! early_clobber_p
   1071 		      || check_conflict_input_operands (REGNO (subreg_reg),
   1072 							ins)))
   1073 		lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
   1074 	    }
   1075 	}
   1076     }
   1077   else
   1078     {
   1079       /* Pseudos have values -- see comments for lra_reg_info.
   1080 	 Different pseudos with the same value do not conflict even if
   1081 	 they live in the same place.  When we create a pseudo we
   1082 	 assign value of original pseudo (if any) from which we
   1083 	 created the new pseudo.  If we create the pseudo from the
   1084 	 input pseudo, the new pseudo will have no conflict with the
   1085 	 input pseudo which is wrong when the input pseudo lives after
   1086 	 the insn and as the new pseudo value is changed by the insn
   1087 	 output.  Therefore we create the new pseudo from the output
   1088 	 except the case when we have single matched dying input
   1089 	 pseudo.
   1090 
   1091 	 We cannot reuse the current output register because we might
   1092 	 have a situation like "a <- a op b", where the constraints
   1093 	 force the second input operand ("b") to match the output
   1094 	 operand ("a").  "b" must then be copied into a new register
   1095 	 so that it doesn't clobber the current value of "a".
   1096 
   1097 	 We cannot use the same value if the output pseudo is
   1098 	 early clobbered or the input pseudo is mentioned in the
   1099 	 output, e.g. as an address part in memory, because
   1100 	 output reload will actually extend the pseudo liveness.
   1101 	 We don't care about eliminable hard regs here as we are
   1102 	 interesting only in pseudos.  */
   1103 
   1104       /* Matching input's register value is the same as one of the other
   1105 	 output operand.  Output operands in a parallel insn must be in
   1106 	 different registers.  */
   1107       out_conflict = false;
   1108       if (REG_P (in_rtx))
   1109 	{
   1110 	  for (i = 0; outs[i] >= 0; i++)
   1111 	    {
   1112 	      rtx other_out_rtx = *curr_id->operand_loc[outs[i]];
   1113 	      if (outs[i] != out && REG_P (other_out_rtx)
   1114 		  && (regno_val_use_in (REGNO (in_rtx), other_out_rtx)
   1115 		      != NULL_RTX))
   1116 		{
   1117 		  out_conflict = true;
   1118 		  break;
   1119 		}
   1120 	    }
   1121 	}
   1122 
   1123       new_in_reg = new_out_reg
   1124 	= (! early_clobber_p && ins[1] < 0 && REG_P (in_rtx)
   1125 	   && (int) REGNO (in_rtx) < lra_new_regno_start
   1126 	   && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
   1127 	   && (! early_clobber_p
   1128 	       || check_conflict_input_operands (REGNO (in_rtx), ins))
   1129 	   && (out < 0
   1130 	       || regno_val_use_in (REGNO (in_rtx), out_rtx) == NULL_RTX)
   1131 	   && !out_conflict
   1132 	   ? lra_create_new_reg (inmode, in_rtx, goal_class,
   1133 				 exclude_start_hard_regs, "")
   1134 	   : lra_create_new_reg_with_unique_value (outmode, out_rtx, goal_class,
   1135 						   exclude_start_hard_regs,
   1136 						   ""));
   1137     }
   1138   /* In operand can be got from transformations before processing insn
   1139      constraints.  One example of such transformations is subreg
   1140      reloading (see function simplify_operand_subreg).  The new
   1141      pseudos created by the transformations might have inaccurate
   1142      class (ALL_REGS) and we should make their classes more
   1143      accurate.  */
   1144   narrow_reload_pseudo_class (in_rtx, goal_class);
   1145   lra_emit_move (copy_rtx (new_in_reg), in_rtx);
   1146   *before = get_insns ();
   1147   end_sequence ();
   1148   /* Add the new pseudo to consider values of subsequent input reload
   1149      pseudos.  */
   1150   lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
   1151   curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
   1152   curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
   1153   curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
   1154   for (i = 0; (in = ins[i]) >= 0; i++)
   1155     if (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
   1156 	|| GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]))
   1157       *curr_id->operand_loc[in] = new_in_reg;
   1158     else
   1159       {
   1160 	lra_assert
   1161 	  (GET_MODE (new_out_reg) == GET_MODE (*curr_id->operand_loc[in]));
   1162 	*curr_id->operand_loc[in] = new_out_reg;
   1163       }
   1164   lra_update_dups (curr_id, ins);
   1165   if (out < 0)
   1166     return;
   1167   /* See a comment for the input operand above.  */
   1168   narrow_reload_pseudo_class (out_rtx, goal_class);
   1169   if (find_reg_note (curr_insn, REG_UNUSED, out_rtx) == NULL_RTX)
   1170     {
   1171       reg = SUBREG_P (out_rtx) ? SUBREG_REG (out_rtx) : out_rtx;
   1172       start_sequence ();
   1173       /* If we had strict_low_part, use it also in reload to keep other
   1174 	 parts unchanged but do it only for regs as strict_low_part
   1175 	 has no sense for memory and probably there is no insn pattern
   1176 	 to match the reload insn in memory case.  */
   1177       if (out >= 0 && curr_static_id->operand[out].strict_low && REG_P (reg))
   1178 	out_rtx = gen_rtx_STRICT_LOW_PART (VOIDmode, out_rtx);
   1179       lra_emit_move (out_rtx, copy_rtx (new_out_reg));
   1180       emit_insn (*after);
   1181       *after = get_insns ();
   1182       end_sequence ();
   1183     }
   1184   *curr_id->operand_loc[out] = new_out_reg;
   1185   lra_update_dup (curr_id, out);
   1186 }
   1187 
   1188 /* Return register class which is union of all reg classes in insn
   1189    constraint alternative string starting with P.  */
   1190 static enum reg_class
   1191 reg_class_from_constraints (const char *p)
   1192 {
   1193   int c, len;
   1194   enum reg_class op_class = NO_REGS;
   1195 
   1196   do
   1197     switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
   1198       {
   1199       case '#':
   1200       case ',':
   1201 	return op_class;
   1202 
   1203       case 'g':
   1204 	op_class = reg_class_subunion[op_class][GENERAL_REGS];
   1205 	break;
   1206 
   1207       default:
   1208 	enum constraint_num cn = lookup_constraint (p);
   1209 	enum reg_class cl = reg_class_for_constraint (cn);
   1210 	if (cl == NO_REGS)
   1211 	  {
   1212 	    if (insn_extra_address_constraint (cn))
   1213 	      op_class
   1214 		= (reg_class_subunion
   1215 		   [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
   1216 					      ADDRESS, SCRATCH)]);
   1217 	    break;
   1218 	  }
   1219 
   1220 	op_class = reg_class_subunion[op_class][cl];
   1221  	break;
   1222       }
   1223   while ((p += len), c);
   1224   return op_class;
   1225 }
   1226 
   1227 /* If OP is a register, return the class of the register as per
   1228    get_reg_class, otherwise return NO_REGS.  */
   1229 static inline enum reg_class
   1230 get_op_class (rtx op)
   1231 {
   1232   return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
   1233 }
   1234 
   1235 /* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
   1236    otherwise.  If modes of MEM_PSEUDO and VAL are different, use
   1237    SUBREG for VAL to make them equal.  */
   1238 static rtx_insn *
   1239 emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
   1240 {
   1241   if (GET_MODE (mem_pseudo) != GET_MODE (val))
   1242     {
   1243       /* Usually size of mem_pseudo is greater than val size but in
   1244 	 rare cases it can be less as it can be defined by target
   1245 	 dependent macro HARD_REGNO_CALLER_SAVE_MODE.  */
   1246       if (! MEM_P (val))
   1247 	{
   1248 	  val = gen_lowpart_SUBREG (GET_MODE (mem_pseudo),
   1249 				    GET_CODE (val) == SUBREG
   1250 				    ? SUBREG_REG (val) : val);
   1251 	  LRA_SUBREG_P (val) = 1;
   1252 	}
   1253       else
   1254 	{
   1255 	  mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
   1256 	  LRA_SUBREG_P (mem_pseudo) = 1;
   1257 	}
   1258     }
   1259   return to_p ? gen_move_insn (mem_pseudo, val)
   1260 	      : gen_move_insn (val, mem_pseudo);
   1261 }
   1262 
   1263 /* Process a special case insn (register move), return true if we
   1264    don't need to process it anymore.  INSN should be a single set
   1265    insn.  Set up that RTL was changed through CHANGE_P and that hook
   1266    TARGET_SECONDARY_MEMORY_NEEDED says to use secondary memory through
   1267    SEC_MEM_P.  */
   1268 static bool
   1269 check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
   1270 {
   1271   int sregno, dregno;
   1272   rtx dest, src, dreg, sreg, new_reg, scratch_reg;
   1273   rtx_insn *before;
   1274   enum reg_class dclass, sclass, secondary_class;
   1275   secondary_reload_info sri;
   1276 
   1277   lra_assert (curr_insn_set != NULL_RTX);
   1278   dreg = dest = SET_DEST (curr_insn_set);
   1279   sreg = src = SET_SRC (curr_insn_set);
   1280   if (GET_CODE (dest) == SUBREG)
   1281     dreg = SUBREG_REG (dest);
   1282   if (GET_CODE (src) == SUBREG)
   1283     sreg = SUBREG_REG (src);
   1284   if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
   1285     return false;
   1286   sclass = dclass = NO_REGS;
   1287   if (REG_P (dreg))
   1288     dclass = get_reg_class (REGNO (dreg));
   1289   gcc_assert (dclass < LIM_REG_CLASSES && dclass >= NO_REGS);
   1290   if (dclass == ALL_REGS)
   1291     /* ALL_REGS is used for new pseudos created by transformations
   1292        like reload of SUBREG_REG (see function
   1293        simplify_operand_subreg).  We don't know their class yet.  We
   1294        should figure out the class from processing the insn
   1295        constraints not in this fast path function.  Even if ALL_REGS
   1296        were a right class for the pseudo, secondary_... hooks usually
   1297        are not define for ALL_REGS.  */
   1298     return false;
   1299   if (REG_P (sreg))
   1300     sclass = get_reg_class (REGNO (sreg));
   1301   gcc_assert (sclass < LIM_REG_CLASSES && sclass >= NO_REGS);
   1302   if (sclass == ALL_REGS)
   1303     /* See comments above.  */
   1304     return false;
   1305   if (sclass == NO_REGS && dclass == NO_REGS)
   1306     return false;
   1307   if (targetm.secondary_memory_needed (GET_MODE (src), sclass, dclass)
   1308       && ((sclass != NO_REGS && dclass != NO_REGS)
   1309 	  || (GET_MODE (src)
   1310 	      != targetm.secondary_memory_needed_mode (GET_MODE (src)))))
   1311     {
   1312       *sec_mem_p = true;
   1313       return false;
   1314     }
   1315   if (! REG_P (dreg) || ! REG_P (sreg))
   1316     return false;
   1317   sri.prev_sri = NULL;
   1318   sri.icode = CODE_FOR_nothing;
   1319   sri.extra_cost = 0;
   1320   secondary_class = NO_REGS;
   1321   /* Set up hard register for a reload pseudo for hook
   1322      secondary_reload because some targets just ignore unassigned
   1323      pseudos in the hook.  */
   1324   if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
   1325     {
   1326       dregno = REGNO (dreg);
   1327       reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
   1328     }
   1329   else
   1330     dregno = -1;
   1331   if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
   1332     {
   1333       sregno = REGNO (sreg);
   1334       reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
   1335     }
   1336   else
   1337     sregno = -1;
   1338   if (sclass != NO_REGS)
   1339     secondary_class
   1340       = (enum reg_class) targetm.secondary_reload (false, dest,
   1341 						   (reg_class_t) sclass,
   1342 						   GET_MODE (src), &sri);
   1343   if (sclass == NO_REGS
   1344       || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
   1345 	  && dclass != NO_REGS))
   1346     {
   1347       enum reg_class old_sclass = secondary_class;
   1348       secondary_reload_info old_sri = sri;
   1349 
   1350       sri.prev_sri = NULL;
   1351       sri.icode = CODE_FOR_nothing;
   1352       sri.extra_cost = 0;
   1353       secondary_class
   1354 	= (enum reg_class) targetm.secondary_reload (true, src,
   1355 						     (reg_class_t) dclass,
   1356 						     GET_MODE (src), &sri);
   1357       /* Check the target hook consistency.  */
   1358       lra_assert
   1359 	((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
   1360 	 || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
   1361 	 || (secondary_class == old_sclass && sri.icode == old_sri.icode));
   1362     }
   1363   if (sregno >= 0)
   1364     reg_renumber [sregno] = -1;
   1365   if (dregno >= 0)
   1366     reg_renumber [dregno] = -1;
   1367   if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
   1368     return false;
   1369   *change_p = true;
   1370   new_reg = NULL_RTX;
   1371   if (secondary_class != NO_REGS)
   1372     new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
   1373 						    secondary_class, NULL,
   1374 						    "secondary");
   1375   start_sequence ();
   1376   if (sri.icode == CODE_FOR_nothing)
   1377     lra_emit_move (new_reg, src);
   1378   else
   1379     {
   1380       enum reg_class scratch_class;
   1381 
   1382       scratch_class = (reg_class_from_constraints
   1383 		       (insn_data[sri.icode].operand[2].constraint));
   1384       scratch_reg = (lra_create_new_reg_with_unique_value
   1385 		     (insn_data[sri.icode].operand[2].mode, NULL_RTX,
   1386 		      scratch_class, NULL, "scratch"));
   1387       emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
   1388 				      src, scratch_reg));
   1389     }
   1390   before = get_insns ();
   1391   end_sequence ();
   1392   lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
   1393   if (new_reg != NULL_RTX)
   1394     SET_SRC (curr_insn_set) = new_reg;
   1395   else
   1396     {
   1397       if (lra_dump_file != NULL)
   1398 	{
   1399 	  fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
   1400 	  dump_insn_slim (lra_dump_file, curr_insn);
   1401 	}
   1402       lra_set_insn_deleted (curr_insn);
   1403       return true;
   1404     }
   1405   return false;
   1406 }
   1407 
   1408 /* The following data describe the result of process_alt_operands.
   1409    The data are used in curr_insn_transform to generate reloads.  */
   1410 
   1411 /* The chosen reg classes which should be used for the corresponding
   1412    operands.  */
   1413 static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
   1414 /* Hard registers which cannot be a start hard register for the corresponding
   1415    operands.  */
   1416 static HARD_REG_SET goal_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
   1417 /* True if the operand should be the same as another operand and that
   1418    other operand does not need a reload.  */
   1419 static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
   1420 /* True if the operand does not need a reload.	*/
   1421 static bool goal_alt_win[MAX_RECOG_OPERANDS];
   1422 /* True if the operand can be offsetable memory.  */
   1423 static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
   1424 /* The number of an operand to which given operand can be matched to.  */
   1425 static int goal_alt_matches[MAX_RECOG_OPERANDS];
   1426 /* The number of elements in the following array.  */
   1427 static int goal_alt_dont_inherit_ops_num;
   1428 /* Numbers of operands whose reload pseudos should not be inherited.  */
   1429 static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
   1430 /* True if the insn commutative operands should be swapped.  */
   1431 static bool goal_alt_swapped;
   1432 /* The chosen insn alternative.	 */
   1433 static int goal_alt_number;
   1434 
   1435 /* True if the corresponding operand is the result of an equivalence
   1436    substitution.  */
   1437 static bool equiv_substition_p[MAX_RECOG_OPERANDS];
   1438 
   1439 /* The following five variables are used to choose the best insn
   1440    alternative.	 They reflect final characteristics of the best
   1441    alternative.	 */
   1442 
   1443 /* Number of necessary reloads and overall cost reflecting the
   1444    previous value and other unpleasantness of the best alternative.  */
   1445 static int best_losers, best_overall;
   1446 /* Overall number hard registers used for reloads.  For example, on
   1447    some targets we need 2 general registers to reload DFmode and only
   1448    one floating point register.	 */
   1449 static int best_reload_nregs;
   1450 /* Overall number reflecting distances of previous reloading the same
   1451    value.  The distances are counted from the current BB start.  It is
   1452    used to improve inheritance chances.  */
   1453 static int best_reload_sum;
   1454 
   1455 /* True if the current insn should have no correspondingly input or
   1456    output reloads.  */
   1457 static bool no_input_reloads_p, no_output_reloads_p;
   1458 
   1459 /* True if we swapped the commutative operands in the current
   1460    insn.  */
   1461 static int curr_swapped;
   1462 
   1463 /* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
   1464    register of class CL.  Add any input reloads to list BEFORE.  AFTER
   1465    is nonnull if *LOC is an automodified value; handle that case by
   1466    adding the required output reloads to list AFTER.  Return true if
   1467    the RTL was changed.
   1468 
   1469    if CHECK_ONLY_P is true, check that the *LOC is a correct address
   1470    register.  Return false if the address register is correct.  */
   1471 static bool
   1472 process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
   1473 		  enum reg_class cl)
   1474 {
   1475   int regno;
   1476   enum reg_class rclass, new_class;
   1477   rtx reg;
   1478   rtx new_reg;
   1479   machine_mode mode;
   1480   bool subreg_p, before_p = false;
   1481 
   1482   subreg_p = GET_CODE (*loc) == SUBREG;
   1483   if (subreg_p)
   1484     {
   1485       reg = SUBREG_REG (*loc);
   1486       mode = GET_MODE (reg);
   1487 
   1488       /* For mode with size bigger than ptr_mode, there unlikely to be "mov"
   1489 	 between two registers with different classes, but there normally will
   1490 	 be "mov" which transfers element of vector register into the general
   1491 	 register, and this normally will be a subreg which should be reloaded
   1492 	 as a whole.  This is particularly likely to be triggered when
   1493 	 -fno-split-wide-types specified.  */
   1494       if (!REG_P (reg)
   1495 	  || in_class_p (reg, cl, &new_class)
   1496 	  || known_le (GET_MODE_SIZE (mode), GET_MODE_SIZE (ptr_mode)))
   1497        loc = &SUBREG_REG (*loc);
   1498     }
   1499 
   1500   reg = *loc;
   1501   mode = GET_MODE (reg);
   1502   if (! REG_P (reg))
   1503     {
   1504       if (check_only_p)
   1505 	return true;
   1506       /* Always reload memory in an address even if the target supports
   1507 	 such addresses.  */
   1508       new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, NULL,
   1509 						      "address");
   1510       before_p = true;
   1511     }
   1512   else
   1513     {
   1514       regno = REGNO (reg);
   1515       rclass = get_reg_class (regno);
   1516       if (! check_only_p
   1517 	  && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
   1518 	{
   1519 	  if (lra_dump_file != NULL)
   1520 	    {
   1521 	      fprintf (lra_dump_file,
   1522 		       "Changing pseudo %d in address of insn %u on equiv ",
   1523 		       REGNO (reg), INSN_UID (curr_insn));
   1524 	      dump_value_slim (lra_dump_file, *loc, 1);
   1525 	      fprintf (lra_dump_file, "\n");
   1526 	    }
   1527 	  *loc = copy_rtx (*loc);
   1528 	}
   1529       if (*loc != reg || ! in_class_p (reg, cl, &new_class))
   1530 	{
   1531 	  if (check_only_p)
   1532 	    return true;
   1533 	  reg = *loc;
   1534 	  if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
   1535 			      mode, reg, cl, NULL,
   1536 			      subreg_p, "address", &new_reg))
   1537 	    before_p = true;
   1538 	}
   1539       else if (new_class != NO_REGS && rclass != new_class)
   1540 	{
   1541 	  if (check_only_p)
   1542 	    return true;
   1543 	  lra_change_class (regno, new_class, "	   Change to", true);
   1544 	  return false;
   1545 	}
   1546       else
   1547 	return false;
   1548     }
   1549   if (before_p)
   1550     {
   1551       push_to_sequence (*before);
   1552       lra_emit_move (new_reg, reg);
   1553       *before = get_insns ();
   1554       end_sequence ();
   1555     }
   1556   *loc = new_reg;
   1557   if (after != NULL)
   1558     {
   1559       start_sequence ();
   1560       lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
   1561       emit_insn (*after);
   1562       *after = get_insns ();
   1563       end_sequence ();
   1564     }
   1565   return true;
   1566 }
   1567 
   1568 /* Insert move insn in simplify_operand_subreg. BEFORE returns
   1569    the insn to be inserted before curr insn. AFTER returns the
   1570    the insn to be inserted after curr insn.  ORIGREG and NEWREG
   1571    are the original reg and new reg for reload.  */
   1572 static void
   1573 insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
   1574 			rtx newreg)
   1575 {
   1576   if (before)
   1577     {
   1578       push_to_sequence (*before);
   1579       lra_emit_move (newreg, origreg);
   1580       *before = get_insns ();
   1581       end_sequence ();
   1582     }
   1583   if (after)
   1584     {
   1585       start_sequence ();
   1586       lra_emit_move (origreg, newreg);
   1587       emit_insn (*after);
   1588       *after = get_insns ();
   1589       end_sequence ();
   1590     }
   1591 }
   1592 
   1593 static int valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
   1594 static bool process_address (int, bool, rtx_insn **, rtx_insn **);
   1595 
   1596 /* Make reloads for subreg in operand NOP with internal subreg mode
   1597    REG_MODE, add new reloads for further processing.  Return true if
   1598    any change was done.  */
   1599 static bool
   1600 simplify_operand_subreg (int nop, machine_mode reg_mode)
   1601 {
   1602   int hard_regno, inner_hard_regno;
   1603   rtx_insn *before, *after;
   1604   machine_mode mode, innermode;
   1605   rtx reg, new_reg;
   1606   rtx operand = *curr_id->operand_loc[nop];
   1607   enum reg_class regclass;
   1608   enum op_type type;
   1609 
   1610   before = after = NULL;
   1611 
   1612   if (GET_CODE (operand) != SUBREG)
   1613     return false;
   1614 
   1615   mode = GET_MODE (operand);
   1616   reg = SUBREG_REG (operand);
   1617   innermode = GET_MODE (reg);
   1618   type = curr_static_id->operand[nop].type;
   1619   if (MEM_P (reg))
   1620     {
   1621       const bool addr_was_valid
   1622 	= valid_address_p (innermode, XEXP (reg, 0), MEM_ADDR_SPACE (reg));
   1623       alter_subreg (curr_id->operand_loc[nop], false);
   1624       rtx subst = *curr_id->operand_loc[nop];
   1625       lra_assert (MEM_P (subst));
   1626       const bool addr_is_valid = valid_address_p (GET_MODE (subst),
   1627 						  XEXP (subst, 0),
   1628 						  MEM_ADDR_SPACE (subst));
   1629       if (!addr_was_valid
   1630 	  || addr_is_valid
   1631 	  || ((get_constraint_type (lookup_constraint
   1632 				    (curr_static_id->operand[nop].constraint))
   1633 	       != CT_SPECIAL_MEMORY)
   1634 	      /* We still can reload address and if the address is
   1635 		 valid, we can remove subreg without reloading its
   1636 		 inner memory.  */
   1637 	      && valid_address_p (GET_MODE (subst),
   1638 				  regno_reg_rtx
   1639 				  [ira_class_hard_regs
   1640 				   [base_reg_class (GET_MODE (subst),
   1641 						    MEM_ADDR_SPACE (subst),
   1642 						    ADDRESS, SCRATCH)][0]],
   1643 				  MEM_ADDR_SPACE (subst))))
   1644 	{
   1645 	  /* If we change the address for a paradoxical subreg of memory, the
   1646 	     new address might violate the necessary alignment or the access
   1647 	     might be slow; take this into consideration.  We need not worry
   1648 	     about accesses beyond allocated memory for paradoxical memory
   1649 	     subregs as we don't substitute such equiv memory (see processing
   1650 	     equivalences in function lra_constraints) and because for spilled
   1651 	     pseudos we allocate stack memory enough for the biggest
   1652 	     corresponding paradoxical subreg.
   1653 
   1654 	     However, do not blindly simplify a (subreg (mem ...)) for
   1655 	     WORD_REGISTER_OPERATIONS targets as this may lead to loading junk
   1656 	     data into a register when the inner is narrower than outer or
   1657 	     missing important data from memory when the inner is wider than
   1658 	     outer.  This rule only applies to modes that are no wider than
   1659 	     a word.
   1660 
   1661 	     If valid memory becomes invalid after subreg elimination
   1662 	     and address might be different we still have to reload
   1663 	     memory.
   1664 	  */
   1665 	  if ((! addr_was_valid
   1666 	       || addr_is_valid
   1667 	       || known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (innermode)))
   1668 	      && !(maybe_ne (GET_MODE_PRECISION (mode),
   1669 			     GET_MODE_PRECISION (innermode))
   1670 		   && known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD)
   1671 		   && known_le (GET_MODE_SIZE (innermode), UNITS_PER_WORD)
   1672 		   && WORD_REGISTER_OPERATIONS)
   1673 	      && (!(MEM_ALIGN (subst) < GET_MODE_ALIGNMENT (mode)
   1674 		    && targetm.slow_unaligned_access (mode, MEM_ALIGN (subst)))
   1675 		  || (MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (innermode)
   1676 		      && targetm.slow_unaligned_access (innermode,
   1677 							MEM_ALIGN (reg)))))
   1678 	    return true;
   1679 
   1680 	  *curr_id->operand_loc[nop] = operand;
   1681 
   1682 	  /* But if the address was not valid, we cannot reload the MEM without
   1683 	     reloading the address first.  */
   1684 	  if (!addr_was_valid)
   1685 	    process_address (nop, false, &before, &after);
   1686 
   1687 	  /* INNERMODE is fast, MODE slow.  Reload the mem in INNERMODE.  */
   1688 	  enum reg_class rclass
   1689 	    = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
   1690 	  if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
   1691 			      reg, rclass, NULL,
   1692 			      TRUE, "slow/invalid mem", &new_reg))
   1693 	    {
   1694 	      bool insert_before, insert_after;
   1695 	      bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
   1696 
   1697 	      insert_before = (type != OP_OUT
   1698 			       || partial_subreg_p (mode, innermode));
   1699 	      insert_after = type != OP_IN;
   1700 	      insert_move_for_subreg (insert_before ? &before : NULL,
   1701 				      insert_after ? &after : NULL,
   1702 				      reg, new_reg);
   1703 	    }
   1704 	  SUBREG_REG (operand) = new_reg;
   1705 
   1706 	  /* Convert to MODE.  */
   1707 	  reg = operand;
   1708 	  rclass
   1709 	    = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
   1710 	  if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
   1711 			      rclass, NULL,
   1712 			      TRUE, "slow/invalid mem", &new_reg))
   1713 	    {
   1714 	      bool insert_before, insert_after;
   1715 	      bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
   1716 
   1717 	      insert_before = type != OP_OUT;
   1718 	      insert_after = type != OP_IN;
   1719 	      insert_move_for_subreg (insert_before ? &before : NULL,
   1720 				      insert_after ? &after : NULL,
   1721 				      reg, new_reg);
   1722 	    }
   1723 	  *curr_id->operand_loc[nop] = new_reg;
   1724 	  lra_process_new_insns (curr_insn, before, after,
   1725 				 "Inserting slow/invalid mem reload");
   1726 	  return true;
   1727 	}
   1728 
   1729       /* If the address was valid and became invalid, prefer to reload
   1730 	 the memory.  Typical case is when the index scale should
   1731 	 correspond the memory.  */
   1732       *curr_id->operand_loc[nop] = operand;
   1733       /* Do not return false here as the MEM_P (reg) will be processed
   1734 	 later in this function.  */
   1735     }
   1736   else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
   1737     {
   1738       alter_subreg (curr_id->operand_loc[nop], false);
   1739       return true;
   1740     }
   1741   else if (CONSTANT_P (reg))
   1742     {
   1743       /* Try to simplify subreg of constant.  It is usually result of
   1744 	 equivalence substitution.  */
   1745       if (innermode == VOIDmode
   1746 	  && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
   1747 	innermode = curr_static_id->operand[nop].mode;
   1748       if ((new_reg = simplify_subreg (mode, reg, innermode,
   1749 				      SUBREG_BYTE (operand))) != NULL_RTX)
   1750 	{
   1751 	  *curr_id->operand_loc[nop] = new_reg;
   1752 	  return true;
   1753 	}
   1754     }
   1755   /* Put constant into memory when we have mixed modes.  It generates
   1756      a better code in most cases as it does not need a secondary
   1757      reload memory.  It also prevents LRA looping when LRA is using
   1758      secondary reload memory again and again.  */
   1759   if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
   1760       && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
   1761     {
   1762       SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
   1763       alter_subreg (curr_id->operand_loc[nop], false);
   1764       return true;
   1765     }
   1766   /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
   1767      if there may be a problem accessing OPERAND in the outer
   1768      mode.  */
   1769   if ((REG_P (reg)
   1770        && REGNO (reg) >= FIRST_PSEUDO_REGISTER
   1771        && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
   1772        /* Don't reload paradoxical subregs because we could be looping
   1773 	  having repeatedly final regno out of hard regs range.  */
   1774        && (hard_regno_nregs (hard_regno, innermode)
   1775 	   >= hard_regno_nregs (hard_regno, mode))
   1776        && simplify_subreg_regno (hard_regno, innermode,
   1777 				 SUBREG_BYTE (operand), mode) < 0
   1778        /* Don't reload subreg for matching reload.  It is actually
   1779 	  valid subreg in LRA.  */
   1780        && ! LRA_SUBREG_P (operand))
   1781       || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
   1782     {
   1783       enum reg_class rclass;
   1784 
   1785       if (REG_P (reg))
   1786 	/* There is a big probability that we will get the same class
   1787 	   for the new pseudo and we will get the same insn which
   1788 	   means infinite looping.  So spill the new pseudo.  */
   1789 	rclass = NO_REGS;
   1790       else
   1791 	/* The class will be defined later in curr_insn_transform.  */
   1792 	rclass
   1793 	  = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
   1794 
   1795       if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
   1796 			  rclass, NULL,
   1797 			  TRUE, "subreg reg", &new_reg))
   1798 	{
   1799 	  bool insert_before, insert_after;
   1800 	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
   1801 
   1802 	  insert_before = (type != OP_OUT
   1803 			   || read_modify_subreg_p (operand));
   1804 	  insert_after = (type != OP_IN);
   1805 	  insert_move_for_subreg (insert_before ? &before : NULL,
   1806 				  insert_after ? &after : NULL,
   1807 				  reg, new_reg);
   1808 	}
   1809       SUBREG_REG (operand) = new_reg;
   1810       lra_process_new_insns (curr_insn, before, after,
   1811 			     "Inserting subreg reload");
   1812       return true;
   1813     }
   1814   /* Force a reload for a paradoxical subreg. For paradoxical subreg,
   1815      IRA allocates hardreg to the inner pseudo reg according to its mode
   1816      instead of the outermode, so the size of the hardreg may not be enough
   1817      to contain the outermode operand, in that case we may need to insert
   1818      reload for the reg. For the following two types of paradoxical subreg,
   1819      we need to insert reload:
   1820      1. If the op_type is OP_IN, and the hardreg could not be paired with
   1821         other hardreg to contain the outermode operand
   1822         (checked by in_hard_reg_set_p), we need to insert the reload.
   1823      2. If the op_type is OP_OUT or OP_INOUT.
   1824 
   1825      Here is a paradoxical subreg example showing how the reload is generated:
   1826 
   1827      (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
   1828         (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
   1829 
   1830      In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
   1831      here, if reg107 is assigned to hardreg R15, because R15 is the last
   1832      hardreg, compiler cannot find another hardreg to pair with R15 to
   1833      contain TImode data. So we insert a TImode reload reg180 for it.
   1834      After reload is inserted:
   1835 
   1836      (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
   1837         (reg:DI 107 [ __comp ])) -1
   1838      (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
   1839         (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
   1840 
   1841      Two reload hard registers will be allocated to reg180 to save TImode data
   1842      in LRA_assign.
   1843 
   1844      For LRA pseudos this should normally be handled by the biggest_mode
   1845      mechanism.  However, it's possible for new uses of an LRA pseudo
   1846      to be introduced after we've allocated it, such as when undoing
   1847      inheritance, and the allocated register might not then be appropriate
   1848      for the new uses.  */
   1849   else if (REG_P (reg)
   1850 	   && REGNO (reg) >= FIRST_PSEUDO_REGISTER
   1851 	   && paradoxical_subreg_p (operand)
   1852 	   && (inner_hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
   1853 	   && ((hard_regno
   1854 		= simplify_subreg_regno (inner_hard_regno, innermode,
   1855 					 SUBREG_BYTE (operand), mode)) < 0
   1856 	       || ((hard_regno_nregs (inner_hard_regno, innermode)
   1857 		    < hard_regno_nregs (hard_regno, mode))
   1858 		   && (regclass = lra_get_allocno_class (REGNO (reg)))
   1859 		   && (type != OP_IN
   1860 		       || !in_hard_reg_set_p (reg_class_contents[regclass],
   1861 					      mode, hard_regno)
   1862 		       || overlaps_hard_reg_set_p (lra_no_alloc_regs,
   1863 						   mode, hard_regno)))))
   1864     {
   1865       /* The class will be defined later in curr_insn_transform.  */
   1866       enum reg_class rclass
   1867 	= (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
   1868 
   1869       if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
   1870                           rclass, NULL,
   1871 			  TRUE, "paradoxical subreg", &new_reg))
   1872         {
   1873 	  rtx subreg;
   1874 	  bool insert_before, insert_after;
   1875 
   1876 	  PUT_MODE (new_reg, mode);
   1877           subreg = gen_lowpart_SUBREG (innermode, new_reg);
   1878 	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
   1879 
   1880 	  insert_before = (type != OP_OUT);
   1881 	  insert_after = (type != OP_IN);
   1882 	  insert_move_for_subreg (insert_before ? &before : NULL,
   1883 				  insert_after ? &after : NULL,
   1884 				  reg, subreg);
   1885 	}
   1886       SUBREG_REG (operand) = new_reg;
   1887       lra_process_new_insns (curr_insn, before, after,
   1888                              "Inserting paradoxical subreg reload");
   1889       return true;
   1890     }
   1891   return false;
   1892 }
   1893 
   1894 /* Return TRUE if X refers for a hard register from SET.  */
   1895 static bool
   1896 uses_hard_regs_p (rtx x, HARD_REG_SET set)
   1897 {
   1898   int i, j, x_hard_regno;
   1899   machine_mode mode;
   1900   const char *fmt;
   1901   enum rtx_code code;
   1902 
   1903   if (x == NULL_RTX)
   1904     return false;
   1905   code = GET_CODE (x);
   1906   mode = GET_MODE (x);
   1907 
   1908   if (code == SUBREG)
   1909     {
   1910       /* For all SUBREGs we want to check whether the full multi-register
   1911 	 overlaps the set.  For normal SUBREGs this means 'get_hard_regno' of
   1912 	 the inner register, for paradoxical SUBREGs this means the
   1913 	 'get_hard_regno' of the full SUBREG and for complete SUBREGs either is
   1914 	 fine.  Use the wider mode for all cases.  */
   1915       rtx subreg = SUBREG_REG (x);
   1916       mode = wider_subreg_mode (x);
   1917       if (mode == GET_MODE (subreg))
   1918 	{
   1919 	  x = subreg;
   1920 	  code = GET_CODE (x);
   1921 	}
   1922     }
   1923 
   1924   if (REG_P (x) || SUBREG_P (x))
   1925     {
   1926       x_hard_regno = get_hard_regno (x, true);
   1927       return (x_hard_regno >= 0
   1928 	      && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
   1929     }
   1930   fmt = GET_RTX_FORMAT (code);
   1931   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1932     {
   1933       if (fmt[i] == 'e')
   1934 	{
   1935 	  if (uses_hard_regs_p (XEXP (x, i), set))
   1936 	    return true;
   1937 	}
   1938       else if (fmt[i] == 'E')
   1939 	{
   1940 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   1941 	    if (uses_hard_regs_p (XVECEXP (x, i, j), set))
   1942 	      return true;
   1943 	}
   1944     }
   1945   return false;
   1946 }
   1947 
   1948 /* Return true if OP is a spilled pseudo. */
   1949 static inline bool
   1950 spilled_pseudo_p (rtx op)
   1951 {
   1952   return (REG_P (op)
   1953 	  && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
   1954 }
   1955 
   1956 /* Return true if X is a general constant.  */
   1957 static inline bool
   1958 general_constant_p (rtx x)
   1959 {
   1960   return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
   1961 }
   1962 
   1963 static bool
   1964 reg_in_class_p (rtx reg, enum reg_class cl)
   1965 {
   1966   if (cl == NO_REGS)
   1967     return get_reg_class (REGNO (reg)) == NO_REGS;
   1968   return in_class_p (reg, cl, NULL);
   1969 }
   1970 
   1971 /* Return true if SET of RCLASS contains no hard regs which can be
   1972    used in MODE.  */
   1973 static bool
   1974 prohibited_class_reg_set_mode_p (enum reg_class rclass,
   1975 				 HARD_REG_SET &set,
   1976 				 machine_mode mode)
   1977 {
   1978   HARD_REG_SET temp;
   1979 
   1980   lra_assert (hard_reg_set_subset_p (reg_class_contents[rclass], set));
   1981   temp = set & ~lra_no_alloc_regs;
   1982   return (hard_reg_set_subset_p
   1983 	  (temp, ira_prohibited_class_mode_regs[rclass][mode]));
   1984 }
   1985 
   1986 
   1987 /* Used to check validity info about small class input operands.  It
   1988    should be incremented at start of processing an insn
   1989    alternative.  */
   1990 static unsigned int curr_small_class_check = 0;
   1991 
   1992 /* Update number of used inputs of class OP_CLASS for operand NOP
   1993    of alternative NALT.  Return true if we have more such class operands
   1994    than the number of available regs.  */
   1995 static bool
   1996 update_and_check_small_class_inputs (int nop, int nalt,
   1997 				     enum reg_class op_class)
   1998 {
   1999   static unsigned int small_class_check[LIM_REG_CLASSES];
   2000   static int small_class_input_nums[LIM_REG_CLASSES];
   2001 
   2002   if (SMALL_REGISTER_CLASS_P (op_class)
   2003       /* We are interesting in classes became small because of fixing
   2004 	 some hard regs, e.g. by an user through GCC options.  */
   2005       && hard_reg_set_intersect_p (reg_class_contents[op_class],
   2006 				   ira_no_alloc_regs)
   2007       && (curr_static_id->operand[nop].type != OP_OUT
   2008 	  || TEST_BIT (curr_static_id->operand[nop].early_clobber_alts, nalt)))
   2009     {
   2010       if (small_class_check[op_class] == curr_small_class_check)
   2011 	small_class_input_nums[op_class]++;
   2012       else
   2013 	{
   2014 	  small_class_check[op_class] = curr_small_class_check;
   2015 	  small_class_input_nums[op_class] = 1;
   2016 	}
   2017       if (small_class_input_nums[op_class] > ira_class_hard_regs_num[op_class])
   2018 	return true;
   2019     }
   2020   return false;
   2021 }
   2022 
   2023 /* Major function to choose the current insn alternative and what
   2024    operands should be reloaded and how.	 If ONLY_ALTERNATIVE is not
   2025    negative we should consider only this alternative.  Return false if
   2026    we cannot choose the alternative or find how to reload the
   2027    operands.  */
   2028 static bool
   2029 process_alt_operands (int only_alternative)
   2030 {
   2031   bool ok_p = false;
   2032   int nop, overall, nalt;
   2033   int n_alternatives = curr_static_id->n_alternatives;
   2034   int n_operands = curr_static_id->n_operands;
   2035   /* LOSERS counts the operands that don't fit this alternative and
   2036      would require loading.  */
   2037   int losers;
   2038   int addr_losers;
   2039   /* REJECT is a count of how undesirable this alternative says it is
   2040      if any reloading is required.  If the alternative matches exactly
   2041      then REJECT is ignored, but otherwise it gets this much counted
   2042      against it in addition to the reloading needed.  */
   2043   int reject;
   2044   /* This is defined by '!' or '?' alternative constraint and added to
   2045      reject.  But in some cases it can be ignored.  */
   2046   int static_reject;
   2047   int op_reject;
   2048   /* The number of elements in the following array.  */
   2049   int early_clobbered_regs_num;
   2050   /* Numbers of operands which are early clobber registers.  */
   2051   int early_clobbered_nops[MAX_RECOG_OPERANDS];
   2052   enum reg_class curr_alt[MAX_RECOG_OPERANDS];
   2053   HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
   2054   HARD_REG_SET curr_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
   2055   bool curr_alt_match_win[MAX_RECOG_OPERANDS];
   2056   bool curr_alt_win[MAX_RECOG_OPERANDS];
   2057   bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
   2058   int curr_alt_matches[MAX_RECOG_OPERANDS];
   2059   /* The number of elements in the following array.  */
   2060   int curr_alt_dont_inherit_ops_num;
   2061   /* Numbers of operands whose reload pseudos should not be inherited.	*/
   2062   int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
   2063   rtx op;
   2064   /* The register when the operand is a subreg of register, otherwise the
   2065      operand itself.  */
   2066   rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
   2067   /* The register if the operand is a register or subreg of register,
   2068      otherwise NULL.  */
   2069   rtx operand_reg[MAX_RECOG_OPERANDS];
   2070   int hard_regno[MAX_RECOG_OPERANDS];
   2071   machine_mode biggest_mode[MAX_RECOG_OPERANDS];
   2072   int reload_nregs, reload_sum;
   2073   bool costly_p;
   2074   enum reg_class cl;
   2075 
   2076   /* Calculate some data common for all alternatives to speed up the
   2077      function.	*/
   2078   for (nop = 0; nop < n_operands; nop++)
   2079     {
   2080       rtx reg;
   2081 
   2082       op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
   2083       /* The real hard regno of the operand after the allocation.  */
   2084       hard_regno[nop] = get_hard_regno (op, true);
   2085 
   2086       operand_reg[nop] = reg = op;
   2087       biggest_mode[nop] = GET_MODE (op);
   2088       if (GET_CODE (op) == SUBREG)
   2089 	{
   2090 	  biggest_mode[nop] = wider_subreg_mode (op);
   2091 	  operand_reg[nop] = reg = SUBREG_REG (op);
   2092 	}
   2093       if (! REG_P (reg))
   2094 	operand_reg[nop] = NULL_RTX;
   2095       else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
   2096 	       || ((int) REGNO (reg)
   2097 		   == lra_get_elimination_hard_regno (REGNO (reg))))
   2098 	no_subreg_reg_operand[nop] = reg;
   2099       else
   2100 	operand_reg[nop] = no_subreg_reg_operand[nop]
   2101 	  /* Just use natural mode for elimination result.  It should
   2102 	     be enough for extra constraints hooks.  */
   2103 	  = regno_reg_rtx[hard_regno[nop]];
   2104     }
   2105 
   2106   /* The constraints are made of several alternatives.	Each operand's
   2107      constraint looks like foo,bar,... with commas separating the
   2108      alternatives.  The first alternatives for all operands go
   2109      together, the second alternatives go together, etc.
   2110 
   2111      First loop over alternatives.  */
   2112   alternative_mask preferred = curr_id->preferred_alternatives;
   2113   if (only_alternative >= 0)
   2114     preferred &= ALTERNATIVE_BIT (only_alternative);
   2115 
   2116   for (nalt = 0; nalt < n_alternatives; nalt++)
   2117     {
   2118       /* Loop over operands for one constraint alternative.  */
   2119       if (!TEST_BIT (preferred, nalt))
   2120 	continue;
   2121 
   2122       bool matching_early_clobber[MAX_RECOG_OPERANDS];
   2123       curr_small_class_check++;
   2124       overall = losers = addr_losers = 0;
   2125       static_reject = reject = reload_nregs = reload_sum = 0;
   2126       for (nop = 0; nop < n_operands; nop++)
   2127 	{
   2128 	  int inc = (curr_static_id
   2129 		     ->operand_alternative[nalt * n_operands + nop].reject);
   2130 	  if (lra_dump_file != NULL && inc != 0)
   2131 	    fprintf (lra_dump_file,
   2132 		     "            Staticly defined alt reject+=%d\n", inc);
   2133 	  static_reject += inc;
   2134 	  matching_early_clobber[nop] = 0;
   2135 	}
   2136       reject += static_reject;
   2137       early_clobbered_regs_num = 0;
   2138 
   2139       for (nop = 0; nop < n_operands; nop++)
   2140 	{
   2141 	  const char *p;
   2142 	  char *end;
   2143 	  int len, c, m, i, opalt_num, this_alternative_matches;
   2144 	  bool win, did_match, offmemok, early_clobber_p;
   2145 	  /* false => this operand can be reloaded somehow for this
   2146 	     alternative.  */
   2147 	  bool badop;
   2148 	  /* true => this operand can be reloaded if the alternative
   2149 	     allows regs.  */
   2150 	  bool winreg;
   2151 	  /* True if a constant forced into memory would be OK for
   2152 	     this operand.  */
   2153 	  bool constmemok;
   2154 	  enum reg_class this_alternative, this_costly_alternative;
   2155 	  HARD_REG_SET this_alternative_set, this_costly_alternative_set;
   2156 	  HARD_REG_SET this_alternative_exclude_start_hard_regs;
   2157 	  bool this_alternative_match_win, this_alternative_win;
   2158 	  bool this_alternative_offmemok;
   2159 	  bool scratch_p;
   2160 	  machine_mode mode;
   2161 	  enum constraint_num cn;
   2162 
   2163 	  opalt_num = nalt * n_operands + nop;
   2164 	  if (curr_static_id->operand_alternative[opalt_num].anything_ok)
   2165 	    {
   2166 	      /* Fast track for no constraints at all.	*/
   2167 	      curr_alt[nop] = NO_REGS;
   2168 	      CLEAR_HARD_REG_SET (curr_alt_set[nop]);
   2169 	      curr_alt_win[nop] = true;
   2170 	      curr_alt_match_win[nop] = false;
   2171 	      curr_alt_offmemok[nop] = false;
   2172 	      curr_alt_matches[nop] = -1;
   2173 	      continue;
   2174 	    }
   2175 
   2176 	  op = no_subreg_reg_operand[nop];
   2177 	  mode = curr_operand_mode[nop];
   2178 
   2179 	  win = did_match = winreg = offmemok = constmemok = false;
   2180 	  badop = true;
   2181 
   2182 	  early_clobber_p = false;
   2183 	  p = curr_static_id->operand_alternative[opalt_num].constraint;
   2184 
   2185 	  this_costly_alternative = this_alternative = NO_REGS;
   2186 	  /* We update set of possible hard regs besides its class
   2187 	     because reg class might be inaccurate.  For example,
   2188 	     union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
   2189 	     is translated in HI_REGS because classes are merged by
   2190 	     pairs and there is no accurate intermediate class.	 */
   2191 	  CLEAR_HARD_REG_SET (this_alternative_set);
   2192 	  CLEAR_HARD_REG_SET (this_costly_alternative_set);
   2193 	  CLEAR_HARD_REG_SET (this_alternative_exclude_start_hard_regs);
   2194 	  this_alternative_win = false;
   2195 	  this_alternative_match_win = false;
   2196 	  this_alternative_offmemok = false;
   2197 	  this_alternative_matches = -1;
   2198 
   2199 	  /* An empty constraint should be excluded by the fast
   2200 	     track.  */
   2201 	  lra_assert (*p != 0 && *p != ',');
   2202 
   2203 	  op_reject = 0;
   2204 	  /* Scan this alternative's specs for this operand; set WIN
   2205 	     if the operand fits any letter in this alternative.
   2206 	     Otherwise, clear BADOP if this operand could fit some
   2207 	     letter after reloads, or set WINREG if this operand could
   2208 	     fit after reloads provided the constraint allows some
   2209 	     registers.	 */
   2210 	  costly_p = false;
   2211 	  do
   2212 	    {
   2213 	      switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
   2214 		{
   2215 		case '\0':
   2216 		  len = 0;
   2217 		  break;
   2218 		case ',':
   2219 		  c = '\0';
   2220 		  break;
   2221 
   2222 		case '&':
   2223 		  early_clobber_p = true;
   2224 		  break;
   2225 
   2226 		case '$':
   2227 		  op_reject += LRA_MAX_REJECT;
   2228 		  break;
   2229 		case '^':
   2230 		  op_reject += LRA_LOSER_COST_FACTOR;
   2231 		  break;
   2232 
   2233 		case '#':
   2234 		  /* Ignore rest of this alternative.  */
   2235 		  c = '\0';
   2236 		  break;
   2237 
   2238 		case '0':  case '1':  case '2':	 case '3':  case '4':
   2239 		case '5':  case '6':  case '7':	 case '8':  case '9':
   2240 		  {
   2241 		    int m_hregno;
   2242 		    bool match_p;
   2243 
   2244 		    m = strtoul (p, &end, 10);
   2245 		    p = end;
   2246 		    len = 0;
   2247 		    lra_assert (nop > m);
   2248 
   2249 		    /* Reject matches if we don't know which operand is
   2250 		       bigger.  This situation would arguably be a bug in
   2251 		       an .md pattern, but could also occur in a user asm.  */
   2252 		    if (!ordered_p (GET_MODE_SIZE (biggest_mode[m]),
   2253 				    GET_MODE_SIZE (biggest_mode[nop])))
   2254 		      break;
   2255 
   2256 		    /* Don't match wrong asm insn operands for proper
   2257 		       diagnostic later.  */
   2258 		    if (INSN_CODE (curr_insn) < 0
   2259 			&& (curr_operand_mode[m] == BLKmode
   2260 			    || curr_operand_mode[nop] == BLKmode)
   2261 			&& curr_operand_mode[m] != curr_operand_mode[nop])
   2262 		      break;
   2263 
   2264 		    m_hregno = get_hard_regno (*curr_id->operand_loc[m], false);
   2265 		    /* We are supposed to match a previous operand.
   2266 		       If we do, we win if that one did.  If we do
   2267 		       not, count both of the operands as losers.
   2268 		       (This is too conservative, since most of the
   2269 		       time only a single reload insn will be needed
   2270 		       to make the two operands win.  As a result,
   2271 		       this alternative may be rejected when it is
   2272 		       actually desirable.)  */
   2273 		    match_p = false;
   2274 		    if (operands_match_p (*curr_id->operand_loc[nop],
   2275 					  *curr_id->operand_loc[m], m_hregno))
   2276 		      {
   2277 			/* We should reject matching of an early
   2278 			   clobber operand if the matching operand is
   2279 			   not dying in the insn.  */
   2280 			if (!TEST_BIT (curr_static_id->operand[m]
   2281 				       .early_clobber_alts, nalt)
   2282 			    || operand_reg[nop] == NULL_RTX
   2283 			    || (find_regno_note (curr_insn, REG_DEAD,
   2284 						 REGNO (op))
   2285 				|| REGNO (op) == REGNO (operand_reg[m])))
   2286 			  match_p = true;
   2287 		      }
   2288 		    if (match_p)
   2289 		      {
   2290 			/* If we are matching a non-offsettable
   2291 			   address where an offsettable address was
   2292 			   expected, then we must reject this
   2293 			   combination, because we can't reload
   2294 			   it.	*/
   2295 			if (curr_alt_offmemok[m]
   2296 			    && MEM_P (*curr_id->operand_loc[m])
   2297 			    && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
   2298 			  continue;
   2299 		      }
   2300 		    else
   2301 		      {
   2302 			/* If the operands do not match and one
   2303 			   operand is INOUT, we can not match them.
   2304 			   Try other possibilities, e.g. other
   2305 			   alternatives or commutative operand
   2306 			   exchange.  */
   2307 			if (curr_static_id->operand[nop].type == OP_INOUT
   2308 			    || curr_static_id->operand[m].type == OP_INOUT)
   2309 			  break;
   2310 			/* Operands don't match.  If the operands are
   2311 			   different user defined explicit hard
   2312 			   registers, then we cannot make them match
   2313 			   when one is early clobber operand.  */
   2314 			if ((REG_P (*curr_id->operand_loc[nop])
   2315 			     || SUBREG_P (*curr_id->operand_loc[nop]))
   2316 			    && (REG_P (*curr_id->operand_loc[m])
   2317 				|| SUBREG_P (*curr_id->operand_loc[m])))
   2318 			  {
   2319 			    rtx nop_reg = *curr_id->operand_loc[nop];
   2320 			    if (SUBREG_P (nop_reg))
   2321 			      nop_reg = SUBREG_REG (nop_reg);
   2322 			    rtx m_reg = *curr_id->operand_loc[m];
   2323 			    if (SUBREG_P (m_reg))
   2324 			      m_reg = SUBREG_REG (m_reg);
   2325 
   2326 			    if (REG_P (nop_reg)
   2327 				&& HARD_REGISTER_P (nop_reg)
   2328 				&& REG_USERVAR_P (nop_reg)
   2329 				&& REG_P (m_reg)
   2330 				&& HARD_REGISTER_P (m_reg)
   2331 				&& REG_USERVAR_P (m_reg))
   2332 			      {
   2333 				int i;
   2334 
   2335 				for (i = 0; i < early_clobbered_regs_num; i++)
   2336 				  if (m == early_clobbered_nops[i])
   2337 				    break;
   2338 				if (i < early_clobbered_regs_num
   2339 				    || early_clobber_p)
   2340 				  break;
   2341 			      }
   2342 			  }
   2343 			/* Both operands must allow a reload register,
   2344 			   otherwise we cannot make them match.  */
   2345 			if (curr_alt[m] == NO_REGS)
   2346 			  break;
   2347 			/* Retroactively mark the operand we had to
   2348 			   match as a loser, if it wasn't already and
   2349 			   it wasn't matched to a register constraint
   2350 			   (e.g it might be matched by memory). */
   2351 			if (curr_alt_win[m]
   2352 			    && (operand_reg[m] == NULL_RTX
   2353 				|| hard_regno[m] < 0))
   2354 			  {
   2355 			    losers++;
   2356 			    reload_nregs
   2357 			      += (ira_reg_class_max_nregs[curr_alt[m]]
   2358 				  [GET_MODE (*curr_id->operand_loc[m])]);
   2359 			  }
   2360 
   2361 			/* Prefer matching earlyclobber alternative as
   2362 			   it results in less hard regs required for
   2363 			   the insn than a non-matching earlyclobber
   2364 			   alternative.  */
   2365 			if (TEST_BIT (curr_static_id->operand[m]
   2366 				      .early_clobber_alts, nalt))
   2367 			  {
   2368 			    if (lra_dump_file != NULL)
   2369 			      fprintf
   2370 				(lra_dump_file,
   2371 				 "            %d Matching earlyclobber alt:"
   2372 				 " reject--\n",
   2373 				 nop);
   2374 			    if (!matching_early_clobber[m])
   2375 			      {
   2376 				reject--;
   2377 				matching_early_clobber[m] = 1;
   2378 			      }
   2379 			  }
   2380 			/* Otherwise we prefer no matching
   2381 			   alternatives because it gives more freedom
   2382 			   in RA.  */
   2383 			else if (operand_reg[nop] == NULL_RTX
   2384 				 || (find_regno_note (curr_insn, REG_DEAD,
   2385 						      REGNO (operand_reg[nop]))
   2386 				     == NULL_RTX))
   2387 			  {
   2388 			    if (lra_dump_file != NULL)
   2389 			      fprintf
   2390 				(lra_dump_file,
   2391 				 "            %d Matching alt: reject+=2\n",
   2392 				 nop);
   2393 			    reject += 2;
   2394 			  }
   2395 		      }
   2396 		    /* If we have to reload this operand and some
   2397 		       previous operand also had to match the same
   2398 		       thing as this operand, we don't know how to do
   2399 		       that.  */
   2400 		    if (!match_p || !curr_alt_win[m])
   2401 		      {
   2402 			for (i = 0; i < nop; i++)
   2403 			  if (curr_alt_matches[i] == m)
   2404 			    break;
   2405 			if (i < nop)
   2406 			  break;
   2407 		      }
   2408 		    else
   2409 		      did_match = true;
   2410 
   2411 		    this_alternative_matches = m;
   2412 		    /* This can be fixed with reloads if the operand
   2413 		       we are supposed to match can be fixed with
   2414 		       reloads. */
   2415 		    badop = false;
   2416 		    this_alternative = curr_alt[m];
   2417 		    this_alternative_set = curr_alt_set[m];
   2418 		    this_alternative_exclude_start_hard_regs
   2419 			= curr_alt_exclude_start_hard_regs[m];
   2420 		    winreg = this_alternative != NO_REGS;
   2421 		    break;
   2422 		  }
   2423 
   2424 		case 'g':
   2425 		  if (MEM_P (op)
   2426 		      || general_constant_p (op)
   2427 		      || spilled_pseudo_p (op))
   2428 		    win = true;
   2429 		  cl = GENERAL_REGS;
   2430 		  goto reg;
   2431 
   2432 		default:
   2433 		  cn = lookup_constraint (p);
   2434 		  switch (get_constraint_type (cn))
   2435 		    {
   2436 		    case CT_REGISTER:
   2437 		      cl = reg_class_for_constraint (cn);
   2438 		      if (cl != NO_REGS)
   2439 			goto reg;
   2440 		      break;
   2441 
   2442 		    case CT_CONST_INT:
   2443 		      if (CONST_INT_P (op)
   2444 			  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
   2445 			win = true;
   2446 		      break;
   2447 
   2448 		    case CT_MEMORY:
   2449 		    case CT_RELAXED_MEMORY:
   2450 		      if (MEM_P (op)
   2451 			  && satisfies_memory_constraint_p (op, cn))
   2452 			win = true;
   2453 		      else if (spilled_pseudo_p (op))
   2454 			win = true;
   2455 
   2456 		      /* If we didn't already win, we can reload constants
   2457 			 via force_const_mem or put the pseudo value into
   2458 			 memory, or make other memory by reloading the
   2459 			 address like for 'o'.  */
   2460 		      if (CONST_POOL_OK_P (mode, op)
   2461 			  || MEM_P (op) || REG_P (op)
   2462 			  /* We can restore the equiv insn by a
   2463 			     reload.  */
   2464 			  || equiv_substition_p[nop])
   2465 			badop = false;
   2466 		      constmemok = true;
   2467 		      offmemok = true;
   2468 		      break;
   2469 
   2470 		    case CT_ADDRESS:
   2471 		      /* An asm operand with an address constraint
   2472 			 that doesn't satisfy address_operand has
   2473 			 is_address cleared, so that we don't try to
   2474 			 make a non-address fit.  */
   2475 		      if (!curr_static_id->operand[nop].is_address)
   2476 			break;
   2477 		      /* If we didn't already win, we can reload the address
   2478 			 into a base register.  */
   2479 		      if (satisfies_address_constraint_p (op, cn))
   2480 			win = true;
   2481 		      cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
   2482 					   ADDRESS, SCRATCH);
   2483 		      badop = false;
   2484 		      goto reg;
   2485 
   2486 		    case CT_FIXED_FORM:
   2487 		      if (constraint_satisfied_p (op, cn))
   2488 			win = true;
   2489 		      break;
   2490 
   2491 		    case CT_SPECIAL_MEMORY:
   2492 		      if (satisfies_memory_constraint_p (op, cn))
   2493 			win = true;
   2494 		      else if (spilled_pseudo_p (op))
   2495 			win = true;
   2496 		      break;
   2497 		    }
   2498 		  break;
   2499 
   2500 		reg:
   2501 		  if (mode == BLKmode)
   2502 		    break;
   2503 		  this_alternative = reg_class_subunion[this_alternative][cl];
   2504 		  if (hard_reg_set_subset_p (this_alternative_set,
   2505 					     reg_class_contents[cl]))
   2506 		    this_alternative_exclude_start_hard_regs
   2507 		      = ira_exclude_class_mode_regs[cl][mode];
   2508 		  else if (!hard_reg_set_subset_p (reg_class_contents[cl],
   2509 						   this_alternative_set))
   2510 		    this_alternative_exclude_start_hard_regs
   2511 		      |= ira_exclude_class_mode_regs[cl][mode];
   2512 		  this_alternative_set |= reg_class_contents[cl];
   2513 		  if (costly_p)
   2514 		    {
   2515 		      this_costly_alternative
   2516 			= reg_class_subunion[this_costly_alternative][cl];
   2517 		      this_costly_alternative_set |= reg_class_contents[cl];
   2518 		    }
   2519 		  winreg = true;
   2520 		  if (REG_P (op))
   2521 		    {
   2522 		      if (hard_regno[nop] >= 0
   2523 			  && in_hard_reg_set_p (this_alternative_set,
   2524 						mode, hard_regno[nop])
   2525 			  && !TEST_HARD_REG_BIT
   2526 			      (this_alternative_exclude_start_hard_regs,
   2527 			       hard_regno[nop]))
   2528 			win = true;
   2529 		      else if (hard_regno[nop] < 0
   2530 			       && in_class_p (op, this_alternative, NULL))
   2531 			win = true;
   2532 		    }
   2533 		  break;
   2534 		}
   2535 	      if (c != ' ' && c != '\t')
   2536 		costly_p = c == '*';
   2537 	    }
   2538 	  while ((p += len), c);
   2539 
   2540 	  scratch_p = (operand_reg[nop] != NULL_RTX
   2541 		       && ira_former_scratch_p (REGNO (operand_reg[nop])));
   2542 	  /* Record which operands fit this alternative.  */
   2543 	  if (win)
   2544 	    {
   2545 	      this_alternative_win = true;
   2546 	      if (operand_reg[nop] != NULL_RTX)
   2547 		{
   2548 		  if (hard_regno[nop] >= 0)
   2549 		    {
   2550 		      if (in_hard_reg_set_p (this_costly_alternative_set,
   2551 					     mode, hard_regno[nop]))
   2552 			{
   2553 			  if (lra_dump_file != NULL)
   2554 			    fprintf (lra_dump_file,
   2555 				     "            %d Costly set: reject++\n",
   2556 				     nop);
   2557 			  reject++;
   2558 			}
   2559 		    }
   2560 		  else
   2561 		    {
   2562 		      /* Prefer won reg to spilled pseudo under other
   2563 			 equal conditions for possibe inheritance.  */
   2564 		      if (! scratch_p)
   2565 			{
   2566 			  if (lra_dump_file != NULL)
   2567 			    fprintf
   2568 			      (lra_dump_file,
   2569 			       "            %d Non pseudo reload: reject++\n",
   2570 			       nop);
   2571 			  reject++;
   2572 			}
   2573 		      if (in_class_p (operand_reg[nop],
   2574 				      this_costly_alternative, NULL))
   2575 			{
   2576 			  if (lra_dump_file != NULL)
   2577 			    fprintf
   2578 			      (lra_dump_file,
   2579 			       "            %d Non pseudo costly reload:"
   2580 			       " reject++\n",
   2581 			       nop);
   2582 			  reject++;
   2583 			}
   2584 		    }
   2585 		  /* We simulate the behavior of old reload here.
   2586 		     Although scratches need hard registers and it
   2587 		     might result in spilling other pseudos, no reload
   2588 		     insns are generated for the scratches.  So it
   2589 		     might cost something but probably less than old
   2590 		     reload pass believes.  */
   2591 		  if (scratch_p)
   2592 		    {
   2593 		      if (lra_dump_file != NULL)
   2594 			fprintf (lra_dump_file,
   2595 				 "            %d Scratch win: reject+=2\n",
   2596 				 nop);
   2597 		      reject += 2;
   2598 		    }
   2599 		}
   2600 	    }
   2601 	  else if (did_match)
   2602 	    this_alternative_match_win = true;
   2603 	  else
   2604 	    {
   2605 	      int const_to_mem = 0;
   2606 	      bool no_regs_p;
   2607 
   2608 	      reject += op_reject;
   2609 	      /* Never do output reload of stack pointer.  It makes
   2610 		 impossible to do elimination when SP is changed in
   2611 		 RTL.  */
   2612 	      if (op == stack_pointer_rtx && ! frame_pointer_needed
   2613 		  && curr_static_id->operand[nop].type != OP_IN)
   2614 		goto fail;
   2615 
   2616 	      /* If this alternative asks for a specific reg class, see if there
   2617 		 is at least one allocatable register in that class.  */
   2618 	      no_regs_p
   2619 		= (this_alternative == NO_REGS
   2620 		   || (hard_reg_set_subset_p
   2621 		       (reg_class_contents[this_alternative],
   2622 			lra_no_alloc_regs)));
   2623 
   2624 	      /* For asms, verify that the class for this alternative is possible
   2625 		 for the mode that is specified.  */
   2626 	      if (!no_regs_p && INSN_CODE (curr_insn) < 0)
   2627 		{
   2628 		  int i;
   2629 		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
   2630 		    if (targetm.hard_regno_mode_ok (i, mode)
   2631 			&& in_hard_reg_set_p (reg_class_contents[this_alternative],
   2632 					      mode, i))
   2633 		      break;
   2634 		  if (i == FIRST_PSEUDO_REGISTER)
   2635 		    winreg = false;
   2636 		}
   2637 
   2638 	      /* If this operand accepts a register, and if the
   2639 		 register class has at least one allocatable register,
   2640 		 then this operand can be reloaded.  */
   2641 	      if (winreg && !no_regs_p)
   2642 		badop = false;
   2643 
   2644 	      if (badop)
   2645 		{
   2646 		  if (lra_dump_file != NULL)
   2647 		    fprintf (lra_dump_file,
   2648 			     "            alt=%d: Bad operand -- refuse\n",
   2649 			     nalt);
   2650 		  goto fail;
   2651 		}
   2652 
   2653 	      if (this_alternative != NO_REGS)
   2654 		{
   2655 		  HARD_REG_SET available_regs
   2656 		    = (reg_class_contents[this_alternative]
   2657 		       & ~((ira_prohibited_class_mode_regs
   2658 			    [this_alternative][mode])
   2659 			   | lra_no_alloc_regs));
   2660 		  if (hard_reg_set_empty_p (available_regs))
   2661 		    {
   2662 		      /* There are no hard regs holding a value of given
   2663 			 mode.  */
   2664 		      if (offmemok)
   2665 			{
   2666 			  this_alternative = NO_REGS;
   2667 			  if (lra_dump_file != NULL)
   2668 			    fprintf (lra_dump_file,
   2669 				     "            %d Using memory because of"
   2670 				     " a bad mode: reject+=2\n",
   2671 				     nop);
   2672 			  reject += 2;
   2673 			}
   2674 		      else
   2675 			{
   2676 			  if (lra_dump_file != NULL)
   2677 			    fprintf (lra_dump_file,
   2678 				     "            alt=%d: Wrong mode -- refuse\n",
   2679 				     nalt);
   2680 			  goto fail;
   2681 			}
   2682 		    }
   2683 		}
   2684 
   2685 	      /* If not assigned pseudo has a class which a subset of
   2686 		 required reg class, it is a less costly alternative
   2687 		 as the pseudo still can get a hard reg of necessary
   2688 		 class.  */
   2689 	      if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
   2690 		  && (cl = get_reg_class (REGNO (op))) != NO_REGS
   2691 		  && ira_class_subset_p[this_alternative][cl])
   2692 		{
   2693 		  if (lra_dump_file != NULL)
   2694 		    fprintf
   2695 		      (lra_dump_file,
   2696 		       "            %d Super set class reg: reject-=3\n", nop);
   2697 		  reject -= 3;
   2698 		}
   2699 
   2700 	      this_alternative_offmemok = offmemok;
   2701 	      if (this_costly_alternative != NO_REGS)
   2702 		{
   2703 		  if (lra_dump_file != NULL)
   2704 		    fprintf (lra_dump_file,
   2705 			     "            %d Costly loser: reject++\n", nop);
   2706 		  reject++;
   2707 		}
   2708 	      /* If the operand is dying, has a matching constraint,
   2709 		 and satisfies constraints of the matched operand
   2710 		 which failed to satisfy the own constraints, most probably
   2711 		 the reload for this operand will be gone.  */
   2712 	      if (this_alternative_matches >= 0
   2713 		  && !curr_alt_win[this_alternative_matches]
   2714 		  && REG_P (op)
   2715 		  && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
   2716 		  && (hard_regno[nop] >= 0
   2717 		      ? in_hard_reg_set_p (this_alternative_set,
   2718 					   mode, hard_regno[nop])
   2719 		      : in_class_p (op, this_alternative, NULL)))
   2720 		{
   2721 		  if (lra_dump_file != NULL)
   2722 		    fprintf
   2723 		      (lra_dump_file,
   2724 		       "            %d Dying matched operand reload: reject++\n",
   2725 		       nop);
   2726 		  reject++;
   2727 		}
   2728 	      else
   2729 		{
   2730 		  /* Strict_low_part requires to reload the register
   2731 		     not the sub-register.  In this case we should
   2732 		     check that a final reload hard reg can hold the
   2733 		     value mode.  */
   2734 		  if (curr_static_id->operand[nop].strict_low
   2735 		      && REG_P (op)
   2736 		      && hard_regno[nop] < 0
   2737 		      && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
   2738 		      && ira_class_hard_regs_num[this_alternative] > 0
   2739 		      && (!targetm.hard_regno_mode_ok
   2740 			  (ira_class_hard_regs[this_alternative][0],
   2741 			   GET_MODE (*curr_id->operand_loc[nop]))))
   2742 		    {
   2743 		      if (lra_dump_file != NULL)
   2744 			fprintf
   2745 			  (lra_dump_file,
   2746 			   "            alt=%d: Strict low subreg reload -- refuse\n",
   2747 			   nalt);
   2748 		      goto fail;
   2749 		    }
   2750 		  losers++;
   2751 		}
   2752 	      if (operand_reg[nop] != NULL_RTX
   2753 		  /* Output operands and matched input operands are
   2754 		     not inherited.  The following conditions do not
   2755 		     exactly describe the previous statement but they
   2756 		     are pretty close.  */
   2757 		  && curr_static_id->operand[nop].type != OP_OUT
   2758 		  && (this_alternative_matches < 0
   2759 		      || curr_static_id->operand[nop].type != OP_IN))
   2760 		{
   2761 		  int last_reload = (lra_reg_info[ORIGINAL_REGNO
   2762 						  (operand_reg[nop])]
   2763 				     .last_reload);
   2764 
   2765 		  /* The value of reload_sum has sense only if we
   2766 		     process insns in their order.  It happens only on
   2767 		     the first constraints sub-pass when we do most of
   2768 		     reload work.  */
   2769 		  if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
   2770 		    reload_sum += last_reload - bb_reload_num;
   2771 		}
   2772 	      /* If this is a constant that is reloaded into the
   2773 		 desired class by copying it to memory first, count
   2774 		 that as another reload.  This is consistent with
   2775 		 other code and is required to avoid choosing another
   2776 		 alternative when the constant is moved into memory.
   2777 		 Note that the test here is precisely the same as in
   2778 		 the code below that calls force_const_mem.  */
   2779 	      if (CONST_POOL_OK_P (mode, op)
   2780 		  && ((targetm.preferred_reload_class
   2781 		       (op, this_alternative) == NO_REGS)
   2782 		      || no_input_reloads_p))
   2783 		{
   2784 		  const_to_mem = 1;
   2785 		  if (! no_regs_p)
   2786 		    losers++;
   2787 		}
   2788 
   2789 	      /* Alternative loses if it requires a type of reload not
   2790 		 permitted for this insn.  We can always reload
   2791 		 objects with a REG_UNUSED note.  */
   2792 	      if ((curr_static_id->operand[nop].type != OP_IN
   2793 		   && no_output_reloads_p
   2794 		   && ! find_reg_note (curr_insn, REG_UNUSED, op))
   2795 		  || (curr_static_id->operand[nop].type != OP_OUT
   2796 		      && no_input_reloads_p && ! const_to_mem)
   2797 		  || (this_alternative_matches >= 0
   2798 		      && (no_input_reloads_p
   2799 			  || (no_output_reloads_p
   2800 			      && (curr_static_id->operand
   2801 				  [this_alternative_matches].type != OP_IN)
   2802 			      && ! find_reg_note (curr_insn, REG_UNUSED,
   2803 						  no_subreg_reg_operand
   2804 						  [this_alternative_matches])))))
   2805 		{
   2806 		  if (lra_dump_file != NULL)
   2807 		    fprintf
   2808 		      (lra_dump_file,
   2809 		       "            alt=%d: No input/output reload -- refuse\n",
   2810 		       nalt);
   2811 		  goto fail;
   2812 		}
   2813 
   2814 	      /* Alternative loses if it required class pseudo cannot
   2815 		 hold value of required mode.  Such insns can be
   2816 		 described by insn definitions with mode iterators.  */
   2817 	      if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
   2818 		  && ! hard_reg_set_empty_p (this_alternative_set)
   2819 		  /* It is common practice for constraints to use a
   2820 		     class which does not have actually enough regs to
   2821 		     hold the value (e.g. x86 AREG for mode requiring
   2822 		     more one general reg).  Therefore we have 2
   2823 		     conditions to check that the reload pseudo cannot
   2824 		     hold the mode value.  */
   2825 		  && (!targetm.hard_regno_mode_ok
   2826 		      (ira_class_hard_regs[this_alternative][0],
   2827 		       GET_MODE (*curr_id->operand_loc[nop])))
   2828 		  /* The above condition is not enough as the first
   2829 		     reg in ira_class_hard_regs can be not aligned for
   2830 		     multi-words mode values.  */
   2831 		  && (prohibited_class_reg_set_mode_p
   2832 		      (this_alternative, this_alternative_set,
   2833 		       GET_MODE (*curr_id->operand_loc[nop]))))
   2834 		{
   2835 		  if (lra_dump_file != NULL)
   2836 		    fprintf (lra_dump_file,
   2837 			     "            alt=%d: reload pseudo for op %d "
   2838 			     "cannot hold the mode value -- refuse\n",
   2839 			     nalt, nop);
   2840 		  goto fail;
   2841 		}
   2842 
   2843 	      /* Check strong discouragement of reload of non-constant
   2844 		 into class THIS_ALTERNATIVE.  */
   2845 	      if (! CONSTANT_P (op) && ! no_regs_p
   2846 		  && (targetm.preferred_reload_class
   2847 		      (op, this_alternative) == NO_REGS
   2848 		      || (curr_static_id->operand[nop].type == OP_OUT
   2849 			  && (targetm.preferred_output_reload_class
   2850 			      (op, this_alternative) == NO_REGS))))
   2851 		{
   2852 		  if (offmemok && REG_P (op))
   2853 		    {
   2854 		      if (lra_dump_file != NULL)
   2855 			fprintf
   2856 			  (lra_dump_file,
   2857 			   "            %d Spill pseudo into memory: reject+=3\n",
   2858 			   nop);
   2859 		      reject += 3;
   2860 		    }
   2861 		  else
   2862 		    {
   2863 		      if (lra_dump_file != NULL)
   2864 			fprintf
   2865 			  (lra_dump_file,
   2866 			   "            %d Non-prefered reload: reject+=%d\n",
   2867 			   nop, LRA_MAX_REJECT);
   2868 		      reject += LRA_MAX_REJECT;
   2869 		    }
   2870 		}
   2871 
   2872 	      if (! (MEM_P (op) && offmemok)
   2873 		  && ! (const_to_mem && constmemok))
   2874 		{
   2875 		  /* We prefer to reload pseudos over reloading other
   2876 		     things, since such reloads may be able to be
   2877 		     eliminated later.  So bump REJECT in other cases.
   2878 		     Don't do this in the case where we are forcing a
   2879 		     constant into memory and it will then win since
   2880 		     we don't want to have a different alternative
   2881 		     match then.  */
   2882 		  if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
   2883 		    {
   2884 		      if (lra_dump_file != NULL)
   2885 			fprintf
   2886 			  (lra_dump_file,
   2887 			   "            %d Non-pseudo reload: reject+=2\n",
   2888 			   nop);
   2889 		      reject += 2;
   2890 		    }
   2891 
   2892 		  if (! no_regs_p)
   2893 		    reload_nregs
   2894 		      += ira_reg_class_max_nregs[this_alternative][mode];
   2895 
   2896 		  if (SMALL_REGISTER_CLASS_P (this_alternative))
   2897 		    {
   2898 		      if (lra_dump_file != NULL)
   2899 			fprintf
   2900 			  (lra_dump_file,
   2901 			   "            %d Small class reload: reject+=%d\n",
   2902 			   nop, LRA_LOSER_COST_FACTOR / 2);
   2903 		      reject += LRA_LOSER_COST_FACTOR / 2;
   2904 		    }
   2905 		}
   2906 
   2907 	      /* We are trying to spill pseudo into memory.  It is
   2908 		 usually more costly than moving to a hard register
   2909 		 although it might takes the same number of
   2910 		 reloads.
   2911 
   2912 		 Non-pseudo spill may happen also.  Suppose a target allows both
   2913 		 register and memory in the operand constraint alternatives,
   2914 		 then it's typical that an eliminable register has a substition
   2915 		 of "base + offset" which can either be reloaded by a simple
   2916 		 "new_reg <= base + offset" which will match the register
   2917 		 constraint, or a similar reg addition followed by further spill
   2918 		 to and reload from memory which will match the memory
   2919 		 constraint, but this memory spill will be much more costly
   2920 		 usually.
   2921 
   2922 		 Code below increases the reject for both pseudo and non-pseudo
   2923 		 spill.  */
   2924 	      if (no_regs_p
   2925 		  && !(MEM_P (op) && offmemok)
   2926 		  && !(REG_P (op) && hard_regno[nop] < 0))
   2927 		{
   2928 		  if (lra_dump_file != NULL)
   2929 		    fprintf
   2930 		      (lra_dump_file,
   2931 		       "            %d Spill %spseudo into memory: reject+=3\n",
   2932 		       nop, REG_P (op) ? "" : "Non-");
   2933 		  reject += 3;
   2934 		  if (VECTOR_MODE_P (mode))
   2935 		    {
   2936 		      /* Spilling vectors into memory is usually more
   2937 			 costly as they contain big values.  */
   2938 		      if (lra_dump_file != NULL)
   2939 			fprintf
   2940 			  (lra_dump_file,
   2941 			   "            %d Spill vector pseudo: reject+=2\n",
   2942 			   nop);
   2943 		      reject += 2;
   2944 		    }
   2945 		}
   2946 
   2947 	      /* When we use an operand requiring memory in given
   2948 		 alternative, the insn should write *and* read the
   2949 		 value to/from memory it is costly in comparison with
   2950 		 an insn alternative which does not use memory
   2951 		 (e.g. register or immediate operand).  We exclude
   2952 		 memory operand for such case as we can satisfy the
   2953 		 memory constraints by reloading address.  */
   2954 	      if (no_regs_p && offmemok && !MEM_P (op))
   2955 		{
   2956 		  if (lra_dump_file != NULL)
   2957 		    fprintf
   2958 		      (lra_dump_file,
   2959 		       "            Using memory insn operand %d: reject+=3\n",
   2960 		       nop);
   2961 		  reject += 3;
   2962 		}
   2963 
   2964 	      /* If reload requires moving value through secondary
   2965 		 memory, it will need one more insn at least.  */
   2966 	      if (this_alternative != NO_REGS
   2967 		  && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
   2968 		  && ((curr_static_id->operand[nop].type != OP_OUT
   2969 		       && targetm.secondary_memory_needed (GET_MODE (op), cl,
   2970 							   this_alternative))
   2971 		      || (curr_static_id->operand[nop].type != OP_IN
   2972 			  && (targetm.secondary_memory_needed
   2973 			      (GET_MODE (op), this_alternative, cl)))))
   2974 		losers++;
   2975 
   2976 	      if (MEM_P (op) && offmemok)
   2977 		addr_losers++;
   2978 	      else
   2979 		{
   2980 		  /* Input reloads can be inherited more often than
   2981 		     output reloads can be removed, so penalize output
   2982 		     reloads.  */
   2983 		  if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
   2984 		    {
   2985 		      if (lra_dump_file != NULL)
   2986 			fprintf
   2987 			  (lra_dump_file,
   2988 			   "            %d Non input pseudo reload: reject++\n",
   2989 			   nop);
   2990 		      reject++;
   2991 		    }
   2992 
   2993 		  if (curr_static_id->operand[nop].type == OP_INOUT)
   2994 		    {
   2995 		      if (lra_dump_file != NULL)
   2996 			fprintf
   2997 			  (lra_dump_file,
   2998 			   "            %d Input/Output reload: reject+=%d\n",
   2999 			   nop, LRA_LOSER_COST_FACTOR);
   3000 		      reject += LRA_LOSER_COST_FACTOR;
   3001 		    }
   3002 		}
   3003 	    }
   3004 
   3005 	  if (early_clobber_p && ! scratch_p)
   3006 	    {
   3007 	      if (lra_dump_file != NULL)
   3008 		fprintf (lra_dump_file,
   3009 			 "            %d Early clobber: reject++\n", nop);
   3010 	      reject++;
   3011 	    }
   3012 	  /* ??? We check early clobbers after processing all operands
   3013 	     (see loop below) and there we update the costs more.
   3014 	     Should we update the cost (may be approximately) here
   3015 	     because of early clobber register reloads or it is a rare
   3016 	     or non-important thing to be worth to do it.  */
   3017 	  overall = (losers * LRA_LOSER_COST_FACTOR + reject
   3018 		     - (addr_losers == losers ? static_reject : 0));
   3019 	  if ((best_losers == 0 || losers != 0) && best_overall < overall)
   3020             {
   3021               if (lra_dump_file != NULL)
   3022 		fprintf (lra_dump_file,
   3023 			 "            alt=%d,overall=%d,losers=%d -- refuse\n",
   3024 			 nalt, overall, losers);
   3025               goto fail;
   3026             }
   3027 
   3028 	  if (update_and_check_small_class_inputs (nop, nalt,
   3029 						   this_alternative))
   3030 	    {
   3031 	      if (lra_dump_file != NULL)
   3032 		fprintf (lra_dump_file,
   3033 			 "            alt=%d, not enough small class regs -- refuse\n",
   3034 			 nalt);
   3035 	      goto fail;
   3036 	    }
   3037 	  curr_alt[nop] = this_alternative;
   3038 	  curr_alt_set[nop] = this_alternative_set;
   3039 	  curr_alt_exclude_start_hard_regs[nop]
   3040 	    = this_alternative_exclude_start_hard_regs;
   3041 	  curr_alt_win[nop] = this_alternative_win;
   3042 	  curr_alt_match_win[nop] = this_alternative_match_win;
   3043 	  curr_alt_offmemok[nop] = this_alternative_offmemok;
   3044 	  curr_alt_matches[nop] = this_alternative_matches;
   3045 
   3046 	  if (this_alternative_matches >= 0
   3047 	      && !did_match && !this_alternative_win)
   3048 	    curr_alt_win[this_alternative_matches] = false;
   3049 
   3050 	  if (early_clobber_p && operand_reg[nop] != NULL_RTX)
   3051 	    early_clobbered_nops[early_clobbered_regs_num++] = nop;
   3052 	}
   3053 
   3054       if (curr_insn_set != NULL_RTX && n_operands == 2
   3055 	  /* Prevent processing non-move insns.  */
   3056 	  && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
   3057 	      || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
   3058 	  && ((! curr_alt_win[0] && ! curr_alt_win[1]
   3059 	       && REG_P (no_subreg_reg_operand[0])
   3060 	       && REG_P (no_subreg_reg_operand[1])
   3061 	       && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
   3062 		   || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
   3063 	      || (! curr_alt_win[0] && curr_alt_win[1]
   3064 		  && REG_P (no_subreg_reg_operand[1])
   3065 		  /* Check that we reload memory not the memory
   3066 		     address.  */
   3067 		  && ! (curr_alt_offmemok[0]
   3068 			&& MEM_P (no_subreg_reg_operand[0]))
   3069 		  && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
   3070 	      || (curr_alt_win[0] && ! curr_alt_win[1]
   3071 		  && REG_P (no_subreg_reg_operand[0])
   3072 		  /* Check that we reload memory not the memory
   3073 		     address.  */
   3074 		  && ! (curr_alt_offmemok[1]
   3075 			&& MEM_P (no_subreg_reg_operand[1]))
   3076 		  && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
   3077 		  && (! CONST_POOL_OK_P (curr_operand_mode[1],
   3078 					 no_subreg_reg_operand[1])
   3079 		      || (targetm.preferred_reload_class
   3080 			  (no_subreg_reg_operand[1],
   3081 			   (enum reg_class) curr_alt[1]) != NO_REGS))
   3082 		  /* If it is a result of recent elimination in move
   3083 		     insn we can transform it into an add still by
   3084 		     using this alternative.  */
   3085 		  && GET_CODE (no_subreg_reg_operand[1]) != PLUS
   3086 		  /* Likewise if the source has been replaced with an
   3087 		     equivalent value.  This only happens once -- the reload
   3088 		     will use the equivalent value instead of the register it
   3089 		     replaces -- so there should be no danger of cycling.  */
   3090 		  && !equiv_substition_p[1])))
   3091 	{
   3092 	  /* We have a move insn and a new reload insn will be similar
   3093 	     to the current insn.  We should avoid such situation as
   3094 	     it results in LRA cycling.  */
   3095 	  if (lra_dump_file != NULL)
   3096 	    fprintf (lra_dump_file,
   3097 		     "            Cycle danger: overall += LRA_MAX_REJECT\n");
   3098 	  overall += LRA_MAX_REJECT;
   3099 	}
   3100       ok_p = true;
   3101       curr_alt_dont_inherit_ops_num = 0;
   3102       for (nop = 0; nop < early_clobbered_regs_num; nop++)
   3103 	{
   3104 	  int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
   3105 	  HARD_REG_SET temp_set;
   3106 
   3107 	  i = early_clobbered_nops[nop];
   3108 	  if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
   3109 	      || hard_regno[i] < 0)
   3110 	    continue;
   3111 	  lra_assert (operand_reg[i] != NULL_RTX);
   3112 	  clobbered_hard_regno = hard_regno[i];
   3113 	  CLEAR_HARD_REG_SET (temp_set);
   3114 	  add_to_hard_reg_set (&temp_set, biggest_mode[i], clobbered_hard_regno);
   3115 	  first_conflict_j = last_conflict_j = -1;
   3116 	  for (j = 0; j < n_operands; j++)
   3117 	    if (j == i
   3118 		/* We don't want process insides of match_operator and
   3119 		   match_parallel because otherwise we would process
   3120 		   their operands once again generating a wrong
   3121 		   code.  */
   3122 		|| curr_static_id->operand[j].is_operator)
   3123 	      continue;
   3124 	    else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
   3125 		     || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
   3126 	      continue;
   3127 	    /* If we don't reload j-th operand, check conflicts.  */
   3128 	    else if ((curr_alt_win[j] || curr_alt_match_win[j])
   3129 		     && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
   3130 	      {
   3131 		if (first_conflict_j < 0)
   3132 		  first_conflict_j = j;
   3133 		last_conflict_j = j;
   3134 		/* Both the earlyclobber operand and conflicting operand
   3135 		   cannot both be user defined hard registers.  */
   3136 		if (HARD_REGISTER_P (operand_reg[i])
   3137 		    && REG_USERVAR_P (operand_reg[i])
   3138 		    && operand_reg[j] != NULL_RTX
   3139 		    && HARD_REGISTER_P (operand_reg[j])
   3140 		    && REG_USERVAR_P (operand_reg[j]))
   3141 		  {
   3142 		    /* For asm, let curr_insn_transform diagnose it.  */
   3143 		    if (INSN_CODE (curr_insn) < 0)
   3144 		      return false;
   3145 		    fatal_insn ("unable to generate reloads for "
   3146 				"impossible constraints:", curr_insn);
   3147 		  }
   3148 	      }
   3149 	  if (last_conflict_j < 0)
   3150 	    continue;
   3151 
   3152 	  /* If an earlyclobber operand conflicts with another non-matching
   3153 	     operand (ie, they have been assigned the same hard register),
   3154 	     then it is better to reload the other operand, as there may
   3155 	     exist yet another operand with a matching constraint associated
   3156 	     with the earlyclobber operand.  However, if one of the operands
   3157 	     is an explicit use of a hard register, then we must reload the
   3158 	     other non-hard register operand.  */
   3159 	  if (HARD_REGISTER_P (operand_reg[i])
   3160 	      || (first_conflict_j == last_conflict_j
   3161 		  && operand_reg[last_conflict_j] != NULL_RTX
   3162 		  && !curr_alt_match_win[last_conflict_j]
   3163 		  && !HARD_REGISTER_P (operand_reg[last_conflict_j])))
   3164 	    {
   3165 	      curr_alt_win[last_conflict_j] = false;
   3166 	      curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
   3167 		= last_conflict_j;
   3168 	      losers++;
   3169 	      if (lra_dump_file != NULL)
   3170 		fprintf
   3171 		  (lra_dump_file,
   3172 		   "            %d Conflict early clobber reload: reject--\n",
   3173 		   i);
   3174 	    }
   3175 	  else
   3176 	    {
   3177 	      /* We need to reload early clobbered register and the
   3178 		 matched registers.  */
   3179 	      for (j = 0; j < n_operands; j++)
   3180 		if (curr_alt_matches[j] == i)
   3181 		  {
   3182 		    curr_alt_match_win[j] = false;
   3183 		    losers++;
   3184 		    overall += LRA_LOSER_COST_FACTOR;
   3185 		  }
   3186 	      if (! curr_alt_match_win[i])
   3187 		curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
   3188 	      else
   3189 		{
   3190 		  /* Remember pseudos used for match reloads are never
   3191 		     inherited.  */
   3192 		  lra_assert (curr_alt_matches[i] >= 0);
   3193 		  curr_alt_win[curr_alt_matches[i]] = false;
   3194 		}
   3195 	      curr_alt_win[i] = curr_alt_match_win[i] = false;
   3196 	      losers++;
   3197 	      if (lra_dump_file != NULL)
   3198 		fprintf
   3199 		  (lra_dump_file,
   3200 		   "            %d Matched conflict early clobber reloads: "
   3201 		   "reject--\n",
   3202 		   i);
   3203 	    }
   3204 	  /* Early clobber was already reflected in REJECT. */
   3205 	  if (!matching_early_clobber[i])
   3206 	    {
   3207 	      lra_assert (reject > 0);
   3208 	      reject--;
   3209 	      matching_early_clobber[i] = 1;
   3210 	    }
   3211 	  overall += LRA_LOSER_COST_FACTOR - 1;
   3212 	}
   3213       if (lra_dump_file != NULL)
   3214 	fprintf (lra_dump_file, "          alt=%d,overall=%d,losers=%d,rld_nregs=%d\n",
   3215 		 nalt, overall, losers, reload_nregs);
   3216 
   3217       /* If this alternative can be made to work by reloading, and it
   3218 	 needs less reloading than the others checked so far, record
   3219 	 it as the chosen goal for reloading.  */
   3220       if ((best_losers != 0 && losers == 0)
   3221 	  || (((best_losers == 0 && losers == 0)
   3222 	       || (best_losers != 0 && losers != 0))
   3223 	      && (best_overall > overall
   3224 		  || (best_overall == overall
   3225 		      /* If the cost of the reloads is the same,
   3226 			 prefer alternative which requires minimal
   3227 			 number of reload regs.  */
   3228 		      && (reload_nregs < best_reload_nregs
   3229 			  || (reload_nregs == best_reload_nregs
   3230 			      && (best_reload_sum < reload_sum
   3231 				  || (best_reload_sum == reload_sum
   3232 				      && nalt < goal_alt_number))))))))
   3233 	{
   3234 	  for (nop = 0; nop < n_operands; nop++)
   3235 	    {
   3236 	      goal_alt_win[nop] = curr_alt_win[nop];
   3237 	      goal_alt_match_win[nop] = curr_alt_match_win[nop];
   3238 	      goal_alt_matches[nop] = curr_alt_matches[nop];
   3239 	      goal_alt[nop] = curr_alt[nop];
   3240 	      goal_alt_exclude_start_hard_regs[nop]
   3241 		= curr_alt_exclude_start_hard_regs[nop];
   3242 	      goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
   3243 	    }
   3244 	  goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
   3245 	  for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
   3246 	    goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
   3247 	  goal_alt_swapped = curr_swapped;
   3248 	  best_overall = overall;
   3249 	  best_losers = losers;
   3250 	  best_reload_nregs = reload_nregs;
   3251 	  best_reload_sum = reload_sum;
   3252 	  goal_alt_number = nalt;
   3253 	}
   3254       if (losers == 0)
   3255 	/* Everything is satisfied.  Do not process alternatives
   3256 	   anymore.  */
   3257 	break;
   3258     fail:
   3259       ;
   3260     }
   3261   return ok_p;
   3262 }
   3263 
   3264 /* Make reload base reg from address AD.  */
   3265 static rtx
   3266 base_to_reg (struct address_info *ad)
   3267 {
   3268   enum reg_class cl;
   3269   int code = -1;
   3270   rtx new_inner = NULL_RTX;
   3271   rtx new_reg = NULL_RTX;
   3272   rtx_insn *insn;
   3273   rtx_insn *last_insn = get_last_insn();
   3274 
   3275   lra_assert (ad->disp == ad->disp_term);
   3276   cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
   3277                        get_index_code (ad));
   3278   new_reg = lra_create_new_reg (GET_MODE (*ad->base), NULL_RTX, cl, NULL,
   3279 				"base");
   3280   new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
   3281                                    ad->disp_term == NULL
   3282                                    ? const0_rtx
   3283                                    : *ad->disp_term);
   3284   if (!valid_address_p (ad->mode, new_inner, ad->as))
   3285     return NULL_RTX;
   3286   insn = emit_insn (gen_rtx_SET (new_reg, *ad->base));
   3287   code = recog_memoized (insn);
   3288   if (code < 0)
   3289     {
   3290       delete_insns_since (last_insn);
   3291       return NULL_RTX;
   3292     }
   3293 
   3294   return new_inner;
   3295 }
   3296 
   3297 /* Make reload base reg + DISP from address AD.  Return the new pseudo.  */
   3298 static rtx
   3299 base_plus_disp_to_reg (struct address_info *ad, rtx disp)
   3300 {
   3301   enum reg_class cl;
   3302   rtx new_reg;
   3303 
   3304   lra_assert (ad->base == ad->base_term);
   3305   cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
   3306 		       get_index_code (ad));
   3307   new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX, cl, NULL,
   3308 				"base + disp");
   3309   lra_emit_add (new_reg, *ad->base_term, disp);
   3310   return new_reg;
   3311 }
   3312 
   3313 /* Make reload of index part of address AD.  Return the new
   3314    pseudo.  */
   3315 static rtx
   3316 index_part_to_reg (struct address_info *ad)
   3317 {
   3318   rtx new_reg;
   3319 
   3320   new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
   3321 				INDEX_REG_CLASS, NULL, "index term");
   3322   expand_mult (GET_MODE (*ad->index), *ad->index_term,
   3323 	       GEN_INT (get_index_scale (ad)), new_reg, 1);
   3324   return new_reg;
   3325 }
   3326 
   3327 /* Return true if we can add a displacement to address AD, even if that
   3328    makes the address invalid.  The fix-up code requires any new address
   3329    to be the sum of the BASE_TERM, INDEX and DISP_TERM fields.  */
   3330 static bool
   3331 can_add_disp_p (struct address_info *ad)
   3332 {
   3333   return (!ad->autoinc_p
   3334 	  && ad->segment == NULL
   3335 	  && ad->base == ad->base_term
   3336 	  && ad->disp == ad->disp_term);
   3337 }
   3338 
   3339 /* Make equiv substitution in address AD.  Return true if a substitution
   3340    was made.  */
   3341 static bool
   3342 equiv_address_substitution (struct address_info *ad)
   3343 {
   3344   rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
   3345   poly_int64 disp;
   3346   HOST_WIDE_INT scale;
   3347   bool change_p;
   3348 
   3349   base_term = strip_subreg (ad->base_term);
   3350   if (base_term == NULL)
   3351     base_reg = new_base_reg = NULL_RTX;
   3352   else
   3353     {
   3354       base_reg = *base_term;
   3355       new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
   3356     }
   3357   index_term = strip_subreg (ad->index_term);
   3358   if (index_term == NULL)
   3359     index_reg = new_index_reg = NULL_RTX;
   3360   else
   3361     {
   3362       index_reg = *index_term;
   3363       new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
   3364     }
   3365   if (base_reg == new_base_reg && index_reg == new_index_reg)
   3366     return false;
   3367   disp = 0;
   3368   change_p = false;
   3369   if (lra_dump_file != NULL)
   3370     {
   3371       fprintf (lra_dump_file, "Changing address in insn %d ",
   3372 	       INSN_UID (curr_insn));
   3373       dump_value_slim (lra_dump_file, *ad->outer, 1);
   3374     }
   3375   if (base_reg != new_base_reg)
   3376     {
   3377       poly_int64 offset;
   3378       if (REG_P (new_base_reg))
   3379 	{
   3380 	  *base_term = new_base_reg;
   3381 	  change_p = true;
   3382 	}
   3383       else if (GET_CODE (new_base_reg) == PLUS
   3384 	       && REG_P (XEXP (new_base_reg, 0))
   3385 	       && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
   3386 	       && can_add_disp_p (ad))
   3387 	{
   3388 	  disp += offset;
   3389 	  *base_term = XEXP (new_base_reg, 0);
   3390 	  change_p = true;
   3391 	}
   3392       if (ad->base_term2 != NULL)
   3393 	*ad->base_term2 = *ad->base_term;
   3394     }
   3395   if (index_reg != new_index_reg)
   3396     {
   3397       poly_int64 offset;
   3398       if (REG_P (new_index_reg))
   3399 	{
   3400 	  *index_term = new_index_reg;
   3401 	  change_p = true;
   3402 	}
   3403       else if (GET_CODE (new_index_reg) == PLUS
   3404 	       && REG_P (XEXP (new_index_reg, 0))
   3405 	       && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
   3406 	       && can_add_disp_p (ad)
   3407 	       && (scale = get_index_scale (ad)))
   3408 	{
   3409 	  disp += offset * scale;
   3410 	  *index_term = XEXP (new_index_reg, 0);
   3411 	  change_p = true;
   3412 	}
   3413     }
   3414   if (maybe_ne (disp, 0))
   3415     {
   3416       if (ad->disp != NULL)
   3417 	*ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
   3418       else
   3419 	{
   3420 	  *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
   3421 	  update_address (ad);
   3422 	}
   3423       change_p = true;
   3424     }
   3425   if (lra_dump_file != NULL)
   3426     {
   3427       if (! change_p)
   3428 	fprintf (lra_dump_file, " -- no change\n");
   3429       else
   3430 	{
   3431 	  fprintf (lra_dump_file, " on equiv ");
   3432 	  dump_value_slim (lra_dump_file, *ad->outer, 1);
   3433 	  fprintf (lra_dump_file, "\n");
   3434 	}
   3435     }
   3436   return change_p;
   3437 }
   3438 
   3439 /* Skip all modifiers and whitespaces in constraint STR and return the
   3440    result.  */
   3441 static const char *
   3442 skip_constraint_modifiers (const char *str)
   3443 {
   3444   for (;;str++)
   3445     switch (*str)
   3446       {
   3447       case '+': case '&' : case '=': case '*': case ' ': case '\t':
   3448       case '$': case '^' : case '%': case '?': case '!':
   3449 	break;
   3450       default: return str;
   3451       }
   3452 }
   3453 
   3454 /* Major function to make reloads for an address in operand NOP or
   3455    check its correctness (If CHECK_ONLY_P is true). The supported
   3456    cases are:
   3457 
   3458    1) an address that existed before LRA started, at which point it
   3459    must have been valid.  These addresses are subject to elimination
   3460    and may have become invalid due to the elimination offset being out
   3461    of range.
   3462 
   3463    2) an address created by forcing a constant to memory
   3464    (force_const_to_mem).  The initial form of these addresses might
   3465    not be valid, and it is this function's job to make them valid.
   3466 
   3467    3) a frame address formed from a register and a (possibly zero)
   3468    constant offset.  As above, these addresses might not be valid and
   3469    this function must make them so.
   3470 
   3471    Add reloads to the lists *BEFORE and *AFTER.  We might need to add
   3472    reloads to *AFTER because of inc/dec, {pre, post} modify in the
   3473    address.  Return true for any RTL change.
   3474 
   3475    The function is a helper function which does not produce all
   3476    transformations (when CHECK_ONLY_P is false) which can be
   3477    necessary.  It does just basic steps.  To do all necessary
   3478    transformations use function process_address.  */
   3479 static bool
   3480 process_address_1 (int nop, bool check_only_p,
   3481 		   rtx_insn **before, rtx_insn **after)
   3482 {
   3483   struct address_info ad;
   3484   rtx new_reg;
   3485   HOST_WIDE_INT scale;
   3486   rtx op = *curr_id->operand_loc[nop];
   3487   rtx mem = extract_mem_from_operand (op);
   3488   const char *constraint;
   3489   enum constraint_num cn;
   3490   bool change_p = false;
   3491 
   3492   if (MEM_P (mem)
   3493       && GET_MODE (mem) == BLKmode
   3494       && GET_CODE (XEXP (mem, 0)) == SCRATCH)
   3495     return false;
   3496 
   3497   constraint
   3498     = skip_constraint_modifiers (curr_static_id->operand[nop].constraint);
   3499   if (IN_RANGE (constraint[0], '0', '9'))
   3500     {
   3501       char *end;
   3502       unsigned long dup = strtoul (constraint, &end, 10);
   3503       constraint
   3504 	= skip_constraint_modifiers (curr_static_id->operand[dup].constraint);
   3505     }
   3506   cn = lookup_constraint (*constraint == '\0' ? "X" : constraint);
   3507   /* If we have several alternatives or/and several constraints in an
   3508      alternative and we can not say at this stage what constraint will be used,
   3509      use unknown constraint.  The exception is an address constraint.  If
   3510      operand has one address constraint, probably all others constraints are
   3511      address ones.  */
   3512   if (constraint[0] != '\0' && get_constraint_type (cn) != CT_ADDRESS
   3513       && *skip_constraint_modifiers (constraint
   3514 				     + CONSTRAINT_LEN (constraint[0],
   3515 						       constraint)) != '\0')
   3516     cn = CONSTRAINT__UNKNOWN;
   3517   if (insn_extra_address_constraint (cn)
   3518       /* When we find an asm operand with an address constraint that
   3519 	 doesn't satisfy address_operand to begin with, we clear
   3520 	 is_address, so that we don't try to make a non-address fit.
   3521 	 If the asm statement got this far, it's because other
   3522 	 constraints are available, and we'll use them, disregarding
   3523 	 the unsatisfiable address ones.  */
   3524       && curr_static_id->operand[nop].is_address)
   3525     decompose_lea_address (&ad, curr_id->operand_loc[nop]);
   3526   /* Do not attempt to decompose arbitrary addresses generated by combine
   3527      for asm operands with loose constraints, e.g 'X'.
   3528      Need to extract memory from op for special memory constraint,
   3529      i.e. bcst_mem_operand in i386 backend.  */
   3530   else if (MEM_P (mem)
   3531 	   && !(INSN_CODE (curr_insn) < 0
   3532 		&& get_constraint_type (cn) == CT_FIXED_FORM
   3533 		&& constraint_satisfied_p (op, cn)))
   3534     decompose_mem_address (&ad, mem);
   3535   else if (GET_CODE (op) == SUBREG
   3536 	   && MEM_P (SUBREG_REG (op)))
   3537     decompose_mem_address (&ad, SUBREG_REG (op));
   3538   else
   3539     return false;
   3540   /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
   3541      index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
   3542      when INDEX_REG_CLASS is a single register class.  */
   3543   if (ad.base_term != NULL
   3544       && ad.index_term != NULL
   3545       && ira_class_hard_regs_num[INDEX_REG_CLASS] == 1
   3546       && REG_P (*ad.base_term)
   3547       && REG_P (*ad.index_term)
   3548       && in_class_p (*ad.base_term, INDEX_REG_CLASS, NULL)
   3549       && ! in_class_p (*ad.index_term, INDEX_REG_CLASS, NULL))
   3550     {
   3551       std::swap (ad.base, ad.index);
   3552       std::swap (ad.base_term, ad.index_term);
   3553     }
   3554   if (! check_only_p)
   3555     change_p = equiv_address_substitution (&ad);
   3556   if (ad.base_term != NULL
   3557       && (process_addr_reg
   3558 	  (ad.base_term, check_only_p, before,
   3559 	   (ad.autoinc_p
   3560 	    && !(REG_P (*ad.base_term)
   3561 		 && find_regno_note (curr_insn, REG_DEAD,
   3562 				     REGNO (*ad.base_term)) != NULL_RTX)
   3563 	    ? after : NULL),
   3564 	   base_reg_class (ad.mode, ad.as, ad.base_outer_code,
   3565 			   get_index_code (&ad)))))
   3566     {
   3567       change_p = true;
   3568       if (ad.base_term2 != NULL)
   3569 	*ad.base_term2 = *ad.base_term;
   3570     }
   3571   if (ad.index_term != NULL
   3572       && process_addr_reg (ad.index_term, check_only_p,
   3573 			   before, NULL, INDEX_REG_CLASS))
   3574     change_p = true;
   3575 
   3576   /* Target hooks sometimes don't treat extra-constraint addresses as
   3577      legitimate address_operands, so handle them specially.  */
   3578   if (insn_extra_address_constraint (cn)
   3579       && satisfies_address_constraint_p (&ad, cn))
   3580     return change_p;
   3581 
   3582   if (check_only_p)
   3583     return change_p;
   3584 
   3585   /* There are three cases where the shape of *AD.INNER may now be invalid:
   3586 
   3587      1) the original address was valid, but either elimination or
   3588      equiv_address_substitution was applied and that made
   3589      the address invalid.
   3590 
   3591      2) the address is an invalid symbolic address created by
   3592      force_const_to_mem.
   3593 
   3594      3) the address is a frame address with an invalid offset.
   3595 
   3596      4) the address is a frame address with an invalid base.
   3597 
   3598      All these cases involve a non-autoinc address, so there is no
   3599      point revalidating other types.  */
   3600   if (ad.autoinc_p || valid_address_p (op, &ad, cn))
   3601     return change_p;
   3602 
   3603   /* Any index existed before LRA started, so we can assume that the
   3604      presence and shape of the index is valid.  */
   3605   push_to_sequence (*before);
   3606   lra_assert (ad.disp == ad.disp_term);
   3607   if (ad.base == NULL)
   3608     {
   3609       if (ad.index == NULL)
   3610 	{
   3611 	  rtx_insn *insn;
   3612 	  rtx_insn *last = get_last_insn ();
   3613 	  int code = -1;
   3614 	  enum reg_class cl = base_reg_class (ad.mode, ad.as,
   3615 					      SCRATCH, SCRATCH);
   3616 	  rtx addr = *ad.inner;
   3617 
   3618 	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
   3619 	  if (HAVE_lo_sum)
   3620 	    {
   3621 	      /* addr => lo_sum (new_base, addr), case (2) above.  */
   3622 	      insn = emit_insn (gen_rtx_SET
   3623 				(new_reg,
   3624 				 gen_rtx_HIGH (Pmode, copy_rtx (addr))));
   3625 	      code = recog_memoized (insn);
   3626 	      if (code >= 0)
   3627 		{
   3628 		  *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
   3629 		  if (!valid_address_p (op, &ad, cn))
   3630 		    {
   3631 		      /* Try to put lo_sum into register.  */
   3632 		      insn = emit_insn (gen_rtx_SET
   3633 					(new_reg,
   3634 					 gen_rtx_LO_SUM (Pmode, new_reg, addr)));
   3635 		      code = recog_memoized (insn);
   3636 		      if (code >= 0)
   3637 			{
   3638 			  *ad.inner = new_reg;
   3639 			  if (!valid_address_p (op, &ad, cn))
   3640 			    {
   3641 			      *ad.inner = addr;
   3642 			      code = -1;
   3643 			    }
   3644 			}
   3645 
   3646 		    }
   3647 		}
   3648 	      if (code < 0)
   3649 		delete_insns_since (last);
   3650 	    }
   3651 
   3652 	  if (code < 0)
   3653 	    {
   3654 	      /* addr => new_base, case (2) above.  */
   3655 	      lra_emit_move (new_reg, addr);
   3656 
   3657 	      for (insn = last == NULL_RTX ? get_insns () : NEXT_INSN (last);
   3658 		   insn != NULL_RTX;
   3659 		   insn = NEXT_INSN (insn))
   3660 		if (recog_memoized (insn) < 0)
   3661 		  break;
   3662 	      if (insn != NULL_RTX)
   3663 		{
   3664 		  /* Do nothing if we cannot generate right insns.
   3665 		     This is analogous to reload pass behavior.  */
   3666 		  delete_insns_since (last);
   3667 		  end_sequence ();
   3668 		  return false;
   3669 		}
   3670 	      *ad.inner = new_reg;
   3671 	    }
   3672 	}
   3673       else
   3674 	{
   3675 	  /* index * scale + disp => new base + index * scale,
   3676 	     case (1) above.  */
   3677 	  enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
   3678 					      GET_CODE (*ad.index));
   3679 
   3680 	  lra_assert (INDEX_REG_CLASS != NO_REGS);
   3681 	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "disp");
   3682 	  lra_emit_move (new_reg, *ad.disp);
   3683 	  *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
   3684 					   new_reg, *ad.index);
   3685 	}
   3686     }
   3687   else if (ad.index == NULL)
   3688     {
   3689       int regno;
   3690       enum reg_class cl;
   3691       rtx set;
   3692       rtx_insn *insns, *last_insn;
   3693       /* Try to reload base into register only if the base is invalid
   3694          for the address but with valid offset, case (4) above.  */
   3695       start_sequence ();
   3696       new_reg = base_to_reg (&ad);
   3697 
   3698       /* base + disp => new base, cases (1) and (3) above.  */
   3699       /* Another option would be to reload the displacement into an
   3700 	 index register.  However, postreload has code to optimize
   3701 	 address reloads that have the same base and different
   3702 	 displacements, so reloading into an index register would
   3703 	 not necessarily be a win.  */
   3704       if (new_reg == NULL_RTX)
   3705 	{
   3706 	  /* See if the target can split the displacement into a
   3707 	     legitimate new displacement from a local anchor.  */
   3708 	  gcc_assert (ad.disp == ad.disp_term);
   3709 	  poly_int64 orig_offset;
   3710 	  rtx offset1, offset2;
   3711 	  if (poly_int_rtx_p (*ad.disp, &orig_offset)
   3712 	      && targetm.legitimize_address_displacement (&offset1, &offset2,
   3713 							  orig_offset,
   3714 							  ad.mode))
   3715 	    {
   3716 	      new_reg = base_plus_disp_to_reg (&ad, offset1);
   3717 	      new_reg = gen_rtx_PLUS (GET_MODE (new_reg), new_reg, offset2);
   3718 	    }
   3719 	  else
   3720 	    new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
   3721 	}
   3722       insns = get_insns ();
   3723       last_insn = get_last_insn ();
   3724       /* If we generated at least two insns, try last insn source as
   3725 	 an address.  If we succeed, we generate one less insn.  */
   3726       if (REG_P (new_reg)
   3727 	  && last_insn != insns
   3728 	  && (set = single_set (last_insn)) != NULL_RTX
   3729 	  && GET_CODE (SET_SRC (set)) == PLUS
   3730 	  && REG_P (XEXP (SET_SRC (set), 0))
   3731 	  && CONSTANT_P (XEXP (SET_SRC (set), 1)))
   3732 	{
   3733 	  *ad.inner = SET_SRC (set);
   3734 	  if (valid_address_p (op, &ad, cn))
   3735 	    {
   3736 	      *ad.base_term = XEXP (SET_SRC (set), 0);
   3737 	      *ad.disp_term = XEXP (SET_SRC (set), 1);
   3738 	      cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
   3739 				   get_index_code (&ad));
   3740 	      regno = REGNO (*ad.base_term);
   3741 	      if (regno >= FIRST_PSEUDO_REGISTER
   3742 		  && cl != lra_get_allocno_class (regno))
   3743 		lra_change_class (regno, cl, "      Change to", true);
   3744 	      new_reg = SET_SRC (set);
   3745 	      delete_insns_since (PREV_INSN (last_insn));
   3746 	    }
   3747 	}
   3748       end_sequence ();
   3749       emit_insn (insns);
   3750       *ad.inner = new_reg;
   3751     }
   3752   else if (ad.disp_term != NULL)
   3753     {
   3754       /* base + scale * index + disp => new base + scale * index,
   3755 	 case (1) above.  */
   3756       gcc_assert (ad.disp == ad.disp_term);
   3757       new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
   3758       *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
   3759 				       new_reg, *ad.index);
   3760     }
   3761   else if ((scale = get_index_scale (&ad)) == 1)
   3762     {
   3763       /* The last transformation to one reg will be made in
   3764 	 curr_insn_transform function.  */
   3765       end_sequence ();
   3766       return false;
   3767     }
   3768   else if (scale != 0)
   3769     {
   3770       /* base + scale * index => base + new_reg,
   3771 	 case (1) above.
   3772       Index part of address may become invalid.  For example, we
   3773       changed pseudo on the equivalent memory and a subreg of the
   3774       pseudo onto the memory of different mode for which the scale is
   3775       prohibitted.  */
   3776       new_reg = index_part_to_reg (&ad);
   3777       *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
   3778 				       *ad.base_term, new_reg);
   3779     }
   3780   else
   3781     {
   3782       enum reg_class cl = base_reg_class (ad.mode, ad.as,
   3783 					  SCRATCH, SCRATCH);
   3784       rtx addr = *ad.inner;
   3785 
   3786       new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
   3787       /* addr => new_base.  */
   3788       lra_emit_move (new_reg, addr);
   3789       *ad.inner = new_reg;
   3790     }
   3791   *before = get_insns ();
   3792   end_sequence ();
   3793   return true;
   3794 }
   3795 
   3796 /* If CHECK_ONLY_P is false, do address reloads until it is necessary.
   3797    Use process_address_1 as a helper function.  Return true for any
   3798    RTL changes.
   3799 
   3800    If CHECK_ONLY_P is true, just check address correctness.  Return
   3801    false if the address correct.  */
   3802 static bool
   3803 process_address (int nop, bool check_only_p,
   3804 		 rtx_insn **before, rtx_insn **after)
   3805 {
   3806   bool res = false;
   3807 
   3808   while (process_address_1 (nop, check_only_p, before, after))
   3809     {
   3810       if (check_only_p)
   3811 	return true;
   3812       res = true;
   3813     }
   3814   return res;
   3815 }
   3816 
   3817 /* Emit insns to reload VALUE into a new register.  VALUE is an
   3818    auto-increment or auto-decrement RTX whose operand is a register or
   3819    memory location; so reloading involves incrementing that location.
   3820    IN is either identical to VALUE, or some cheaper place to reload
   3821    value being incremented/decremented from.
   3822 
   3823    INC_AMOUNT is the number to increment or decrement by (always
   3824    positive and ignored for POST_MODIFY/PRE_MODIFY).
   3825 
   3826    Return pseudo containing the result.	 */
   3827 static rtx
   3828 emit_inc (enum reg_class new_rclass, rtx in, rtx value, poly_int64 inc_amount)
   3829 {
   3830   /* REG or MEM to be copied and incremented.  */
   3831   rtx incloc = XEXP (value, 0);
   3832   /* Nonzero if increment after copying.  */
   3833   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
   3834 	      || GET_CODE (value) == POST_MODIFY);
   3835   rtx_insn *last;
   3836   rtx inc;
   3837   rtx_insn *add_insn;
   3838   int code;
   3839   rtx real_in = in == value ? incloc : in;
   3840   rtx result;
   3841   bool plus_p = true;
   3842 
   3843   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
   3844     {
   3845       lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
   3846 		  || GET_CODE (XEXP (value, 1)) == MINUS);
   3847       lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
   3848       plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
   3849       inc = XEXP (XEXP (value, 1), 1);
   3850     }
   3851   else
   3852     {
   3853       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
   3854 	inc_amount = -inc_amount;
   3855 
   3856       inc = gen_int_mode (inc_amount, GET_MODE (value));
   3857     }
   3858 
   3859   if (! post && REG_P (incloc))
   3860     result = incloc;
   3861   else
   3862     result = lra_create_new_reg (GET_MODE (value), value, new_rclass, NULL,
   3863 				 "INC/DEC result");
   3864 
   3865   if (real_in != result)
   3866     {
   3867       /* First copy the location to the result register.  */
   3868       lra_assert (REG_P (result));
   3869       emit_insn (gen_move_insn (result, real_in));
   3870     }
   3871 
   3872   /* We suppose that there are insns to add/sub with the constant
   3873      increment permitted in {PRE/POST)_{DEC/INC/MODIFY}.  At least the
   3874      old reload worked with this assumption.  If the assumption
   3875      becomes wrong, we should use approach in function
   3876      base_plus_disp_to_reg.  */
   3877   if (in == value)
   3878     {
   3879       /* See if we can directly increment INCLOC.  */
   3880       last = get_last_insn ();
   3881       add_insn = emit_insn (plus_p
   3882 			    ? gen_add2_insn (incloc, inc)
   3883 			    : gen_sub2_insn (incloc, inc));
   3884 
   3885       code = recog_memoized (add_insn);
   3886       if (code >= 0)
   3887 	{
   3888 	  if (! post && result != incloc)
   3889 	    emit_insn (gen_move_insn (result, incloc));
   3890 	  return result;
   3891 	}
   3892       delete_insns_since (last);
   3893     }
   3894 
   3895   /* If couldn't do the increment directly, must increment in RESULT.
   3896      The way we do this depends on whether this is pre- or
   3897      post-increment.  For pre-increment, copy INCLOC to the reload
   3898      register, increment it there, then save back.  */
   3899   if (! post)
   3900     {
   3901       if (real_in != result)
   3902 	emit_insn (gen_move_insn (result, real_in));
   3903       if (plus_p)
   3904 	emit_insn (gen_add2_insn (result, inc));
   3905       else
   3906 	emit_insn (gen_sub2_insn (result, inc));
   3907       if (result != incloc)
   3908 	emit_insn (gen_move_insn (incloc, result));
   3909     }
   3910   else
   3911     {
   3912       /* Post-increment.
   3913 
   3914 	 Because this might be a jump insn or a compare, and because
   3915 	 RESULT may not be available after the insn in an input
   3916 	 reload, we must do the incrementing before the insn being
   3917 	 reloaded for.
   3918 
   3919 	 We have already copied IN to RESULT.  Increment the copy in
   3920 	 RESULT, save that back, then decrement RESULT so it has
   3921 	 the original value.  */
   3922       if (plus_p)
   3923 	emit_insn (gen_add2_insn (result, inc));
   3924       else
   3925 	emit_insn (gen_sub2_insn (result, inc));
   3926       emit_insn (gen_move_insn (incloc, result));
   3927       /* Restore non-modified value for the result.  We prefer this
   3928 	 way because it does not require an additional hard
   3929 	 register.  */
   3930       if (plus_p)
   3931 	{
   3932 	  poly_int64 offset;
   3933 	  if (poly_int_rtx_p (inc, &offset))
   3934 	    emit_insn (gen_add2_insn (result,
   3935 				      gen_int_mode (-offset,
   3936 						    GET_MODE (result))));
   3937 	  else
   3938 	    emit_insn (gen_sub2_insn (result, inc));
   3939 	}
   3940       else
   3941 	emit_insn (gen_add2_insn (result, inc));
   3942     }
   3943   return result;
   3944 }
   3945 
   3946 /* Return true if the current move insn does not need processing as we
   3947    already know that it satisfies its constraints.  */
   3948 static bool
   3949 simple_move_p (void)
   3950 {
   3951   rtx dest, src;
   3952   enum reg_class dclass, sclass;
   3953 
   3954   lra_assert (curr_insn_set != NULL_RTX);
   3955   dest = SET_DEST (curr_insn_set);
   3956   src = SET_SRC (curr_insn_set);
   3957 
   3958   /* If the instruction has multiple sets we need to process it even if it
   3959      is single_set.  This can happen if one or more of the SETs are dead.
   3960      See PR73650.  */
   3961   if (multiple_sets (curr_insn))
   3962     return false;
   3963 
   3964   return ((dclass = get_op_class (dest)) != NO_REGS
   3965 	  && (sclass = get_op_class (src)) != NO_REGS
   3966 	  /* The backend guarantees that register moves of cost 2
   3967 	     never need reloads.  */
   3968 	  && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
   3969  }
   3970 
   3971 /* Swap operands NOP and NOP + 1. */
   3972 static inline void
   3973 swap_operands (int nop)
   3974 {
   3975   std::swap (curr_operand_mode[nop], curr_operand_mode[nop + 1]);
   3976   std::swap (original_subreg_reg_mode[nop], original_subreg_reg_mode[nop + 1]);
   3977   std::swap (*curr_id->operand_loc[nop], *curr_id->operand_loc[nop + 1]);
   3978   std::swap (equiv_substition_p[nop], equiv_substition_p[nop + 1]);
   3979   /* Swap the duplicates too.  */
   3980   lra_update_dup (curr_id, nop);
   3981   lra_update_dup (curr_id, nop + 1);
   3982 }
   3983 
   3984 /* Main entry point of the constraint code: search the body of the
   3985    current insn to choose the best alternative.  It is mimicking insn
   3986    alternative cost calculation model of former reload pass.  That is
   3987    because machine descriptions were written to use this model.  This
   3988    model can be changed in future.  Make commutative operand exchange
   3989    if it is chosen.
   3990 
   3991    if CHECK_ONLY_P is false, do RTL changes to satisfy the
   3992    constraints.  Return true if any change happened during function
   3993    call.
   3994 
   3995    If CHECK_ONLY_P is true then don't do any transformation.  Just
   3996    check that the insn satisfies all constraints.  If the insn does
   3997    not satisfy any constraint, return true.  */
   3998 static bool
   3999 curr_insn_transform (bool check_only_p)
   4000 {
   4001   int i, j, k;
   4002   int n_operands;
   4003   int n_alternatives;
   4004   int n_outputs;
   4005   int commutative;
   4006   signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
   4007   signed char match_inputs[MAX_RECOG_OPERANDS + 1];
   4008   signed char outputs[MAX_RECOG_OPERANDS + 1];
   4009   rtx_insn *before, *after;
   4010   bool alt_p = false;
   4011   /* Flag that the insn has been changed through a transformation.  */
   4012   bool change_p;
   4013   bool sec_mem_p;
   4014   bool use_sec_mem_p;
   4015   int max_regno_before;
   4016   int reused_alternative_num;
   4017 
   4018   curr_insn_set = single_set (curr_insn);
   4019   if (curr_insn_set != NULL_RTX && simple_move_p ())
   4020     {
   4021       /* We assume that the corresponding insn alternative has no
   4022 	 earlier clobbers.  If it is not the case, don't define move
   4023 	 cost equal to 2 for the corresponding register classes.  */
   4024       lra_set_used_insn_alternative (curr_insn, LRA_NON_CLOBBERED_ALT);
   4025       return false;
   4026     }
   4027 
   4028   no_input_reloads_p = no_output_reloads_p = false;
   4029   goal_alt_number = -1;
   4030   change_p = sec_mem_p = false;
   4031 
   4032   /* CALL_INSNs are not allowed to have any output reloads.  */
   4033   if (CALL_P (curr_insn))
   4034     no_output_reloads_p = true;
   4035 
   4036   n_operands = curr_static_id->n_operands;
   4037   n_alternatives = curr_static_id->n_alternatives;
   4038 
   4039   /* Just return "no reloads" if insn has no operands with
   4040      constraints.  */
   4041   if (n_operands == 0 || n_alternatives == 0)
   4042     return false;
   4043 
   4044   max_regno_before = max_reg_num ();
   4045 
   4046   for (i = 0; i < n_operands; i++)
   4047     {
   4048       goal_alt_matched[i][0] = -1;
   4049       goal_alt_matches[i] = -1;
   4050     }
   4051 
   4052   commutative = curr_static_id->commutative;
   4053 
   4054   /* Now see what we need for pseudos that didn't get hard regs or got
   4055      the wrong kind of hard reg.  For this, we must consider all the
   4056      operands together against the register constraints.  */
   4057 
   4058   best_losers = best_overall = INT_MAX;
   4059   best_reload_sum = 0;
   4060 
   4061   curr_swapped = false;
   4062   goal_alt_swapped = false;
   4063 
   4064   if (! check_only_p)
   4065     /* Make equivalence substitution and memory subreg elimination
   4066        before address processing because an address legitimacy can
   4067        depend on memory mode.  */
   4068     for (i = 0; i < n_operands; i++)
   4069       {
   4070 	rtx op, subst, old;
   4071 	bool op_change_p = false;
   4072 
   4073 	if (curr_static_id->operand[i].is_operator)
   4074 	  continue;
   4075 
   4076 	old = op = *curr_id->operand_loc[i];
   4077 	if (GET_CODE (old) == SUBREG)
   4078 	  old = SUBREG_REG (old);
   4079 	subst = get_equiv_with_elimination (old, curr_insn);
   4080 	original_subreg_reg_mode[i] = VOIDmode;
   4081 	equiv_substition_p[i] = false;
   4082 	if (subst != old)
   4083 	  {
   4084 	    equiv_substition_p[i] = true;
   4085 	    subst = copy_rtx (subst);
   4086 	    lra_assert (REG_P (old));
   4087 	    if (GET_CODE (op) != SUBREG)
   4088 	      *curr_id->operand_loc[i] = subst;
   4089 	    else
   4090 	      {
   4091 		SUBREG_REG (op) = subst;
   4092 		if (GET_MODE (subst) == VOIDmode)
   4093 		  original_subreg_reg_mode[i] = GET_MODE (old);
   4094 	      }
   4095 	    if (lra_dump_file != NULL)
   4096 	      {
   4097 		fprintf (lra_dump_file,
   4098 			 "Changing pseudo %d in operand %i of insn %u on equiv ",
   4099 			 REGNO (old), i, INSN_UID (curr_insn));
   4100 		dump_value_slim (lra_dump_file, subst, 1);
   4101 		fprintf (lra_dump_file, "\n");
   4102 	      }
   4103 	    op_change_p = change_p = true;
   4104 	  }
   4105 	if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
   4106 	  {
   4107 	    change_p = true;
   4108 	    lra_update_dup (curr_id, i);
   4109 	  }
   4110       }
   4111 
   4112   /* Reload address registers and displacements.  We do it before
   4113      finding an alternative because of memory constraints.  */
   4114   before = after = NULL;
   4115   for (i = 0; i < n_operands; i++)
   4116     if (! curr_static_id->operand[i].is_operator
   4117 	&& process_address (i, check_only_p, &before, &after))
   4118       {
   4119 	if (check_only_p)
   4120 	  return true;
   4121 	change_p = true;
   4122 	lra_update_dup (curr_id, i);
   4123       }
   4124 
   4125   if (change_p)
   4126     /* If we've changed the instruction then any alternative that
   4127        we chose previously may no longer be valid.  */
   4128     lra_set_used_insn_alternative (curr_insn, LRA_UNKNOWN_ALT);
   4129 
   4130   if (! check_only_p && curr_insn_set != NULL_RTX
   4131       && check_and_process_move (&change_p, &sec_mem_p))
   4132     return change_p;
   4133 
   4134  try_swapped:
   4135 
   4136   reused_alternative_num = check_only_p ? LRA_UNKNOWN_ALT : curr_id->used_insn_alternative;
   4137   if (lra_dump_file != NULL && reused_alternative_num >= 0)
   4138     fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
   4139 	     reused_alternative_num, INSN_UID (curr_insn));
   4140 
   4141   if (process_alt_operands (reused_alternative_num))
   4142     alt_p = true;
   4143 
   4144   if (check_only_p)
   4145     return ! alt_p || best_losers != 0;
   4146 
   4147   /* If insn is commutative (it's safe to exchange a certain pair of
   4148      operands) then we need to try each alternative twice, the second
   4149      time matching those two operands as if we had exchanged them.  To
   4150      do this, really exchange them in operands.
   4151 
   4152      If we have just tried the alternatives the second time, return
   4153      operands to normal and drop through.  */
   4154 
   4155   if (reused_alternative_num < 0 && commutative >= 0)
   4156     {
   4157       curr_swapped = !curr_swapped;
   4158       if (curr_swapped)
   4159 	{
   4160 	  swap_operands (commutative);
   4161 	  goto try_swapped;
   4162 	}
   4163       else
   4164 	swap_operands (commutative);
   4165     }
   4166 
   4167   if (! alt_p && ! sec_mem_p)
   4168     {
   4169       /* No alternative works with reloads??  */
   4170       if (INSN_CODE (curr_insn) >= 0)
   4171 	fatal_insn ("unable to generate reloads for:", curr_insn);
   4172       error_for_asm (curr_insn,
   4173 		     "inconsistent operand constraints in an %<asm%>");
   4174       lra_asm_error_p = true;
   4175       if (! JUMP_P (curr_insn))
   4176 	{
   4177 	  /* Avoid further trouble with this insn.  Don't generate use
   4178 	     pattern here as we could use the insn SP offset.  */
   4179 	  lra_set_insn_deleted (curr_insn);
   4180 	}
   4181       else
   4182 	{
   4183 	  lra_invalidate_insn_data (curr_insn);
   4184 	  ira_nullify_asm_goto (curr_insn);
   4185 	  lra_update_insn_regno_info (curr_insn);
   4186 	}
   4187       return true;
   4188     }
   4189 
   4190   /* If the best alternative is with operands 1 and 2 swapped, swap
   4191      them.  Update the operand numbers of any reloads already
   4192      pushed.  */
   4193 
   4194   if (goal_alt_swapped)
   4195     {
   4196       if (lra_dump_file != NULL)
   4197 	fprintf (lra_dump_file, "  Commutative operand exchange in insn %u\n",
   4198 		 INSN_UID (curr_insn));
   4199 
   4200       /* Swap the duplicates too.  */
   4201       swap_operands (commutative);
   4202       change_p = true;
   4203     }
   4204 
   4205   /* Some targets' TARGET_SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
   4206      too conservatively.  So we use the secondary memory only if there
   4207      is no any alternative without reloads.  */
   4208   use_sec_mem_p = false;
   4209   if (! alt_p)
   4210     use_sec_mem_p = true;
   4211   else if (sec_mem_p)
   4212     {
   4213       for (i = 0; i < n_operands; i++)
   4214 	if (! goal_alt_win[i] && ! goal_alt_match_win[i])
   4215 	  break;
   4216       use_sec_mem_p = i < n_operands;
   4217     }
   4218 
   4219   if (use_sec_mem_p)
   4220     {
   4221       int in = -1, out = -1;
   4222       rtx new_reg, src, dest, rld;
   4223       machine_mode sec_mode, rld_mode;
   4224 
   4225       lra_assert (curr_insn_set != NULL_RTX && sec_mem_p);
   4226       dest = SET_DEST (curr_insn_set);
   4227       src = SET_SRC (curr_insn_set);
   4228       for (i = 0; i < n_operands; i++)
   4229 	if (*curr_id->operand_loc[i] == dest)
   4230 	  out = i;
   4231 	else if (*curr_id->operand_loc[i] == src)
   4232 	  in = i;
   4233       for (i = 0; i < curr_static_id->n_dups; i++)
   4234 	if (out < 0 && *curr_id->dup_loc[i] == dest)
   4235 	  out = curr_static_id->dup_num[i];
   4236 	else if (in < 0 && *curr_id->dup_loc[i] == src)
   4237 	  in = curr_static_id->dup_num[i];
   4238       lra_assert (out >= 0 && in >= 0
   4239 		  && curr_static_id->operand[out].type == OP_OUT
   4240 		  && curr_static_id->operand[in].type == OP_IN);
   4241       rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
   4242       rld_mode = GET_MODE (rld);
   4243       sec_mode = targetm.secondary_memory_needed_mode (rld_mode);
   4244       new_reg = lra_create_new_reg (sec_mode, NULL_RTX, NO_REGS, NULL,
   4245 				    "secondary");
   4246       /* If the mode is changed, it should be wider.  */
   4247       lra_assert (!partial_subreg_p (sec_mode, rld_mode));
   4248       if (sec_mode != rld_mode)
   4249         {
   4250 	  /* If the target says specifically to use another mode for
   4251 	     secondary memory moves we cannot reuse the original
   4252 	     insn.  */
   4253 	  after = emit_spill_move (false, new_reg, dest);
   4254 	  lra_process_new_insns (curr_insn, NULL, after,
   4255 				 "Inserting the sec. move");
   4256 	  /* We may have non null BEFORE here (e.g. after address
   4257 	     processing.  */
   4258 	  push_to_sequence (before);
   4259 	  before = emit_spill_move (true, new_reg, src);
   4260 	  emit_insn (before);
   4261 	  before = get_insns ();
   4262 	  end_sequence ();
   4263 	  lra_process_new_insns (curr_insn, before, NULL, "Changing on");
   4264 	  lra_set_insn_deleted (curr_insn);
   4265 	}
   4266       else if (dest == rld)
   4267         {
   4268 	  *curr_id->operand_loc[out] = new_reg;
   4269 	  lra_update_dup (curr_id, out);
   4270 	  after = emit_spill_move (false, new_reg, dest);
   4271 	  lra_process_new_insns (curr_insn, NULL, after,
   4272 				 "Inserting the sec. move");
   4273 	}
   4274       else
   4275 	{
   4276 	  *curr_id->operand_loc[in] = new_reg;
   4277 	  lra_update_dup (curr_id, in);
   4278 	  /* See comments above.  */
   4279 	  push_to_sequence (before);
   4280 	  before = emit_spill_move (true, new_reg, src);
   4281 	  emit_insn (before);
   4282 	  before = get_insns ();
   4283 	  end_sequence ();
   4284 	  lra_process_new_insns (curr_insn, before, NULL,
   4285 				 "Inserting the sec. move");
   4286 	}
   4287       lra_update_insn_regno_info (curr_insn);
   4288       return true;
   4289     }
   4290 
   4291   lra_assert (goal_alt_number >= 0);
   4292   lra_set_used_insn_alternative (curr_insn, goal_alt_number);
   4293 
   4294   if (lra_dump_file != NULL)
   4295     {
   4296       const char *p;
   4297 
   4298       fprintf (lra_dump_file, "	 Choosing alt %d in insn %u:",
   4299 	       goal_alt_number, INSN_UID (curr_insn));
   4300       for (i = 0; i < n_operands; i++)
   4301 	{
   4302 	  p = (curr_static_id->operand_alternative
   4303 	       [goal_alt_number * n_operands + i].constraint);
   4304 	  if (*p == '\0')
   4305 	    continue;
   4306 	  fprintf (lra_dump_file, "  (%d) ", i);
   4307 	  for (; *p != '\0' && *p != ',' && *p != '#'; p++)
   4308 	    fputc (*p, lra_dump_file);
   4309 	}
   4310       if (INSN_CODE (curr_insn) >= 0
   4311           && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
   4312         fprintf (lra_dump_file, " {%s}", p);
   4313       if (maybe_ne (curr_id->sp_offset, 0))
   4314 	{
   4315 	  fprintf (lra_dump_file, " (sp_off=");
   4316 	  print_dec (curr_id->sp_offset, lra_dump_file);
   4317 	  fprintf (lra_dump_file, ")");
   4318 	}
   4319       fprintf (lra_dump_file, "\n");
   4320     }
   4321 
   4322   /* Right now, for any pair of operands I and J that are required to
   4323      match, with J < I, goal_alt_matches[I] is J.  Add I to
   4324      goal_alt_matched[J].  */
   4325 
   4326   for (i = 0; i < n_operands; i++)
   4327     if ((j = goal_alt_matches[i]) >= 0)
   4328       {
   4329 	for (k = 0; goal_alt_matched[j][k] >= 0; k++)
   4330 	  ;
   4331 	/* We allow matching one output operand and several input
   4332 	   operands.  */
   4333 	lra_assert (k == 0
   4334 		    || (curr_static_id->operand[j].type == OP_OUT
   4335 			&& curr_static_id->operand[i].type == OP_IN
   4336 			&& (curr_static_id->operand
   4337 			    [goal_alt_matched[j][0]].type == OP_IN)));
   4338 	goal_alt_matched[j][k] = i;
   4339 	goal_alt_matched[j][k + 1] = -1;
   4340       }
   4341 
   4342   for (i = 0; i < n_operands; i++)
   4343     goal_alt_win[i] |= goal_alt_match_win[i];
   4344 
   4345   /* Any constants that aren't allowed and can't be reloaded into
   4346      registers are here changed into memory references.	 */
   4347   for (i = 0; i < n_operands; i++)
   4348     if (goal_alt_win[i])
   4349       {
   4350 	int regno;
   4351 	enum reg_class new_class;
   4352 	rtx reg = *curr_id->operand_loc[i];
   4353 
   4354 	if (GET_CODE (reg) == SUBREG)
   4355 	  reg = SUBREG_REG (reg);
   4356 
   4357 	if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
   4358 	  {
   4359 	    bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
   4360 
   4361 	    if (new_class != NO_REGS && get_reg_class (regno) != new_class)
   4362 	      {
   4363 		lra_assert (ok_p);
   4364 		lra_change_class (regno, new_class, "      Change to", true);
   4365 	      }
   4366 	  }
   4367       }
   4368     else
   4369       {
   4370 	const char *constraint;
   4371 	char c;
   4372 	rtx op = *curr_id->operand_loc[i];
   4373 	rtx subreg = NULL_RTX;
   4374 	machine_mode mode = curr_operand_mode[i];
   4375 
   4376 	if (GET_CODE (op) == SUBREG)
   4377 	  {
   4378 	    subreg = op;
   4379 	    op = SUBREG_REG (op);
   4380 	    mode = GET_MODE (op);
   4381 	  }
   4382 
   4383 	if (CONST_POOL_OK_P (mode, op)
   4384 	    && ((targetm.preferred_reload_class
   4385 		 (op, (enum reg_class) goal_alt[i]) == NO_REGS)
   4386 		|| no_input_reloads_p))
   4387 	  {
   4388 	    rtx tem = force_const_mem (mode, op);
   4389 
   4390 	    change_p = true;
   4391 	    if (subreg != NULL_RTX)
   4392 	      tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
   4393 
   4394 	    *curr_id->operand_loc[i] = tem;
   4395 	    lra_update_dup (curr_id, i);
   4396 	    process_address (i, false, &before, &after);
   4397 
   4398 	    /* If the alternative accepts constant pool refs directly
   4399 	       there will be no reload needed at all.  */
   4400 	    if (subreg != NULL_RTX)
   4401 	      continue;
   4402 	    /* Skip alternatives before the one requested.  */
   4403 	    constraint = (curr_static_id->operand_alternative
   4404 			  [goal_alt_number * n_operands + i].constraint);
   4405 	    for (;
   4406 		 (c = *constraint) && c != ',' && c != '#';
   4407 		 constraint += CONSTRAINT_LEN (c, constraint))
   4408 	      {
   4409 		enum constraint_num cn = lookup_constraint (constraint);
   4410 		if ((insn_extra_memory_constraint (cn)
   4411 		     || insn_extra_special_memory_constraint (cn)
   4412 		     || insn_extra_relaxed_memory_constraint (cn))
   4413 		    && satisfies_memory_constraint_p (tem, cn))
   4414 		  break;
   4415 	      }
   4416 	    if (c == '\0' || c == ',' || c == '#')
   4417 	      continue;
   4418 
   4419 	    goal_alt_win[i] = true;
   4420 	  }
   4421       }
   4422 
   4423   n_outputs = 0;
   4424   for (i = 0; i < n_operands; i++)
   4425     if (curr_static_id->operand[i].type == OP_OUT)
   4426       outputs[n_outputs++] = i;
   4427   outputs[n_outputs] = -1;
   4428   for (i = 0; i < n_operands; i++)
   4429     {
   4430       int regno;
   4431       bool optional_p = false;
   4432       rtx old, new_reg;
   4433       rtx op = *curr_id->operand_loc[i];
   4434 
   4435       if (goal_alt_win[i])
   4436 	{
   4437 	  if (goal_alt[i] == NO_REGS
   4438 	      && REG_P (op)
   4439 	      /* When we assign NO_REGS it means that we will not
   4440 		 assign a hard register to the scratch pseudo by
   4441 		 assigment pass and the scratch pseudo will be
   4442 		 spilled.  Spilled scratch pseudos are transformed
   4443 		 back to scratches at the LRA end.  */
   4444 	      && ira_former_scratch_operand_p (curr_insn, i)
   4445 	      && ira_former_scratch_p (REGNO (op)))
   4446 	    {
   4447 	      int regno = REGNO (op);
   4448 	      lra_change_class (regno, NO_REGS, "      Change to", true);
   4449 	      if (lra_get_regno_hard_regno (regno) >= 0)
   4450 		/* We don't have to mark all insn affected by the
   4451 		   spilled pseudo as there is only one such insn, the
   4452 		   current one.  */
   4453 		reg_renumber[regno] = -1;
   4454 	      lra_assert (bitmap_single_bit_set_p
   4455 			  (&lra_reg_info[REGNO (op)].insn_bitmap));
   4456 	    }
   4457 	  /* We can do an optional reload.  If the pseudo got a hard
   4458 	     reg, we might improve the code through inheritance.  If
   4459 	     it does not get a hard register we coalesce memory/memory
   4460 	     moves later.  Ignore move insns to avoid cycling.  */
   4461 	  if (! lra_simple_p
   4462 	      && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
   4463 	      && goal_alt[i] != NO_REGS && REG_P (op)
   4464 	      && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
   4465 	      && regno < new_regno_start
   4466 	      && ! ira_former_scratch_p (regno)
   4467 	      && reg_renumber[regno] < 0
   4468 	      /* Check that the optional reload pseudo will be able to
   4469 		 hold given mode value.  */
   4470 	      && ! (prohibited_class_reg_set_mode_p
   4471 		    (goal_alt[i], reg_class_contents[goal_alt[i]],
   4472 		     PSEUDO_REGNO_MODE (regno)))
   4473 	      && (curr_insn_set == NULL_RTX
   4474 		  || !((REG_P (SET_SRC (curr_insn_set))
   4475 			|| MEM_P (SET_SRC (curr_insn_set))
   4476 			|| GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
   4477 		       && (REG_P (SET_DEST (curr_insn_set))
   4478 			   || MEM_P (SET_DEST (curr_insn_set))
   4479 			   || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
   4480 	    optional_p = true;
   4481 	  else if (goal_alt_matched[i][0] != -1
   4482 		   && curr_static_id->operand[i].type == OP_OUT
   4483 		   && (curr_static_id->operand_alternative
   4484 		       [goal_alt_number * n_operands + i].earlyclobber)
   4485 		   && REG_P (op))
   4486 	    {
   4487 	      for (j = 0; goal_alt_matched[i][j] != -1; j++)
   4488 		{
   4489 		  rtx op2 = *curr_id->operand_loc[goal_alt_matched[i][j]];
   4490 
   4491 		  if (REG_P (op2) && REGNO (op) != REGNO (op2))
   4492 		    break;
   4493 		}
   4494 	      if (goal_alt_matched[i][j] != -1)
   4495 		{
   4496 		  /* Generate reloads for different output and matched
   4497 		     input registers.  This is the easiest way to avoid
   4498 		     creation of non-existing register conflicts in
   4499 		     lra-lives.cc.  */
   4500 		  match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
   4501 				&goal_alt_exclude_start_hard_regs[i], &before,
   4502 				&after, TRUE);
   4503 		}
   4504 	      continue;
   4505 	    }
   4506 	  else
   4507 	    continue;
   4508 	}
   4509 
   4510       /* Operands that match previous ones have already been handled.  */
   4511       if (goal_alt_matches[i] >= 0)
   4512 	continue;
   4513 
   4514       /* We should not have an operand with a non-offsettable address
   4515 	 appearing where an offsettable address will do.  It also may
   4516 	 be a case when the address should be special in other words
   4517 	 not a general one (e.g. it needs no index reg).  */
   4518       if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
   4519 	{
   4520 	  enum reg_class rclass;
   4521 	  rtx *loc = &XEXP (op, 0);
   4522 	  enum rtx_code code = GET_CODE (*loc);
   4523 
   4524 	  push_to_sequence (before);
   4525 	  rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
   4526 				   MEM, SCRATCH);
   4527 	  if (GET_RTX_CLASS (code) == RTX_AUTOINC)
   4528 	    new_reg = emit_inc (rclass, *loc, *loc,
   4529 				/* This value does not matter for MODIFY.  */
   4530 				GET_MODE_SIZE (GET_MODE (op)));
   4531 	  else if (get_reload_reg (OP_IN, Pmode, *loc, rclass,
   4532 				   NULL, FALSE,
   4533 				   "offsetable address", &new_reg))
   4534 	    {
   4535 	      rtx addr = *loc;
   4536 	      enum rtx_code code = GET_CODE (addr);
   4537 	      bool align_p = false;
   4538 
   4539 	      if (code == AND && CONST_INT_P (XEXP (addr, 1)))
   4540 		{
   4541 		  /* (and ... (const_int -X)) is used to align to X bytes.  */
   4542 		  align_p = true;
   4543 		  addr = XEXP (*loc, 0);
   4544 		}
   4545 	      else
   4546 		addr = canonicalize_reload_addr (addr);
   4547 
   4548 	      lra_emit_move (new_reg, addr);
   4549 	      if (align_p)
   4550 		emit_move_insn (new_reg, gen_rtx_AND (GET_MODE (new_reg), new_reg, XEXP (*loc, 1)));
   4551 	    }
   4552 	  before = get_insns ();
   4553 	  end_sequence ();
   4554 	  *loc = new_reg;
   4555 	  lra_update_dup (curr_id, i);
   4556 	}
   4557       else if (goal_alt_matched[i][0] == -1)
   4558 	{
   4559 	  machine_mode mode;
   4560 	  rtx reg, *loc;
   4561 	  int hard_regno;
   4562 	  enum op_type type = curr_static_id->operand[i].type;
   4563 
   4564 	  loc = curr_id->operand_loc[i];
   4565 	  mode = curr_operand_mode[i];
   4566 	  if (GET_CODE (*loc) == SUBREG)
   4567 	    {
   4568 	      reg = SUBREG_REG (*loc);
   4569 	      poly_int64 byte = SUBREG_BYTE (*loc);
   4570 	      if (REG_P (reg)
   4571 		  /* Strict_low_part requires reloading the register and not
   4572 		     just the subreg.  Likewise for a strict subreg no wider
   4573 		     than a word for WORD_REGISTER_OPERATIONS targets.  */
   4574 		  && (curr_static_id->operand[i].strict_low
   4575 		      || (!paradoxical_subreg_p (mode, GET_MODE (reg))
   4576 			  && (hard_regno
   4577 			      = get_try_hard_regno (REGNO (reg))) >= 0
   4578 			  && (simplify_subreg_regno
   4579 			      (hard_regno,
   4580 			       GET_MODE (reg), byte, mode) < 0)
   4581 			  && (goal_alt[i] == NO_REGS
   4582 			      || (simplify_subreg_regno
   4583 				  (ira_class_hard_regs[goal_alt[i]][0],
   4584 				   GET_MODE (reg), byte, mode) >= 0)))
   4585 		      || (partial_subreg_p (mode, GET_MODE (reg))
   4586 			  && known_le (GET_MODE_SIZE (GET_MODE (reg)),
   4587 				       UNITS_PER_WORD)
   4588 			  && WORD_REGISTER_OPERATIONS)))
   4589 		{
   4590 		  /* An OP_INOUT is required when reloading a subreg of a
   4591 		     mode wider than a word to ensure that data beyond the
   4592 		     word being reloaded is preserved.  Also automatically
   4593 		     ensure that strict_low_part reloads are made into
   4594 		     OP_INOUT which should already be true from the backend
   4595 		     constraints.  */
   4596 		  if (type == OP_OUT
   4597 		      && (curr_static_id->operand[i].strict_low
   4598 			  || read_modify_subreg_p (*loc)))
   4599 		    type = OP_INOUT;
   4600 		  loc = &SUBREG_REG (*loc);
   4601 		  mode = GET_MODE (*loc);
   4602 		}
   4603 	    }
   4604 	  old = *loc;
   4605 	  if (get_reload_reg (type, mode, old, goal_alt[i],
   4606 			      &goal_alt_exclude_start_hard_regs[i],
   4607 			      loc != curr_id->operand_loc[i], "", &new_reg)
   4608 	      && type != OP_OUT)
   4609 	    {
   4610 	      push_to_sequence (before);
   4611 	      lra_emit_move (new_reg, old);
   4612 	      before = get_insns ();
   4613 	      end_sequence ();
   4614 	    }
   4615 	  *loc = new_reg;
   4616 	  if (type != OP_IN
   4617 	      && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX)
   4618 	    {
   4619 	      start_sequence ();
   4620 	      lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
   4621 	      emit_insn (after);
   4622 	      after = get_insns ();
   4623 	      end_sequence ();
   4624 	      *loc = new_reg;
   4625 	    }
   4626 	  for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
   4627 	    if (goal_alt_dont_inherit_ops[j] == i)
   4628 	      {
   4629 		lra_set_regno_unique_value (REGNO (new_reg));
   4630 		break;
   4631 	      }
   4632 	  lra_update_dup (curr_id, i);
   4633 	}
   4634       else if (curr_static_id->operand[i].type == OP_IN
   4635 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
   4636 		   == OP_OUT
   4637 		   || (curr_static_id->operand[goal_alt_matched[i][0]].type
   4638 		       == OP_INOUT
   4639 		       && (operands_match_p
   4640 			   (*curr_id->operand_loc[i],
   4641 			    *curr_id->operand_loc[goal_alt_matched[i][0]],
   4642 			    -1)))))
   4643 	{
   4644 	  /* generate reloads for input and matched outputs.  */
   4645 	  match_inputs[0] = i;
   4646 	  match_inputs[1] = -1;
   4647 	  match_reload (goal_alt_matched[i][0], match_inputs, outputs,
   4648 			goal_alt[i], &goal_alt_exclude_start_hard_regs[i],
   4649 			&before, &after,
   4650 			curr_static_id->operand_alternative
   4651 			[goal_alt_number * n_operands + goal_alt_matched[i][0]]
   4652 			.earlyclobber);
   4653 	}
   4654       else if ((curr_static_id->operand[i].type == OP_OUT
   4655 		|| (curr_static_id->operand[i].type == OP_INOUT
   4656 		    && (operands_match_p
   4657 			(*curr_id->operand_loc[i],
   4658 			 *curr_id->operand_loc[goal_alt_matched[i][0]],
   4659 			 -1))))
   4660 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
   4661 		    == OP_IN))
   4662 	/* Generate reloads for output and matched inputs.  */
   4663 	match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
   4664 		      &goal_alt_exclude_start_hard_regs[i], &before, &after,
   4665 		      curr_static_id->operand_alternative
   4666 		      [goal_alt_number * n_operands + i].earlyclobber);
   4667       else if (curr_static_id->operand[i].type == OP_IN
   4668 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
   4669 		   == OP_IN))
   4670 	{
   4671 	  /* Generate reloads for matched inputs.  */
   4672 	  match_inputs[0] = i;
   4673 	  for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
   4674 	    match_inputs[j + 1] = k;
   4675 	  match_inputs[j + 1] = -1;
   4676 	  match_reload (-1, match_inputs, outputs, goal_alt[i],
   4677 			&goal_alt_exclude_start_hard_regs[i],
   4678 			&before, &after, false);
   4679 	}
   4680       else
   4681 	/* We must generate code in any case when function
   4682 	   process_alt_operands decides that it is possible.  */
   4683 	gcc_unreachable ();
   4684 
   4685       if (optional_p)
   4686 	{
   4687 	  rtx reg = op;
   4688 
   4689 	  lra_assert (REG_P (reg));
   4690 	  regno = REGNO (reg);
   4691 	  op = *curr_id->operand_loc[i]; /* Substitution.  */
   4692 	  if (GET_CODE (op) == SUBREG)
   4693 	    op = SUBREG_REG (op);
   4694 	  gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
   4695 	  bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
   4696 	  lra_reg_info[REGNO (op)].restore_rtx = reg;
   4697 	  if (lra_dump_file != NULL)
   4698 	    fprintf (lra_dump_file,
   4699 		     "      Making reload reg %d for reg %d optional\n",
   4700 		     REGNO (op), regno);
   4701 	}
   4702     }
   4703   if (before != NULL_RTX || after != NULL_RTX
   4704       || max_regno_before != max_reg_num ())
   4705     change_p = true;
   4706   if (change_p)
   4707     {
   4708       lra_update_operator_dups (curr_id);
   4709       /* Something changes -- process the insn.	 */
   4710       lra_update_insn_regno_info (curr_insn);
   4711     }
   4712   lra_process_new_insns (curr_insn, before, after, "Inserting insn reload");
   4713   return change_p;
   4714 }
   4715 
   4716 /* Return true if INSN satisfies all constraints.  In other words, no
   4717    reload insns are needed.  */
   4718 bool
   4719 lra_constrain_insn (rtx_insn *insn)
   4720 {
   4721   int saved_new_regno_start = new_regno_start;
   4722   int saved_new_insn_uid_start = new_insn_uid_start;
   4723   bool change_p;
   4724 
   4725   curr_insn = insn;
   4726   curr_id = lra_get_insn_recog_data (curr_insn);
   4727   curr_static_id = curr_id->insn_static_data;
   4728   new_insn_uid_start = get_max_uid ();
   4729   new_regno_start = max_reg_num ();
   4730   change_p = curr_insn_transform (true);
   4731   new_regno_start = saved_new_regno_start;
   4732   new_insn_uid_start = saved_new_insn_uid_start;
   4733   return ! change_p;
   4734 }
   4735 
   4736 /* Return true if X is in LIST.	 */
   4737 static bool
   4738 in_list_p (rtx x, rtx list)
   4739 {
   4740   for (; list != NULL_RTX; list = XEXP (list, 1))
   4741     if (XEXP (list, 0) == x)
   4742       return true;
   4743   return false;
   4744 }
   4745 
   4746 /* Return true if X contains an allocatable hard register (if
   4747    HARD_REG_P) or a (spilled if SPILLED_P) pseudo.  */
   4748 static bool
   4749 contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
   4750 {
   4751   int i, j;
   4752   const char *fmt;
   4753   enum rtx_code code;
   4754 
   4755   code = GET_CODE (x);
   4756   if (REG_P (x))
   4757     {
   4758       int regno = REGNO (x);
   4759       HARD_REG_SET alloc_regs;
   4760 
   4761       if (hard_reg_p)
   4762 	{
   4763 	  if (regno >= FIRST_PSEUDO_REGISTER)
   4764 	    regno = lra_get_regno_hard_regno (regno);
   4765 	  if (regno < 0)
   4766 	    return false;
   4767 	  alloc_regs = ~lra_no_alloc_regs;
   4768 	  return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
   4769 	}
   4770       else
   4771 	{
   4772 	  if (regno < FIRST_PSEUDO_REGISTER)
   4773 	    return false;
   4774 	  if (! spilled_p)
   4775 	    return true;
   4776 	  return lra_get_regno_hard_regno (regno) < 0;
   4777 	}
   4778     }
   4779   fmt = GET_RTX_FORMAT (code);
   4780   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   4781     {
   4782       if (fmt[i] == 'e')
   4783 	{
   4784 	  if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
   4785 	    return true;
   4786 	}
   4787       else if (fmt[i] == 'E')
   4788 	{
   4789 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   4790 	    if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
   4791 	      return true;
   4792 	}
   4793     }
   4794   return false;
   4795 }
   4796 
   4797 /* Process all regs in location *LOC and change them on equivalent
   4798    substitution.  Return true if any change was done.  */
   4799 static bool
   4800 loc_equivalence_change_p (rtx *loc)
   4801 {
   4802   rtx subst, reg, x = *loc;
   4803   bool result = false;
   4804   enum rtx_code code = GET_CODE (x);
   4805   const char *fmt;
   4806   int i, j;
   4807 
   4808   if (code == SUBREG)
   4809     {
   4810       reg = SUBREG_REG (x);
   4811       if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
   4812 	  && GET_MODE (subst) == VOIDmode)
   4813 	{
   4814 	  /* We cannot reload debug location.  Simplify subreg here
   4815 	     while we know the inner mode.  */
   4816 	  *loc = simplify_gen_subreg (GET_MODE (x), subst,
   4817 				      GET_MODE (reg), SUBREG_BYTE (x));
   4818 	  return true;
   4819 	}
   4820     }
   4821   if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
   4822     {
   4823       *loc = subst;
   4824       return true;
   4825     }
   4826 
   4827   /* Scan all the operand sub-expressions.  */
   4828   fmt = GET_RTX_FORMAT (code);
   4829   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   4830     {
   4831       if (fmt[i] == 'e')
   4832 	result = loc_equivalence_change_p (&XEXP (x, i)) || result;
   4833       else if (fmt[i] == 'E')
   4834 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   4835 	  result
   4836 	    = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
   4837     }
   4838   return result;
   4839 }
   4840 
   4841 /* Similar to loc_equivalence_change_p, but for use as
   4842    simplify_replace_fn_rtx callback.  DATA is insn for which the
   4843    elimination is done.  If it null we don't do the elimination.  */
   4844 static rtx
   4845 loc_equivalence_callback (rtx loc, const_rtx, void *data)
   4846 {
   4847   if (!REG_P (loc))
   4848     return NULL_RTX;
   4849 
   4850   rtx subst = (data == NULL
   4851 	       ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
   4852   if (subst != loc)
   4853     return subst;
   4854 
   4855   return NULL_RTX;
   4856 }
   4857 
   4858 /* Maximum number of generated reload insns per an insn.  It is for
   4859    preventing this pass cycling in a bug case.	*/
   4860 #define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
   4861 
   4862 /* The current iteration number of this LRA pass.  */
   4863 int lra_constraint_iter;
   4864 
   4865 /* True if we should during assignment sub-pass check assignment
   4866    correctness for all pseudos and spill some of them to correct
   4867    conflicts.  It can be necessary when we substitute equiv which
   4868    needs checking register allocation correctness because the
   4869    equivalent value contains allocatable hard registers, or when we
   4870    restore multi-register pseudo, or when we change the insn code and
   4871    its operand became INOUT operand when it was IN one before.  */
   4872 bool check_and_force_assignment_correctness_p;
   4873 
   4874 /* Return true if REGNO is referenced in more than one block.  */
   4875 static bool
   4876 multi_block_pseudo_p (int regno)
   4877 {
   4878   basic_block bb = NULL;
   4879   unsigned int uid;
   4880   bitmap_iterator bi;
   4881 
   4882   if (regno < FIRST_PSEUDO_REGISTER)
   4883     return false;
   4884 
   4885   EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
   4886     if (bb == NULL)
   4887       bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
   4888     else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
   4889       return true;
   4890   return false;
   4891 }
   4892 
   4893 /* Return true if LIST contains a deleted insn.  */
   4894 static bool
   4895 contains_deleted_insn_p (rtx_insn_list *list)
   4896 {
   4897   for (; list != NULL_RTX; list = list->next ())
   4898     if (NOTE_P (list->insn ())
   4899 	&& NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
   4900       return true;
   4901   return false;
   4902 }
   4903 
   4904 /* Return true if X contains a pseudo dying in INSN.  */
   4905 static bool
   4906 dead_pseudo_p (rtx x, rtx_insn *insn)
   4907 {
   4908   int i, j;
   4909   const char *fmt;
   4910   enum rtx_code code;
   4911 
   4912   if (REG_P (x))
   4913     return (insn != NULL_RTX
   4914 	    && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
   4915   code = GET_CODE (x);
   4916   fmt = GET_RTX_FORMAT (code);
   4917   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   4918     {
   4919       if (fmt[i] == 'e')
   4920 	{
   4921 	  if (dead_pseudo_p (XEXP (x, i), insn))
   4922 	    return true;
   4923 	}
   4924       else if (fmt[i] == 'E')
   4925 	{
   4926 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   4927 	    if (dead_pseudo_p (XVECEXP (x, i, j), insn))
   4928 	      return true;
   4929 	}
   4930     }
   4931   return false;
   4932 }
   4933 
   4934 /* Return true if INSN contains a dying pseudo in INSN right hand
   4935    side.  */
   4936 static bool
   4937 insn_rhs_dead_pseudo_p (rtx_insn *insn)
   4938 {
   4939   rtx set = single_set (insn);
   4940 
   4941   gcc_assert (set != NULL);
   4942   return dead_pseudo_p (SET_SRC (set), insn);
   4943 }
   4944 
   4945 /* Return true if any init insn of REGNO contains a dying pseudo in
   4946    insn right hand side.  */
   4947 static bool
   4948 init_insn_rhs_dead_pseudo_p (int regno)
   4949 {
   4950   rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
   4951 
   4952   if (insns == NULL)
   4953     return false;
   4954   for (; insns != NULL_RTX; insns = insns->next ())
   4955     if (insn_rhs_dead_pseudo_p (insns->insn ()))
   4956       return true;
   4957   return false;
   4958 }
   4959 
   4960 /* Return TRUE if REGNO has a reverse equivalence.  The equivalence is
   4961    reverse only if we have one init insn with given REGNO as a
   4962    source.  */
   4963 static bool
   4964 reverse_equiv_p (int regno)
   4965 {
   4966   rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
   4967   rtx set;
   4968 
   4969   if (insns == NULL)
   4970     return false;
   4971   if (! INSN_P (insns->insn ())
   4972       || insns->next () != NULL)
   4973     return false;
   4974   if ((set = single_set (insns->insn ())) == NULL_RTX)
   4975     return false;
   4976   return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
   4977 }
   4978 
   4979 /* Return TRUE if REGNO was reloaded in an equivalence init insn.  We
   4980    call this function only for non-reverse equivalence.  */
   4981 static bool
   4982 contains_reloaded_insn_p (int regno)
   4983 {
   4984   rtx set;
   4985   rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
   4986 
   4987   for (; list != NULL; list = list->next ())
   4988     if ((set = single_set (list->insn ())) == NULL_RTX
   4989 	|| ! REG_P (SET_DEST (set))
   4990 	|| (int) REGNO (SET_DEST (set)) != regno)
   4991       return true;
   4992   return false;
   4993 }
   4994 
   4995 /* Entry function of LRA constraint pass.  Return true if the
   4996    constraint pass did change the code.	 */
   4997 bool
   4998 lra_constraints (bool first_p)
   4999 {
   5000   bool changed_p;
   5001   int i, hard_regno, new_insns_num;
   5002   unsigned int min_len, new_min_len, uid;
   5003   rtx set, x, reg, dest_reg;
   5004   basic_block last_bb;
   5005   bitmap_iterator bi;
   5006 
   5007   lra_constraint_iter++;
   5008   if (lra_dump_file != NULL)
   5009     fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
   5010 	     lra_constraint_iter);
   5011   changed_p = false;
   5012   if (pic_offset_table_rtx
   5013       && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
   5014     check_and_force_assignment_correctness_p = true;
   5015   else if (first_p)
   5016     /* On the first iteration we should check IRA assignment
   5017        correctness.  In rare cases, the assignments can be wrong as
   5018        early clobbers operands are ignored in IRA or usages of
   5019        paradoxical sub-registers are not taken into account by
   5020        IRA.  */
   5021     check_and_force_assignment_correctness_p = true;
   5022   new_insn_uid_start = get_max_uid ();
   5023   new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
   5024   /* Mark used hard regs for target stack size calulations.  */
   5025   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
   5026     if (lra_reg_info[i].nrefs != 0
   5027 	&& (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
   5028       {
   5029 	int j, nregs;
   5030 
   5031 	nregs = hard_regno_nregs (hard_regno, lra_reg_info[i].biggest_mode);
   5032 	for (j = 0; j < nregs; j++)
   5033 	  df_set_regs_ever_live (hard_regno + j, true);
   5034       }
   5035   /* Do elimination before the equivalence processing as we can spill
   5036      some pseudos during elimination.  */
   5037   lra_eliminate (false, first_p);
   5038   auto_bitmap equiv_insn_bitmap (&reg_obstack);
   5039   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
   5040     if (lra_reg_info[i].nrefs != 0)
   5041       {
   5042 	ira_reg_equiv[i].profitable_p = true;
   5043 	reg = regno_reg_rtx[i];
   5044 	if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
   5045 	  {
   5046 	    bool pseudo_p = contains_reg_p (x, false, false);
   5047 
   5048 	    /* After RTL transformation, we cannot guarantee that
   5049 	       pseudo in the substitution was not reloaded which might
   5050 	       make equivalence invalid.  For example, in reverse
   5051 	       equiv of p0
   5052 
   5053 	       p0 <- ...
   5054 	       ...
   5055 	       equiv_mem <- p0
   5056 
   5057 	       the memory address register was reloaded before the 2nd
   5058 	       insn.  */
   5059 	    if ((! first_p && pseudo_p)
   5060 		/* We don't use DF for compilation speed sake.  So it
   5061 		   is problematic to update live info when we use an
   5062 		   equivalence containing pseudos in more than one
   5063 		   BB.  */
   5064 		|| (pseudo_p && multi_block_pseudo_p (i))
   5065 		/* If an init insn was deleted for some reason, cancel
   5066 		   the equiv.  We could update the equiv insns after
   5067 		   transformations including an equiv insn deletion
   5068 		   but it is not worthy as such cases are extremely
   5069 		   rare.  */
   5070 		|| contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
   5071 		/* If it is not a reverse equivalence, we check that a
   5072 		   pseudo in rhs of the init insn is not dying in the
   5073 		   insn.  Otherwise, the live info at the beginning of
   5074 		   the corresponding BB might be wrong after we
   5075 		   removed the insn.  When the equiv can be a
   5076 		   constant, the right hand side of the init insn can
   5077 		   be a pseudo.  */
   5078 		|| (! reverse_equiv_p (i)
   5079 		    && (init_insn_rhs_dead_pseudo_p (i)
   5080 			/* If we reloaded the pseudo in an equivalence
   5081 			   init insn, we cannot remove the equiv init
   5082 			   insns and the init insns might write into
   5083 			   const memory in this case.  */
   5084 			|| contains_reloaded_insn_p (i)))
   5085 		/* Prevent access beyond equivalent memory for
   5086 		   paradoxical subregs.  */
   5087 		|| (MEM_P (x)
   5088 		    && maybe_gt (GET_MODE_SIZE (lra_reg_info[i].biggest_mode),
   5089 				 GET_MODE_SIZE (GET_MODE (x))))
   5090 		|| (pic_offset_table_rtx
   5091 		    && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
   5092 			 && (targetm.preferred_reload_class
   5093 			     (x, lra_get_allocno_class (i)) == NO_REGS))
   5094 			|| contains_symbol_ref_p (x))))
   5095 	      ira_reg_equiv[i].defined_p = false;
   5096 	    if (contains_reg_p (x, false, true))
   5097 	      ira_reg_equiv[i].profitable_p = false;
   5098 	    if (get_equiv (reg) != reg)
   5099 	      bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
   5100 	  }
   5101       }
   5102   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
   5103     update_equiv (i);
   5104   /* We should add all insns containing pseudos which should be
   5105      substituted by their equivalences.  */
   5106   EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
   5107     lra_push_insn_by_uid (uid);
   5108   min_len = lra_insn_stack_length ();
   5109   new_insns_num = 0;
   5110   last_bb = NULL;
   5111   changed_p = false;
   5112   while ((new_min_len = lra_insn_stack_length ()) != 0)
   5113     {
   5114       curr_insn = lra_pop_insn ();
   5115       --new_min_len;
   5116       curr_bb = BLOCK_FOR_INSN (curr_insn);
   5117       if (curr_bb != last_bb)
   5118 	{
   5119 	  last_bb = curr_bb;
   5120 	  bb_reload_num = lra_curr_reload_num;
   5121 	}
   5122       if (min_len > new_min_len)
   5123 	{
   5124 	  min_len = new_min_len;
   5125 	  new_insns_num = 0;
   5126 	}
   5127       if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
   5128 	internal_error
   5129 	  ("maximum number of generated reload insns per insn achieved (%d)",
   5130 	   MAX_RELOAD_INSNS_NUMBER);
   5131       new_insns_num++;
   5132       if (DEBUG_INSN_P (curr_insn))
   5133 	{
   5134 	  /* We need to check equivalence in debug insn and change
   5135 	     pseudo to the equivalent value if necessary.  */
   5136 	  curr_id = lra_get_insn_recog_data (curr_insn);
   5137 	  if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
   5138 	    {
   5139 	      rtx old = *curr_id->operand_loc[0];
   5140 	      *curr_id->operand_loc[0]
   5141 		= simplify_replace_fn_rtx (old, NULL_RTX,
   5142 					   loc_equivalence_callback, curr_insn);
   5143 	      if (old != *curr_id->operand_loc[0])
   5144 		{
   5145 		  /* If we substitute pseudo by shared equivalence, we can fail
   5146 		     to update LRA reg info and this can result in many
   5147 		     unexpected consequences.  So keep rtl unshared:  */
   5148 		  *curr_id->operand_loc[0]
   5149 		    = copy_rtx (*curr_id->operand_loc[0]);
   5150 		  lra_update_insn_regno_info (curr_insn);
   5151 		  changed_p = true;
   5152 		}
   5153 	    }
   5154 	}
   5155       else if (INSN_P (curr_insn))
   5156 	{
   5157 	  if ((set = single_set (curr_insn)) != NULL_RTX)
   5158 	    {
   5159 	      dest_reg = SET_DEST (set);
   5160 	      /* The equivalence pseudo could be set up as SUBREG in a
   5161 		 case when it is a call restore insn in a mode
   5162 		 different from the pseudo mode.  */
   5163 	      if (GET_CODE (dest_reg) == SUBREG)
   5164 		dest_reg = SUBREG_REG (dest_reg);
   5165 	      if ((REG_P (dest_reg)
   5166 		   && (x = get_equiv (dest_reg)) != dest_reg
   5167 		   /* Remove insns which set up a pseudo whose value
   5168 		      cannot be changed.  Such insns might be not in
   5169 		      init_insns because we don't update equiv data
   5170 		      during insn transformations.
   5171 
   5172 		      As an example, let suppose that a pseudo got
   5173 		      hard register and on the 1st pass was not
   5174 		      changed to equivalent constant.  We generate an
   5175 		      additional insn setting up the pseudo because of
   5176 		      secondary memory movement.  Then the pseudo is
   5177 		      spilled and we use the equiv constant.  In this
   5178 		      case we should remove the additional insn and
   5179 		      this insn is not init_insns list.  */
   5180 		   && (! MEM_P (x) || MEM_READONLY_P (x)
   5181 		       /* Check that this is actually an insn setting
   5182 			  up the equivalence.  */
   5183 		       || in_list_p (curr_insn,
   5184 				     ira_reg_equiv
   5185 				     [REGNO (dest_reg)].init_insns)))
   5186 		  || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
   5187 		      && in_list_p (curr_insn,
   5188 				    ira_reg_equiv
   5189 				    [REGNO (SET_SRC (set))].init_insns)))
   5190 		{
   5191 		  /* This is equiv init insn of pseudo which did not get a
   5192 		     hard register -- remove the insn.	*/
   5193 		  if (lra_dump_file != NULL)
   5194 		    {
   5195 		      fprintf (lra_dump_file,
   5196 			       "      Removing equiv init insn %i (freq=%d)\n",
   5197 			       INSN_UID (curr_insn),
   5198 			       REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
   5199 		      dump_insn_slim (lra_dump_file, curr_insn);
   5200 		    }
   5201 		  if (contains_reg_p (x, true, false))
   5202 		    check_and_force_assignment_correctness_p = true;
   5203 		  lra_set_insn_deleted (curr_insn);
   5204 		  continue;
   5205 		}
   5206 	    }
   5207 	  curr_id = lra_get_insn_recog_data (curr_insn);
   5208 	  curr_static_id = curr_id->insn_static_data;
   5209 	  init_curr_insn_input_reloads ();
   5210 	  init_curr_operand_mode ();
   5211 	  if (curr_insn_transform (false))
   5212 	    changed_p = true;
   5213 	  /* Check non-transformed insns too for equiv change as USE
   5214 	     or CLOBBER don't need reloads but can contain pseudos
   5215 	     being changed on their equivalences.  */
   5216 	  else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
   5217 		   && loc_equivalence_change_p (&PATTERN (curr_insn)))
   5218 	    {
   5219 	      lra_update_insn_regno_info (curr_insn);
   5220 	      changed_p = true;
   5221 	    }
   5222 	}
   5223     }
   5224 
   5225   /* If we used a new hard regno, changed_p should be true because the
   5226      hard reg is assigned to a new pseudo.  */
   5227   if (flag_checking && !changed_p)
   5228     {
   5229       for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
   5230 	if (lra_reg_info[i].nrefs != 0
   5231 	    && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
   5232 	  {
   5233 	    int j, nregs = hard_regno_nregs (hard_regno,
   5234 					     PSEUDO_REGNO_MODE (i));
   5235 
   5236 	    for (j = 0; j < nregs; j++)
   5237 	      lra_assert (df_regs_ever_live_p (hard_regno + j));
   5238 	  }
   5239     }
   5240   return changed_p;
   5241 }
   5242 
   5243 static void initiate_invariants (void);
   5244 static void finish_invariants (void);
   5245 
   5246 /* Initiate the LRA constraint pass.  It is done once per
   5247    function.  */
   5248 void
   5249 lra_constraints_init (void)
   5250 {
   5251   initiate_invariants ();
   5252 }
   5253 
   5254 /* Finalize the LRA constraint pass.  It is done once per
   5255    function.  */
   5256 void
   5257 lra_constraints_finish (void)
   5258 {
   5259   finish_invariants ();
   5260 }
   5261 
   5262 
   5263 
   5265 /* Structure describes invariants for ineheritance.  */
   5266 struct lra_invariant
   5267 {
   5268   /* The order number of the invariant.  */
   5269   int num;
   5270   /* The invariant RTX.  */
   5271   rtx invariant_rtx;
   5272   /* The origin insn of the invariant.  */
   5273   rtx_insn *insn;
   5274 };
   5275 
   5276 typedef lra_invariant invariant_t;
   5277 typedef invariant_t *invariant_ptr_t;
   5278 typedef const invariant_t *const_invariant_ptr_t;
   5279 
   5280 /* Pointer to the inheritance invariants.  */
   5281 static vec<invariant_ptr_t> invariants;
   5282 
   5283 /* Allocation pool for the invariants.  */
   5284 static object_allocator<lra_invariant> *invariants_pool;
   5285 
   5286 /* Hash table for the invariants.  */
   5287 static htab_t invariant_table;
   5288 
   5289 /* Hash function for INVARIANT.  */
   5290 static hashval_t
   5291 invariant_hash (const void *invariant)
   5292 {
   5293   rtx inv = ((const_invariant_ptr_t) invariant)->invariant_rtx;
   5294   return lra_rtx_hash (inv);
   5295 }
   5296 
   5297 /* Equal function for invariants INVARIANT1 and INVARIANT2.  */
   5298 static int
   5299 invariant_eq_p (const void *invariant1, const void *invariant2)
   5300 {
   5301   rtx inv1 = ((const_invariant_ptr_t) invariant1)->invariant_rtx;
   5302   rtx inv2 = ((const_invariant_ptr_t) invariant2)->invariant_rtx;
   5303 
   5304   return rtx_equal_p (inv1, inv2);
   5305 }
   5306 
   5307 /* Insert INVARIANT_RTX into the table if it is not there yet.  Return
   5308    invariant which is in the table.  */
   5309 static invariant_ptr_t
   5310 insert_invariant (rtx invariant_rtx)
   5311 {
   5312   void **entry_ptr;
   5313   invariant_t invariant;
   5314   invariant_ptr_t invariant_ptr;
   5315 
   5316   invariant.invariant_rtx = invariant_rtx;
   5317   entry_ptr = htab_find_slot (invariant_table, &invariant, INSERT);
   5318   if (*entry_ptr == NULL)
   5319     {
   5320       invariant_ptr = invariants_pool->allocate ();
   5321       invariant_ptr->invariant_rtx = invariant_rtx;
   5322       invariant_ptr->insn = NULL;
   5323       invariants.safe_push (invariant_ptr);
   5324       *entry_ptr = (void *) invariant_ptr;
   5325     }
   5326   return (invariant_ptr_t) *entry_ptr;
   5327 }
   5328 
   5329 /* Initiate the invariant table.  */
   5330 static void
   5331 initiate_invariants (void)
   5332 {
   5333   invariants.create (100);
   5334   invariants_pool
   5335     = new object_allocator<lra_invariant> ("Inheritance invariants");
   5336   invariant_table = htab_create (100, invariant_hash, invariant_eq_p, NULL);
   5337 }
   5338 
   5339 /* Finish the invariant table.  */
   5340 static void
   5341 finish_invariants (void)
   5342 {
   5343   htab_delete (invariant_table);
   5344   delete invariants_pool;
   5345   invariants.release ();
   5346 }
   5347 
   5348 /* Make the invariant table empty.  */
   5349 static void
   5350 clear_invariants (void)
   5351 {
   5352   htab_empty (invariant_table);
   5353   invariants_pool->release ();
   5354   invariants.truncate (0);
   5355 }
   5356 
   5357 
   5358 
   5360 /* This page contains code to do inheritance/split
   5361    transformations.  */
   5362 
   5363 /* Number of reloads passed so far in current EBB.  */
   5364 static int reloads_num;
   5365 
   5366 /* Number of calls passed so far in current EBB.  */
   5367 static int calls_num;
   5368 
   5369 /* Index ID is the CALLS_NUM associated the last call we saw with
   5370    ABI identifier ID.  */
   5371 static int last_call_for_abi[NUM_ABI_IDS];
   5372 
   5373 /* Which registers have been fully or partially clobbered by a call
   5374    since they were last used.  */
   5375 static HARD_REG_SET full_and_partial_call_clobbers;
   5376 
   5377 /* Current reload pseudo check for validity of elements in
   5378    USAGE_INSNS.	 */
   5379 static int curr_usage_insns_check;
   5380 
   5381 /* Info about last usage of registers in EBB to do inheritance/split
   5382    transformation.  Inheritance transformation is done from a spilled
   5383    pseudo and split transformations from a hard register or a pseudo
   5384    assigned to a hard register.	 */
   5385 struct usage_insns
   5386 {
   5387   /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
   5388      value INSNS is valid.  The insns is chain of optional debug insns
   5389      and a finishing non-debug insn using the corresponding reg.  The
   5390      value is also used to mark the registers which are set up in the
   5391      current insn.  The negated insn uid is used for this.  */
   5392   int check;
   5393   /* Value of global reloads_num at the last insn in INSNS.  */
   5394   int reloads_num;
   5395   /* Value of global reloads_nums at the last insn in INSNS.  */
   5396   int calls_num;
   5397   /* It can be true only for splitting.	 And it means that the restore
   5398      insn should be put after insn given by the following member.  */
   5399   bool after_p;
   5400   /* Next insns in the current EBB which use the original reg and the
   5401      original reg value is not changed between the current insn and
   5402      the next insns.  In order words, e.g. for inheritance, if we need
   5403      to use the original reg value again in the next insns we can try
   5404      to use the value in a hard register from a reload insn of the
   5405      current insn.  */
   5406   rtx insns;
   5407 };
   5408 
   5409 /* Map: regno -> corresponding pseudo usage insns.  */
   5410 static struct usage_insns *usage_insns;
   5411 
   5412 static void
   5413 setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
   5414 {
   5415   usage_insns[regno].check = curr_usage_insns_check;
   5416   usage_insns[regno].insns = insn;
   5417   usage_insns[regno].reloads_num = reloads_num;
   5418   usage_insns[regno].calls_num = calls_num;
   5419   usage_insns[regno].after_p = after_p;
   5420   if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0)
   5421     remove_from_hard_reg_set (&full_and_partial_call_clobbers,
   5422 			      PSEUDO_REGNO_MODE (regno),
   5423 			      reg_renumber[regno]);
   5424 }
   5425 
   5426 /* The function is used to form list REGNO usages which consists of
   5427    optional debug insns finished by a non-debug insn using REGNO.
   5428    RELOADS_NUM is current number of reload insns processed so far.  */
   5429 static void
   5430 add_next_usage_insn (int regno, rtx_insn *insn, int reloads_num)
   5431 {
   5432   rtx next_usage_insns;
   5433 
   5434   if (usage_insns[regno].check == curr_usage_insns_check
   5435       && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
   5436       && DEBUG_INSN_P (insn))
   5437     {
   5438       /* Check that we did not add the debug insn yet.	*/
   5439       if (next_usage_insns != insn
   5440 	  && (GET_CODE (next_usage_insns) != INSN_LIST
   5441 	      || XEXP (next_usage_insns, 0) != insn))
   5442 	usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
   5443 						      next_usage_insns);
   5444     }
   5445   else if (NONDEBUG_INSN_P (insn))
   5446     setup_next_usage_insn (regno, insn, reloads_num, false);
   5447   else
   5448     usage_insns[regno].check = 0;
   5449 }
   5450 
   5451 /* Return first non-debug insn in list USAGE_INSNS.  */
   5452 static rtx_insn *
   5453 skip_usage_debug_insns (rtx usage_insns)
   5454 {
   5455   rtx insn;
   5456 
   5457   /* Skip debug insns.  */
   5458   for (insn = usage_insns;
   5459        insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
   5460        insn = XEXP (insn, 1))
   5461     ;
   5462   return safe_as_a <rtx_insn *> (insn);
   5463 }
   5464 
   5465 /* Return true if we need secondary memory moves for insn in
   5466    USAGE_INSNS after inserting inherited pseudo of class INHER_CL
   5467    into the insn.  */
   5468 static bool
   5469 check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
   5470 				 rtx usage_insns ATTRIBUTE_UNUSED)
   5471 {
   5472   rtx_insn *insn;
   5473   rtx set, dest;
   5474   enum reg_class cl;
   5475 
   5476   if (inher_cl == ALL_REGS
   5477       || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
   5478     return false;
   5479   lra_assert (INSN_P (insn));
   5480   if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
   5481     return false;
   5482   dest = SET_DEST (set);
   5483   if (! REG_P (dest))
   5484     return false;
   5485   lra_assert (inher_cl != NO_REGS);
   5486   cl = get_reg_class (REGNO (dest));
   5487   return (cl != NO_REGS && cl != ALL_REGS
   5488 	  && targetm.secondary_memory_needed (GET_MODE (dest), inher_cl, cl));
   5489 }
   5490 
   5491 /* Registers involved in inheritance/split in the current EBB
   5492    (inheritance/split pseudos and original registers).	*/
   5493 static bitmap_head check_only_regs;
   5494 
   5495 /* Reload pseudos cannot be involded in invariant inheritance in the
   5496    current EBB.  */
   5497 static bitmap_head invalid_invariant_regs;
   5498 
   5499 /* Do inheritance transformations for insn INSN, which defines (if
   5500    DEF_P) or uses ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which
   5501    instruction in the EBB next uses ORIGINAL_REGNO; it has the same
   5502    form as the "insns" field of usage_insns.  Return true if we
   5503    succeed in such transformation.
   5504 
   5505    The transformations look like:
   5506 
   5507      p <- ...		  i <- ...
   5508      ...		  p <- i    (new insn)
   5509      ...	     =>
   5510      <- ... p ...	  <- ... i ...
   5511    or
   5512      ...		  i <- p    (new insn)
   5513      <- ... p ...	  <- ... i ...
   5514      ...	     =>
   5515      <- ... p ...	  <- ... i ...
   5516    where p is a spilled original pseudo and i is a new inheritance pseudo.
   5517 
   5518 
   5519    The inheritance pseudo has the smallest class of two classes CL and
   5520    class of ORIGINAL REGNO.  */
   5521 static bool
   5522 inherit_reload_reg (bool def_p, int original_regno,
   5523 		    enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
   5524 {
   5525   if (optimize_function_for_size_p (cfun))
   5526     return false;
   5527 
   5528   enum reg_class rclass = lra_get_allocno_class (original_regno);
   5529   rtx original_reg = regno_reg_rtx[original_regno];
   5530   rtx new_reg, usage_insn;
   5531   rtx_insn *new_insns;
   5532 
   5533   lra_assert (! usage_insns[original_regno].after_p);
   5534   if (lra_dump_file != NULL)
   5535     fprintf (lra_dump_file,
   5536 	     "    <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
   5537   if (! ira_reg_classes_intersect_p[cl][rclass])
   5538     {
   5539       if (lra_dump_file != NULL)
   5540 	{
   5541 	  fprintf (lra_dump_file,
   5542 		   "    Rejecting inheritance for %d "
   5543 		   "because of disjoint classes %s and %s\n",
   5544 		   original_regno, reg_class_names[cl],
   5545 		   reg_class_names[rclass]);
   5546 	  fprintf (lra_dump_file,
   5547 		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
   5548 	}
   5549       return false;
   5550     }
   5551   if ((ira_class_subset_p[cl][rclass] && cl != rclass)
   5552       /* We don't use a subset of two classes because it can be
   5553 	 NO_REGS.  This transformation is still profitable in most
   5554 	 cases even if the classes are not intersected as register
   5555 	 move is probably cheaper than a memory load.  */
   5556       || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
   5557     {
   5558       if (lra_dump_file != NULL)
   5559 	fprintf (lra_dump_file, "    Use smallest class of %s and %s\n",
   5560 		 reg_class_names[cl], reg_class_names[rclass]);
   5561 
   5562       rclass = cl;
   5563     }
   5564   if (check_secondary_memory_needed_p (rclass, next_usage_insns))
   5565     {
   5566       /* Reject inheritance resulting in secondary memory moves.
   5567 	 Otherwise, there is a danger in LRA cycling.  Also such
   5568 	 transformation will be unprofitable.  */
   5569       if (lra_dump_file != NULL)
   5570 	{
   5571 	  rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
   5572 	  rtx set = single_set (insn);
   5573 
   5574 	  lra_assert (set != NULL_RTX);
   5575 
   5576 	  rtx dest = SET_DEST (set);
   5577 
   5578 	  lra_assert (REG_P (dest));
   5579 	  fprintf (lra_dump_file,
   5580 		   "    Rejecting inheritance for insn %d(%s)<-%d(%s) "
   5581 		   "as secondary mem is needed\n",
   5582 		   REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
   5583 		   original_regno, reg_class_names[rclass]);
   5584 	  fprintf (lra_dump_file,
   5585 		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
   5586 	}
   5587       return false;
   5588     }
   5589   new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
   5590 				rclass, NULL, "inheritance");
   5591   start_sequence ();
   5592   if (def_p)
   5593     lra_emit_move (original_reg, new_reg);
   5594   else
   5595     lra_emit_move (new_reg, original_reg);
   5596   new_insns = get_insns ();
   5597   end_sequence ();
   5598   if (NEXT_INSN (new_insns) != NULL_RTX)
   5599     {
   5600       if (lra_dump_file != NULL)
   5601 	{
   5602 	  fprintf (lra_dump_file,
   5603 		   "    Rejecting inheritance %d->%d "
   5604 		   "as it results in 2 or more insns:\n",
   5605 		   original_regno, REGNO (new_reg));
   5606 	  dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
   5607 	  fprintf (lra_dump_file,
   5608 		   "	>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
   5609 	}
   5610       return false;
   5611     }
   5612   lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
   5613   lra_update_insn_regno_info (insn);
   5614   if (! def_p)
   5615     /* We now have a new usage insn for original regno.  */
   5616     setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
   5617   if (lra_dump_file != NULL)
   5618     fprintf (lra_dump_file, "    Original reg change %d->%d (bb%d):\n",
   5619 	     original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
   5620   lra_reg_info[REGNO (new_reg)].restore_rtx = regno_reg_rtx[original_regno];
   5621   bitmap_set_bit (&check_only_regs, REGNO (new_reg));
   5622   bitmap_set_bit (&check_only_regs, original_regno);
   5623   bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
   5624   if (def_p)
   5625     lra_process_new_insns (insn, NULL, new_insns,
   5626 			   "Add original<-inheritance");
   5627   else
   5628     lra_process_new_insns (insn, new_insns, NULL,
   5629 			   "Add inheritance<-original");
   5630   while (next_usage_insns != NULL_RTX)
   5631     {
   5632       if (GET_CODE (next_usage_insns) != INSN_LIST)
   5633 	{
   5634 	  usage_insn = next_usage_insns;
   5635 	  lra_assert (NONDEBUG_INSN_P (usage_insn));
   5636 	  next_usage_insns = NULL;
   5637 	}
   5638       else
   5639 	{
   5640 	  usage_insn = XEXP (next_usage_insns, 0);
   5641 	  lra_assert (DEBUG_INSN_P (usage_insn));
   5642 	  next_usage_insns = XEXP (next_usage_insns, 1);
   5643 	}
   5644       lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
   5645 			     DEBUG_INSN_P (usage_insn));
   5646       lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
   5647       if (lra_dump_file != NULL)
   5648 	{
   5649 	  basic_block bb = BLOCK_FOR_INSN (usage_insn);
   5650 	  fprintf (lra_dump_file,
   5651 		   "    Inheritance reuse change %d->%d (bb%d):\n",
   5652 		   original_regno, REGNO (new_reg),
   5653 		   bb ? bb->index : -1);
   5654 	  dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
   5655 	}
   5656     }
   5657   if (lra_dump_file != NULL)
   5658     fprintf (lra_dump_file,
   5659 	     "	  >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
   5660   return true;
   5661 }
   5662 
   5663 /* Return true if we need a caller save/restore for pseudo REGNO which
   5664    was assigned to a hard register.  */
   5665 static inline bool
   5666 need_for_call_save_p (int regno)
   5667 {
   5668   lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
   5669   if (usage_insns[regno].calls_num < calls_num)
   5670     {
   5671       unsigned int abis = 0;
   5672       for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
   5673 	if (last_call_for_abi[i] > usage_insns[regno].calls_num)
   5674 	  abis |= 1 << i;
   5675       gcc_assert (abis);
   5676       if (call_clobbered_in_region_p (abis, full_and_partial_call_clobbers,
   5677 				      PSEUDO_REGNO_MODE (regno),
   5678 				      reg_renumber[regno]))
   5679 	return true;
   5680     }
   5681   return false;
   5682 }
   5683 
   5684 /* Global registers occurring in the current EBB.  */
   5685 static bitmap_head ebb_global_regs;
   5686 
   5687 /* Return true if we need a split for hard register REGNO or pseudo
   5688    REGNO which was assigned to a hard register.
   5689    POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
   5690    used for reloads since the EBB end.	It is an approximation of the
   5691    used hard registers in the split range.  The exact value would
   5692    require expensive calculations.  If we were aggressive with
   5693    splitting because of the approximation, the split pseudo will save
   5694    the same hard register assignment and will be removed in the undo
   5695    pass.  We still need the approximation because too aggressive
   5696    splitting would result in too inaccurate cost calculation in the
   5697    assignment pass because of too many generated moves which will be
   5698    probably removed in the undo pass.  */
   5699 static inline bool
   5700 need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
   5701 {
   5702   int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
   5703 
   5704   lra_assert (hard_regno >= 0);
   5705   return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
   5706 	   /* Don't split eliminable hard registers, otherwise we can
   5707 	      split hard registers like hard frame pointer, which
   5708 	      lives on BB start/end according to DF-infrastructure,
   5709 	      when there is a pseudo assigned to the register and
   5710 	      living in the same BB.  */
   5711 	   && (regno >= FIRST_PSEUDO_REGISTER
   5712 	       || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
   5713 	   && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
   5714 	   /* Don't split call clobbered hard regs living through
   5715 	      calls, otherwise we might have a check problem in the
   5716 	      assign sub-pass as in the most cases (exception is a
   5717 	      situation when check_and_force_assignment_correctness_p value is
   5718 	      true) the assign pass assumes that all pseudos living
   5719 	      through calls are assigned to call saved hard regs.  */
   5720 	   && (regno >= FIRST_PSEUDO_REGISTER
   5721 	       || !TEST_HARD_REG_BIT (full_and_partial_call_clobbers, regno))
   5722 	   /* We need at least 2 reloads to make pseudo splitting
   5723 	      profitable.  We should provide hard regno splitting in
   5724 	      any case to solve 1st insn scheduling problem when
   5725 	      moving hard register definition up might result in
   5726 	      impossibility to find hard register for reload pseudo of
   5727 	      small register class.  */
   5728 	   && (usage_insns[regno].reloads_num
   5729 	       + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
   5730 	   && (regno < FIRST_PSEUDO_REGISTER
   5731 	       /* For short living pseudos, spilling + inheritance can
   5732 		  be considered a substitution for splitting.
   5733 		  Therefore we do not splitting for local pseudos.  It
   5734 		  decreases also aggressiveness of splitting.  The
   5735 		  minimal number of references is chosen taking into
   5736 		  account that for 2 references splitting has no sense
   5737 		  as we can just spill the pseudo.  */
   5738 	       || (regno >= FIRST_PSEUDO_REGISTER
   5739 		   && lra_reg_info[regno].nrefs > 3
   5740 		   && bitmap_bit_p (&ebb_global_regs, regno))))
   5741 	  || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
   5742 }
   5743 
   5744 /* Return class for the split pseudo created from original pseudo with
   5745    ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO.	 We
   5746    choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
   5747    results in no secondary memory movements.  */
   5748 static enum reg_class
   5749 choose_split_class (enum reg_class allocno_class,
   5750 		    int hard_regno ATTRIBUTE_UNUSED,
   5751 		    machine_mode mode ATTRIBUTE_UNUSED)
   5752 {
   5753   int i;
   5754   enum reg_class cl, best_cl = NO_REGS;
   5755   enum reg_class hard_reg_class ATTRIBUTE_UNUSED
   5756     = REGNO_REG_CLASS (hard_regno);
   5757 
   5758   if (! targetm.secondary_memory_needed (mode, allocno_class, allocno_class)
   5759       && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
   5760     return allocno_class;
   5761   for (i = 0;
   5762        (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
   5763        i++)
   5764     if (! targetm.secondary_memory_needed (mode, cl, hard_reg_class)
   5765 	&& ! targetm.secondary_memory_needed (mode, hard_reg_class, cl)
   5766 	&& TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
   5767 	&& (best_cl == NO_REGS
   5768 	    || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
   5769       best_cl = cl;
   5770   return best_cl;
   5771 }
   5772 
   5773 /* Copy any equivalence information from ORIGINAL_REGNO to NEW_REGNO.
   5774    It only makes sense to call this function if NEW_REGNO is always
   5775    equal to ORIGINAL_REGNO.  */
   5776 
   5777 static void
   5778 lra_copy_reg_equiv (unsigned int new_regno, unsigned int original_regno)
   5779 {
   5780   if (!ira_reg_equiv[original_regno].defined_p)
   5781     return;
   5782 
   5783   ira_expand_reg_equiv ();
   5784   ira_reg_equiv[new_regno].defined_p = true;
   5785   if (ira_reg_equiv[original_regno].memory)
   5786     ira_reg_equiv[new_regno].memory
   5787       = copy_rtx (ira_reg_equiv[original_regno].memory);
   5788   if (ira_reg_equiv[original_regno].constant)
   5789     ira_reg_equiv[new_regno].constant
   5790       = copy_rtx (ira_reg_equiv[original_regno].constant);
   5791   if (ira_reg_equiv[original_regno].invariant)
   5792     ira_reg_equiv[new_regno].invariant
   5793       = copy_rtx (ira_reg_equiv[original_regno].invariant);
   5794 }
   5795 
   5796 /* Do split transformations for insn INSN, which defines or uses
   5797    ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which instruction in
   5798    the EBB next uses ORIGINAL_REGNO; it has the same form as the
   5799    "insns" field of usage_insns.  If TO is not NULL, we don't use
   5800    usage_insns, we put restore insns after TO insn.  It is a case when
   5801    we call it from lra_split_hard_reg_for, outside the inheritance
   5802    pass.
   5803 
   5804    The transformations look like:
   5805 
   5806      p <- ...		  p <- ...
   5807      ...		  s <- p    (new insn -- save)
   5808      ...	     =>
   5809      ...		  p <- s    (new insn -- restore)
   5810      <- ... p ...	  <- ... p ...
   5811    or
   5812      <- ... p ...	  <- ... p ...
   5813      ...		  s <- p    (new insn -- save)
   5814      ...	     =>
   5815      ...		  p <- s    (new insn -- restore)
   5816      <- ... p ...	  <- ... p ...
   5817 
   5818    where p is an original pseudo got a hard register or a hard
   5819    register and s is a new split pseudo.  The save is put before INSN
   5820    if BEFORE_P is true.	 Return true if we succeed in such
   5821    transformation.  */
   5822 static bool
   5823 split_reg (bool before_p, int original_regno, rtx_insn *insn,
   5824 	   rtx next_usage_insns, rtx_insn *to)
   5825 {
   5826   enum reg_class rclass;
   5827   rtx original_reg;
   5828   int hard_regno, nregs;
   5829   rtx new_reg, usage_insn;
   5830   rtx_insn *restore, *save;
   5831   bool after_p;
   5832   bool call_save_p;
   5833   machine_mode mode;
   5834 
   5835   if (original_regno < FIRST_PSEUDO_REGISTER)
   5836     {
   5837       rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
   5838       hard_regno = original_regno;
   5839       call_save_p = false;
   5840       nregs = 1;
   5841       mode = lra_reg_info[hard_regno].biggest_mode;
   5842       machine_mode reg_rtx_mode = GET_MODE (regno_reg_rtx[hard_regno]);
   5843       /* A reg can have a biggest_mode of VOIDmode if it was only ever seen as
   5844 	 part of a multi-word register.  In that case, just use the reg_rtx
   5845 	 mode.  Do the same also if the biggest mode was larger than a register
   5846 	 or we can not compare the modes.  Otherwise, limit the size to that of
   5847 	 the biggest access in the function or to the natural mode at least.  */
   5848       if (mode == VOIDmode
   5849 	  || !ordered_p (GET_MODE_PRECISION (mode),
   5850 			 GET_MODE_PRECISION (reg_rtx_mode))
   5851 	  || paradoxical_subreg_p (mode, reg_rtx_mode)
   5852 	  || maybe_gt (GET_MODE_PRECISION (reg_rtx_mode), GET_MODE_PRECISION (mode)))
   5853 	{
   5854 	  original_reg = regno_reg_rtx[hard_regno];
   5855 	  mode = reg_rtx_mode;
   5856 	}
   5857       else
   5858 	original_reg = gen_rtx_REG (mode, hard_regno);
   5859     }
   5860   else
   5861     {
   5862       mode = PSEUDO_REGNO_MODE (original_regno);
   5863       hard_regno = reg_renumber[original_regno];
   5864       nregs = hard_regno_nregs (hard_regno, mode);
   5865       rclass = lra_get_allocno_class (original_regno);
   5866       original_reg = regno_reg_rtx[original_regno];
   5867       call_save_p = need_for_call_save_p (original_regno);
   5868     }
   5869   lra_assert (hard_regno >= 0);
   5870   if (lra_dump_file != NULL)
   5871     fprintf (lra_dump_file,
   5872 	     "	  ((((((((((((((((((((((((((((((((((((((((((((((((\n");
   5873 
   5874   if (call_save_p)
   5875     {
   5876       mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
   5877 					  hard_regno_nregs (hard_regno, mode),
   5878 					  mode);
   5879       new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, NULL, "save");
   5880     }
   5881   else
   5882     {
   5883       rclass = choose_split_class (rclass, hard_regno, mode);
   5884       if (rclass == NO_REGS)
   5885 	{
   5886 	  if (lra_dump_file != NULL)
   5887 	    {
   5888 	      fprintf (lra_dump_file,
   5889 		       "    Rejecting split of %d(%s): "
   5890 		       "no good reg class for %d(%s)\n",
   5891 		       original_regno,
   5892 		       reg_class_names[lra_get_allocno_class (original_regno)],
   5893 		       hard_regno,
   5894 		       reg_class_names[REGNO_REG_CLASS (hard_regno)]);
   5895 	      fprintf
   5896 		(lra_dump_file,
   5897 		 "    ))))))))))))))))))))))))))))))))))))))))))))))))\n");
   5898 	    }
   5899 	  return false;
   5900 	}
   5901       /* Split_if_necessary can split hard registers used as part of a
   5902 	 multi-register mode but splits each register individually.  The
   5903 	 mode used for each independent register may not be supported
   5904 	 so reject the split.  Splitting the wider mode should theoretically
   5905 	 be possible but is not implemented.  */
   5906       if (!targetm.hard_regno_mode_ok (hard_regno, mode))
   5907 	{
   5908 	  if (lra_dump_file != NULL)
   5909 	    {
   5910 	      fprintf (lra_dump_file,
   5911 		       "    Rejecting split of %d(%s): unsuitable mode %s\n",
   5912 		       original_regno,
   5913 		       reg_class_names[lra_get_allocno_class (original_regno)],
   5914 		       GET_MODE_NAME (mode));
   5915 	      fprintf
   5916 		(lra_dump_file,
   5917 		 "    ))))))))))))))))))))))))))))))))))))))))))))))))\n");
   5918 	    }
   5919 	  return false;
   5920 	}
   5921       new_reg = lra_create_new_reg (mode, original_reg, rclass, NULL, "split");
   5922       reg_renumber[REGNO (new_reg)] = hard_regno;
   5923     }
   5924   int new_regno = REGNO (new_reg);
   5925   save = emit_spill_move (true, new_reg, original_reg);
   5926   if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
   5927     {
   5928       if (lra_dump_file != NULL)
   5929 	{
   5930 	  fprintf
   5931 	    (lra_dump_file,
   5932 	     "	  Rejecting split %d->%d resulting in > 2 save insns:\n",
   5933 	     original_regno, new_regno);
   5934 	  dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
   5935 	  fprintf (lra_dump_file,
   5936 		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
   5937 	}
   5938       return false;
   5939     }
   5940   restore = emit_spill_move (false, new_reg, original_reg);
   5941   if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
   5942     {
   5943       if (lra_dump_file != NULL)
   5944 	{
   5945 	  fprintf (lra_dump_file,
   5946 		   "	Rejecting split %d->%d "
   5947 		   "resulting in > 2 restore insns:\n",
   5948 		   original_regno, new_regno);
   5949 	  dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
   5950 	  fprintf (lra_dump_file,
   5951 		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
   5952 	}
   5953       return false;
   5954     }
   5955   /* Transfer equivalence information to the spill register, so that
   5956      if we fail to allocate the spill register, we have the option of
   5957      rematerializing the original value instead of spilling to the stack.  */
   5958   if (!HARD_REGISTER_NUM_P (original_regno)
   5959       && mode == PSEUDO_REGNO_MODE (original_regno))
   5960     lra_copy_reg_equiv (new_regno, original_regno);
   5961   lra_reg_info[new_regno].restore_rtx = regno_reg_rtx[original_regno];
   5962   bitmap_set_bit (&lra_split_regs, new_regno);
   5963   if (to != NULL)
   5964     {
   5965       lra_assert (next_usage_insns == NULL);
   5966       usage_insn = to;
   5967       after_p = TRUE;
   5968     }
   5969   else
   5970     {
   5971       /* We need check_only_regs only inside the inheritance pass.  */
   5972       bitmap_set_bit (&check_only_regs, new_regno);
   5973       bitmap_set_bit (&check_only_regs, original_regno);
   5974       after_p = usage_insns[original_regno].after_p;
   5975       for (;;)
   5976 	{
   5977 	  if (GET_CODE (next_usage_insns) != INSN_LIST)
   5978 	    {
   5979 	      usage_insn = next_usage_insns;
   5980 	      break;
   5981 	    }
   5982 	  usage_insn = XEXP (next_usage_insns, 0);
   5983 	  lra_assert (DEBUG_INSN_P (usage_insn));
   5984 	  next_usage_insns = XEXP (next_usage_insns, 1);
   5985 	  lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
   5986 				 true);
   5987 	  lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
   5988 	  if (lra_dump_file != NULL)
   5989 	    {
   5990 	      fprintf (lra_dump_file, "    Split reuse change %d->%d:\n",
   5991 		       original_regno, new_regno);
   5992 	      dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
   5993 	    }
   5994 	}
   5995     }
   5996   lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
   5997   lra_assert (usage_insn != insn || (after_p && before_p));
   5998   lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
   5999 			 after_p ? NULL : restore,
   6000 			 after_p ? restore : NULL,
   6001 			 call_save_p
   6002 			 ?  "Add reg<-save" : "Add reg<-split");
   6003   lra_process_new_insns (insn, before_p ? save : NULL,
   6004 			 before_p ? NULL : save,
   6005 			 call_save_p
   6006 			 ?  "Add save<-reg" : "Add split<-reg");
   6007   if (nregs > 1 || original_regno < FIRST_PSEUDO_REGISTER)
   6008     /* If we are trying to split multi-register.  We should check
   6009        conflicts on the next assignment sub-pass.  IRA can allocate on
   6010        sub-register levels, LRA do this on pseudos level right now and
   6011        this discrepancy may create allocation conflicts after
   6012        splitting.
   6013 
   6014        If we are trying to split hard register we should also check conflicts
   6015        as such splitting can create artificial conflict of the hard register
   6016        with another pseudo because of simplified conflict calculation in
   6017        LRA.  */
   6018     check_and_force_assignment_correctness_p = true;
   6019   if (lra_dump_file != NULL)
   6020     fprintf (lra_dump_file,
   6021 	     "	  ))))))))))))))))))))))))))))))))))))))))))))))))\n");
   6022   return true;
   6023 }
   6024 
   6025 /* Split a hard reg for reload pseudo REGNO having RCLASS and living
   6026    in the range [FROM, TO].  Return true if did a split.  Otherwise,
   6027    return false.  */
   6028 bool
   6029 spill_hard_reg_in_range (int regno, enum reg_class rclass, rtx_insn *from, rtx_insn *to)
   6030 {
   6031   int i, hard_regno;
   6032   int rclass_size;
   6033   rtx_insn *insn;
   6034   unsigned int uid;
   6035   bitmap_iterator bi;
   6036   HARD_REG_SET ignore;
   6037 
   6038   lra_assert (from != NULL && to != NULL);
   6039   ignore = lra_no_alloc_regs;
   6040   EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
   6041     {
   6042       lra_insn_recog_data_t id = lra_insn_recog_data[uid];
   6043       struct lra_static_insn_data *static_id = id->insn_static_data;
   6044       struct lra_insn_reg *reg;
   6045 
   6046       for (reg = id->regs; reg != NULL; reg = reg->next)
   6047 	if (reg->regno < FIRST_PSEUDO_REGISTER)
   6048 	  SET_HARD_REG_BIT (ignore, reg->regno);
   6049       for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
   6050 	SET_HARD_REG_BIT (ignore, reg->regno);
   6051     }
   6052   rclass_size = ira_class_hard_regs_num[rclass];
   6053   for (i = 0; i < rclass_size; i++)
   6054     {
   6055       hard_regno = ira_class_hard_regs[rclass][i];
   6056       if (! TEST_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hard_regno)
   6057 	  || TEST_HARD_REG_BIT (ignore, hard_regno))
   6058 	continue;
   6059       for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
   6060 	{
   6061 	  struct lra_static_insn_data *static_id;
   6062 	  struct lra_insn_reg *reg;
   6063 
   6064 	  if (!INSN_P (insn))
   6065 	      continue;
   6066 	  if (bitmap_bit_p (&lra_reg_info[hard_regno].insn_bitmap,
   6067 			    INSN_UID (insn)))
   6068 	    break;
   6069 	  static_id = lra_get_insn_recog_data (insn)->insn_static_data;
   6070 	  for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
   6071 	    if (reg->regno == hard_regno)
   6072 	      break;
   6073 	  if (reg != NULL)
   6074 	    break;
   6075 	}
   6076       if (insn != NEXT_INSN (to))
   6077 	continue;
   6078       if (split_reg (TRUE, hard_regno, from, NULL, to))
   6079 	return true;
   6080     }
   6081   return false;
   6082 }
   6083 
   6084 /* Recognize that we need a split transformation for insn INSN, which
   6085    defines or uses REGNO in its insn biggest MODE (we use it only if
   6086    REGNO is a hard register).  POTENTIAL_RELOAD_HARD_REGS contains
   6087    hard registers which might be used for reloads since the EBB end.
   6088    Put the save before INSN if BEFORE_P is true.  MAX_UID is maximla
   6089    uid before starting INSN processing.  Return true if we succeed in
   6090    such transformation.  */
   6091 static bool
   6092 split_if_necessary (int regno, machine_mode mode,
   6093 		    HARD_REG_SET potential_reload_hard_regs,
   6094 		    bool before_p, rtx_insn *insn, int max_uid)
   6095 {
   6096   bool res = false;
   6097   int i, nregs = 1;
   6098   rtx next_usage_insns;
   6099 
   6100   if (regno < FIRST_PSEUDO_REGISTER)
   6101     nregs = hard_regno_nregs (regno, mode);
   6102   for (i = 0; i < nregs; i++)
   6103     if (usage_insns[regno + i].check == curr_usage_insns_check
   6104 	&& (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
   6105 	/* To avoid processing the register twice or more.  */
   6106 	&& ((GET_CODE (next_usage_insns) != INSN_LIST
   6107 	     && INSN_UID (next_usage_insns) < max_uid)
   6108 	    || (GET_CODE (next_usage_insns) == INSN_LIST
   6109 		&& (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
   6110 	&& need_for_split_p (potential_reload_hard_regs, regno + i)
   6111 	&& split_reg (before_p, regno + i, insn, next_usage_insns, NULL))
   6112     res = true;
   6113   return res;
   6114 }
   6115 
   6116 /* Return TRUE if rtx X is considered as an invariant for
   6117    inheritance.  */
   6118 static bool
   6119 invariant_p (const_rtx x)
   6120 {
   6121   machine_mode mode;
   6122   const char *fmt;
   6123   enum rtx_code code;
   6124   int i, j;
   6125 
   6126   if (side_effects_p (x))
   6127     return false;
   6128 
   6129   code = GET_CODE (x);
   6130   mode = GET_MODE (x);
   6131   if (code == SUBREG)
   6132     {
   6133       x = SUBREG_REG (x);
   6134       code = GET_CODE (x);
   6135       mode = wider_subreg_mode (mode, GET_MODE (x));
   6136     }
   6137 
   6138   if (MEM_P (x))
   6139     return false;
   6140 
   6141   if (REG_P (x))
   6142     {
   6143       int i, nregs, regno = REGNO (x);
   6144 
   6145       if (regno >= FIRST_PSEUDO_REGISTER || regno == STACK_POINTER_REGNUM
   6146 	  || TEST_HARD_REG_BIT (eliminable_regset, regno)
   6147 	  || GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
   6148 	return false;
   6149       nregs = hard_regno_nregs (regno, mode);
   6150       for (i = 0; i < nregs; i++)
   6151 	if (! fixed_regs[regno + i]
   6152 	    /* A hard register may be clobbered in the current insn
   6153 	       but we can ignore this case because if the hard
   6154 	       register is used it should be set somewhere after the
   6155 	       clobber.  */
   6156 	    || bitmap_bit_p (&invalid_invariant_regs, regno + i))
   6157 	  return false;
   6158     }
   6159   fmt = GET_RTX_FORMAT (code);
   6160   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   6161     {
   6162       if (fmt[i] == 'e')
   6163 	{
   6164 	  if (! invariant_p (XEXP (x, i)))
   6165 	    return false;
   6166 	}
   6167       else if (fmt[i] == 'E')
   6168 	{
   6169 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   6170 	    if (! invariant_p (XVECEXP (x, i, j)))
   6171 	      return false;
   6172 	}
   6173     }
   6174   return true;
   6175 }
   6176 
   6177 /* We have 'dest_reg <- invariant'.  Let us try to make an invariant
   6178    inheritance transformation (using dest_reg instead invariant in a
   6179    subsequent insn).  */
   6180 static bool
   6181 process_invariant_for_inheritance (rtx dst_reg, rtx invariant_rtx)
   6182 {
   6183   invariant_ptr_t invariant_ptr;
   6184   rtx_insn *insn, *new_insns;
   6185   rtx insn_set, insn_reg, new_reg;
   6186   int insn_regno;
   6187   bool succ_p = false;
   6188   int dst_regno = REGNO (dst_reg);
   6189   machine_mode dst_mode = GET_MODE (dst_reg);
   6190   enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
   6191 
   6192   invariant_ptr = insert_invariant (invariant_rtx);
   6193   if ((insn = invariant_ptr->insn) != NULL_RTX)
   6194     {
   6195       /* We have a subsequent insn using the invariant.  */
   6196       insn_set = single_set (insn);
   6197       lra_assert (insn_set != NULL);
   6198       insn_reg = SET_DEST (insn_set);
   6199       lra_assert (REG_P (insn_reg));
   6200       insn_regno = REGNO (insn_reg);
   6201       insn_reg_cl = lra_get_allocno_class (insn_regno);
   6202 
   6203       if (dst_mode == GET_MODE (insn_reg)
   6204 	  /* We should consider only result move reg insns which are
   6205 	     cheap.  */
   6206 	  && targetm.register_move_cost (dst_mode, cl, insn_reg_cl) == 2
   6207 	  && targetm.register_move_cost (dst_mode, cl, cl) == 2)
   6208 	{
   6209 	  if (lra_dump_file != NULL)
   6210 	    fprintf (lra_dump_file,
   6211 		     "    [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[\n");
   6212 	  new_reg = lra_create_new_reg (dst_mode, dst_reg, cl, NULL,
   6213 					"invariant inheritance");
   6214 	  bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
   6215 	  bitmap_set_bit (&check_only_regs, REGNO (new_reg));
   6216 	  lra_reg_info[REGNO (new_reg)].restore_rtx = PATTERN (insn);
   6217 	  start_sequence ();
   6218 	  lra_emit_move (new_reg, dst_reg);
   6219 	  new_insns = get_insns ();
   6220 	  end_sequence ();
   6221 	  lra_process_new_insns (curr_insn, NULL, new_insns,
   6222 				 "Add invariant inheritance<-original");
   6223 	  start_sequence ();
   6224 	  lra_emit_move (SET_DEST (insn_set), new_reg);
   6225 	  new_insns = get_insns ();
   6226 	  end_sequence ();
   6227 	  lra_process_new_insns (insn, NULL, new_insns,
   6228 				 "Changing reload<-inheritance");
   6229 	  lra_set_insn_deleted (insn);
   6230 	  succ_p = true;
   6231 	  if (lra_dump_file != NULL)
   6232 	    {
   6233 	      fprintf (lra_dump_file,
   6234 		       "    Invariant inheritance reuse change %d (bb%d):\n",
   6235 		       REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
   6236 	      dump_insn_slim (lra_dump_file, insn);
   6237 	      fprintf (lra_dump_file,
   6238 		       "	  ]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]\n");
   6239 	    }
   6240 	}
   6241     }
   6242   invariant_ptr->insn = curr_insn;
   6243   return succ_p;
   6244 }
   6245 
   6246 /* Check only registers living at the current program point in the
   6247    current EBB.	 */
   6248 static bitmap_head live_regs;
   6249 
   6250 /* Update live info in EBB given by its HEAD and TAIL insns after
   6251    inheritance/split transformation.  The function removes dead moves
   6252    too.	 */
   6253 static void
   6254 update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
   6255 {
   6256   unsigned int j;
   6257   int i, regno;
   6258   bool live_p;
   6259   rtx_insn *prev_insn;
   6260   rtx set;
   6261   bool remove_p;
   6262   basic_block last_bb, prev_bb, curr_bb;
   6263   bitmap_iterator bi;
   6264   struct lra_insn_reg *reg;
   6265   edge e;
   6266   edge_iterator ei;
   6267 
   6268   last_bb = BLOCK_FOR_INSN (tail);
   6269   prev_bb = NULL;
   6270   for (curr_insn = tail;
   6271        curr_insn != PREV_INSN (head);
   6272        curr_insn = prev_insn)
   6273     {
   6274       prev_insn = PREV_INSN (curr_insn);
   6275       /* We need to process empty blocks too.  They contain
   6276 	 NOTE_INSN_BASIC_BLOCK referring for the basic block.  */
   6277       if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
   6278 	continue;
   6279       curr_bb = BLOCK_FOR_INSN (curr_insn);
   6280       if (curr_bb != prev_bb)
   6281 	{
   6282 	  if (prev_bb != NULL)
   6283 	    {
   6284 	      /* Update df_get_live_in (prev_bb):  */
   6285 	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
   6286 		if (bitmap_bit_p (&live_regs, j))
   6287 		  bitmap_set_bit (df_get_live_in (prev_bb), j);
   6288 		else
   6289 		  bitmap_clear_bit (df_get_live_in (prev_bb), j);
   6290 	    }
   6291 	  if (curr_bb != last_bb)
   6292 	    {
   6293 	      /* Update df_get_live_out (curr_bb):  */
   6294 	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
   6295 		{
   6296 		  live_p = bitmap_bit_p (&live_regs, j);
   6297 		  if (! live_p)
   6298 		    FOR_EACH_EDGE (e, ei, curr_bb->succs)
   6299 		      if (bitmap_bit_p (df_get_live_in (e->dest), j))
   6300 			{
   6301 			  live_p = true;
   6302 			  break;
   6303 			}
   6304 		  if (live_p)
   6305 		    bitmap_set_bit (df_get_live_out (curr_bb), j);
   6306 		  else
   6307 		    bitmap_clear_bit (df_get_live_out (curr_bb), j);
   6308 		}
   6309 	    }
   6310 	  prev_bb = curr_bb;
   6311 	  bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
   6312 	}
   6313       if (! NONDEBUG_INSN_P (curr_insn))
   6314 	continue;
   6315       curr_id = lra_get_insn_recog_data (curr_insn);
   6316       curr_static_id = curr_id->insn_static_data;
   6317       remove_p = false;
   6318       if ((set = single_set (curr_insn)) != NULL_RTX
   6319 	  && REG_P (SET_DEST (set))
   6320 	  && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
   6321 	  && SET_DEST (set) != pic_offset_table_rtx
   6322 	  && bitmap_bit_p (&check_only_regs, regno)
   6323 	  && ! bitmap_bit_p (&live_regs, regno))
   6324 	remove_p = true;
   6325       /* See which defined values die here.  */
   6326       for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   6327 	if (reg->type == OP_OUT && ! reg->subreg_p)
   6328 	  bitmap_clear_bit (&live_regs, reg->regno);
   6329       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
   6330 	if (reg->type == OP_OUT && ! reg->subreg_p)
   6331 	  bitmap_clear_bit (&live_regs, reg->regno);
   6332       if (curr_id->arg_hard_regs != NULL)
   6333 	/* Make clobbered argument hard registers die.  */
   6334 	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6335 	  if (regno >= FIRST_PSEUDO_REGISTER)
   6336 	    bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
   6337       /* Mark each used value as live.  */
   6338       for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   6339 	if (reg->type != OP_OUT
   6340 	    && bitmap_bit_p (&check_only_regs, reg->regno))
   6341 	  bitmap_set_bit (&live_regs, reg->regno);
   6342       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
   6343 	if (reg->type != OP_OUT
   6344 	    && bitmap_bit_p (&check_only_regs, reg->regno))
   6345 	  bitmap_set_bit (&live_regs, reg->regno);
   6346       if (curr_id->arg_hard_regs != NULL)
   6347 	/* Make used argument hard registers live.  */
   6348 	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6349 	  if (regno < FIRST_PSEUDO_REGISTER
   6350 	      && bitmap_bit_p (&check_only_regs, regno))
   6351 	    bitmap_set_bit (&live_regs, regno);
   6352       /* It is quite important to remove dead move insns because it
   6353 	 means removing dead store.  We don't need to process them for
   6354 	 constraints.  */
   6355       if (remove_p)
   6356 	{
   6357 	  if (lra_dump_file != NULL)
   6358 	    {
   6359 	      fprintf (lra_dump_file, "	    Removing dead insn:\n ");
   6360 	      dump_insn_slim (lra_dump_file, curr_insn);
   6361 	    }
   6362 	  lra_set_insn_deleted (curr_insn);
   6363 	}
   6364     }
   6365 }
   6366 
   6367 /* The structure describes info to do an inheritance for the current
   6368    insn.  We need to collect such info first before doing the
   6369    transformations because the transformations change the insn
   6370    internal representation.  */
   6371 struct to_inherit
   6372 {
   6373   /* Original regno.  */
   6374   int regno;
   6375   /* Subsequent insns which can inherit original reg value.  */
   6376   rtx insns;
   6377 };
   6378 
   6379 /* Array containing all info for doing inheritance from the current
   6380    insn.  */
   6381 static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
   6382 
   6383 /* Number elements in the previous array.  */
   6384 static int to_inherit_num;
   6385 
   6386 /* Add inheritance info REGNO and INSNS. Their meaning is described in
   6387    structure to_inherit.  */
   6388 static void
   6389 add_to_inherit (int regno, rtx insns)
   6390 {
   6391   int i;
   6392 
   6393   for (i = 0; i < to_inherit_num; i++)
   6394     if (to_inherit[i].regno == regno)
   6395       return;
   6396   lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
   6397   to_inherit[to_inherit_num].regno = regno;
   6398   to_inherit[to_inherit_num++].insns = insns;
   6399 }
   6400 
   6401 /* Return the last non-debug insn in basic block BB, or the block begin
   6402    note if none.  */
   6403 static rtx_insn *
   6404 get_last_insertion_point (basic_block bb)
   6405 {
   6406   rtx_insn *insn;
   6407 
   6408   FOR_BB_INSNS_REVERSE (bb, insn)
   6409     if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
   6410       return insn;
   6411   gcc_unreachable ();
   6412 }
   6413 
   6414 /* Set up RES by registers living on edges FROM except the edge (FROM,
   6415    TO) or by registers set up in a jump insn in BB FROM.  */
   6416 static void
   6417 get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
   6418 {
   6419   rtx_insn *last;
   6420   struct lra_insn_reg *reg;
   6421   edge e;
   6422   edge_iterator ei;
   6423 
   6424   lra_assert (to != NULL);
   6425   bitmap_clear (res);
   6426   FOR_EACH_EDGE (e, ei, from->succs)
   6427     if (e->dest != to)
   6428       bitmap_ior_into (res, df_get_live_in (e->dest));
   6429   last = get_last_insertion_point (from);
   6430   if (! JUMP_P (last))
   6431     return;
   6432   curr_id = lra_get_insn_recog_data (last);
   6433   for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   6434     if (reg->type != OP_IN)
   6435       bitmap_set_bit (res, reg->regno);
   6436 }
   6437 
   6438 /* Used as a temporary results of some bitmap calculations.  */
   6439 static bitmap_head temp_bitmap;
   6440 
   6441 /* We split for reloads of small class of hard regs.  The following
   6442    defines how many hard regs the class should have to be qualified as
   6443    small.  The code is mostly oriented to x86/x86-64 architecture
   6444    where some insns need to use only specific register or pair of
   6445    registers and these register can live in RTL explicitly, e.g. for
   6446    parameter passing.  */
   6447 static const int max_small_class_regs_num = 2;
   6448 
   6449 /* Do inheritance/split transformations in EBB starting with HEAD and
   6450    finishing on TAIL.  We process EBB insns in the reverse order.
   6451    Return true if we did any inheritance/split transformation in the
   6452    EBB.
   6453 
   6454    We should avoid excessive splitting which results in worse code
   6455    because of inaccurate cost calculations for spilling new split
   6456    pseudos in such case.  To achieve this we do splitting only if
   6457    register pressure is high in given basic block and there are reload
   6458    pseudos requiring hard registers.  We could do more register
   6459    pressure calculations at any given program point to avoid necessary
   6460    splitting even more but it is to expensive and the current approach
   6461    works well enough.  */
   6462 static bool
   6463 inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
   6464 {
   6465   int i, src_regno, dst_regno, nregs;
   6466   bool change_p, succ_p, update_reloads_num_p;
   6467   rtx_insn *prev_insn, *last_insn;
   6468   rtx next_usage_insns, curr_set;
   6469   enum reg_class cl;
   6470   struct lra_insn_reg *reg;
   6471   basic_block last_processed_bb, curr_bb = NULL;
   6472   HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
   6473   bitmap to_process;
   6474   unsigned int j;
   6475   bitmap_iterator bi;
   6476   bool head_p, after_p;
   6477 
   6478   change_p = false;
   6479   curr_usage_insns_check++;
   6480   clear_invariants ();
   6481   reloads_num = calls_num = 0;
   6482   for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
   6483     last_call_for_abi[i] = 0;
   6484   CLEAR_HARD_REG_SET (full_and_partial_call_clobbers);
   6485   bitmap_clear (&check_only_regs);
   6486   bitmap_clear (&invalid_invariant_regs);
   6487   last_processed_bb = NULL;
   6488   CLEAR_HARD_REG_SET (potential_reload_hard_regs);
   6489   live_hard_regs = eliminable_regset | lra_no_alloc_regs;
   6490   /* We don't process new insns generated in the loop.	*/
   6491   for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
   6492     {
   6493       prev_insn = PREV_INSN (curr_insn);
   6494       if (BLOCK_FOR_INSN (curr_insn) != NULL)
   6495 	curr_bb = BLOCK_FOR_INSN (curr_insn);
   6496       if (last_processed_bb != curr_bb)
   6497 	{
   6498 	  /* We are at the end of BB.  Add qualified living
   6499 	     pseudos for potential splitting.  */
   6500 	  to_process = df_get_live_out (curr_bb);
   6501 	  if (last_processed_bb != NULL)
   6502 	    {
   6503 	      /* We are somewhere in the middle of EBB.	 */
   6504 	      get_live_on_other_edges (curr_bb, last_processed_bb,
   6505 				       &temp_bitmap);
   6506 	      to_process = &temp_bitmap;
   6507 	    }
   6508 	  last_processed_bb = curr_bb;
   6509 	  last_insn = get_last_insertion_point (curr_bb);
   6510 	  after_p = (! JUMP_P (last_insn)
   6511 		     && (! CALL_P (last_insn)
   6512 			 || (find_reg_note (last_insn,
   6513 					   REG_NORETURN, NULL_RTX) == NULL_RTX
   6514 			     && ! SIBLING_CALL_P (last_insn))));
   6515 	  CLEAR_HARD_REG_SET (potential_reload_hard_regs);
   6516 	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
   6517 	    {
   6518 	      if ((int) j >= lra_constraint_new_regno_start)
   6519 		break;
   6520 	      if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
   6521 		{
   6522 		  if (j < FIRST_PSEUDO_REGISTER)
   6523 		    SET_HARD_REG_BIT (live_hard_regs, j);
   6524 		  else
   6525 		    add_to_hard_reg_set (&live_hard_regs,
   6526 					 PSEUDO_REGNO_MODE (j),
   6527 					 reg_renumber[j]);
   6528 		  setup_next_usage_insn (j, last_insn, reloads_num, after_p);
   6529 		}
   6530 	    }
   6531 	}
   6532       src_regno = dst_regno = -1;
   6533       curr_set = single_set (curr_insn);
   6534       if (curr_set != NULL_RTX && REG_P (SET_DEST (curr_set)))
   6535 	dst_regno = REGNO (SET_DEST (curr_set));
   6536       if (curr_set != NULL_RTX && REG_P (SET_SRC (curr_set)))
   6537 	src_regno = REGNO (SET_SRC (curr_set));
   6538       update_reloads_num_p = true;
   6539       if (src_regno < lra_constraint_new_regno_start
   6540 	  && src_regno >= FIRST_PSEUDO_REGISTER
   6541 	  && reg_renumber[src_regno] < 0
   6542 	  && dst_regno >= lra_constraint_new_regno_start
   6543 	  && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
   6544 	{
   6545 	  /* 'reload_pseudo <- original_pseudo'.  */
   6546 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
   6547 	    reloads_num++;
   6548 	  update_reloads_num_p = false;
   6549 	  succ_p = false;
   6550 	  if (usage_insns[src_regno].check == curr_usage_insns_check
   6551 	      && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
   6552 	    succ_p = inherit_reload_reg (false, src_regno, cl,
   6553 					 curr_insn, next_usage_insns);
   6554 	  if (succ_p)
   6555 	    change_p = true;
   6556 	  else
   6557 	    setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
   6558 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
   6559 	    potential_reload_hard_regs |= reg_class_contents[cl];
   6560 	}
   6561       else if (src_regno < 0
   6562 	       && dst_regno >= lra_constraint_new_regno_start
   6563 	       && invariant_p (SET_SRC (curr_set))
   6564 	       && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS
   6565 	       && ! bitmap_bit_p (&invalid_invariant_regs, dst_regno)
   6566 	       && ! bitmap_bit_p (&invalid_invariant_regs,
   6567 				  ORIGINAL_REGNO(regno_reg_rtx[dst_regno])))
   6568 	{
   6569 	  /* 'reload_pseudo <- invariant'.  */
   6570 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
   6571 	    reloads_num++;
   6572 	  update_reloads_num_p = false;
   6573 	  if (process_invariant_for_inheritance (SET_DEST (curr_set), SET_SRC (curr_set)))
   6574 	    change_p = true;
   6575 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
   6576 	    potential_reload_hard_regs |= reg_class_contents[cl];
   6577 	}
   6578       else if (src_regno >= lra_constraint_new_regno_start
   6579 	       && dst_regno < lra_constraint_new_regno_start
   6580 	       && dst_regno >= FIRST_PSEUDO_REGISTER
   6581 	       && reg_renumber[dst_regno] < 0
   6582 	       && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
   6583 	       && usage_insns[dst_regno].check == curr_usage_insns_check
   6584 	       && (next_usage_insns
   6585 		   = usage_insns[dst_regno].insns) != NULL_RTX)
   6586 	{
   6587 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
   6588 	    reloads_num++;
   6589 	  update_reloads_num_p = false;
   6590 	  /* 'original_pseudo <- reload_pseudo'.  */
   6591 	  if (! JUMP_P (curr_insn)
   6592 	      && inherit_reload_reg (true, dst_regno, cl,
   6593 				     curr_insn, next_usage_insns))
   6594 	    change_p = true;
   6595 	  /* Invalidate.  */
   6596 	  usage_insns[dst_regno].check = 0;
   6597 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
   6598 	    potential_reload_hard_regs |= reg_class_contents[cl];
   6599 	}
   6600       else if (INSN_P (curr_insn))
   6601 	{
   6602 	  int iter;
   6603 	  int max_uid = get_max_uid ();
   6604 
   6605 	  curr_id = lra_get_insn_recog_data (curr_insn);
   6606 	  curr_static_id = curr_id->insn_static_data;
   6607 	  to_inherit_num = 0;
   6608 	  /* Process insn definitions.	*/
   6609 	  for (iter = 0; iter < 2; iter++)
   6610 	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
   6611 		 reg != NULL;
   6612 		 reg = reg->next)
   6613 	      if (reg->type != OP_IN
   6614 		  && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
   6615 		{
   6616 		  if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
   6617 		      && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
   6618 		      && usage_insns[dst_regno].check == curr_usage_insns_check
   6619 		      && (next_usage_insns
   6620 			  = usage_insns[dst_regno].insns) != NULL_RTX)
   6621 		    {
   6622 		      struct lra_insn_reg *r;
   6623 
   6624 		      for (r = curr_id->regs; r != NULL; r = r->next)
   6625 			if (r->type != OP_OUT && r->regno == dst_regno)
   6626 			  break;
   6627 		      /* Don't do inheritance if the pseudo is also
   6628 			 used in the insn.  */
   6629 		      if (r == NULL)
   6630 			/* We cannot do inheritance right now
   6631 			   because the current insn reg info (chain
   6632 			   regs) can change after that.  */
   6633 			add_to_inherit (dst_regno, next_usage_insns);
   6634 		    }
   6635 		  /* We cannot process one reg twice here because of
   6636 		     usage_insns invalidation.  */
   6637 		  if ((dst_regno < FIRST_PSEUDO_REGISTER
   6638 		       || reg_renumber[dst_regno] >= 0)
   6639 		      && ! reg->subreg_p && reg->type != OP_IN)
   6640 		    {
   6641 		      HARD_REG_SET s;
   6642 
   6643 		      if (split_if_necessary (dst_regno, reg->biggest_mode,
   6644 					      potential_reload_hard_regs,
   6645 					      false, curr_insn, max_uid))
   6646 			change_p = true;
   6647 		      CLEAR_HARD_REG_SET (s);
   6648 		      if (dst_regno < FIRST_PSEUDO_REGISTER)
   6649 			add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
   6650 		      else
   6651 			add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
   6652 					     reg_renumber[dst_regno]);
   6653 		      live_hard_regs &= ~s;
   6654 		      potential_reload_hard_regs &= ~s;
   6655 		    }
   6656 		  /* We should invalidate potential inheritance or
   6657 		     splitting for the current insn usages to the next
   6658 		     usage insns (see code below) as the output pseudo
   6659 		     prevents this.  */
   6660 		  if ((dst_regno >= FIRST_PSEUDO_REGISTER
   6661 		       && reg_renumber[dst_regno] < 0)
   6662 		      || (reg->type == OP_OUT && ! reg->subreg_p
   6663 			  && (dst_regno < FIRST_PSEUDO_REGISTER
   6664 			      || reg_renumber[dst_regno] >= 0)))
   6665 		    {
   6666 		      /* Invalidate and mark definitions.  */
   6667 		      if (dst_regno >= FIRST_PSEUDO_REGISTER)
   6668 			usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
   6669 		      else
   6670 			{
   6671 			  nregs = hard_regno_nregs (dst_regno,
   6672 						    reg->biggest_mode);
   6673 			  for (i = 0; i < nregs; i++)
   6674 			    usage_insns[dst_regno + i].check
   6675 			      = -(int) INSN_UID (curr_insn);
   6676 			}
   6677 		    }
   6678 		}
   6679 	  /* Process clobbered call regs.  */
   6680 	  if (curr_id->arg_hard_regs != NULL)
   6681 	    for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6682 	      if (dst_regno >= FIRST_PSEUDO_REGISTER)
   6683 		usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
   6684 		  = -(int) INSN_UID (curr_insn);
   6685 	  if (! JUMP_P (curr_insn))
   6686 	    for (i = 0; i < to_inherit_num; i++)
   6687 	      if (inherit_reload_reg (true, to_inherit[i].regno,
   6688 				      ALL_REGS, curr_insn,
   6689 				      to_inherit[i].insns))
   6690 	      change_p = true;
   6691 	  if (CALL_P (curr_insn))
   6692 	    {
   6693 	      rtx cheap, pat, dest;
   6694 	      rtx_insn *restore;
   6695 	      int regno, hard_regno;
   6696 
   6697 	      calls_num++;
   6698 	      function_abi callee_abi = insn_callee_abi (curr_insn);
   6699 	      last_call_for_abi[callee_abi.id ()] = calls_num;
   6700 	      full_and_partial_call_clobbers
   6701 		|= callee_abi.full_and_partial_reg_clobbers ();
   6702 	      if ((cheap = find_reg_note (curr_insn,
   6703 					  REG_RETURNED, NULL_RTX)) != NULL_RTX
   6704 		  && ((cheap = XEXP (cheap, 0)), true)
   6705 		  && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
   6706 		  && (hard_regno = reg_renumber[regno]) >= 0
   6707 		  && usage_insns[regno].check == curr_usage_insns_check
   6708 		  /* If there are pending saves/restores, the
   6709 		     optimization is not worth.	 */
   6710 		  && usage_insns[regno].calls_num == calls_num - 1
   6711 		  && callee_abi.clobbers_reg_p (GET_MODE (cheap), hard_regno))
   6712 		{
   6713 		  /* Restore the pseudo from the call result as
   6714 		     REG_RETURNED note says that the pseudo value is
   6715 		     in the call result and the pseudo is an argument
   6716 		     of the call.  */
   6717 		  pat = PATTERN (curr_insn);
   6718 		  if (GET_CODE (pat) == PARALLEL)
   6719 		    pat = XVECEXP (pat, 0, 0);
   6720 		  dest = SET_DEST (pat);
   6721 		  /* For multiple return values dest is PARALLEL.
   6722 		     Currently we handle only single return value case.  */
   6723 		  if (REG_P (dest))
   6724 		    {
   6725 		      start_sequence ();
   6726 		      emit_move_insn (cheap, copy_rtx (dest));
   6727 		      restore = get_insns ();
   6728 		      end_sequence ();
   6729 		      lra_process_new_insns (curr_insn, NULL, restore,
   6730 					     "Inserting call parameter restore");
   6731 		      /* We don't need to save/restore of the pseudo from
   6732 			 this call.	 */
   6733 		      usage_insns[regno].calls_num = calls_num;
   6734 		      remove_from_hard_reg_set
   6735 			(&full_and_partial_call_clobbers,
   6736 			 GET_MODE (cheap), hard_regno);
   6737 		      bitmap_set_bit (&check_only_regs, regno);
   6738 		    }
   6739 		}
   6740 	    }
   6741 	  to_inherit_num = 0;
   6742 	  /* Process insn usages.  */
   6743 	  for (iter = 0; iter < 2; iter++)
   6744 	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
   6745 		 reg != NULL;
   6746 		 reg = reg->next)
   6747 	      if ((reg->type != OP_OUT
   6748 		   || (reg->type == OP_OUT && reg->subreg_p))
   6749 		  && (src_regno = reg->regno) < lra_constraint_new_regno_start)
   6750 		{
   6751 		  if (src_regno >= FIRST_PSEUDO_REGISTER
   6752 		      && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
   6753 		    {
   6754 		      if (usage_insns[src_regno].check == curr_usage_insns_check
   6755 			  && (next_usage_insns
   6756 			      = usage_insns[src_regno].insns) != NULL_RTX
   6757 			  && NONDEBUG_INSN_P (curr_insn))
   6758 			add_to_inherit (src_regno, next_usage_insns);
   6759 		      else if (usage_insns[src_regno].check
   6760 			       != -(int) INSN_UID (curr_insn))
   6761 			/* Add usages but only if the reg is not set up
   6762 			   in the same insn.  */
   6763 			add_next_usage_insn (src_regno, curr_insn, reloads_num);
   6764 		    }
   6765 		  else if (src_regno < FIRST_PSEUDO_REGISTER
   6766 			   || reg_renumber[src_regno] >= 0)
   6767 		    {
   6768 		      bool before_p;
   6769 		      rtx_insn *use_insn = curr_insn;
   6770 
   6771 		      before_p = (JUMP_P (curr_insn)
   6772 				  || (CALL_P (curr_insn) && reg->type == OP_IN));
   6773 		      if (NONDEBUG_INSN_P (curr_insn)
   6774 			  && (! JUMP_P (curr_insn) || reg->type == OP_IN)
   6775 			  && split_if_necessary (src_regno, reg->biggest_mode,
   6776 						 potential_reload_hard_regs,
   6777 						 before_p, curr_insn, max_uid))
   6778 			{
   6779 			  if (reg->subreg_p)
   6780 			    check_and_force_assignment_correctness_p = true;
   6781 			  change_p = true;
   6782 			  /* Invalidate. */
   6783 			  usage_insns[src_regno].check = 0;
   6784 			  if (before_p)
   6785 			    use_insn = PREV_INSN (curr_insn);
   6786 			}
   6787 		      if (NONDEBUG_INSN_P (curr_insn))
   6788 			{
   6789 			  if (src_regno < FIRST_PSEUDO_REGISTER)
   6790 			    add_to_hard_reg_set (&live_hard_regs,
   6791 						 reg->biggest_mode, src_regno);
   6792 			  else
   6793 			    add_to_hard_reg_set (&live_hard_regs,
   6794 						 PSEUDO_REGNO_MODE (src_regno),
   6795 						 reg_renumber[src_regno]);
   6796 			}
   6797 		      if (src_regno >= FIRST_PSEUDO_REGISTER)
   6798 			add_next_usage_insn (src_regno, use_insn, reloads_num);
   6799 		      else
   6800 			{
   6801 			  for (i = 0; i < hard_regno_nregs (src_regno, reg->biggest_mode); i++)
   6802 			    add_next_usage_insn (src_regno + i, use_insn, reloads_num);
   6803 			}
   6804 		    }
   6805 		}
   6806 	  /* Process used call regs.  */
   6807 	  if (curr_id->arg_hard_regs != NULL)
   6808 	    for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6809 	      if (src_regno < FIRST_PSEUDO_REGISTER)
   6810 		{
   6811 	           SET_HARD_REG_BIT (live_hard_regs, src_regno);
   6812 	           add_next_usage_insn (src_regno, curr_insn, reloads_num);
   6813 		}
   6814 	  for (i = 0; i < to_inherit_num; i++)
   6815 	    {
   6816 	      src_regno = to_inherit[i].regno;
   6817 	      if (inherit_reload_reg (false, src_regno, ALL_REGS,
   6818 				      curr_insn, to_inherit[i].insns))
   6819 		change_p = true;
   6820 	      else
   6821 		setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
   6822 	    }
   6823 	}
   6824       if (update_reloads_num_p
   6825 	  && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
   6826 	{
   6827 	  int regno = -1;
   6828 	  if ((REG_P (SET_DEST (curr_set))
   6829 	       && (regno = REGNO (SET_DEST (curr_set))) >= lra_constraint_new_regno_start
   6830 	       && reg_renumber[regno] < 0
   6831 	       && (cl = lra_get_allocno_class (regno)) != NO_REGS)
   6832 	      || (REG_P (SET_SRC (curr_set))
   6833 	          && (regno = REGNO (SET_SRC (curr_set))) >= lra_constraint_new_regno_start
   6834 	          && reg_renumber[regno] < 0
   6835 	          && (cl = lra_get_allocno_class (regno)) != NO_REGS))
   6836 	    {
   6837 	      if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
   6838 		reloads_num++;
   6839 	      if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
   6840 		potential_reload_hard_regs |= reg_class_contents[cl];
   6841 	    }
   6842 	}
   6843       if (NONDEBUG_INSN_P (curr_insn))
   6844 	{
   6845 	  int regno;
   6846 
   6847 	  /* Invalidate invariants with changed regs.  */
   6848 	  curr_id = lra_get_insn_recog_data (curr_insn);
   6849 	  for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   6850 	    if (reg->type != OP_IN)
   6851 	      {
   6852 		bitmap_set_bit (&invalid_invariant_regs, reg->regno);
   6853 		bitmap_set_bit (&invalid_invariant_regs,
   6854 				ORIGINAL_REGNO (regno_reg_rtx[reg->regno]));
   6855 	      }
   6856 	  curr_static_id = curr_id->insn_static_data;
   6857 	  for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
   6858 	    if (reg->type != OP_IN)
   6859 	      bitmap_set_bit (&invalid_invariant_regs, reg->regno);
   6860 	  if (curr_id->arg_hard_regs != NULL)
   6861 	    for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6862 	      if (regno >= FIRST_PSEUDO_REGISTER)
   6863 		bitmap_set_bit (&invalid_invariant_regs,
   6864 				regno - FIRST_PSEUDO_REGISTER);
   6865 	}
   6866       /* We reached the start of the current basic block.  */
   6867       if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
   6868 	  || BLOCK_FOR_INSN (prev_insn) != curr_bb)
   6869 	{
   6870 	  /* We reached the beginning of the current block -- do
   6871 	     rest of spliting in the current BB.  */
   6872 	  to_process = df_get_live_in (curr_bb);
   6873 	  if (BLOCK_FOR_INSN (head) != curr_bb)
   6874 	    {
   6875 	      /* We are somewhere in the middle of EBB.	 */
   6876 	      get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
   6877 				       curr_bb, &temp_bitmap);
   6878 	      to_process = &temp_bitmap;
   6879 	    }
   6880 	  head_p = true;
   6881 	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
   6882 	    {
   6883 	      if ((int) j >= lra_constraint_new_regno_start)
   6884 		break;
   6885 	      if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
   6886 		  && usage_insns[j].check == curr_usage_insns_check
   6887 		  && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
   6888 		{
   6889 		  if (need_for_split_p (potential_reload_hard_regs, j))
   6890 		    {
   6891 		      if (lra_dump_file != NULL && head_p)
   6892 			{
   6893 			  fprintf (lra_dump_file,
   6894 				   "  ----------------------------------\n");
   6895 			  head_p = false;
   6896 			}
   6897 		      if (split_reg (false, j, bb_note (curr_bb),
   6898 				     next_usage_insns, NULL))
   6899 			change_p = true;
   6900 		    }
   6901 		  usage_insns[j].check = 0;
   6902 		}
   6903 	    }
   6904 	}
   6905     }
   6906   return change_p;
   6907 }
   6908 
   6909 /* This value affects EBB forming.  If probability of edge from EBB to
   6910    a BB is not greater than the following value, we don't add the BB
   6911    to EBB.  */
   6912 #define EBB_PROBABILITY_CUTOFF \
   6913   ((REG_BR_PROB_BASE * param_lra_inheritance_ebb_probability_cutoff) / 100)
   6914 
   6915 /* Current number of inheritance/split iteration.  */
   6916 int lra_inheritance_iter;
   6917 
   6918 /* Entry function for inheritance/split pass.  */
   6919 void
   6920 lra_inheritance (void)
   6921 {
   6922   int i;
   6923   basic_block bb, start_bb;
   6924   edge e;
   6925 
   6926   lra_inheritance_iter++;
   6927   if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
   6928     return;
   6929   timevar_push (TV_LRA_INHERITANCE);
   6930   if (lra_dump_file != NULL)
   6931     fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
   6932 	     lra_inheritance_iter);
   6933   curr_usage_insns_check = 0;
   6934   usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
   6935   for (i = 0; i < lra_constraint_new_regno_start; i++)
   6936     usage_insns[i].check = 0;
   6937   bitmap_initialize (&check_only_regs, &reg_obstack);
   6938   bitmap_initialize (&invalid_invariant_regs, &reg_obstack);
   6939   bitmap_initialize (&live_regs, &reg_obstack);
   6940   bitmap_initialize (&temp_bitmap, &reg_obstack);
   6941   bitmap_initialize (&ebb_global_regs, &reg_obstack);
   6942   FOR_EACH_BB_FN (bb, cfun)
   6943     {
   6944       start_bb = bb;
   6945       if (lra_dump_file != NULL)
   6946 	fprintf (lra_dump_file, "EBB");
   6947       /* Form a EBB starting with BB.  */
   6948       bitmap_clear (&ebb_global_regs);
   6949       bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
   6950       for (;;)
   6951 	{
   6952 	  if (lra_dump_file != NULL)
   6953 	    fprintf (lra_dump_file, " %d", bb->index);
   6954 	  if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
   6955 	      || LABEL_P (BB_HEAD (bb->next_bb)))
   6956 	    break;
   6957 	  e = find_fallthru_edge (bb->succs);
   6958 	  if (! e)
   6959 	    break;
   6960 	  if (e->probability.initialized_p ()
   6961 	      && e->probability.to_reg_br_prob_base () < EBB_PROBABILITY_CUTOFF)
   6962 	    break;
   6963 	  bb = bb->next_bb;
   6964 	}
   6965       bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
   6966       if (lra_dump_file != NULL)
   6967 	fprintf (lra_dump_file, "\n");
   6968       if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
   6969 	/* Remember that the EBB head and tail can change in
   6970 	   inherit_in_ebb.  */
   6971 	update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
   6972     }
   6973   bitmap_release (&ebb_global_regs);
   6974   bitmap_release (&temp_bitmap);
   6975   bitmap_release (&live_regs);
   6976   bitmap_release (&invalid_invariant_regs);
   6977   bitmap_release (&check_only_regs);
   6978   free (usage_insns);
   6979 
   6980   timevar_pop (TV_LRA_INHERITANCE);
   6981 }
   6982 
   6983 
   6984 
   6986 /* This page contains code to undo failed inheritance/split
   6987    transformations.  */
   6988 
   6989 /* Current number of iteration undoing inheritance/split.  */
   6990 int lra_undo_inheritance_iter;
   6991 
   6992 /* Fix BB live info LIVE after removing pseudos created on pass doing
   6993    inheritance/split which are REMOVED_PSEUDOS.	 */
   6994 static void
   6995 fix_bb_live_info (bitmap live, bitmap removed_pseudos)
   6996 {
   6997   unsigned int regno;
   6998   bitmap_iterator bi;
   6999 
   7000   EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
   7001     if (bitmap_clear_bit (live, regno)
   7002 	&& REG_P (lra_reg_info[regno].restore_rtx))
   7003       bitmap_set_bit (live, REGNO (lra_reg_info[regno].restore_rtx));
   7004 }
   7005 
   7006 /* Return regno of the (subreg of) REG. Otherwise, return a negative
   7007    number.  */
   7008 static int
   7009 get_regno (rtx reg)
   7010 {
   7011   if (GET_CODE (reg) == SUBREG)
   7012     reg = SUBREG_REG (reg);
   7013   if (REG_P (reg))
   7014     return REGNO (reg);
   7015   return -1;
   7016 }
   7017 
   7018 /* Delete a move INSN with destination reg DREGNO and a previous
   7019    clobber insn with the same regno.  The inheritance/split code can
   7020    generate moves with preceding clobber and when we delete such moves
   7021    we should delete the clobber insn too to keep the correct life
   7022    info.  */
   7023 static void
   7024 delete_move_and_clobber (rtx_insn *insn, int dregno)
   7025 {
   7026   rtx_insn *prev_insn = PREV_INSN (insn);
   7027 
   7028   lra_set_insn_deleted (insn);
   7029   lra_assert (dregno >= 0);
   7030   if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
   7031       && GET_CODE (PATTERN (prev_insn)) == CLOBBER
   7032       && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
   7033     lra_set_insn_deleted (prev_insn);
   7034 }
   7035 
   7036 /* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
   7037    return true if we did any change.  The undo transformations for
   7038    inheritance looks like
   7039       i <- i2
   7040       p <- i	  =>   p <- i2
   7041    or removing
   7042       p <- i, i <- p, and i <- i3
   7043    where p is original pseudo from which inheritance pseudo i was
   7044    created, i and i3 are removed inheritance pseudos, i2 is another
   7045    not removed inheritance pseudo.  All split pseudos or other
   7046    occurrences of removed inheritance pseudos are changed on the
   7047    corresponding original pseudos.
   7048 
   7049    The function also schedules insns changed and created during
   7050    inheritance/split pass for processing by the subsequent constraint
   7051    pass.  */
   7052 static bool
   7053 remove_inheritance_pseudos (bitmap remove_pseudos)
   7054 {
   7055   basic_block bb;
   7056   int regno, sregno, prev_sregno, dregno;
   7057   rtx restore_rtx;
   7058   rtx set, prev_set;
   7059   rtx_insn *prev_insn;
   7060   bool change_p, done_p;
   7061 
   7062   change_p = ! bitmap_empty_p (remove_pseudos);
   7063   /* We cannot finish the function right away if CHANGE_P is true
   7064      because we need to marks insns affected by previous
   7065      inheritance/split pass for processing by the subsequent
   7066      constraint pass.  */
   7067   FOR_EACH_BB_FN (bb, cfun)
   7068     {
   7069       fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
   7070       fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
   7071       FOR_BB_INSNS_REVERSE (bb, curr_insn)
   7072 	{
   7073 	  if (! INSN_P (curr_insn))
   7074 	    continue;
   7075 	  done_p = false;
   7076 	  sregno = dregno = -1;
   7077 	  if (change_p && NONDEBUG_INSN_P (curr_insn)
   7078 	      && (set = single_set (curr_insn)) != NULL_RTX)
   7079 	    {
   7080 	      dregno = get_regno (SET_DEST (set));
   7081 	      sregno = get_regno (SET_SRC (set));
   7082 	    }
   7083 
   7084 	  if (sregno >= 0 && dregno >= 0)
   7085 	    {
   7086 	      if (bitmap_bit_p (remove_pseudos, dregno)
   7087 		  && ! REG_P (lra_reg_info[dregno].restore_rtx))
   7088 		{
   7089 		  /* invariant inheritance pseudo <- original pseudo */
   7090 		  if (lra_dump_file != NULL)
   7091 		    {
   7092 		      fprintf (lra_dump_file, "	   Removing invariant inheritance:\n");
   7093 		      dump_insn_slim (lra_dump_file, curr_insn);
   7094 		      fprintf (lra_dump_file, "\n");
   7095 		    }
   7096 		  delete_move_and_clobber (curr_insn, dregno);
   7097 		  done_p = true;
   7098 		}
   7099 	      else if (bitmap_bit_p (remove_pseudos, sregno)
   7100 		       && ! REG_P (lra_reg_info[sregno].restore_rtx))
   7101 		{
   7102 		  /* reload pseudo <- invariant inheritance pseudo */
   7103 		  start_sequence ();
   7104 		  /* We cannot just change the source.  It might be
   7105 		     an insn different from the move.  */
   7106 		  emit_insn (lra_reg_info[sregno].restore_rtx);
   7107 		  rtx_insn *new_insns = get_insns ();
   7108 		  end_sequence ();
   7109 		  lra_assert (single_set (new_insns) != NULL
   7110 			      && SET_DEST (set) == SET_DEST (single_set (new_insns)));
   7111 		  lra_process_new_insns (curr_insn, NULL, new_insns,
   7112 					 "Changing reload<-invariant inheritance");
   7113 		  delete_move_and_clobber (curr_insn, dregno);
   7114 		  done_p = true;
   7115 		}
   7116 	      else if ((bitmap_bit_p (remove_pseudos, sregno)
   7117 			&& (get_regno (lra_reg_info[sregno].restore_rtx) == dregno
   7118 			    || (bitmap_bit_p (remove_pseudos, dregno)
   7119 				&& get_regno (lra_reg_info[sregno].restore_rtx) >= 0
   7120 				&& (get_regno (lra_reg_info[sregno].restore_rtx)
   7121 				    == get_regno (lra_reg_info[dregno].restore_rtx)))))
   7122 		       || (bitmap_bit_p (remove_pseudos, dregno)
   7123 			   && get_regno (lra_reg_info[dregno].restore_rtx) == sregno))
   7124 		/* One of the following cases:
   7125 		     original <- removed inheritance pseudo
   7126 		     removed inherit pseudo <- another removed inherit pseudo
   7127 		     removed inherit pseudo <- original pseudo
   7128 		   Or
   7129 		     removed_split_pseudo <- original_reg
   7130 		     original_reg <- removed_split_pseudo */
   7131 		{
   7132 		  if (lra_dump_file != NULL)
   7133 		    {
   7134 		      fprintf (lra_dump_file, "	   Removing %s:\n",
   7135 			       bitmap_bit_p (&lra_split_regs, sregno)
   7136 			       || bitmap_bit_p (&lra_split_regs, dregno)
   7137 			       ? "split" : "inheritance");
   7138 		      dump_insn_slim (lra_dump_file, curr_insn);
   7139 		    }
   7140 		  delete_move_and_clobber (curr_insn, dregno);
   7141 		  done_p = true;
   7142 		}
   7143 	      else if (bitmap_bit_p (remove_pseudos, sregno)
   7144 		       && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
   7145 		{
   7146 		  /* Search the following pattern:
   7147 		       inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
   7148 		       original_pseudo <- inherit_or_split_pseudo1
   7149 		    where the 2nd insn is the current insn and
   7150 		    inherit_or_split_pseudo2 is not removed.  If it is found,
   7151 		    change the current insn onto:
   7152 		       original_pseudo <- inherit_or_split_pseudo2.  */
   7153 		  for (prev_insn = PREV_INSN (curr_insn);
   7154 		       prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
   7155 		       prev_insn = PREV_INSN (prev_insn))
   7156 		    ;
   7157 		  if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
   7158 		      && (prev_set = single_set (prev_insn)) != NULL_RTX
   7159 		      /* There should be no subregs in insn we are
   7160 			 searching because only the original reg might
   7161 			 be in subreg when we changed the mode of
   7162 			 load/store for splitting.  */
   7163 		      && REG_P (SET_DEST (prev_set))
   7164 		      && REG_P (SET_SRC (prev_set))
   7165 		      && (int) REGNO (SET_DEST (prev_set)) == sregno
   7166 		      && ((prev_sregno = REGNO (SET_SRC (prev_set)))
   7167 			  >= FIRST_PSEUDO_REGISTER)
   7168 		      && (lra_reg_info[prev_sregno].restore_rtx == NULL_RTX
   7169 			  ||
   7170 			  /* As we consider chain of inheritance or
   7171 			     splitting described in above comment we should
   7172 			     check that sregno and prev_sregno were
   7173 			     inheritance/split pseudos created from the
   7174 			     same original regno.  */
   7175 			  (get_regno (lra_reg_info[sregno].restore_rtx) >= 0
   7176 			   && (get_regno (lra_reg_info[sregno].restore_rtx)
   7177 			       == get_regno (lra_reg_info[prev_sregno].restore_rtx))))
   7178 		      && ! bitmap_bit_p (remove_pseudos, prev_sregno))
   7179 		    {
   7180 		      lra_assert (GET_MODE (SET_SRC (prev_set))
   7181 				  == GET_MODE (regno_reg_rtx[sregno]));
   7182 		      /* Although we have a single set, the insn can
   7183 			 contain more one sregno register occurrence
   7184 			 as a source.  Change all occurrences.  */
   7185 		      lra_substitute_pseudo_within_insn (curr_insn, sregno,
   7186 							 SET_SRC (prev_set),
   7187 							 false);
   7188 		      /* As we are finishing with processing the insn
   7189 			 here, check the destination too as it might
   7190 			 inheritance pseudo for another pseudo.  */
   7191 		      if (bitmap_bit_p (remove_pseudos, dregno)
   7192 			  && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
   7193 			  && (restore_rtx
   7194 			      = lra_reg_info[dregno].restore_rtx) != NULL_RTX)
   7195 			{
   7196 			  if (GET_CODE (SET_DEST (set)) == SUBREG)
   7197 			    SUBREG_REG (SET_DEST (set)) = restore_rtx;
   7198 			  else
   7199 			    SET_DEST (set) = restore_rtx;
   7200 			}
   7201 		      lra_push_insn_and_update_insn_regno_info (curr_insn);
   7202 		      lra_set_used_insn_alternative_by_uid
   7203 			(INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
   7204 		      done_p = true;
   7205 		      if (lra_dump_file != NULL)
   7206 			{
   7207 			  fprintf (lra_dump_file, "    Change reload insn:\n");
   7208 			  dump_insn_slim (lra_dump_file, curr_insn);
   7209 			}
   7210 		    }
   7211 		}
   7212 	    }
   7213 	  if (! done_p)
   7214 	    {
   7215 	      struct lra_insn_reg *reg;
   7216 	      bool restored_regs_p = false;
   7217 	      bool kept_regs_p = false;
   7218 
   7219 	      curr_id = lra_get_insn_recog_data (curr_insn);
   7220 	      for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   7221 		{
   7222 		  regno = reg->regno;
   7223 		  restore_rtx = lra_reg_info[regno].restore_rtx;
   7224 		  if (restore_rtx != NULL_RTX)
   7225 		    {
   7226 		      if (change_p && bitmap_bit_p (remove_pseudos, regno))
   7227 			{
   7228 			  lra_substitute_pseudo_within_insn
   7229 			    (curr_insn, regno, restore_rtx, false);
   7230 			  restored_regs_p = true;
   7231 			}
   7232 		      else
   7233 			kept_regs_p = true;
   7234 		    }
   7235 		}
   7236 	      if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
   7237 		{
   7238 		  /* The instruction has changed since the previous
   7239 		     constraints pass.  */
   7240 		  lra_push_insn_and_update_insn_regno_info (curr_insn);
   7241 		  lra_set_used_insn_alternative_by_uid
   7242 		    (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
   7243 		}
   7244 	      else if (restored_regs_p)
   7245 		/* The instruction has been restored to the form that
   7246 		   it had during the previous constraints pass.  */
   7247 		lra_update_insn_regno_info (curr_insn);
   7248 	      if (restored_regs_p && lra_dump_file != NULL)
   7249 		{
   7250 		  fprintf (lra_dump_file, "   Insn after restoring regs:\n");
   7251 		  dump_insn_slim (lra_dump_file, curr_insn);
   7252 		}
   7253 	    }
   7254 	}
   7255     }
   7256   return change_p;
   7257 }
   7258 
   7259 /* If optional reload pseudos failed to get a hard register or was not
   7260    inherited, it is better to remove optional reloads.  We do this
   7261    transformation after undoing inheritance to figure out necessity to
   7262    remove optional reloads easier.  Return true if we do any
   7263    change.  */
   7264 static bool
   7265 undo_optional_reloads (void)
   7266 {
   7267   bool change_p, keep_p;
   7268   unsigned int regno, uid;
   7269   bitmap_iterator bi, bi2;
   7270   rtx_insn *insn;
   7271   rtx set, src, dest;
   7272   auto_bitmap removed_optional_reload_pseudos (&reg_obstack);
   7273 
   7274   bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
   7275   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
   7276     {
   7277       keep_p = false;
   7278       /* Keep optional reloads from previous subpasses.  */
   7279       if (lra_reg_info[regno].restore_rtx == NULL_RTX
   7280 	  /* If the original pseudo changed its allocation, just
   7281 	     removing the optional pseudo is dangerous as the original
   7282 	     pseudo will have longer live range.  */
   7283 	  || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] >= 0)
   7284 	keep_p = true;
   7285       else if (reg_renumber[regno] >= 0)
   7286 	EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
   7287 	  {
   7288 	    insn = lra_insn_recog_data[uid]->insn;
   7289 	    if ((set = single_set (insn)) == NULL_RTX)
   7290 	      continue;
   7291 	    src = SET_SRC (set);
   7292 	    dest = SET_DEST (set);
   7293 	    if ((! REG_P (src) && ! SUBREG_P (src))
   7294 		|| (! REG_P (dest) && ! SUBREG_P (dest)))
   7295 	      continue;
   7296 	    if (get_regno (dest) == (int) regno
   7297 		/* Ignore insn for optional reloads itself.  */
   7298 		&& (get_regno (lra_reg_info[regno].restore_rtx)
   7299 		    != get_regno (src))
   7300 		/* Check only inheritance on last inheritance pass.  */
   7301 		&& get_regno (src) >= new_regno_start
   7302 		/* Check that the optional reload was inherited.  */
   7303 		&& bitmap_bit_p (&lra_inheritance_pseudos, get_regno (src)))
   7304 	      {
   7305 		keep_p = true;
   7306 		break;
   7307 	      }
   7308 	  }
   7309       if (keep_p)
   7310 	{
   7311 	  bitmap_clear_bit (removed_optional_reload_pseudos, regno);
   7312 	  if (lra_dump_file != NULL)
   7313 	    fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
   7314 	}
   7315     }
   7316   change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
   7317   auto_bitmap insn_bitmap (&reg_obstack);
   7318   EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
   7319     {
   7320       if (lra_dump_file != NULL)
   7321 	fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
   7322       bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
   7323       EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
   7324 	{
   7325 	  /* We may have already removed a clobber.  */
   7326 	  if (!lra_insn_recog_data[uid])
   7327 	    continue;
   7328 	  insn = lra_insn_recog_data[uid]->insn;
   7329 	  if ((set = single_set (insn)) != NULL_RTX)
   7330 	    {
   7331 	      src = SET_SRC (set);
   7332 	      dest = SET_DEST (set);
   7333 	      if ((REG_P (src) || SUBREG_P (src))
   7334 		  && (REG_P (dest) || SUBREG_P (dest))
   7335 		  && ((get_regno (src) == (int) regno
   7336 		       && (get_regno (lra_reg_info[regno].restore_rtx)
   7337 			   == get_regno (dest)))
   7338 		      || (get_regno (dest) == (int) regno
   7339 			  && (get_regno (lra_reg_info[regno].restore_rtx)
   7340 			      == get_regno (src)))))
   7341 		{
   7342 		  if (lra_dump_file != NULL)
   7343 		    {
   7344 		      fprintf (lra_dump_file, "  Deleting move %u\n",
   7345 			       INSN_UID (insn));
   7346 		      dump_insn_slim (lra_dump_file, insn);
   7347 		    }
   7348 		  delete_move_and_clobber (insn, get_regno (dest));
   7349 		  continue;
   7350 		}
   7351 	      /* We should not worry about generation memory-memory
   7352 		 moves here as if the corresponding inheritance did
   7353 		 not work (inheritance pseudo did not get a hard reg),
   7354 		 we remove the inheritance pseudo and the optional
   7355 		 reload.  */
   7356 	    }
   7357 	  if (GET_CODE (PATTERN (insn)) == CLOBBER
   7358 	      && REG_P (SET_DEST (insn))
   7359 	      && get_regno (SET_DEST (insn)) == (int) regno)
   7360 	    /* Refuse to remap clobbers to preexisting pseudos.  */
   7361 	    gcc_unreachable ();
   7362 	  lra_substitute_pseudo_within_insn
   7363 	    (insn, regno, lra_reg_info[regno].restore_rtx, false);
   7364 	  lra_update_insn_regno_info (insn);
   7365 	  if (lra_dump_file != NULL)
   7366 	    {
   7367 	      fprintf (lra_dump_file,
   7368 		       "  Restoring original insn:\n");
   7369 	      dump_insn_slim (lra_dump_file, insn);
   7370 	    }
   7371 	}
   7372     }
   7373   /* Clear restore_regnos.  */
   7374   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
   7375     lra_reg_info[regno].restore_rtx = NULL_RTX;
   7376   return change_p;
   7377 }
   7378 
   7379 /* Entry function for undoing inheritance/split transformation.	 Return true
   7380    if we did any RTL change in this pass.  */
   7381 bool
   7382 lra_undo_inheritance (void)
   7383 {
   7384   unsigned int regno;
   7385   int hard_regno;
   7386   int n_all_inherit, n_inherit, n_all_split, n_split;
   7387   rtx restore_rtx;
   7388   bitmap_iterator bi;
   7389   bool change_p;
   7390 
   7391   lra_undo_inheritance_iter++;
   7392   if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
   7393     return false;
   7394   if (lra_dump_file != NULL)
   7395     fprintf (lra_dump_file,
   7396 	     "\n********** Undoing inheritance #%d: **********\n\n",
   7397 	     lra_undo_inheritance_iter);
   7398   auto_bitmap remove_pseudos (&reg_obstack);
   7399   n_inherit = n_all_inherit = 0;
   7400   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
   7401     if (lra_reg_info[regno].restore_rtx != NULL_RTX)
   7402       {
   7403 	n_all_inherit++;
   7404 	if (reg_renumber[regno] < 0
   7405 	    /* If the original pseudo changed its allocation, just
   7406 	       removing inheritance is dangerous as for changing
   7407 	       allocation we used shorter live-ranges.  */
   7408 	    && (! REG_P (lra_reg_info[regno].restore_rtx)
   7409 		|| reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
   7410 	  bitmap_set_bit (remove_pseudos, regno);
   7411 	else
   7412 	  n_inherit++;
   7413       }
   7414   if (lra_dump_file != NULL && n_all_inherit != 0)
   7415     fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
   7416 	     n_inherit, n_all_inherit,
   7417 	     (double) n_inherit / n_all_inherit * 100);
   7418   n_split = n_all_split = 0;
   7419   EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
   7420     if ((restore_rtx = lra_reg_info[regno].restore_rtx) != NULL_RTX)
   7421       {
   7422 	int restore_regno = REGNO (restore_rtx);
   7423 
   7424 	n_all_split++;
   7425 	hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
   7426 		      ? reg_renumber[restore_regno] : restore_regno);
   7427 	if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
   7428 	  bitmap_set_bit (remove_pseudos, regno);
   7429 	else
   7430 	  {
   7431 	    n_split++;
   7432 	    if (lra_dump_file != NULL)
   7433 	      fprintf (lra_dump_file, "	     Keep split r%d (orig=r%d)\n",
   7434 		       regno, restore_regno);
   7435 	  }
   7436       }
   7437   if (lra_dump_file != NULL && n_all_split != 0)
   7438     fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
   7439 	     n_split, n_all_split,
   7440 	     (double) n_split / n_all_split * 100);
   7441   change_p = remove_inheritance_pseudos (remove_pseudos);
   7442   /* Clear restore_regnos.  */
   7443   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
   7444     lra_reg_info[regno].restore_rtx = NULL_RTX;
   7445   EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
   7446     lra_reg_info[regno].restore_rtx = NULL_RTX;
   7447   change_p = undo_optional_reloads () || change_p;
   7448   return change_p;
   7449 }
   7450