Home | History | Annotate | Line # | Download | only in gcc
      1  1.1  mrg /* Code for RTL transformations to satisfy insn constraints.
      2  1.1  mrg    Copyright (C) 2010-2022 Free Software Foundation, Inc.
      3  1.1  mrg    Contributed by Vladimir Makarov <vmakarov (at) redhat.com>.
      4  1.1  mrg 
      5  1.1  mrg    This file is part of GCC.
      6  1.1  mrg 
      7  1.1  mrg    GCC is free software; you can redistribute it and/or modify it under
      8  1.1  mrg    the terms of the GNU General Public License as published by the Free
      9  1.1  mrg    Software Foundation; either version 3, or (at your option) any later
     10  1.1  mrg    version.
     11  1.1  mrg 
     12  1.1  mrg    GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13  1.1  mrg    WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14  1.1  mrg    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15  1.1  mrg    for more details.
     16  1.1  mrg 
     17  1.1  mrg    You should have received a copy of the GNU General Public License
     18  1.1  mrg    along with GCC; see the file COPYING3.  If not see
     19  1.1  mrg    <http://www.gnu.org/licenses/>.  */
     20  1.1  mrg 
     21  1.1  mrg 
     22  1.1  mrg /* This file contains code for 3 passes: constraint pass,
     23  1.1  mrg    inheritance/split pass, and pass for undoing failed inheritance and
     24  1.1  mrg    split.
     25  1.1  mrg 
     26  1.1  mrg    The major goal of constraint pass is to transform RTL to satisfy
     27  1.1  mrg    insn and address constraints by:
     28  1.1  mrg      o choosing insn alternatives;
     29  1.1  mrg      o generating *reload insns* (or reloads in brief) and *reload
     30  1.1  mrg        pseudos* which will get necessary hard registers later;
     31  1.1  mrg      o substituting pseudos with equivalent values and removing the
     32  1.1  mrg        instructions that initialized those pseudos.
     33  1.1  mrg 
     34  1.1  mrg    The constraint pass has biggest and most complicated code in LRA.
     35  1.1  mrg    There are a lot of important details like:
     36  1.1  mrg      o reuse of input reload pseudos to simplify reload pseudo
     37  1.1  mrg        allocations;
     38  1.1  mrg      o some heuristics to choose insn alternative to improve the
     39  1.1  mrg        inheritance;
     40  1.1  mrg      o early clobbers etc.
     41  1.1  mrg 
     42  1.1  mrg    The pass is mimicking former reload pass in alternative choosing
     43  1.1  mrg    because the reload pass is oriented to current machine description
     44  1.1  mrg    model.  It might be changed if the machine description model is
     45  1.1  mrg    changed.
     46  1.1  mrg 
     47  1.1  mrg    There is special code for preventing all LRA and this pass cycling
     48  1.1  mrg    in case of bugs.
     49  1.1  mrg 
     50  1.1  mrg    On the first iteration of the pass we process every instruction and
     51  1.1  mrg    choose an alternative for each one.  On subsequent iterations we try
     52  1.1  mrg    to avoid reprocessing instructions if we can be sure that the old
     53  1.1  mrg    choice is still valid.
     54  1.1  mrg 
     55  1.1  mrg    The inheritance/spilt pass is to transform code to achieve
     56  1.1  mrg    ineheritance and live range splitting.  It is done on backward
     57  1.1  mrg    traversal of EBBs.
     58  1.1  mrg 
     59  1.1  mrg    The inheritance optimization goal is to reuse values in hard
     60  1.1  mrg    registers. There is analogous optimization in old reload pass.  The
     61  1.1  mrg    inheritance is achieved by following transformation:
     62  1.1  mrg 
     63  1.1  mrg        reload_p1 <- p	     reload_p1 <- p
     64  1.1  mrg        ...		     new_p <- reload_p1
     65  1.1  mrg        ...		=>   ...
     66  1.1  mrg        reload_p2 <- p	     reload_p2 <- new_p
     67  1.1  mrg 
     68  1.1  mrg    where p is spilled and not changed between the insns.  Reload_p1 is
     69  1.1  mrg    also called *original pseudo* and new_p is called *inheritance
     70  1.1  mrg    pseudo*.
     71  1.1  mrg 
     72  1.1  mrg    The subsequent assignment pass will try to assign the same (or
     73  1.1  mrg    another if it is not possible) hard register to new_p as to
     74  1.1  mrg    reload_p1 or reload_p2.
     75  1.1  mrg 
     76  1.1  mrg    If the assignment pass fails to assign a hard register to new_p,
     77  1.1  mrg    this file will undo the inheritance and restore the original code.
     78  1.1  mrg    This is because implementing the above sequence with a spilled
     79  1.1  mrg    new_p would make the code much worse.  The inheritance is done in
     80  1.1  mrg    EBB scope.  The above is just a simplified example to get an idea
     81  1.1  mrg    of the inheritance as the inheritance is also done for non-reload
     82  1.1  mrg    insns.
     83  1.1  mrg 
     84  1.1  mrg    Splitting (transformation) is also done in EBB scope on the same
     85  1.1  mrg    pass as the inheritance:
     86  1.1  mrg 
     87  1.1  mrg        r <- ... or ... <- r		 r <- ... or ... <- r
     88  1.1  mrg        ...				 s <- r (new insn -- save)
     89  1.1  mrg        ...			  =>
     90  1.1  mrg        ...				 r <- s (new insn -- restore)
     91  1.1  mrg        ... <- r				 ... <- r
     92  1.1  mrg 
     93  1.1  mrg     The *split pseudo* s is assigned to the hard register of the
     94  1.1  mrg     original pseudo or hard register r.
     95  1.1  mrg 
     96  1.1  mrg     Splitting is done:
     97  1.1  mrg       o In EBBs with high register pressure for global pseudos (living
     98  1.1  mrg 	in at least 2 BBs) and assigned to hard registers when there
     99  1.1  mrg 	are more one reloads needing the hard registers;
    100  1.1  mrg       o for pseudos needing save/restore code around calls.
    101  1.1  mrg 
    102  1.1  mrg     If the split pseudo still has the same hard register as the
    103  1.1  mrg     original pseudo after the subsequent assignment pass or the
    104  1.1  mrg     original pseudo was split, the opposite transformation is done on
    105  1.1  mrg     the same pass for undoing inheritance.  */
    106  1.1  mrg 
    107  1.1  mrg #undef REG_OK_STRICT
    108  1.1  mrg 
    109  1.1  mrg #include "config.h"
    110  1.1  mrg #include "system.h"
    111  1.1  mrg #include "coretypes.h"
    112  1.1  mrg #include "backend.h"
    113  1.1  mrg #include "target.h"
    114  1.1  mrg #include "rtl.h"
    115  1.1  mrg #include "tree.h"
    116  1.1  mrg #include "predict.h"
    117  1.1  mrg #include "df.h"
    118  1.1  mrg #include "memmodel.h"
    119  1.1  mrg #include "tm_p.h"
    120  1.1  mrg #include "expmed.h"
    121  1.1  mrg #include "optabs.h"
    122  1.1  mrg #include "regs.h"
    123  1.1  mrg #include "ira.h"
    124  1.1  mrg #include "recog.h"
    125  1.1  mrg #include "output.h"
    126  1.1  mrg #include "addresses.h"
    127  1.1  mrg #include "expr.h"
    128  1.1  mrg #include "cfgrtl.h"
    129  1.1  mrg #include "rtl-error.h"
    130  1.1  mrg #include "lra.h"
    131  1.1  mrg #include "lra-int.h"
    132  1.1  mrg #include "print-rtl.h"
    133  1.1  mrg #include "function-abi.h"
    134  1.1  mrg #include "rtl-iter.h"
    135  1.1  mrg 
    136  1.1  mrg /* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
    137  1.1  mrg    insn.  Remember that LRA_CURR_RELOAD_NUM is the number of emitted
    138  1.1  mrg    reload insns.  */
    139  1.1  mrg static int bb_reload_num;
    140  1.1  mrg 
    141  1.1  mrg /* The current insn being processed and corresponding its single set
    142  1.1  mrg    (NULL otherwise), its data (basic block, the insn data, the insn
    143  1.1  mrg    static data, and the mode of each operand).  */
    144  1.1  mrg static rtx_insn *curr_insn;
    145  1.1  mrg static rtx curr_insn_set;
    146  1.1  mrg static basic_block curr_bb;
    147  1.1  mrg static lra_insn_recog_data_t curr_id;
    148  1.1  mrg static struct lra_static_insn_data *curr_static_id;
    149  1.1  mrg static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
    150  1.1  mrg /* Mode of the register substituted by its equivalence with VOIDmode
    151  1.1  mrg    (e.g. constant) and whose subreg is given operand of the current
    152  1.1  mrg    insn.  VOIDmode in all other cases.  */
    153  1.1  mrg static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
    154  1.1  mrg 
    155  1.1  mrg 
    156  1.1  mrg 
    158  1.1  mrg /* Start numbers for new registers and insns at the current constraints
    159  1.1  mrg    pass start.	*/
    160  1.1  mrg static int new_regno_start;
    161  1.1  mrg static int new_insn_uid_start;
    162  1.1  mrg 
    163  1.1  mrg /* If LOC is nonnull, strip any outer subreg from it.  */
    164  1.1  mrg static inline rtx *
    165  1.1  mrg strip_subreg (rtx *loc)
    166  1.1  mrg {
    167  1.1  mrg   return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
    168  1.1  mrg }
    169  1.1  mrg 
    170  1.1  mrg /* Return hard regno of REGNO or if it is was not assigned to a hard
    171  1.1  mrg    register, use a hard register from its allocno class.  */
    172  1.1  mrg static int
    173  1.1  mrg get_try_hard_regno (int regno)
    174  1.1  mrg {
    175  1.1  mrg   int hard_regno;
    176  1.1  mrg   enum reg_class rclass;
    177  1.1  mrg 
    178  1.1  mrg   if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
    179  1.1  mrg     hard_regno = lra_get_regno_hard_regno (regno);
    180  1.1  mrg   if (hard_regno >= 0)
    181  1.1  mrg     return hard_regno;
    182  1.1  mrg   rclass = lra_get_allocno_class (regno);
    183  1.1  mrg   if (rclass == NO_REGS)
    184  1.1  mrg     return -1;
    185  1.1  mrg   return ira_class_hard_regs[rclass][0];
    186  1.1  mrg }
    187  1.1  mrg 
    188  1.1  mrg /* Return the hard regno of X after removing its subreg.  If X is not
    189  1.1  mrg    a register or a subreg of a register, return -1.  If X is a pseudo,
    190  1.1  mrg    use its assignment.  If FINAL_P return the final hard regno which will
    191  1.1  mrg    be after elimination.  */
    192  1.1  mrg static int
    193  1.1  mrg get_hard_regno (rtx x, bool final_p)
    194  1.1  mrg {
    195  1.1  mrg   rtx reg;
    196  1.1  mrg   int hard_regno;
    197  1.1  mrg 
    198  1.1  mrg   reg = x;
    199  1.1  mrg   if (SUBREG_P (x))
    200  1.1  mrg     reg = SUBREG_REG (x);
    201  1.1  mrg   if (! REG_P (reg))
    202  1.1  mrg     return -1;
    203  1.1  mrg   if (! HARD_REGISTER_NUM_P (hard_regno = REGNO (reg)))
    204  1.1  mrg     hard_regno = lra_get_regno_hard_regno (hard_regno);
    205  1.1  mrg   if (hard_regno < 0)
    206  1.1  mrg     return -1;
    207  1.1  mrg   if (final_p)
    208  1.1  mrg     hard_regno = lra_get_elimination_hard_regno (hard_regno);
    209  1.1  mrg   if (SUBREG_P (x))
    210  1.1  mrg     hard_regno += subreg_regno_offset (hard_regno, GET_MODE (reg),
    211  1.1  mrg 				       SUBREG_BYTE (x),  GET_MODE (x));
    212  1.1  mrg   return hard_regno;
    213  1.1  mrg }
    214  1.1  mrg 
    215  1.1  mrg /* If REGNO is a hard register or has been allocated a hard register,
    216  1.1  mrg    return the class of that register.  If REGNO is a reload pseudo
    217  1.1  mrg    created by the current constraints pass, return its allocno class.
    218  1.1  mrg    Return NO_REGS otherwise.  */
    219  1.1  mrg static enum reg_class
    220  1.1  mrg get_reg_class (int regno)
    221  1.1  mrg {
    222  1.1  mrg   int hard_regno;
    223  1.1  mrg 
    224  1.1  mrg   if (! HARD_REGISTER_NUM_P (hard_regno = regno))
    225  1.1  mrg     hard_regno = lra_get_regno_hard_regno (regno);
    226  1.1  mrg   if (hard_regno >= 0)
    227  1.1  mrg     {
    228  1.1  mrg       hard_regno = lra_get_elimination_hard_regno (hard_regno);
    229  1.1  mrg       return REGNO_REG_CLASS (hard_regno);
    230  1.1  mrg     }
    231  1.1  mrg   if (regno >= new_regno_start)
    232  1.1  mrg     return lra_get_allocno_class (regno);
    233  1.1  mrg   return NO_REGS;
    234  1.1  mrg }
    235  1.1  mrg 
    236  1.1  mrg /* Return true if REG satisfies (or will satisfy) reg class constraint
    237  1.1  mrg    CL.  Use elimination first if REG is a hard register.  If REG is a
    238  1.1  mrg    reload pseudo created by this constraints pass, assume that it will
    239  1.1  mrg    be allocated a hard register from its allocno class, but allow that
    240  1.1  mrg    class to be narrowed to CL if it is currently a superset of CL and
    241  1.1  mrg    if either:
    242  1.1  mrg 
    243  1.1  mrg    - ALLOW_ALL_RELOAD_CLASS_CHANGES_P is true or
    244  1.1  mrg    - the instruction we're processing is not a reload move.
    245  1.1  mrg 
    246  1.1  mrg    If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
    247  1.1  mrg    REGNO (reg), or NO_REGS if no change in its class was needed.  */
    248  1.1  mrg static bool
    249  1.1  mrg in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class,
    250  1.1  mrg 	    bool allow_all_reload_class_changes_p = false)
    251  1.1  mrg {
    252  1.1  mrg   enum reg_class rclass, common_class;
    253  1.1  mrg   machine_mode reg_mode;
    254  1.1  mrg   rtx src;
    255  1.1  mrg   int class_size, hard_regno, nregs, i, j;
    256  1.1  mrg   int regno = REGNO (reg);
    257  1.1  mrg 
    258  1.1  mrg   if (new_class != NULL)
    259  1.1  mrg     *new_class = NO_REGS;
    260  1.1  mrg   if (regno < FIRST_PSEUDO_REGISTER)
    261  1.1  mrg     {
    262  1.1  mrg       rtx final_reg = reg;
    263  1.1  mrg       rtx *final_loc = &final_reg;
    264  1.1  mrg 
    265  1.1  mrg       lra_eliminate_reg_if_possible (final_loc);
    266  1.1  mrg       return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
    267  1.1  mrg     }
    268  1.1  mrg   reg_mode = GET_MODE (reg);
    269  1.1  mrg   rclass = get_reg_class (regno);
    270  1.1  mrg   src = curr_insn_set != NULL ? SET_SRC (curr_insn_set) : NULL;
    271  1.1  mrg   if (regno < new_regno_start
    272  1.1  mrg       /* Do not allow the constraints for reload instructions to
    273  1.1  mrg 	 influence the classes of new pseudos.  These reloads are
    274  1.1  mrg 	 typically moves that have many alternatives, and restricting
    275  1.1  mrg 	 reload pseudos for one alternative may lead to situations
    276  1.1  mrg 	 where other reload pseudos are no longer allocatable.  */
    277  1.1  mrg       || (!allow_all_reload_class_changes_p
    278  1.1  mrg 	  && INSN_UID (curr_insn) >= new_insn_uid_start
    279  1.1  mrg 	  && src != NULL
    280  1.1  mrg 	  && ((REG_P (src) || MEM_P (src))
    281  1.1  mrg 	      || (GET_CODE (src) == SUBREG
    282  1.1  mrg 		  && (REG_P (SUBREG_REG (src)) || MEM_P (SUBREG_REG (src)))))))
    283  1.1  mrg     /* When we don't know what class will be used finally for reload
    284  1.1  mrg        pseudos, we use ALL_REGS.  */
    285  1.1  mrg     return ((regno >= new_regno_start && rclass == ALL_REGS)
    286  1.1  mrg 	    || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
    287  1.1  mrg 		&& ! hard_reg_set_subset_p (reg_class_contents[cl],
    288  1.1  mrg 					    lra_no_alloc_regs)));
    289  1.1  mrg   else
    290  1.1  mrg     {
    291  1.1  mrg       common_class = ira_reg_class_subset[rclass][cl];
    292  1.1  mrg       if (new_class != NULL)
    293  1.1  mrg 	*new_class = common_class;
    294  1.1  mrg       if (hard_reg_set_subset_p (reg_class_contents[common_class],
    295  1.1  mrg 				 lra_no_alloc_regs))
    296  1.1  mrg 	return false;
    297  1.1  mrg       /* Check that there are enough allocatable regs.  */
    298  1.1  mrg       class_size = ira_class_hard_regs_num[common_class];
    299  1.1  mrg       for (i = 0; i < class_size; i++)
    300  1.1  mrg 	{
    301  1.1  mrg 	  hard_regno = ira_class_hard_regs[common_class][i];
    302  1.1  mrg 	  nregs = hard_regno_nregs (hard_regno, reg_mode);
    303  1.1  mrg 	  if (nregs == 1)
    304  1.1  mrg 	    return true;
    305  1.1  mrg 	  for (j = 0; j < nregs; j++)
    306  1.1  mrg 	    if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
    307  1.1  mrg 		|| ! TEST_HARD_REG_BIT (reg_class_contents[common_class],
    308  1.1  mrg 					hard_regno + j))
    309  1.1  mrg 	      break;
    310  1.1  mrg 	  if (j >= nregs)
    311  1.1  mrg 	    return true;
    312  1.1  mrg 	}
    313  1.1  mrg       return false;
    314  1.1  mrg     }
    315  1.1  mrg }
    316  1.1  mrg 
    317  1.1  mrg /* Return true if REGNO satisfies a memory constraint.	*/
    318  1.1  mrg static bool
    319  1.1  mrg in_mem_p (int regno)
    320  1.1  mrg {
    321  1.1  mrg   return get_reg_class (regno) == NO_REGS;
    322  1.1  mrg }
    323  1.1  mrg 
    324  1.1  mrg /* Return 1 if ADDR is a valid memory address for mode MODE in address
    325  1.1  mrg    space AS, and check that each pseudo has the proper kind of hard
    326  1.1  mrg    reg.	 */
    327  1.1  mrg static int
    328  1.1  mrg valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
    329  1.1  mrg 		 rtx addr, addr_space_t as)
    330  1.1  mrg {
    331  1.1  mrg #ifdef GO_IF_LEGITIMATE_ADDRESS
    332  1.1  mrg   lra_assert (ADDR_SPACE_GENERIC_P (as));
    333  1.1  mrg   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
    334  1.1  mrg   return 0;
    335  1.1  mrg 
    336  1.1  mrg  win:
    337  1.1  mrg   return 1;
    338  1.1  mrg #else
    339  1.1  mrg   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
    340  1.1  mrg #endif
    341  1.1  mrg }
    342  1.1  mrg 
    343  1.1  mrg namespace {
    344  1.1  mrg   /* Temporarily eliminates registers in an address (for the lifetime of
    345  1.1  mrg      the object).  */
    346  1.1  mrg   class address_eliminator {
    347  1.1  mrg   public:
    348  1.1  mrg     address_eliminator (struct address_info *ad);
    349  1.1  mrg     ~address_eliminator ();
    350  1.1  mrg 
    351  1.1  mrg   private:
    352  1.1  mrg     struct address_info *m_ad;
    353  1.1  mrg     rtx *m_base_loc;
    354  1.1  mrg     rtx m_base_reg;
    355  1.1  mrg     rtx *m_index_loc;
    356  1.1  mrg     rtx m_index_reg;
    357  1.1  mrg   };
    358  1.1  mrg }
    359  1.1  mrg 
    360  1.1  mrg address_eliminator::address_eliminator (struct address_info *ad)
    361  1.1  mrg   : m_ad (ad),
    362  1.1  mrg     m_base_loc (strip_subreg (ad->base_term)),
    363  1.1  mrg     m_base_reg (NULL_RTX),
    364  1.1  mrg     m_index_loc (strip_subreg (ad->index_term)),
    365  1.1  mrg     m_index_reg (NULL_RTX)
    366  1.1  mrg {
    367  1.1  mrg   if (m_base_loc != NULL)
    368  1.1  mrg     {
    369  1.1  mrg       m_base_reg = *m_base_loc;
    370  1.1  mrg       /* If we have non-legitimate address which is decomposed not in
    371  1.1  mrg 	 the way we expected, don't do elimination here.  In such case
    372  1.1  mrg 	 the address will be reloaded and elimination will be done in
    373  1.1  mrg 	 reload insn finally.  */
    374  1.1  mrg       if (REG_P (m_base_reg))
    375  1.1  mrg 	lra_eliminate_reg_if_possible (m_base_loc);
    376  1.1  mrg       if (m_ad->base_term2 != NULL)
    377  1.1  mrg 	*m_ad->base_term2 = *m_ad->base_term;
    378  1.1  mrg     }
    379  1.1  mrg   if (m_index_loc != NULL)
    380  1.1  mrg     {
    381  1.1  mrg       m_index_reg = *m_index_loc;
    382  1.1  mrg       if (REG_P (m_index_reg))
    383  1.1  mrg 	lra_eliminate_reg_if_possible (m_index_loc);
    384  1.1  mrg     }
    385  1.1  mrg }
    386  1.1  mrg 
    387  1.1  mrg address_eliminator::~address_eliminator ()
    388  1.1  mrg {
    389  1.1  mrg   if (m_base_loc && *m_base_loc != m_base_reg)
    390  1.1  mrg     {
    391  1.1  mrg       *m_base_loc = m_base_reg;
    392  1.1  mrg       if (m_ad->base_term2 != NULL)
    393  1.1  mrg 	*m_ad->base_term2 = *m_ad->base_term;
    394  1.1  mrg     }
    395  1.1  mrg   if (m_index_loc && *m_index_loc != m_index_reg)
    396  1.1  mrg     *m_index_loc = m_index_reg;
    397  1.1  mrg }
    398  1.1  mrg 
    399  1.1  mrg /* Return true if the eliminated form of AD is a legitimate target address.
    400  1.1  mrg    If OP is a MEM, AD is the address within OP, otherwise OP should be
    401  1.1  mrg    ignored.  CONSTRAINT is one constraint that the operand may need
    402  1.1  mrg    to meet.  */
    403  1.1  mrg static bool
    404  1.1  mrg valid_address_p (rtx op, struct address_info *ad,
    405  1.1  mrg 		 enum constraint_num constraint)
    406  1.1  mrg {
    407  1.1  mrg   address_eliminator eliminator (ad);
    408  1.1  mrg 
    409  1.1  mrg   /* Allow a memory OP if it matches CONSTRAINT, even if CONSTRAINT is more
    410  1.1  mrg      forgiving than "m".
    411  1.1  mrg      Need to extract memory from op for special memory constraint,
    412  1.1  mrg      i.e. bcst_mem_operand in i386 backend.  */
    413  1.1  mrg   if (MEM_P (extract_mem_from_operand (op))
    414  1.1  mrg       && insn_extra_relaxed_memory_constraint (constraint)
    415  1.1  mrg       && constraint_satisfied_p (op, constraint))
    416  1.1  mrg     return true;
    417  1.1  mrg 
    418  1.1  mrg   return valid_address_p (ad->mode, *ad->outer, ad->as);
    419  1.1  mrg }
    420  1.1  mrg 
    421  1.1  mrg /* For special_memory_operand, it could be false for MEM_P (op),
    422  1.1  mrg    i.e. bcst_mem_operand in i386 backend.
    423  1.1  mrg    Extract and return real memory operand or op.  */
    424  1.1  mrg rtx
    425  1.1  mrg extract_mem_from_operand (rtx op)
    426  1.1  mrg {
    427  1.1  mrg   for (rtx x = op;; x = XEXP (x, 0))
    428  1.1  mrg     {
    429  1.1  mrg       if (MEM_P (x))
    430  1.1  mrg 	return x;
    431  1.1  mrg       if (GET_RTX_LENGTH (GET_CODE (x)) != 1
    432  1.1  mrg 	  || GET_RTX_FORMAT (GET_CODE (x))[0] != 'e')
    433  1.1  mrg 	break;
    434  1.1  mrg     }
    435  1.1  mrg   return op;
    436  1.1  mrg }
    437  1.1  mrg 
    438  1.1  mrg /* Return true if the eliminated form of memory reference OP satisfies
    439  1.1  mrg    extra (special) memory constraint CONSTRAINT.  */
    440  1.1  mrg static bool
    441  1.1  mrg satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
    442  1.1  mrg {
    443  1.1  mrg   struct address_info ad;
    444  1.1  mrg   rtx mem = extract_mem_from_operand (op);
    445  1.1  mrg   if (!MEM_P (mem))
    446  1.1  mrg     return false;
    447  1.1  mrg 
    448  1.1  mrg   decompose_mem_address (&ad, mem);
    449  1.1  mrg   address_eliminator eliminator (&ad);
    450  1.1  mrg   return constraint_satisfied_p (op, constraint);
    451  1.1  mrg }
    452  1.1  mrg 
    453  1.1  mrg /* Return true if the eliminated form of address AD satisfies extra
    454  1.1  mrg    address constraint CONSTRAINT.  */
    455  1.1  mrg static bool
    456  1.1  mrg satisfies_address_constraint_p (struct address_info *ad,
    457  1.1  mrg 				enum constraint_num constraint)
    458  1.1  mrg {
    459  1.1  mrg   address_eliminator eliminator (ad);
    460  1.1  mrg   return constraint_satisfied_p (*ad->outer, constraint);
    461  1.1  mrg }
    462  1.1  mrg 
    463  1.1  mrg /* Return true if the eliminated form of address OP satisfies extra
    464  1.1  mrg    address constraint CONSTRAINT.  */
    465  1.1  mrg static bool
    466  1.1  mrg satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
    467  1.1  mrg {
    468  1.1  mrg   struct address_info ad;
    469  1.1  mrg 
    470  1.1  mrg   decompose_lea_address (&ad, &op);
    471  1.1  mrg   return satisfies_address_constraint_p (&ad, constraint);
    472  1.1  mrg }
    473  1.1  mrg 
    474  1.1  mrg /* Initiate equivalences for LRA.  As we keep original equivalences
    475  1.1  mrg    before any elimination, we need to make copies otherwise any change
    476  1.1  mrg    in insns might change the equivalences.  */
    477  1.1  mrg void
    478  1.1  mrg lra_init_equiv (void)
    479  1.1  mrg {
    480  1.1  mrg   ira_expand_reg_equiv ();
    481  1.1  mrg   for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
    482  1.1  mrg     {
    483  1.1  mrg       rtx res;
    484  1.1  mrg 
    485  1.1  mrg       if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
    486  1.1  mrg 	ira_reg_equiv[i].memory = copy_rtx (res);
    487  1.1  mrg       if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
    488  1.1  mrg 	ira_reg_equiv[i].invariant = copy_rtx (res);
    489  1.1  mrg     }
    490  1.1  mrg }
    491  1.1  mrg 
    492  1.1  mrg static rtx loc_equivalence_callback (rtx, const_rtx, void *);
    493  1.1  mrg 
    494  1.1  mrg /* Update equivalence for REGNO.  We need to this as the equivalence
    495  1.1  mrg    might contain other pseudos which are changed by their
    496  1.1  mrg    equivalences.  */
    497  1.1  mrg static void
    498  1.1  mrg update_equiv (int regno)
    499  1.1  mrg {
    500  1.1  mrg   rtx x;
    501  1.1  mrg 
    502  1.1  mrg   if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
    503  1.1  mrg     ira_reg_equiv[regno].memory
    504  1.1  mrg       = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
    505  1.1  mrg 				 NULL_RTX);
    506  1.1  mrg   if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
    507  1.1  mrg     ira_reg_equiv[regno].invariant
    508  1.1  mrg       = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
    509  1.1  mrg 				 NULL_RTX);
    510  1.1  mrg }
    511  1.1  mrg 
    512  1.1  mrg /* If we have decided to substitute X with another value, return that
    513  1.1  mrg    value, otherwise return X.  */
    514  1.1  mrg static rtx
    515  1.1  mrg get_equiv (rtx x)
    516  1.1  mrg {
    517  1.1  mrg   int regno;
    518  1.1  mrg   rtx res;
    519  1.1  mrg 
    520  1.1  mrg   if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
    521  1.1  mrg       || ! ira_reg_equiv[regno].defined_p
    522  1.1  mrg       || ! ira_reg_equiv[regno].profitable_p
    523  1.1  mrg       || lra_get_regno_hard_regno (regno) >= 0)
    524  1.1  mrg     return x;
    525  1.1  mrg   if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
    526  1.1  mrg     {
    527  1.1  mrg       if (targetm.cannot_substitute_mem_equiv_p (res))
    528  1.1  mrg 	return x;
    529  1.1  mrg       return res;
    530  1.1  mrg     }
    531  1.1  mrg   if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
    532  1.1  mrg     return res;
    533  1.1  mrg   if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
    534  1.1  mrg     return res;
    535  1.1  mrg   gcc_unreachable ();
    536  1.1  mrg }
    537  1.1  mrg 
    538  1.1  mrg /* If we have decided to substitute X with the equivalent value,
    539  1.1  mrg    return that value after elimination for INSN, otherwise return
    540  1.1  mrg    X.  */
    541  1.1  mrg static rtx
    542  1.1  mrg get_equiv_with_elimination (rtx x, rtx_insn *insn)
    543  1.1  mrg {
    544  1.1  mrg   rtx res = get_equiv (x);
    545  1.1  mrg 
    546  1.1  mrg   if (x == res || CONSTANT_P (res))
    547  1.1  mrg     return res;
    548  1.1  mrg   return lra_eliminate_regs_1 (insn, res, GET_MODE (res),
    549  1.1  mrg 			       false, false, 0, true);
    550  1.1  mrg }
    551  1.1  mrg 
    552  1.1  mrg /* Set up curr_operand_mode.  */
    553  1.1  mrg static void
    554  1.1  mrg init_curr_operand_mode (void)
    555  1.1  mrg {
    556  1.1  mrg   int nop = curr_static_id->n_operands;
    557  1.1  mrg   for (int i = 0; i < nop; i++)
    558  1.1  mrg     {
    559  1.1  mrg       machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
    560  1.1  mrg       if (mode == VOIDmode)
    561  1.1  mrg 	{
    562  1.1  mrg 	  /* The .md mode for address operands is the mode of the
    563  1.1  mrg 	     addressed value rather than the mode of the address itself.  */
    564  1.1  mrg 	  if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
    565  1.1  mrg 	    mode = Pmode;
    566  1.1  mrg 	  else
    567  1.1  mrg 	    mode = curr_static_id->operand[i].mode;
    568  1.1  mrg 	}
    569  1.1  mrg       curr_operand_mode[i] = mode;
    570  1.1  mrg     }
    571  1.1  mrg }
    572  1.1  mrg 
    573  1.1  mrg 
    574  1.1  mrg 
    576  1.1  mrg /* The page contains code to reuse input reloads.  */
    577  1.1  mrg 
    578  1.1  mrg /* Structure describes input reload of the current insns.  */
    579  1.1  mrg struct input_reload
    580  1.1  mrg {
    581  1.1  mrg   /* True for input reload of matched operands.  */
    582  1.1  mrg   bool match_p;
    583  1.1  mrg   /* Reloaded value.  */
    584  1.1  mrg   rtx input;
    585  1.1  mrg   /* Reload pseudo used.  */
    586  1.1  mrg   rtx reg;
    587  1.1  mrg };
    588  1.1  mrg 
    589  1.1  mrg /* The number of elements in the following array.  */
    590  1.1  mrg static int curr_insn_input_reloads_num;
    591  1.1  mrg /* Array containing info about input reloads.  It is used to find the
    592  1.1  mrg    same input reload and reuse the reload pseudo in this case.	*/
    593  1.1  mrg static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
    594  1.1  mrg 
    595  1.1  mrg /* Initiate data concerning reuse of input reloads for the current
    596  1.1  mrg    insn.  */
    597  1.1  mrg static void
    598  1.1  mrg init_curr_insn_input_reloads (void)
    599  1.1  mrg {
    600  1.1  mrg   curr_insn_input_reloads_num = 0;
    601  1.1  mrg }
    602  1.1  mrg 
    603  1.1  mrg /* The canonical form of an rtx inside a MEM is not necessarily the same as the
    604  1.1  mrg    canonical form of the rtx outside the MEM.  Fix this up in the case that
    605  1.1  mrg    we're reloading an address (and therefore pulling it outside a MEM).  */
    606  1.1  mrg static rtx
    607  1.1  mrg canonicalize_reload_addr (rtx addr)
    608  1.1  mrg {
    609  1.1  mrg   subrtx_var_iterator::array_type array;
    610  1.1  mrg   FOR_EACH_SUBRTX_VAR (iter, array, addr, NONCONST)
    611  1.1  mrg     {
    612  1.1  mrg       rtx x = *iter;
    613  1.1  mrg       if (GET_CODE (x) == MULT && CONST_INT_P (XEXP (x, 1)))
    614  1.1  mrg 	{
    615  1.1  mrg 	  const HOST_WIDE_INT ci = INTVAL (XEXP (x, 1));
    616  1.1  mrg 	  const int pwr2 = exact_log2 (ci);
    617  1.1  mrg 	  if (pwr2 > 0)
    618  1.1  mrg 	    {
    619  1.1  mrg 	      /* Rewrite this to use a shift instead, which is canonical when
    620  1.1  mrg 		 outside of a MEM.  */
    621  1.1  mrg 	      PUT_CODE (x, ASHIFT);
    622  1.1  mrg 	      XEXP (x, 1) = GEN_INT (pwr2);
    623  1.1  mrg 	    }
    624  1.1  mrg 	}
    625  1.1  mrg     }
    626  1.1  mrg 
    627  1.1  mrg   return addr;
    628  1.1  mrg }
    629  1.1  mrg 
    630  1.1  mrg /* Create a new pseudo using MODE, RCLASS, EXCLUDE_START_HARD_REGS, ORIGINAL or
    631  1.1  mrg    reuse an existing reload pseudo.  Don't reuse an existing reload pseudo if
    632  1.1  mrg    IN_SUBREG_P is true and the reused pseudo should be wrapped up in a SUBREG.
    633  1.1  mrg    The result pseudo is returned through RESULT_REG.  Return TRUE if we created
    634  1.1  mrg    a new pseudo, FALSE if we reused an existing reload pseudo.  Use TITLE to
    635  1.1  mrg    describe new registers for debug purposes.  */
    636  1.1  mrg static bool
    637  1.1  mrg get_reload_reg (enum op_type type, machine_mode mode, rtx original,
    638  1.1  mrg 		enum reg_class rclass, HARD_REG_SET *exclude_start_hard_regs,
    639  1.1  mrg 		bool in_subreg_p, const char *title, rtx *result_reg)
    640  1.1  mrg {
    641  1.1  mrg   int i, regno;
    642  1.1  mrg   enum reg_class new_class;
    643  1.1  mrg   bool unique_p = false;
    644  1.1  mrg 
    645  1.1  mrg   if (type == OP_OUT)
    646  1.1  mrg     {
    647  1.1  mrg       /* Output reload registers tend to start out with a conservative
    648  1.1  mrg 	 choice of register class.  Usually this is ALL_REGS, although
    649  1.1  mrg 	 a target might narrow it (for performance reasons) through
    650  1.1  mrg 	 targetm.preferred_reload_class.  It's therefore quite common
    651  1.1  mrg 	 for a reload instruction to require a more restrictive class
    652  1.1  mrg 	 than the class that was originally assigned to the reload register.
    653  1.1  mrg 
    654  1.1  mrg 	 In these situations, it's more efficient to refine the choice
    655  1.1  mrg 	 of register class rather than create a second reload register.
    656  1.1  mrg 	 This also helps to avoid cycling for registers that are only
    657  1.1  mrg 	 used by reload instructions.  */
    658  1.1  mrg       if (REG_P (original)
    659  1.1  mrg 	  && (int) REGNO (original) >= new_regno_start
    660  1.1  mrg 	  && INSN_UID (curr_insn) >= new_insn_uid_start
    661  1.1  mrg 	  && in_class_p (original, rclass, &new_class, true))
    662  1.1  mrg 	{
    663  1.1  mrg 	  unsigned int regno = REGNO (original);
    664  1.1  mrg 	  if (lra_dump_file != NULL)
    665  1.1  mrg 	    {
    666  1.1  mrg 	      fprintf (lra_dump_file, "	 Reuse r%d for output ", regno);
    667  1.1  mrg 	      dump_value_slim (lra_dump_file, original, 1);
    668  1.1  mrg 	    }
    669  1.1  mrg 	  if (new_class != lra_get_allocno_class (regno))
    670  1.1  mrg 	    lra_change_class (regno, new_class, ", change to", false);
    671  1.1  mrg 	  if (lra_dump_file != NULL)
    672  1.1  mrg 	    fprintf (lra_dump_file, "\n");
    673  1.1  mrg 	  *result_reg = original;
    674  1.1  mrg 	  return false;
    675  1.1  mrg 	}
    676  1.1  mrg       *result_reg
    677  1.1  mrg 	= lra_create_new_reg_with_unique_value (mode, original, rclass,
    678  1.1  mrg 						exclude_start_hard_regs, title);
    679  1.1  mrg       return true;
    680  1.1  mrg     }
    681  1.1  mrg   /* Prevent reuse value of expression with side effects,
    682  1.1  mrg      e.g. volatile memory.  */
    683  1.1  mrg   if (! side_effects_p (original))
    684  1.1  mrg     for (i = 0; i < curr_insn_input_reloads_num; i++)
    685  1.1  mrg       {
    686  1.1  mrg 	if (! curr_insn_input_reloads[i].match_p
    687  1.1  mrg 	    && rtx_equal_p (curr_insn_input_reloads[i].input, original)
    688  1.1  mrg 	    && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
    689  1.1  mrg 	  {
    690  1.1  mrg 	    rtx reg = curr_insn_input_reloads[i].reg;
    691  1.1  mrg 	    regno = REGNO (reg);
    692  1.1  mrg 	    /* If input is equal to original and both are VOIDmode,
    693  1.1  mrg 	       GET_MODE (reg) might be still different from mode.
    694  1.1  mrg 	       Ensure we don't return *result_reg with wrong mode.  */
    695  1.1  mrg 	    if (GET_MODE (reg) != mode)
    696  1.1  mrg 	      {
    697  1.1  mrg 		if (in_subreg_p)
    698  1.1  mrg 		  continue;
    699  1.1  mrg 		if (maybe_lt (GET_MODE_SIZE (GET_MODE (reg)),
    700  1.1  mrg 			      GET_MODE_SIZE (mode)))
    701  1.1  mrg 		  continue;
    702  1.1  mrg 		reg = lowpart_subreg (mode, reg, GET_MODE (reg));
    703  1.1  mrg 		if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
    704  1.1  mrg 		  continue;
    705  1.1  mrg 	      }
    706  1.1  mrg 	    *result_reg = reg;
    707  1.1  mrg 	    if (lra_dump_file != NULL)
    708  1.1  mrg 	      {
    709  1.1  mrg 		fprintf (lra_dump_file, "	 Reuse r%d for reload ", regno);
    710  1.1  mrg 		dump_value_slim (lra_dump_file, original, 1);
    711  1.1  mrg 	      }
    712  1.1  mrg 	    if (new_class != lra_get_allocno_class (regno))
    713  1.1  mrg 	      lra_change_class (regno, new_class, ", change to", false);
    714  1.1  mrg 	    if (lra_dump_file != NULL)
    715  1.1  mrg 	      fprintf (lra_dump_file, "\n");
    716  1.1  mrg 	    return false;
    717  1.1  mrg 	  }
    718  1.1  mrg 	/* If we have an input reload with a different mode, make sure it
    719  1.1  mrg 	   will get a different hard reg.  */
    720  1.1  mrg 	else if (REG_P (original)
    721  1.1  mrg 		 && REG_P (curr_insn_input_reloads[i].input)
    722  1.1  mrg 		 && REGNO (original) == REGNO (curr_insn_input_reloads[i].input)
    723  1.1  mrg 		 && (GET_MODE (original)
    724  1.1  mrg 		     != GET_MODE (curr_insn_input_reloads[i].input)))
    725  1.1  mrg 	  unique_p = true;
    726  1.1  mrg       }
    727  1.1  mrg   *result_reg = (unique_p
    728  1.1  mrg 		 ? lra_create_new_reg_with_unique_value
    729  1.1  mrg 		 : lra_create_new_reg) (mode, original, rclass,
    730  1.1  mrg 					exclude_start_hard_regs, title);
    731  1.1  mrg   lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
    732  1.1  mrg   curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
    733  1.1  mrg   curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
    734  1.1  mrg   curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
    735  1.1  mrg   return true;
    736  1.1  mrg }
    737  1.1  mrg 
    738  1.1  mrg 
    739  1.1  mrg /* The page contains major code to choose the current insn alternative
    741  1.1  mrg    and generate reloads for it.	 */
    742  1.1  mrg 
    743  1.1  mrg /* Return the offset from REGNO of the least significant register
    744  1.1  mrg    in (reg:MODE REGNO).
    745  1.1  mrg 
    746  1.1  mrg    This function is used to tell whether two registers satisfy
    747  1.1  mrg    a matching constraint.  (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
    748  1.1  mrg 
    749  1.1  mrg          REGNO1 + lra_constraint_offset (REGNO1, MODE1)
    750  1.1  mrg 	 == REGNO2 + lra_constraint_offset (REGNO2, MODE2)  */
    751  1.1  mrg int
    752  1.1  mrg lra_constraint_offset (int regno, machine_mode mode)
    753  1.1  mrg {
    754  1.1  mrg   lra_assert (regno < FIRST_PSEUDO_REGISTER);
    755  1.1  mrg 
    756  1.1  mrg   scalar_int_mode int_mode;
    757  1.1  mrg   if (WORDS_BIG_ENDIAN
    758  1.1  mrg       && is_a <scalar_int_mode> (mode, &int_mode)
    759  1.1  mrg       && GET_MODE_SIZE (int_mode) > UNITS_PER_WORD)
    760  1.1  mrg     return hard_regno_nregs (regno, mode) - 1;
    761  1.1  mrg   return 0;
    762  1.1  mrg }
    763  1.1  mrg 
    764  1.1  mrg /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
    765  1.1  mrg    if they are the same hard reg, and has special hacks for
    766  1.1  mrg    auto-increment and auto-decrement.  This is specifically intended for
    767  1.1  mrg    process_alt_operands to use in determining whether two operands
    768  1.1  mrg    match.  X is the operand whose number is the lower of the two.
    769  1.1  mrg 
    770  1.1  mrg    It is supposed that X is the output operand and Y is the input
    771  1.1  mrg    operand.  Y_HARD_REGNO is the final hard regno of register Y or
    772  1.1  mrg    register in subreg Y as we know it now.  Otherwise, it is a
    773  1.1  mrg    negative value.  */
    774  1.1  mrg static bool
    775  1.1  mrg operands_match_p (rtx x, rtx y, int y_hard_regno)
    776  1.1  mrg {
    777  1.1  mrg   int i;
    778  1.1  mrg   RTX_CODE code = GET_CODE (x);
    779  1.1  mrg   const char *fmt;
    780  1.1  mrg 
    781  1.1  mrg   if (x == y)
    782  1.1  mrg     return true;
    783  1.1  mrg   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
    784  1.1  mrg       && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
    785  1.1  mrg     {
    786  1.1  mrg       int j;
    787  1.1  mrg 
    788  1.1  mrg       i = get_hard_regno (x, false);
    789  1.1  mrg       if (i < 0)
    790  1.1  mrg 	goto slow;
    791  1.1  mrg 
    792  1.1  mrg       if ((j = y_hard_regno) < 0)
    793  1.1  mrg 	goto slow;
    794  1.1  mrg 
    795  1.1  mrg       i += lra_constraint_offset (i, GET_MODE (x));
    796  1.1  mrg       j += lra_constraint_offset (j, GET_MODE (y));
    797  1.1  mrg 
    798  1.1  mrg       return i == j;
    799  1.1  mrg     }
    800  1.1  mrg 
    801  1.1  mrg   /* If two operands must match, because they are really a single
    802  1.1  mrg      operand of an assembler insn, then two post-increments are invalid
    803  1.1  mrg      because the assembler insn would increment only once.  On the
    804  1.1  mrg      other hand, a post-increment matches ordinary indexing if the
    805  1.1  mrg      post-increment is the output operand.  */
    806  1.1  mrg   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
    807  1.1  mrg     return operands_match_p (XEXP (x, 0), y, y_hard_regno);
    808  1.1  mrg 
    809  1.1  mrg   /* Two pre-increments are invalid because the assembler insn would
    810  1.1  mrg      increment only once.  On the other hand, a pre-increment matches
    811  1.1  mrg      ordinary indexing if the pre-increment is the input operand.  */
    812  1.1  mrg   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
    813  1.1  mrg       || GET_CODE (y) == PRE_MODIFY)
    814  1.1  mrg     return operands_match_p (x, XEXP (y, 0), -1);
    815  1.1  mrg 
    816  1.1  mrg  slow:
    817  1.1  mrg 
    818  1.1  mrg   if (code == REG && REG_P (y))
    819  1.1  mrg     return REGNO (x) == REGNO (y);
    820  1.1  mrg 
    821  1.1  mrg   if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
    822  1.1  mrg       && x == SUBREG_REG (y))
    823  1.1  mrg     return true;
    824  1.1  mrg   if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
    825  1.1  mrg       && SUBREG_REG (x) == y)
    826  1.1  mrg     return true;
    827  1.1  mrg 
    828  1.1  mrg   /* Now we have disposed of all the cases in which different rtx
    829  1.1  mrg      codes can match.  */
    830  1.1  mrg   if (code != GET_CODE (y))
    831  1.1  mrg     return false;
    832  1.1  mrg 
    833  1.1  mrg   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
    834  1.1  mrg   if (GET_MODE (x) != GET_MODE (y))
    835  1.1  mrg     return false;
    836  1.1  mrg 
    837  1.1  mrg   switch (code)
    838  1.1  mrg     {
    839  1.1  mrg     CASE_CONST_UNIQUE:
    840  1.1  mrg       return false;
    841  1.1  mrg 
    842  1.1  mrg     case CONST_VECTOR:
    843  1.1  mrg       if (!same_vector_encodings_p (x, y))
    844  1.1  mrg 	return false;
    845  1.1  mrg       break;
    846  1.1  mrg 
    847  1.1  mrg     case LABEL_REF:
    848  1.1  mrg       return label_ref_label (x) == label_ref_label (y);
    849  1.1  mrg     case SYMBOL_REF:
    850  1.1  mrg       return XSTR (x, 0) == XSTR (y, 0);
    851  1.1  mrg 
    852  1.1  mrg     default:
    853  1.1  mrg       break;
    854  1.1  mrg     }
    855  1.1  mrg 
    856  1.1  mrg   /* Compare the elements.  If any pair of corresponding elements fail
    857  1.1  mrg      to match, return false for the whole things.  */
    858  1.1  mrg 
    859  1.1  mrg   fmt = GET_RTX_FORMAT (code);
    860  1.1  mrg   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    861  1.1  mrg     {
    862  1.1  mrg       int val, j;
    863  1.1  mrg       switch (fmt[i])
    864  1.1  mrg 	{
    865  1.1  mrg 	case 'w':
    866  1.1  mrg 	  if (XWINT (x, i) != XWINT (y, i))
    867  1.1  mrg 	    return false;
    868  1.1  mrg 	  break;
    869  1.1  mrg 
    870  1.1  mrg 	case 'i':
    871  1.1  mrg 	  if (XINT (x, i) != XINT (y, i))
    872  1.1  mrg 	    return false;
    873  1.1  mrg 	  break;
    874  1.1  mrg 
    875  1.1  mrg 	case 'p':
    876  1.1  mrg 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
    877  1.1  mrg 	    return false;
    878  1.1  mrg 	  break;
    879  1.1  mrg 
    880  1.1  mrg 	case 'e':
    881  1.1  mrg 	  val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
    882  1.1  mrg 	  if (val == 0)
    883  1.1  mrg 	    return false;
    884  1.1  mrg 	  break;
    885  1.1  mrg 
    886  1.1  mrg 	case '0':
    887  1.1  mrg 	  break;
    888  1.1  mrg 
    889  1.1  mrg 	case 'E':
    890  1.1  mrg 	  if (XVECLEN (x, i) != XVECLEN (y, i))
    891  1.1  mrg 	    return false;
    892  1.1  mrg 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
    893  1.1  mrg 	    {
    894  1.1  mrg 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
    895  1.1  mrg 	      if (val == 0)
    896  1.1  mrg 		return false;
    897  1.1  mrg 	    }
    898  1.1  mrg 	  break;
    899  1.1  mrg 
    900  1.1  mrg 	  /* It is believed that rtx's at this level will never
    901  1.1  mrg 	     contain anything but integers and other rtx's, except for
    902  1.1  mrg 	     within LABEL_REFs and SYMBOL_REFs.	 */
    903  1.1  mrg 	default:
    904  1.1  mrg 	  gcc_unreachable ();
    905  1.1  mrg 	}
    906  1.1  mrg     }
    907  1.1  mrg   return true;
    908  1.1  mrg }
    909  1.1  mrg 
    910  1.1  mrg /* True if X is a constant that can be forced into the constant pool.
    911  1.1  mrg    MODE is the mode of the operand, or VOIDmode if not known.  */
    912  1.1  mrg #define CONST_POOL_OK_P(MODE, X)		\
    913  1.1  mrg   ((MODE) != VOIDmode				\
    914  1.1  mrg    && CONSTANT_P (X)				\
    915  1.1  mrg    && GET_CODE (X) != HIGH			\
    916  1.1  mrg    && GET_MODE_SIZE (MODE).is_constant ()	\
    917  1.1  mrg    && !targetm.cannot_force_const_mem (MODE, X))
    918  1.1  mrg 
    919  1.1  mrg /* True if C is a non-empty register class that has too few registers
    920  1.1  mrg    to be safely used as a reload target class.	*/
    921  1.1  mrg #define SMALL_REGISTER_CLASS_P(C)		\
    922  1.1  mrg   (ira_class_hard_regs_num [(C)] == 1		\
    923  1.1  mrg    || (ira_class_hard_regs_num [(C)] >= 1	\
    924  1.1  mrg        && targetm.class_likely_spilled_p (C)))
    925  1.1  mrg 
    926  1.1  mrg /* If REG is a reload pseudo, try to make its class satisfying CL.  */
    927  1.1  mrg static void
    928  1.1  mrg narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
    929  1.1  mrg {
    930  1.1  mrg   enum reg_class rclass;
    931  1.1  mrg 
    932  1.1  mrg   /* Do not make more accurate class from reloads generated.  They are
    933  1.1  mrg      mostly moves with a lot of constraints.  Making more accurate
    934  1.1  mrg      class may results in very narrow class and impossibility of find
    935  1.1  mrg      registers for several reloads of one insn.	 */
    936  1.1  mrg   if (INSN_UID (curr_insn) >= new_insn_uid_start)
    937  1.1  mrg     return;
    938  1.1  mrg   if (GET_CODE (reg) == SUBREG)
    939  1.1  mrg     reg = SUBREG_REG (reg);
    940  1.1  mrg   if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
    941  1.1  mrg     return;
    942  1.1  mrg   if (in_class_p (reg, cl, &rclass) && rclass != cl)
    943  1.1  mrg     lra_change_class (REGNO (reg), rclass, "      Change to", true);
    944  1.1  mrg }
    945  1.1  mrg 
    946  1.1  mrg /* Searches X for any reference to a reg with the same value as REGNO,
    947  1.1  mrg    returning the rtx of the reference found if any.  Otherwise,
    948  1.1  mrg    returns NULL_RTX.  */
    949  1.1  mrg static rtx
    950  1.1  mrg regno_val_use_in (unsigned int regno, rtx x)
    951  1.1  mrg {
    952  1.1  mrg   const char *fmt;
    953  1.1  mrg   int i, j;
    954  1.1  mrg   rtx tem;
    955  1.1  mrg 
    956  1.1  mrg   if (REG_P (x) && lra_reg_info[REGNO (x)].val == lra_reg_info[regno].val)
    957  1.1  mrg     return x;
    958  1.1  mrg 
    959  1.1  mrg   fmt = GET_RTX_FORMAT (GET_CODE (x));
    960  1.1  mrg   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
    961  1.1  mrg     {
    962  1.1  mrg       if (fmt[i] == 'e')
    963  1.1  mrg 	{
    964  1.1  mrg 	  if ((tem = regno_val_use_in (regno, XEXP (x, i))))
    965  1.1  mrg 	    return tem;
    966  1.1  mrg 	}
    967  1.1  mrg       else if (fmt[i] == 'E')
    968  1.1  mrg 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    969  1.1  mrg 	  if ((tem = regno_val_use_in (regno , XVECEXP (x, i, j))))
    970  1.1  mrg 	    return tem;
    971  1.1  mrg     }
    972  1.1  mrg 
    973  1.1  mrg   return NULL_RTX;
    974  1.1  mrg }
    975  1.1  mrg 
    976  1.1  mrg /* Return true if all current insn non-output operands except INS (it
    977  1.1  mrg    has a negaitve end marker) do not use pseudos with the same value
    978  1.1  mrg    as REGNO.  */
    979  1.1  mrg static bool
    980  1.1  mrg check_conflict_input_operands (int regno, signed char *ins)
    981  1.1  mrg {
    982  1.1  mrg   int in;
    983  1.1  mrg   int n_operands = curr_static_id->n_operands;
    984  1.1  mrg 
    985  1.1  mrg   for (int nop = 0; nop < n_operands; nop++)
    986  1.1  mrg     if (! curr_static_id->operand[nop].is_operator
    987  1.1  mrg 	&& curr_static_id->operand[nop].type != OP_OUT)
    988  1.1  mrg       {
    989  1.1  mrg 	for (int i = 0; (in = ins[i]) >= 0; i++)
    990  1.1  mrg 	  if (in == nop)
    991  1.1  mrg 	    break;
    992  1.1  mrg 	if (in < 0
    993  1.1  mrg 	    && regno_val_use_in (regno, *curr_id->operand_loc[nop]) != NULL_RTX)
    994  1.1  mrg 	  return false;
    995  1.1  mrg       }
    996  1.1  mrg   return true;
    997  1.1  mrg }
    998  1.1  mrg 
    999  1.1  mrg /* Generate reloads for matching OUT and INS (array of input operand numbers
   1000  1.1  mrg    with end marker -1) with reg class GOAL_CLASS and EXCLUDE_START_HARD_REGS,
   1001  1.1  mrg    considering output operands OUTS (similar array to INS) needing to be in
   1002  1.1  mrg    different registers.  Add input and output reloads correspondingly to the
   1003  1.1  mrg    lists *BEFORE and *AFTER.  OUT might be negative.  In this case we generate
   1004  1.1  mrg    input reloads for matched input operands INS.  EARLY_CLOBBER_P is a flag
   1005  1.1  mrg    that the output operand is early clobbered for chosen alternative.  */
   1006  1.1  mrg static void
   1007  1.1  mrg match_reload (signed char out, signed char *ins, signed char *outs,
   1008  1.1  mrg 	      enum reg_class goal_class, HARD_REG_SET *exclude_start_hard_regs,
   1009  1.1  mrg 	      rtx_insn **before, rtx_insn **after, bool early_clobber_p)
   1010  1.1  mrg {
   1011  1.1  mrg   bool out_conflict;
   1012  1.1  mrg   int i, in;
   1013  1.1  mrg   rtx new_in_reg, new_out_reg, reg;
   1014  1.1  mrg   machine_mode inmode, outmode;
   1015  1.1  mrg   rtx in_rtx = *curr_id->operand_loc[ins[0]];
   1016  1.1  mrg   rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
   1017  1.1  mrg 
   1018  1.1  mrg   inmode = curr_operand_mode[ins[0]];
   1019  1.1  mrg   outmode = out < 0 ? inmode : curr_operand_mode[out];
   1020  1.1  mrg   push_to_sequence (*before);
   1021  1.1  mrg   if (inmode != outmode)
   1022  1.1  mrg     {
   1023  1.1  mrg       /* process_alt_operands has already checked that the mode sizes
   1024  1.1  mrg 	 are ordered.  */
   1025  1.1  mrg       if (partial_subreg_p (outmode, inmode))
   1026  1.1  mrg 	{
   1027  1.1  mrg 	  reg = new_in_reg
   1028  1.1  mrg 	    = lra_create_new_reg_with_unique_value (inmode, in_rtx, goal_class,
   1029  1.1  mrg 						    exclude_start_hard_regs,
   1030  1.1  mrg 						    "");
   1031  1.1  mrg 	  new_out_reg = gen_lowpart_SUBREG (outmode, reg);
   1032  1.1  mrg 	  LRA_SUBREG_P (new_out_reg) = 1;
   1033  1.1  mrg 	  /* If the input reg is dying here, we can use the same hard
   1034  1.1  mrg 	     register for REG and IN_RTX.  We do it only for original
   1035  1.1  mrg 	     pseudos as reload pseudos can die although original
   1036  1.1  mrg 	     pseudos still live where reload pseudos dies.  */
   1037  1.1  mrg 	  if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
   1038  1.1  mrg 	      && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
   1039  1.1  mrg 	      && (!early_clobber_p
   1040  1.1  mrg 		  || check_conflict_input_operands(REGNO (in_rtx), ins)))
   1041  1.1  mrg 	    lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
   1042  1.1  mrg 	}
   1043  1.1  mrg       else
   1044  1.1  mrg 	{
   1045  1.1  mrg 	  reg = new_out_reg
   1046  1.1  mrg 	    = lra_create_new_reg_with_unique_value (outmode, out_rtx,
   1047  1.1  mrg 						    goal_class,
   1048  1.1  mrg 						    exclude_start_hard_regs,
   1049  1.1  mrg 						    "");
   1050  1.1  mrg 	  new_in_reg = gen_lowpart_SUBREG (inmode, reg);
   1051  1.1  mrg 	  /* NEW_IN_REG is non-paradoxical subreg.  We don't want
   1052  1.1  mrg 	     NEW_OUT_REG living above.  We add clobber clause for
   1053  1.1  mrg 	     this.  This is just a temporary clobber.  We can remove
   1054  1.1  mrg 	     it at the end of LRA work.  */
   1055  1.1  mrg 	  rtx_insn *clobber = emit_clobber (new_out_reg);
   1056  1.1  mrg 	  LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
   1057  1.1  mrg 	  LRA_SUBREG_P (new_in_reg) = 1;
   1058  1.1  mrg 	  if (GET_CODE (in_rtx) == SUBREG)
   1059  1.1  mrg 	    {
   1060  1.1  mrg 	      rtx subreg_reg = SUBREG_REG (in_rtx);
   1061  1.1  mrg 
   1062  1.1  mrg 	      /* If SUBREG_REG is dying here and sub-registers IN_RTX
   1063  1.1  mrg 		 and NEW_IN_REG are similar, we can use the same hard
   1064  1.1  mrg 		 register for REG and SUBREG_REG.  */
   1065  1.1  mrg 	      if (REG_P (subreg_reg)
   1066  1.1  mrg 		  && (int) REGNO (subreg_reg) < lra_new_regno_start
   1067  1.1  mrg 		  && GET_MODE (subreg_reg) == outmode
   1068  1.1  mrg 		  && known_eq (SUBREG_BYTE (in_rtx), SUBREG_BYTE (new_in_reg))
   1069  1.1  mrg 		  && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg))
   1070  1.1  mrg 		  && (! early_clobber_p
   1071  1.1  mrg 		      || check_conflict_input_operands (REGNO (subreg_reg),
   1072  1.1  mrg 							ins)))
   1073  1.1  mrg 		lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
   1074  1.1  mrg 	    }
   1075  1.1  mrg 	}
   1076  1.1  mrg     }
   1077  1.1  mrg   else
   1078  1.1  mrg     {
   1079  1.1  mrg       /* Pseudos have values -- see comments for lra_reg_info.
   1080  1.1  mrg 	 Different pseudos with the same value do not conflict even if
   1081  1.1  mrg 	 they live in the same place.  When we create a pseudo we
   1082  1.1  mrg 	 assign value of original pseudo (if any) from which we
   1083  1.1  mrg 	 created the new pseudo.  If we create the pseudo from the
   1084  1.1  mrg 	 input pseudo, the new pseudo will have no conflict with the
   1085  1.1  mrg 	 input pseudo which is wrong when the input pseudo lives after
   1086  1.1  mrg 	 the insn and as the new pseudo value is changed by the insn
   1087  1.1  mrg 	 output.  Therefore we create the new pseudo from the output
   1088  1.1  mrg 	 except the case when we have single matched dying input
   1089  1.1  mrg 	 pseudo.
   1090  1.1  mrg 
   1091  1.1  mrg 	 We cannot reuse the current output register because we might
   1092  1.1  mrg 	 have a situation like "a <- a op b", where the constraints
   1093  1.1  mrg 	 force the second input operand ("b") to match the output
   1094  1.1  mrg 	 operand ("a").  "b" must then be copied into a new register
   1095  1.1  mrg 	 so that it doesn't clobber the current value of "a".
   1096  1.1  mrg 
   1097  1.1  mrg 	 We cannot use the same value if the output pseudo is
   1098  1.1  mrg 	 early clobbered or the input pseudo is mentioned in the
   1099  1.1  mrg 	 output, e.g. as an address part in memory, because
   1100  1.1  mrg 	 output reload will actually extend the pseudo liveness.
   1101  1.1  mrg 	 We don't care about eliminable hard regs here as we are
   1102  1.1  mrg 	 interesting only in pseudos.  */
   1103  1.1  mrg 
   1104  1.1  mrg       /* Matching input's register value is the same as one of the other
   1105  1.1  mrg 	 output operand.  Output operands in a parallel insn must be in
   1106  1.1  mrg 	 different registers.  */
   1107  1.1  mrg       out_conflict = false;
   1108  1.1  mrg       if (REG_P (in_rtx))
   1109  1.1  mrg 	{
   1110  1.1  mrg 	  for (i = 0; outs[i] >= 0; i++)
   1111  1.1  mrg 	    {
   1112  1.1  mrg 	      rtx other_out_rtx = *curr_id->operand_loc[outs[i]];
   1113  1.1  mrg 	      if (outs[i] != out && REG_P (other_out_rtx)
   1114  1.1  mrg 		  && (regno_val_use_in (REGNO (in_rtx), other_out_rtx)
   1115  1.1  mrg 		      != NULL_RTX))
   1116  1.1  mrg 		{
   1117  1.1  mrg 		  out_conflict = true;
   1118  1.1  mrg 		  break;
   1119  1.1  mrg 		}
   1120  1.1  mrg 	    }
   1121  1.1  mrg 	}
   1122  1.1  mrg 
   1123  1.1  mrg       new_in_reg = new_out_reg
   1124  1.1  mrg 	= (! early_clobber_p && ins[1] < 0 && REG_P (in_rtx)
   1125  1.1  mrg 	   && (int) REGNO (in_rtx) < lra_new_regno_start
   1126  1.1  mrg 	   && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
   1127  1.1  mrg 	   && (! early_clobber_p
   1128  1.1  mrg 	       || check_conflict_input_operands (REGNO (in_rtx), ins))
   1129  1.1  mrg 	   && (out < 0
   1130  1.1  mrg 	       || regno_val_use_in (REGNO (in_rtx), out_rtx) == NULL_RTX)
   1131  1.1  mrg 	   && !out_conflict
   1132  1.1  mrg 	   ? lra_create_new_reg (inmode, in_rtx, goal_class,
   1133  1.1  mrg 				 exclude_start_hard_regs, "")
   1134  1.1  mrg 	   : lra_create_new_reg_with_unique_value (outmode, out_rtx, goal_class,
   1135  1.1  mrg 						   exclude_start_hard_regs,
   1136  1.1  mrg 						   ""));
   1137  1.1  mrg     }
   1138  1.1  mrg   /* In operand can be got from transformations before processing insn
   1139  1.1  mrg      constraints.  One example of such transformations is subreg
   1140  1.1  mrg      reloading (see function simplify_operand_subreg).  The new
   1141  1.1  mrg      pseudos created by the transformations might have inaccurate
   1142  1.1  mrg      class (ALL_REGS) and we should make their classes more
   1143  1.1  mrg      accurate.  */
   1144  1.1  mrg   narrow_reload_pseudo_class (in_rtx, goal_class);
   1145  1.1  mrg   lra_emit_move (copy_rtx (new_in_reg), in_rtx);
   1146  1.1  mrg   *before = get_insns ();
   1147  1.1  mrg   end_sequence ();
   1148  1.1  mrg   /* Add the new pseudo to consider values of subsequent input reload
   1149  1.1  mrg      pseudos.  */
   1150  1.1  mrg   lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
   1151  1.1  mrg   curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
   1152  1.1  mrg   curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
   1153  1.1  mrg   curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
   1154  1.1  mrg   for (i = 0; (in = ins[i]) >= 0; i++)
   1155  1.1  mrg     if (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
   1156  1.1  mrg 	|| GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]))
   1157  1.1  mrg       *curr_id->operand_loc[in] = new_in_reg;
   1158  1.1  mrg     else
   1159  1.1  mrg       {
   1160  1.1  mrg 	lra_assert
   1161  1.1  mrg 	  (GET_MODE (new_out_reg) == GET_MODE (*curr_id->operand_loc[in]));
   1162  1.1  mrg 	*curr_id->operand_loc[in] = new_out_reg;
   1163  1.1  mrg       }
   1164  1.1  mrg   lra_update_dups (curr_id, ins);
   1165  1.1  mrg   if (out < 0)
   1166  1.1  mrg     return;
   1167  1.1  mrg   /* See a comment for the input operand above.  */
   1168  1.1  mrg   narrow_reload_pseudo_class (out_rtx, goal_class);
   1169  1.1  mrg   if (find_reg_note (curr_insn, REG_UNUSED, out_rtx) == NULL_RTX)
   1170  1.1  mrg     {
   1171  1.1  mrg       reg = SUBREG_P (out_rtx) ? SUBREG_REG (out_rtx) : out_rtx;
   1172  1.1  mrg       start_sequence ();
   1173  1.1  mrg       /* If we had strict_low_part, use it also in reload to keep other
   1174  1.1  mrg 	 parts unchanged but do it only for regs as strict_low_part
   1175  1.1  mrg 	 has no sense for memory and probably there is no insn pattern
   1176  1.1  mrg 	 to match the reload insn in memory case.  */
   1177  1.1  mrg       if (out >= 0 && curr_static_id->operand[out].strict_low && REG_P (reg))
   1178  1.1  mrg 	out_rtx = gen_rtx_STRICT_LOW_PART (VOIDmode, out_rtx);
   1179  1.1  mrg       lra_emit_move (out_rtx, copy_rtx (new_out_reg));
   1180  1.1  mrg       emit_insn (*after);
   1181  1.1  mrg       *after = get_insns ();
   1182  1.1  mrg       end_sequence ();
   1183  1.1  mrg     }
   1184  1.1  mrg   *curr_id->operand_loc[out] = new_out_reg;
   1185  1.1  mrg   lra_update_dup (curr_id, out);
   1186  1.1  mrg }
   1187  1.1  mrg 
   1188  1.1  mrg /* Return register class which is union of all reg classes in insn
   1189  1.1  mrg    constraint alternative string starting with P.  */
   1190  1.1  mrg static enum reg_class
   1191  1.1  mrg reg_class_from_constraints (const char *p)
   1192  1.1  mrg {
   1193  1.1  mrg   int c, len;
   1194  1.1  mrg   enum reg_class op_class = NO_REGS;
   1195  1.1  mrg 
   1196  1.1  mrg   do
   1197  1.1  mrg     switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
   1198  1.1  mrg       {
   1199  1.1  mrg       case '#':
   1200  1.1  mrg       case ',':
   1201  1.1  mrg 	return op_class;
   1202  1.1  mrg 
   1203  1.1  mrg       case 'g':
   1204  1.1  mrg 	op_class = reg_class_subunion[op_class][GENERAL_REGS];
   1205  1.1  mrg 	break;
   1206  1.1  mrg 
   1207  1.1  mrg       default:
   1208  1.1  mrg 	enum constraint_num cn = lookup_constraint (p);
   1209  1.1  mrg 	enum reg_class cl = reg_class_for_constraint (cn);
   1210  1.1  mrg 	if (cl == NO_REGS)
   1211  1.1  mrg 	  {
   1212  1.1  mrg 	    if (insn_extra_address_constraint (cn))
   1213  1.1  mrg 	      op_class
   1214  1.1  mrg 		= (reg_class_subunion
   1215  1.1  mrg 		   [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
   1216  1.1  mrg 					      ADDRESS, SCRATCH)]);
   1217  1.1  mrg 	    break;
   1218  1.1  mrg 	  }
   1219  1.1  mrg 
   1220  1.1  mrg 	op_class = reg_class_subunion[op_class][cl];
   1221  1.1  mrg  	break;
   1222  1.1  mrg       }
   1223  1.1  mrg   while ((p += len), c);
   1224  1.1  mrg   return op_class;
   1225  1.1  mrg }
   1226  1.1  mrg 
   1227  1.1  mrg /* If OP is a register, return the class of the register as per
   1228  1.1  mrg    get_reg_class, otherwise return NO_REGS.  */
   1229  1.1  mrg static inline enum reg_class
   1230  1.1  mrg get_op_class (rtx op)
   1231  1.1  mrg {
   1232  1.1  mrg   return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
   1233  1.1  mrg }
   1234  1.1  mrg 
   1235  1.1  mrg /* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
   1236  1.1  mrg    otherwise.  If modes of MEM_PSEUDO and VAL are different, use
   1237  1.1  mrg    SUBREG for VAL to make them equal.  */
   1238  1.1  mrg static rtx_insn *
   1239  1.1  mrg emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
   1240  1.1  mrg {
   1241  1.1  mrg   if (GET_MODE (mem_pseudo) != GET_MODE (val))
   1242  1.1  mrg     {
   1243  1.1  mrg       /* Usually size of mem_pseudo is greater than val size but in
   1244  1.1  mrg 	 rare cases it can be less as it can be defined by target
   1245  1.1  mrg 	 dependent macro HARD_REGNO_CALLER_SAVE_MODE.  */
   1246  1.1  mrg       if (! MEM_P (val))
   1247  1.1  mrg 	{
   1248  1.1  mrg 	  val = gen_lowpart_SUBREG (GET_MODE (mem_pseudo),
   1249  1.1  mrg 				    GET_CODE (val) == SUBREG
   1250  1.1  mrg 				    ? SUBREG_REG (val) : val);
   1251  1.1  mrg 	  LRA_SUBREG_P (val) = 1;
   1252  1.1  mrg 	}
   1253  1.1  mrg       else
   1254  1.1  mrg 	{
   1255  1.1  mrg 	  mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
   1256  1.1  mrg 	  LRA_SUBREG_P (mem_pseudo) = 1;
   1257  1.1  mrg 	}
   1258  1.1  mrg     }
   1259  1.1  mrg   return to_p ? gen_move_insn (mem_pseudo, val)
   1260  1.1  mrg 	      : gen_move_insn (val, mem_pseudo);
   1261  1.1  mrg }
   1262  1.1  mrg 
   1263  1.1  mrg /* Process a special case insn (register move), return true if we
   1264  1.1  mrg    don't need to process it anymore.  INSN should be a single set
   1265  1.1  mrg    insn.  Set up that RTL was changed through CHANGE_P and that hook
   1266  1.1  mrg    TARGET_SECONDARY_MEMORY_NEEDED says to use secondary memory through
   1267  1.1  mrg    SEC_MEM_P.  */
   1268  1.1  mrg static bool
   1269  1.1  mrg check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
   1270  1.1  mrg {
   1271  1.1  mrg   int sregno, dregno;
   1272  1.1  mrg   rtx dest, src, dreg, sreg, new_reg, scratch_reg;
   1273  1.1  mrg   rtx_insn *before;
   1274  1.1  mrg   enum reg_class dclass, sclass, secondary_class;
   1275  1.1  mrg   secondary_reload_info sri;
   1276  1.1  mrg 
   1277  1.1  mrg   lra_assert (curr_insn_set != NULL_RTX);
   1278  1.1  mrg   dreg = dest = SET_DEST (curr_insn_set);
   1279  1.1  mrg   sreg = src = SET_SRC (curr_insn_set);
   1280  1.1  mrg   if (GET_CODE (dest) == SUBREG)
   1281  1.1  mrg     dreg = SUBREG_REG (dest);
   1282  1.1  mrg   if (GET_CODE (src) == SUBREG)
   1283  1.1  mrg     sreg = SUBREG_REG (src);
   1284  1.1  mrg   if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
   1285  1.1  mrg     return false;
   1286  1.1  mrg   sclass = dclass = NO_REGS;
   1287  1.1  mrg   if (REG_P (dreg))
   1288  1.1  mrg     dclass = get_reg_class (REGNO (dreg));
   1289  1.1  mrg   gcc_assert (dclass < LIM_REG_CLASSES && dclass >= NO_REGS);
   1290  1.1  mrg   if (dclass == ALL_REGS)
   1291  1.1  mrg     /* ALL_REGS is used for new pseudos created by transformations
   1292  1.1  mrg        like reload of SUBREG_REG (see function
   1293  1.1  mrg        simplify_operand_subreg).  We don't know their class yet.  We
   1294  1.1  mrg        should figure out the class from processing the insn
   1295  1.1  mrg        constraints not in this fast path function.  Even if ALL_REGS
   1296  1.1  mrg        were a right class for the pseudo, secondary_... hooks usually
   1297  1.1  mrg        are not define for ALL_REGS.  */
   1298  1.1  mrg     return false;
   1299  1.1  mrg   if (REG_P (sreg))
   1300  1.1  mrg     sclass = get_reg_class (REGNO (sreg));
   1301  1.1  mrg   gcc_assert (sclass < LIM_REG_CLASSES && sclass >= NO_REGS);
   1302  1.1  mrg   if (sclass == ALL_REGS)
   1303  1.1  mrg     /* See comments above.  */
   1304  1.1  mrg     return false;
   1305  1.1  mrg   if (sclass == NO_REGS && dclass == NO_REGS)
   1306  1.1  mrg     return false;
   1307  1.1  mrg   if (targetm.secondary_memory_needed (GET_MODE (src), sclass, dclass)
   1308  1.1  mrg       && ((sclass != NO_REGS && dclass != NO_REGS)
   1309  1.1  mrg 	  || (GET_MODE (src)
   1310  1.1  mrg 	      != targetm.secondary_memory_needed_mode (GET_MODE (src)))))
   1311  1.1  mrg     {
   1312  1.1  mrg       *sec_mem_p = true;
   1313  1.1  mrg       return false;
   1314  1.1  mrg     }
   1315  1.1  mrg   if (! REG_P (dreg) || ! REG_P (sreg))
   1316  1.1  mrg     return false;
   1317  1.1  mrg   sri.prev_sri = NULL;
   1318  1.1  mrg   sri.icode = CODE_FOR_nothing;
   1319  1.1  mrg   sri.extra_cost = 0;
   1320  1.1  mrg   secondary_class = NO_REGS;
   1321  1.1  mrg   /* Set up hard register for a reload pseudo for hook
   1322  1.1  mrg      secondary_reload because some targets just ignore unassigned
   1323  1.1  mrg      pseudos in the hook.  */
   1324  1.1  mrg   if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
   1325  1.1  mrg     {
   1326  1.1  mrg       dregno = REGNO (dreg);
   1327  1.1  mrg       reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
   1328  1.1  mrg     }
   1329  1.1  mrg   else
   1330  1.1  mrg     dregno = -1;
   1331  1.1  mrg   if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
   1332  1.1  mrg     {
   1333  1.1  mrg       sregno = REGNO (sreg);
   1334  1.1  mrg       reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
   1335  1.1  mrg     }
   1336  1.1  mrg   else
   1337  1.1  mrg     sregno = -1;
   1338  1.1  mrg   if (sclass != NO_REGS)
   1339  1.1  mrg     secondary_class
   1340  1.1  mrg       = (enum reg_class) targetm.secondary_reload (false, dest,
   1341  1.1  mrg 						   (reg_class_t) sclass,
   1342  1.1  mrg 						   GET_MODE (src), &sri);
   1343  1.1  mrg   if (sclass == NO_REGS
   1344  1.1  mrg       || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
   1345  1.1  mrg 	  && dclass != NO_REGS))
   1346  1.1  mrg     {
   1347  1.1  mrg       enum reg_class old_sclass = secondary_class;
   1348  1.1  mrg       secondary_reload_info old_sri = sri;
   1349  1.1  mrg 
   1350  1.1  mrg       sri.prev_sri = NULL;
   1351  1.1  mrg       sri.icode = CODE_FOR_nothing;
   1352  1.1  mrg       sri.extra_cost = 0;
   1353  1.1  mrg       secondary_class
   1354  1.1  mrg 	= (enum reg_class) targetm.secondary_reload (true, src,
   1355  1.1  mrg 						     (reg_class_t) dclass,
   1356  1.1  mrg 						     GET_MODE (src), &sri);
   1357  1.1  mrg       /* Check the target hook consistency.  */
   1358  1.1  mrg       lra_assert
   1359  1.1  mrg 	((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
   1360  1.1  mrg 	 || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
   1361  1.1  mrg 	 || (secondary_class == old_sclass && sri.icode == old_sri.icode));
   1362  1.1  mrg     }
   1363  1.1  mrg   if (sregno >= 0)
   1364  1.1  mrg     reg_renumber [sregno] = -1;
   1365  1.1  mrg   if (dregno >= 0)
   1366  1.1  mrg     reg_renumber [dregno] = -1;
   1367  1.1  mrg   if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
   1368  1.1  mrg     return false;
   1369  1.1  mrg   *change_p = true;
   1370  1.1  mrg   new_reg = NULL_RTX;
   1371  1.1  mrg   if (secondary_class != NO_REGS)
   1372  1.1  mrg     new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
   1373  1.1  mrg 						    secondary_class, NULL,
   1374  1.1  mrg 						    "secondary");
   1375  1.1  mrg   start_sequence ();
   1376  1.1  mrg   if (sri.icode == CODE_FOR_nothing)
   1377  1.1  mrg     lra_emit_move (new_reg, src);
   1378  1.1  mrg   else
   1379  1.1  mrg     {
   1380  1.1  mrg       enum reg_class scratch_class;
   1381  1.1  mrg 
   1382  1.1  mrg       scratch_class = (reg_class_from_constraints
   1383  1.1  mrg 		       (insn_data[sri.icode].operand[2].constraint));
   1384  1.1  mrg       scratch_reg = (lra_create_new_reg_with_unique_value
   1385  1.1  mrg 		     (insn_data[sri.icode].operand[2].mode, NULL_RTX,
   1386  1.1  mrg 		      scratch_class, NULL, "scratch"));
   1387  1.1  mrg       emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
   1388  1.1  mrg 				      src, scratch_reg));
   1389  1.1  mrg     }
   1390  1.1  mrg   before = get_insns ();
   1391  1.1  mrg   end_sequence ();
   1392  1.1  mrg   lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
   1393  1.1  mrg   if (new_reg != NULL_RTX)
   1394  1.1  mrg     SET_SRC (curr_insn_set) = new_reg;
   1395  1.1  mrg   else
   1396  1.1  mrg     {
   1397  1.1  mrg       if (lra_dump_file != NULL)
   1398  1.1  mrg 	{
   1399  1.1  mrg 	  fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
   1400  1.1  mrg 	  dump_insn_slim (lra_dump_file, curr_insn);
   1401  1.1  mrg 	}
   1402  1.1  mrg       lra_set_insn_deleted (curr_insn);
   1403  1.1  mrg       return true;
   1404  1.1  mrg     }
   1405  1.1  mrg   return false;
   1406  1.1  mrg }
   1407  1.1  mrg 
   1408  1.1  mrg /* The following data describe the result of process_alt_operands.
   1409  1.1  mrg    The data are used in curr_insn_transform to generate reloads.  */
   1410  1.1  mrg 
   1411  1.1  mrg /* The chosen reg classes which should be used for the corresponding
   1412  1.1  mrg    operands.  */
   1413  1.1  mrg static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
   1414  1.1  mrg /* Hard registers which cannot be a start hard register for the corresponding
   1415  1.1  mrg    operands.  */
   1416  1.1  mrg static HARD_REG_SET goal_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
   1417  1.1  mrg /* True if the operand should be the same as another operand and that
   1418  1.1  mrg    other operand does not need a reload.  */
   1419  1.1  mrg static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
   1420  1.1  mrg /* True if the operand does not need a reload.	*/
   1421  1.1  mrg static bool goal_alt_win[MAX_RECOG_OPERANDS];
   1422  1.1  mrg /* True if the operand can be offsetable memory.  */
   1423  1.1  mrg static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
   1424  1.1  mrg /* The number of an operand to which given operand can be matched to.  */
   1425  1.1  mrg static int goal_alt_matches[MAX_RECOG_OPERANDS];
   1426  1.1  mrg /* The number of elements in the following array.  */
   1427  1.1  mrg static int goal_alt_dont_inherit_ops_num;
   1428  1.1  mrg /* Numbers of operands whose reload pseudos should not be inherited.  */
   1429  1.1  mrg static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
   1430  1.1  mrg /* True if the insn commutative operands should be swapped.  */
   1431  1.1  mrg static bool goal_alt_swapped;
   1432  1.1  mrg /* The chosen insn alternative.	 */
   1433  1.1  mrg static int goal_alt_number;
   1434  1.1  mrg 
   1435  1.1  mrg /* True if the corresponding operand is the result of an equivalence
   1436  1.1  mrg    substitution.  */
   1437  1.1  mrg static bool equiv_substition_p[MAX_RECOG_OPERANDS];
   1438  1.1  mrg 
   1439  1.1  mrg /* The following five variables are used to choose the best insn
   1440  1.1  mrg    alternative.	 They reflect final characteristics of the best
   1441  1.1  mrg    alternative.	 */
   1442  1.1  mrg 
   1443  1.1  mrg /* Number of necessary reloads and overall cost reflecting the
   1444  1.1  mrg    previous value and other unpleasantness of the best alternative.  */
   1445  1.1  mrg static int best_losers, best_overall;
   1446  1.1  mrg /* Overall number hard registers used for reloads.  For example, on
   1447  1.1  mrg    some targets we need 2 general registers to reload DFmode and only
   1448  1.1  mrg    one floating point register.	 */
   1449  1.1  mrg static int best_reload_nregs;
   1450  1.1  mrg /* Overall number reflecting distances of previous reloading the same
   1451  1.1  mrg    value.  The distances are counted from the current BB start.  It is
   1452  1.1  mrg    used to improve inheritance chances.  */
   1453  1.1  mrg static int best_reload_sum;
   1454  1.1  mrg 
   1455  1.1  mrg /* True if the current insn should have no correspondingly input or
   1456  1.1  mrg    output reloads.  */
   1457  1.1  mrg static bool no_input_reloads_p, no_output_reloads_p;
   1458  1.1  mrg 
   1459  1.1  mrg /* True if we swapped the commutative operands in the current
   1460  1.1  mrg    insn.  */
   1461  1.1  mrg static int curr_swapped;
   1462  1.1  mrg 
   1463  1.1  mrg /* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
   1464  1.1  mrg    register of class CL.  Add any input reloads to list BEFORE.  AFTER
   1465  1.1  mrg    is nonnull if *LOC is an automodified value; handle that case by
   1466  1.1  mrg    adding the required output reloads to list AFTER.  Return true if
   1467  1.1  mrg    the RTL was changed.
   1468  1.1  mrg 
   1469  1.1  mrg    if CHECK_ONLY_P is true, check that the *LOC is a correct address
   1470  1.1  mrg    register.  Return false if the address register is correct.  */
   1471  1.1  mrg static bool
   1472  1.1  mrg process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
   1473  1.1  mrg 		  enum reg_class cl)
   1474  1.1  mrg {
   1475  1.1  mrg   int regno;
   1476  1.1  mrg   enum reg_class rclass, new_class;
   1477  1.1  mrg   rtx reg;
   1478  1.1  mrg   rtx new_reg;
   1479  1.1  mrg   machine_mode mode;
   1480  1.1  mrg   bool subreg_p, before_p = false;
   1481  1.1  mrg 
   1482  1.1  mrg   subreg_p = GET_CODE (*loc) == SUBREG;
   1483  1.1  mrg   if (subreg_p)
   1484  1.1  mrg     {
   1485  1.1  mrg       reg = SUBREG_REG (*loc);
   1486  1.1  mrg       mode = GET_MODE (reg);
   1487  1.1  mrg 
   1488  1.1  mrg       /* For mode with size bigger than ptr_mode, there unlikely to be "mov"
   1489  1.1  mrg 	 between two registers with different classes, but there normally will
   1490  1.1  mrg 	 be "mov" which transfers element of vector register into the general
   1491  1.1  mrg 	 register, and this normally will be a subreg which should be reloaded
   1492  1.1  mrg 	 as a whole.  This is particularly likely to be triggered when
   1493  1.1  mrg 	 -fno-split-wide-types specified.  */
   1494  1.1  mrg       if (!REG_P (reg)
   1495  1.1  mrg 	  || in_class_p (reg, cl, &new_class)
   1496  1.1  mrg 	  || known_le (GET_MODE_SIZE (mode), GET_MODE_SIZE (ptr_mode)))
   1497  1.1  mrg        loc = &SUBREG_REG (*loc);
   1498  1.1  mrg     }
   1499  1.1  mrg 
   1500  1.1  mrg   reg = *loc;
   1501  1.1  mrg   mode = GET_MODE (reg);
   1502  1.1  mrg   if (! REG_P (reg))
   1503  1.1  mrg     {
   1504  1.1  mrg       if (check_only_p)
   1505  1.1  mrg 	return true;
   1506  1.1  mrg       /* Always reload memory in an address even if the target supports
   1507  1.1  mrg 	 such addresses.  */
   1508  1.1  mrg       new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, NULL,
   1509  1.1  mrg 						      "address");
   1510  1.1  mrg       before_p = true;
   1511  1.1  mrg     }
   1512  1.1  mrg   else
   1513  1.1  mrg     {
   1514  1.1  mrg       regno = REGNO (reg);
   1515  1.1  mrg       rclass = get_reg_class (regno);
   1516  1.1  mrg       if (! check_only_p
   1517  1.1  mrg 	  && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
   1518  1.1  mrg 	{
   1519  1.1  mrg 	  if (lra_dump_file != NULL)
   1520  1.1  mrg 	    {
   1521  1.1  mrg 	      fprintf (lra_dump_file,
   1522  1.1  mrg 		       "Changing pseudo %d in address of insn %u on equiv ",
   1523  1.1  mrg 		       REGNO (reg), INSN_UID (curr_insn));
   1524  1.1  mrg 	      dump_value_slim (lra_dump_file, *loc, 1);
   1525  1.1  mrg 	      fprintf (lra_dump_file, "\n");
   1526  1.1  mrg 	    }
   1527  1.1  mrg 	  *loc = copy_rtx (*loc);
   1528  1.1  mrg 	}
   1529  1.1  mrg       if (*loc != reg || ! in_class_p (reg, cl, &new_class))
   1530  1.1  mrg 	{
   1531  1.1  mrg 	  if (check_only_p)
   1532  1.1  mrg 	    return true;
   1533  1.1  mrg 	  reg = *loc;
   1534  1.1  mrg 	  if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
   1535  1.1  mrg 			      mode, reg, cl, NULL,
   1536  1.1  mrg 			      subreg_p, "address", &new_reg))
   1537  1.1  mrg 	    before_p = true;
   1538  1.1  mrg 	}
   1539  1.1  mrg       else if (new_class != NO_REGS && rclass != new_class)
   1540  1.1  mrg 	{
   1541  1.1  mrg 	  if (check_only_p)
   1542  1.1  mrg 	    return true;
   1543  1.1  mrg 	  lra_change_class (regno, new_class, "	   Change to", true);
   1544  1.1  mrg 	  return false;
   1545  1.1  mrg 	}
   1546  1.1  mrg       else
   1547  1.1  mrg 	return false;
   1548  1.1  mrg     }
   1549  1.1  mrg   if (before_p)
   1550  1.1  mrg     {
   1551  1.1  mrg       push_to_sequence (*before);
   1552  1.1  mrg       lra_emit_move (new_reg, reg);
   1553  1.1  mrg       *before = get_insns ();
   1554  1.1  mrg       end_sequence ();
   1555  1.1  mrg     }
   1556  1.1  mrg   *loc = new_reg;
   1557  1.1  mrg   if (after != NULL)
   1558  1.1  mrg     {
   1559  1.1  mrg       start_sequence ();
   1560  1.1  mrg       lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
   1561  1.1  mrg       emit_insn (*after);
   1562  1.1  mrg       *after = get_insns ();
   1563  1.1  mrg       end_sequence ();
   1564  1.1  mrg     }
   1565  1.1  mrg   return true;
   1566  1.1  mrg }
   1567  1.1  mrg 
   1568  1.1  mrg /* Insert move insn in simplify_operand_subreg. BEFORE returns
   1569  1.1  mrg    the insn to be inserted before curr insn. AFTER returns the
   1570  1.1  mrg    the insn to be inserted after curr insn.  ORIGREG and NEWREG
   1571  1.1  mrg    are the original reg and new reg for reload.  */
   1572  1.1  mrg static void
   1573  1.1  mrg insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
   1574  1.1  mrg 			rtx newreg)
   1575  1.1  mrg {
   1576  1.1  mrg   if (before)
   1577  1.1  mrg     {
   1578  1.1  mrg       push_to_sequence (*before);
   1579  1.1  mrg       lra_emit_move (newreg, origreg);
   1580  1.1  mrg       *before = get_insns ();
   1581  1.1  mrg       end_sequence ();
   1582  1.1  mrg     }
   1583  1.1  mrg   if (after)
   1584  1.1  mrg     {
   1585  1.1  mrg       start_sequence ();
   1586  1.1  mrg       lra_emit_move (origreg, newreg);
   1587  1.1  mrg       emit_insn (*after);
   1588  1.1  mrg       *after = get_insns ();
   1589  1.1  mrg       end_sequence ();
   1590  1.1  mrg     }
   1591  1.1  mrg }
   1592  1.1  mrg 
   1593  1.1  mrg static int valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
   1594  1.1  mrg static bool process_address (int, bool, rtx_insn **, rtx_insn **);
   1595  1.1  mrg 
   1596  1.1  mrg /* Make reloads for subreg in operand NOP with internal subreg mode
   1597  1.1  mrg    REG_MODE, add new reloads for further processing.  Return true if
   1598  1.1  mrg    any change was done.  */
   1599  1.1  mrg static bool
   1600  1.1  mrg simplify_operand_subreg (int nop, machine_mode reg_mode)
   1601  1.1  mrg {
   1602  1.1  mrg   int hard_regno, inner_hard_regno;
   1603  1.1  mrg   rtx_insn *before, *after;
   1604  1.1  mrg   machine_mode mode, innermode;
   1605  1.1  mrg   rtx reg, new_reg;
   1606  1.1  mrg   rtx operand = *curr_id->operand_loc[nop];
   1607  1.1  mrg   enum reg_class regclass;
   1608  1.1  mrg   enum op_type type;
   1609  1.1  mrg 
   1610  1.1  mrg   before = after = NULL;
   1611  1.1  mrg 
   1612  1.1  mrg   if (GET_CODE (operand) != SUBREG)
   1613  1.1  mrg     return false;
   1614  1.1  mrg 
   1615  1.1  mrg   mode = GET_MODE (operand);
   1616  1.1  mrg   reg = SUBREG_REG (operand);
   1617  1.1  mrg   innermode = GET_MODE (reg);
   1618  1.1  mrg   type = curr_static_id->operand[nop].type;
   1619  1.1  mrg   if (MEM_P (reg))
   1620  1.1  mrg     {
   1621  1.1  mrg       const bool addr_was_valid
   1622  1.1  mrg 	= valid_address_p (innermode, XEXP (reg, 0), MEM_ADDR_SPACE (reg));
   1623  1.1  mrg       alter_subreg (curr_id->operand_loc[nop], false);
   1624  1.1  mrg       rtx subst = *curr_id->operand_loc[nop];
   1625  1.1  mrg       lra_assert (MEM_P (subst));
   1626  1.1  mrg       const bool addr_is_valid = valid_address_p (GET_MODE (subst),
   1627  1.1  mrg 						  XEXP (subst, 0),
   1628  1.1  mrg 						  MEM_ADDR_SPACE (subst));
   1629  1.1  mrg       if (!addr_was_valid
   1630  1.1  mrg 	  || addr_is_valid
   1631  1.1  mrg 	  || ((get_constraint_type (lookup_constraint
   1632  1.1  mrg 				    (curr_static_id->operand[nop].constraint))
   1633  1.1  mrg 	       != CT_SPECIAL_MEMORY)
   1634  1.1  mrg 	      /* We still can reload address and if the address is
   1635  1.1  mrg 		 valid, we can remove subreg without reloading its
   1636  1.1  mrg 		 inner memory.  */
   1637  1.1  mrg 	      && valid_address_p (GET_MODE (subst),
   1638  1.1  mrg 				  regno_reg_rtx
   1639  1.1  mrg 				  [ira_class_hard_regs
   1640  1.1  mrg 				   [base_reg_class (GET_MODE (subst),
   1641  1.1  mrg 						    MEM_ADDR_SPACE (subst),
   1642  1.1  mrg 						    ADDRESS, SCRATCH)][0]],
   1643  1.1  mrg 				  MEM_ADDR_SPACE (subst))))
   1644  1.1  mrg 	{
   1645  1.1  mrg 	  /* If we change the address for a paradoxical subreg of memory, the
   1646  1.1  mrg 	     new address might violate the necessary alignment or the access
   1647  1.1  mrg 	     might be slow; take this into consideration.  We need not worry
   1648  1.1  mrg 	     about accesses beyond allocated memory for paradoxical memory
   1649  1.1  mrg 	     subregs as we don't substitute such equiv memory (see processing
   1650  1.1  mrg 	     equivalences in function lra_constraints) and because for spilled
   1651  1.1  mrg 	     pseudos we allocate stack memory enough for the biggest
   1652  1.1  mrg 	     corresponding paradoxical subreg.
   1653  1.1  mrg 
   1654  1.1  mrg 	     However, do not blindly simplify a (subreg (mem ...)) for
   1655  1.1  mrg 	     WORD_REGISTER_OPERATIONS targets as this may lead to loading junk
   1656  1.1  mrg 	     data into a register when the inner is narrower than outer or
   1657  1.1  mrg 	     missing important data from memory when the inner is wider than
   1658  1.1  mrg 	     outer.  This rule only applies to modes that are no wider than
   1659  1.1  mrg 	     a word.
   1660  1.1  mrg 
   1661  1.1  mrg 	     If valid memory becomes invalid after subreg elimination
   1662  1.1  mrg 	     and address might be different we still have to reload
   1663  1.1  mrg 	     memory.
   1664  1.1  mrg 	  */
   1665  1.1  mrg 	  if ((! addr_was_valid
   1666  1.1  mrg 	       || addr_is_valid
   1667  1.1  mrg 	       || known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (innermode)))
   1668  1.1  mrg 	      && !(maybe_ne (GET_MODE_PRECISION (mode),
   1669  1.1  mrg 			     GET_MODE_PRECISION (innermode))
   1670  1.1  mrg 		   && known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD)
   1671  1.1  mrg 		   && known_le (GET_MODE_SIZE (innermode), UNITS_PER_WORD)
   1672  1.1  mrg 		   && WORD_REGISTER_OPERATIONS)
   1673  1.1  mrg 	      && (!(MEM_ALIGN (subst) < GET_MODE_ALIGNMENT (mode)
   1674  1.1  mrg 		    && targetm.slow_unaligned_access (mode, MEM_ALIGN (subst)))
   1675  1.1  mrg 		  || (MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (innermode)
   1676  1.1  mrg 		      && targetm.slow_unaligned_access (innermode,
   1677  1.1  mrg 							MEM_ALIGN (reg)))))
   1678  1.1  mrg 	    return true;
   1679  1.1  mrg 
   1680  1.1  mrg 	  *curr_id->operand_loc[nop] = operand;
   1681  1.1  mrg 
   1682  1.1  mrg 	  /* But if the address was not valid, we cannot reload the MEM without
   1683  1.1  mrg 	     reloading the address first.  */
   1684  1.1  mrg 	  if (!addr_was_valid)
   1685  1.1  mrg 	    process_address (nop, false, &before, &after);
   1686  1.1  mrg 
   1687  1.1  mrg 	  /* INNERMODE is fast, MODE slow.  Reload the mem in INNERMODE.  */
   1688  1.1  mrg 	  enum reg_class rclass
   1689  1.1  mrg 	    = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
   1690  1.1  mrg 	  if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
   1691  1.1  mrg 			      reg, rclass, NULL,
   1692  1.1  mrg 			      TRUE, "slow/invalid mem", &new_reg))
   1693  1.1  mrg 	    {
   1694  1.1  mrg 	      bool insert_before, insert_after;
   1695  1.1  mrg 	      bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
   1696  1.1  mrg 
   1697  1.1  mrg 	      insert_before = (type != OP_OUT
   1698  1.1  mrg 			       || partial_subreg_p (mode, innermode));
   1699  1.1  mrg 	      insert_after = type != OP_IN;
   1700  1.1  mrg 	      insert_move_for_subreg (insert_before ? &before : NULL,
   1701  1.1  mrg 				      insert_after ? &after : NULL,
   1702  1.1  mrg 				      reg, new_reg);
   1703  1.1  mrg 	    }
   1704  1.1  mrg 	  SUBREG_REG (operand) = new_reg;
   1705  1.1  mrg 
   1706  1.1  mrg 	  /* Convert to MODE.  */
   1707  1.1  mrg 	  reg = operand;
   1708  1.1  mrg 	  rclass
   1709  1.1  mrg 	    = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
   1710  1.1  mrg 	  if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
   1711  1.1  mrg 			      rclass, NULL,
   1712  1.1  mrg 			      TRUE, "slow/invalid mem", &new_reg))
   1713  1.1  mrg 	    {
   1714  1.1  mrg 	      bool insert_before, insert_after;
   1715  1.1  mrg 	      bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
   1716  1.1  mrg 
   1717  1.1  mrg 	      insert_before = type != OP_OUT;
   1718  1.1  mrg 	      insert_after = type != OP_IN;
   1719  1.1  mrg 	      insert_move_for_subreg (insert_before ? &before : NULL,
   1720  1.1  mrg 				      insert_after ? &after : NULL,
   1721  1.1  mrg 				      reg, new_reg);
   1722  1.1  mrg 	    }
   1723  1.1  mrg 	  *curr_id->operand_loc[nop] = new_reg;
   1724  1.1  mrg 	  lra_process_new_insns (curr_insn, before, after,
   1725  1.1  mrg 				 "Inserting slow/invalid mem reload");
   1726  1.1  mrg 	  return true;
   1727  1.1  mrg 	}
   1728  1.1  mrg 
   1729  1.1  mrg       /* If the address was valid and became invalid, prefer to reload
   1730  1.1  mrg 	 the memory.  Typical case is when the index scale should
   1731  1.1  mrg 	 correspond the memory.  */
   1732  1.1  mrg       *curr_id->operand_loc[nop] = operand;
   1733  1.1  mrg       /* Do not return false here as the MEM_P (reg) will be processed
   1734  1.1  mrg 	 later in this function.  */
   1735  1.1  mrg     }
   1736  1.1  mrg   else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
   1737  1.1  mrg     {
   1738  1.1  mrg       alter_subreg (curr_id->operand_loc[nop], false);
   1739  1.1  mrg       return true;
   1740  1.1  mrg     }
   1741  1.1  mrg   else if (CONSTANT_P (reg))
   1742  1.1  mrg     {
   1743  1.1  mrg       /* Try to simplify subreg of constant.  It is usually result of
   1744  1.1  mrg 	 equivalence substitution.  */
   1745  1.1  mrg       if (innermode == VOIDmode
   1746  1.1  mrg 	  && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
   1747  1.1  mrg 	innermode = curr_static_id->operand[nop].mode;
   1748  1.1  mrg       if ((new_reg = simplify_subreg (mode, reg, innermode,
   1749  1.1  mrg 				      SUBREG_BYTE (operand))) != NULL_RTX)
   1750  1.1  mrg 	{
   1751  1.1  mrg 	  *curr_id->operand_loc[nop] = new_reg;
   1752  1.1  mrg 	  return true;
   1753  1.1  mrg 	}
   1754  1.1  mrg     }
   1755  1.1  mrg   /* Put constant into memory when we have mixed modes.  It generates
   1756  1.1  mrg      a better code in most cases as it does not need a secondary
   1757  1.1  mrg      reload memory.  It also prevents LRA looping when LRA is using
   1758  1.1  mrg      secondary reload memory again and again.  */
   1759  1.1  mrg   if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
   1760  1.1  mrg       && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
   1761  1.1  mrg     {
   1762  1.1  mrg       SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
   1763  1.1  mrg       alter_subreg (curr_id->operand_loc[nop], false);
   1764  1.1  mrg       return true;
   1765  1.1  mrg     }
   1766  1.1  mrg   /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
   1767  1.1  mrg      if there may be a problem accessing OPERAND in the outer
   1768  1.1  mrg      mode.  */
   1769  1.1  mrg   if ((REG_P (reg)
   1770  1.1  mrg        && REGNO (reg) >= FIRST_PSEUDO_REGISTER
   1771  1.1  mrg        && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
   1772  1.1  mrg        /* Don't reload paradoxical subregs because we could be looping
   1773  1.1  mrg 	  having repeatedly final regno out of hard regs range.  */
   1774  1.1  mrg        && (hard_regno_nregs (hard_regno, innermode)
   1775  1.1  mrg 	   >= hard_regno_nregs (hard_regno, mode))
   1776  1.1  mrg        && simplify_subreg_regno (hard_regno, innermode,
   1777  1.1  mrg 				 SUBREG_BYTE (operand), mode) < 0
   1778  1.1  mrg        /* Don't reload subreg for matching reload.  It is actually
   1779  1.1  mrg 	  valid subreg in LRA.  */
   1780  1.1  mrg        && ! LRA_SUBREG_P (operand))
   1781  1.1  mrg       || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
   1782  1.1  mrg     {
   1783  1.1  mrg       enum reg_class rclass;
   1784  1.1  mrg 
   1785  1.1  mrg       if (REG_P (reg))
   1786  1.1  mrg 	/* There is a big probability that we will get the same class
   1787  1.1  mrg 	   for the new pseudo and we will get the same insn which
   1788  1.1  mrg 	   means infinite looping.  So spill the new pseudo.  */
   1789  1.1  mrg 	rclass = NO_REGS;
   1790  1.1  mrg       else
   1791  1.1  mrg 	/* The class will be defined later in curr_insn_transform.  */
   1792  1.1  mrg 	rclass
   1793  1.1  mrg 	  = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
   1794  1.1  mrg 
   1795  1.1  mrg       if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
   1796  1.1  mrg 			  rclass, NULL,
   1797  1.1  mrg 			  TRUE, "subreg reg", &new_reg))
   1798  1.1  mrg 	{
   1799  1.1  mrg 	  bool insert_before, insert_after;
   1800  1.1  mrg 	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
   1801  1.1  mrg 
   1802  1.1  mrg 	  insert_before = (type != OP_OUT
   1803  1.1  mrg 			   || read_modify_subreg_p (operand));
   1804  1.1  mrg 	  insert_after = (type != OP_IN);
   1805  1.1  mrg 	  insert_move_for_subreg (insert_before ? &before : NULL,
   1806  1.1  mrg 				  insert_after ? &after : NULL,
   1807  1.1  mrg 				  reg, new_reg);
   1808  1.1  mrg 	}
   1809  1.1  mrg       SUBREG_REG (operand) = new_reg;
   1810  1.1  mrg       lra_process_new_insns (curr_insn, before, after,
   1811  1.1  mrg 			     "Inserting subreg reload");
   1812  1.1  mrg       return true;
   1813  1.1  mrg     }
   1814  1.1  mrg   /* Force a reload for a paradoxical subreg. For paradoxical subreg,
   1815  1.1  mrg      IRA allocates hardreg to the inner pseudo reg according to its mode
   1816  1.1  mrg      instead of the outermode, so the size of the hardreg may not be enough
   1817  1.1  mrg      to contain the outermode operand, in that case we may need to insert
   1818  1.1  mrg      reload for the reg. For the following two types of paradoxical subreg,
   1819  1.1  mrg      we need to insert reload:
   1820  1.1  mrg      1. If the op_type is OP_IN, and the hardreg could not be paired with
   1821  1.1  mrg         other hardreg to contain the outermode operand
   1822  1.1  mrg         (checked by in_hard_reg_set_p), we need to insert the reload.
   1823  1.1  mrg      2. If the op_type is OP_OUT or OP_INOUT.
   1824  1.1  mrg 
   1825  1.1  mrg      Here is a paradoxical subreg example showing how the reload is generated:
   1826  1.1  mrg 
   1827  1.1  mrg      (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
   1828  1.1  mrg         (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
   1829  1.1  mrg 
   1830  1.1  mrg      In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
   1831  1.1  mrg      here, if reg107 is assigned to hardreg R15, because R15 is the last
   1832  1.1  mrg      hardreg, compiler cannot find another hardreg to pair with R15 to
   1833  1.1  mrg      contain TImode data. So we insert a TImode reload reg180 for it.
   1834  1.1  mrg      After reload is inserted:
   1835  1.1  mrg 
   1836  1.1  mrg      (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
   1837  1.1  mrg         (reg:DI 107 [ __comp ])) -1
   1838  1.1  mrg      (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
   1839  1.1  mrg         (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
   1840  1.1  mrg 
   1841  1.1  mrg      Two reload hard registers will be allocated to reg180 to save TImode data
   1842  1.1  mrg      in LRA_assign.
   1843  1.1  mrg 
   1844  1.1  mrg      For LRA pseudos this should normally be handled by the biggest_mode
   1845  1.1  mrg      mechanism.  However, it's possible for new uses of an LRA pseudo
   1846  1.1  mrg      to be introduced after we've allocated it, such as when undoing
   1847  1.1  mrg      inheritance, and the allocated register might not then be appropriate
   1848  1.1  mrg      for the new uses.  */
   1849  1.1  mrg   else if (REG_P (reg)
   1850  1.1  mrg 	   && REGNO (reg) >= FIRST_PSEUDO_REGISTER
   1851  1.1  mrg 	   && paradoxical_subreg_p (operand)
   1852  1.1  mrg 	   && (inner_hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
   1853  1.1  mrg 	   && ((hard_regno
   1854  1.1  mrg 		= simplify_subreg_regno (inner_hard_regno, innermode,
   1855  1.1  mrg 					 SUBREG_BYTE (operand), mode)) < 0
   1856  1.1  mrg 	       || ((hard_regno_nregs (inner_hard_regno, innermode)
   1857  1.1  mrg 		    < hard_regno_nregs (hard_regno, mode))
   1858  1.1  mrg 		   && (regclass = lra_get_allocno_class (REGNO (reg)))
   1859  1.1  mrg 		   && (type != OP_IN
   1860  1.1  mrg 		       || !in_hard_reg_set_p (reg_class_contents[regclass],
   1861  1.1  mrg 					      mode, hard_regno)
   1862  1.1  mrg 		       || overlaps_hard_reg_set_p (lra_no_alloc_regs,
   1863  1.1  mrg 						   mode, hard_regno)))))
   1864  1.1  mrg     {
   1865  1.1  mrg       /* The class will be defined later in curr_insn_transform.  */
   1866  1.1  mrg       enum reg_class rclass
   1867  1.1  mrg 	= (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
   1868  1.1  mrg 
   1869  1.1  mrg       if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
   1870  1.1  mrg                           rclass, NULL,
   1871  1.1  mrg 			  TRUE, "paradoxical subreg", &new_reg))
   1872  1.1  mrg         {
   1873  1.1  mrg 	  rtx subreg;
   1874  1.1  mrg 	  bool insert_before, insert_after;
   1875  1.1  mrg 
   1876  1.1  mrg 	  PUT_MODE (new_reg, mode);
   1877  1.1  mrg           subreg = gen_lowpart_SUBREG (innermode, new_reg);
   1878  1.1  mrg 	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
   1879  1.1  mrg 
   1880  1.1  mrg 	  insert_before = (type != OP_OUT);
   1881  1.1  mrg 	  insert_after = (type != OP_IN);
   1882  1.1  mrg 	  insert_move_for_subreg (insert_before ? &before : NULL,
   1883  1.1  mrg 				  insert_after ? &after : NULL,
   1884  1.1  mrg 				  reg, subreg);
   1885  1.1  mrg 	}
   1886  1.1  mrg       SUBREG_REG (operand) = new_reg;
   1887  1.1  mrg       lra_process_new_insns (curr_insn, before, after,
   1888  1.1  mrg                              "Inserting paradoxical subreg reload");
   1889  1.1  mrg       return true;
   1890  1.1  mrg     }
   1891  1.1  mrg   return false;
   1892  1.1  mrg }
   1893  1.1  mrg 
   1894  1.1  mrg /* Return TRUE if X refers for a hard register from SET.  */
   1895  1.1  mrg static bool
   1896  1.1  mrg uses_hard_regs_p (rtx x, HARD_REG_SET set)
   1897  1.1  mrg {
   1898  1.1  mrg   int i, j, x_hard_regno;
   1899  1.1  mrg   machine_mode mode;
   1900  1.1  mrg   const char *fmt;
   1901  1.1  mrg   enum rtx_code code;
   1902  1.1  mrg 
   1903  1.1  mrg   if (x == NULL_RTX)
   1904  1.1  mrg     return false;
   1905  1.1  mrg   code = GET_CODE (x);
   1906  1.1  mrg   mode = GET_MODE (x);
   1907  1.1  mrg 
   1908  1.1  mrg   if (code == SUBREG)
   1909  1.1  mrg     {
   1910  1.1  mrg       /* For all SUBREGs we want to check whether the full multi-register
   1911  1.1  mrg 	 overlaps the set.  For normal SUBREGs this means 'get_hard_regno' of
   1912  1.1  mrg 	 the inner register, for paradoxical SUBREGs this means the
   1913  1.1  mrg 	 'get_hard_regno' of the full SUBREG and for complete SUBREGs either is
   1914  1.1  mrg 	 fine.  Use the wider mode for all cases.  */
   1915  1.1  mrg       rtx subreg = SUBREG_REG (x);
   1916  1.1  mrg       mode = wider_subreg_mode (x);
   1917  1.1  mrg       if (mode == GET_MODE (subreg))
   1918  1.1  mrg 	{
   1919  1.1  mrg 	  x = subreg;
   1920  1.1  mrg 	  code = GET_CODE (x);
   1921  1.1  mrg 	}
   1922  1.1  mrg     }
   1923  1.1  mrg 
   1924  1.1  mrg   if (REG_P (x) || SUBREG_P (x))
   1925  1.1  mrg     {
   1926  1.1  mrg       x_hard_regno = get_hard_regno (x, true);
   1927  1.1  mrg       return (x_hard_regno >= 0
   1928  1.1  mrg 	      && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
   1929  1.1  mrg     }
   1930  1.1  mrg   fmt = GET_RTX_FORMAT (code);
   1931  1.1  mrg   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1932  1.1  mrg     {
   1933  1.1  mrg       if (fmt[i] == 'e')
   1934  1.1  mrg 	{
   1935  1.1  mrg 	  if (uses_hard_regs_p (XEXP (x, i), set))
   1936  1.1  mrg 	    return true;
   1937  1.1  mrg 	}
   1938  1.1  mrg       else if (fmt[i] == 'E')
   1939  1.1  mrg 	{
   1940  1.1  mrg 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   1941  1.1  mrg 	    if (uses_hard_regs_p (XVECEXP (x, i, j), set))
   1942  1.1  mrg 	      return true;
   1943  1.1  mrg 	}
   1944  1.1  mrg     }
   1945  1.1  mrg   return false;
   1946  1.1  mrg }
   1947  1.1  mrg 
   1948  1.1  mrg /* Return true if OP is a spilled pseudo. */
   1949  1.1  mrg static inline bool
   1950  1.1  mrg spilled_pseudo_p (rtx op)
   1951  1.1  mrg {
   1952  1.1  mrg   return (REG_P (op)
   1953  1.1  mrg 	  && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
   1954  1.1  mrg }
   1955  1.1  mrg 
   1956  1.1  mrg /* Return true if X is a general constant.  */
   1957  1.1  mrg static inline bool
   1958  1.1  mrg general_constant_p (rtx x)
   1959  1.1  mrg {
   1960  1.1  mrg   return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
   1961  1.1  mrg }
   1962  1.1  mrg 
   1963  1.1  mrg static bool
   1964  1.1  mrg reg_in_class_p (rtx reg, enum reg_class cl)
   1965  1.1  mrg {
   1966  1.1  mrg   if (cl == NO_REGS)
   1967  1.1  mrg     return get_reg_class (REGNO (reg)) == NO_REGS;
   1968  1.1  mrg   return in_class_p (reg, cl, NULL);
   1969  1.1  mrg }
   1970  1.1  mrg 
   1971  1.1  mrg /* Return true if SET of RCLASS contains no hard regs which can be
   1972  1.1  mrg    used in MODE.  */
   1973  1.1  mrg static bool
   1974  1.1  mrg prohibited_class_reg_set_mode_p (enum reg_class rclass,
   1975  1.1  mrg 				 HARD_REG_SET &set,
   1976  1.1  mrg 				 machine_mode mode)
   1977  1.1  mrg {
   1978  1.1  mrg   HARD_REG_SET temp;
   1979  1.1  mrg 
   1980  1.1  mrg   lra_assert (hard_reg_set_subset_p (reg_class_contents[rclass], set));
   1981  1.1  mrg   temp = set & ~lra_no_alloc_regs;
   1982  1.1  mrg   return (hard_reg_set_subset_p
   1983  1.1  mrg 	  (temp, ira_prohibited_class_mode_regs[rclass][mode]));
   1984  1.1  mrg }
   1985  1.1  mrg 
   1986  1.1  mrg 
   1987  1.1  mrg /* Used to check validity info about small class input operands.  It
   1988  1.1  mrg    should be incremented at start of processing an insn
   1989  1.1  mrg    alternative.  */
   1990  1.1  mrg static unsigned int curr_small_class_check = 0;
   1991  1.1  mrg 
   1992  1.1  mrg /* Update number of used inputs of class OP_CLASS for operand NOP
   1993  1.1  mrg    of alternative NALT.  Return true if we have more such class operands
   1994  1.1  mrg    than the number of available regs.  */
   1995  1.1  mrg static bool
   1996  1.1  mrg update_and_check_small_class_inputs (int nop, int nalt,
   1997  1.1  mrg 				     enum reg_class op_class)
   1998  1.1  mrg {
   1999  1.1  mrg   static unsigned int small_class_check[LIM_REG_CLASSES];
   2000  1.1  mrg   static int small_class_input_nums[LIM_REG_CLASSES];
   2001  1.1  mrg 
   2002  1.1  mrg   if (SMALL_REGISTER_CLASS_P (op_class)
   2003  1.1  mrg       /* We are interesting in classes became small because of fixing
   2004  1.1  mrg 	 some hard regs, e.g. by an user through GCC options.  */
   2005  1.1  mrg       && hard_reg_set_intersect_p (reg_class_contents[op_class],
   2006  1.1  mrg 				   ira_no_alloc_regs)
   2007  1.1  mrg       && (curr_static_id->operand[nop].type != OP_OUT
   2008  1.1  mrg 	  || TEST_BIT (curr_static_id->operand[nop].early_clobber_alts, nalt)))
   2009  1.1  mrg     {
   2010  1.1  mrg       if (small_class_check[op_class] == curr_small_class_check)
   2011  1.1  mrg 	small_class_input_nums[op_class]++;
   2012  1.1  mrg       else
   2013  1.1  mrg 	{
   2014  1.1  mrg 	  small_class_check[op_class] = curr_small_class_check;
   2015  1.1  mrg 	  small_class_input_nums[op_class] = 1;
   2016  1.1  mrg 	}
   2017  1.1  mrg       if (small_class_input_nums[op_class] > ira_class_hard_regs_num[op_class])
   2018  1.1  mrg 	return true;
   2019  1.1  mrg     }
   2020  1.1  mrg   return false;
   2021  1.1  mrg }
   2022  1.1  mrg 
   2023  1.1  mrg /* Major function to choose the current insn alternative and what
   2024  1.1  mrg    operands should be reloaded and how.	 If ONLY_ALTERNATIVE is not
   2025  1.1  mrg    negative we should consider only this alternative.  Return false if
   2026  1.1  mrg    we cannot choose the alternative or find how to reload the
   2027  1.1  mrg    operands.  */
   2028  1.1  mrg static bool
   2029  1.1  mrg process_alt_operands (int only_alternative)
   2030  1.1  mrg {
   2031  1.1  mrg   bool ok_p = false;
   2032  1.1  mrg   int nop, overall, nalt;
   2033  1.1  mrg   int n_alternatives = curr_static_id->n_alternatives;
   2034  1.1  mrg   int n_operands = curr_static_id->n_operands;
   2035  1.1  mrg   /* LOSERS counts the operands that don't fit this alternative and
   2036  1.1  mrg      would require loading.  */
   2037  1.1  mrg   int losers;
   2038  1.1  mrg   int addr_losers;
   2039  1.1  mrg   /* REJECT is a count of how undesirable this alternative says it is
   2040  1.1  mrg      if any reloading is required.  If the alternative matches exactly
   2041  1.1  mrg      then REJECT is ignored, but otherwise it gets this much counted
   2042  1.1  mrg      against it in addition to the reloading needed.  */
   2043  1.1  mrg   int reject;
   2044  1.1  mrg   /* This is defined by '!' or '?' alternative constraint and added to
   2045  1.1  mrg      reject.  But in some cases it can be ignored.  */
   2046  1.1  mrg   int static_reject;
   2047  1.1  mrg   int op_reject;
   2048  1.1  mrg   /* The number of elements in the following array.  */
   2049  1.1  mrg   int early_clobbered_regs_num;
   2050  1.1  mrg   /* Numbers of operands which are early clobber registers.  */
   2051  1.1  mrg   int early_clobbered_nops[MAX_RECOG_OPERANDS];
   2052  1.1  mrg   enum reg_class curr_alt[MAX_RECOG_OPERANDS];
   2053  1.1  mrg   HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
   2054  1.1  mrg   HARD_REG_SET curr_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
   2055  1.1  mrg   bool curr_alt_match_win[MAX_RECOG_OPERANDS];
   2056  1.1  mrg   bool curr_alt_win[MAX_RECOG_OPERANDS];
   2057  1.1  mrg   bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
   2058  1.1  mrg   int curr_alt_matches[MAX_RECOG_OPERANDS];
   2059  1.1  mrg   /* The number of elements in the following array.  */
   2060  1.1  mrg   int curr_alt_dont_inherit_ops_num;
   2061  1.1  mrg   /* Numbers of operands whose reload pseudos should not be inherited.	*/
   2062  1.1  mrg   int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
   2063  1.1  mrg   rtx op;
   2064  1.1  mrg   /* The register when the operand is a subreg of register, otherwise the
   2065  1.1  mrg      operand itself.  */
   2066  1.1  mrg   rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
   2067  1.1  mrg   /* The register if the operand is a register or subreg of register,
   2068  1.1  mrg      otherwise NULL.  */
   2069  1.1  mrg   rtx operand_reg[MAX_RECOG_OPERANDS];
   2070  1.1  mrg   int hard_regno[MAX_RECOG_OPERANDS];
   2071  1.1  mrg   machine_mode biggest_mode[MAX_RECOG_OPERANDS];
   2072  1.1  mrg   int reload_nregs, reload_sum;
   2073  1.1  mrg   bool costly_p;
   2074  1.1  mrg   enum reg_class cl;
   2075  1.1  mrg 
   2076  1.1  mrg   /* Calculate some data common for all alternatives to speed up the
   2077  1.1  mrg      function.	*/
   2078  1.1  mrg   for (nop = 0; nop < n_operands; nop++)
   2079  1.1  mrg     {
   2080  1.1  mrg       rtx reg;
   2081  1.1  mrg 
   2082  1.1  mrg       op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
   2083  1.1  mrg       /* The real hard regno of the operand after the allocation.  */
   2084  1.1  mrg       hard_regno[nop] = get_hard_regno (op, true);
   2085  1.1  mrg 
   2086  1.1  mrg       operand_reg[nop] = reg = op;
   2087  1.1  mrg       biggest_mode[nop] = GET_MODE (op);
   2088  1.1  mrg       if (GET_CODE (op) == SUBREG)
   2089  1.1  mrg 	{
   2090  1.1  mrg 	  biggest_mode[nop] = wider_subreg_mode (op);
   2091  1.1  mrg 	  operand_reg[nop] = reg = SUBREG_REG (op);
   2092  1.1  mrg 	}
   2093  1.1  mrg       if (! REG_P (reg))
   2094  1.1  mrg 	operand_reg[nop] = NULL_RTX;
   2095  1.1  mrg       else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
   2096  1.1  mrg 	       || ((int) REGNO (reg)
   2097  1.1  mrg 		   == lra_get_elimination_hard_regno (REGNO (reg))))
   2098  1.1  mrg 	no_subreg_reg_operand[nop] = reg;
   2099  1.1  mrg       else
   2100  1.1  mrg 	operand_reg[nop] = no_subreg_reg_operand[nop]
   2101  1.1  mrg 	  /* Just use natural mode for elimination result.  It should
   2102  1.1  mrg 	     be enough for extra constraints hooks.  */
   2103  1.1  mrg 	  = regno_reg_rtx[hard_regno[nop]];
   2104  1.1  mrg     }
   2105  1.1  mrg 
   2106  1.1  mrg   /* The constraints are made of several alternatives.	Each operand's
   2107  1.1  mrg      constraint looks like foo,bar,... with commas separating the
   2108  1.1  mrg      alternatives.  The first alternatives for all operands go
   2109  1.1  mrg      together, the second alternatives go together, etc.
   2110  1.1  mrg 
   2111  1.1  mrg      First loop over alternatives.  */
   2112  1.1  mrg   alternative_mask preferred = curr_id->preferred_alternatives;
   2113  1.1  mrg   if (only_alternative >= 0)
   2114  1.1  mrg     preferred &= ALTERNATIVE_BIT (only_alternative);
   2115  1.1  mrg 
   2116  1.1  mrg   for (nalt = 0; nalt < n_alternatives; nalt++)
   2117  1.1  mrg     {
   2118  1.1  mrg       /* Loop over operands for one constraint alternative.  */
   2119  1.1  mrg       if (!TEST_BIT (preferred, nalt))
   2120  1.1  mrg 	continue;
   2121  1.1  mrg 
   2122  1.1  mrg       bool matching_early_clobber[MAX_RECOG_OPERANDS];
   2123  1.1  mrg       curr_small_class_check++;
   2124  1.1  mrg       overall = losers = addr_losers = 0;
   2125  1.1  mrg       static_reject = reject = reload_nregs = reload_sum = 0;
   2126  1.1  mrg       for (nop = 0; nop < n_operands; nop++)
   2127  1.1  mrg 	{
   2128  1.1  mrg 	  int inc = (curr_static_id
   2129  1.1  mrg 		     ->operand_alternative[nalt * n_operands + nop].reject);
   2130  1.1  mrg 	  if (lra_dump_file != NULL && inc != 0)
   2131  1.1  mrg 	    fprintf (lra_dump_file,
   2132  1.1  mrg 		     "            Staticly defined alt reject+=%d\n", inc);
   2133  1.1  mrg 	  static_reject += inc;
   2134  1.1  mrg 	  matching_early_clobber[nop] = 0;
   2135  1.1  mrg 	}
   2136  1.1  mrg       reject += static_reject;
   2137  1.1  mrg       early_clobbered_regs_num = 0;
   2138  1.1  mrg 
   2139  1.1  mrg       for (nop = 0; nop < n_operands; nop++)
   2140  1.1  mrg 	{
   2141  1.1  mrg 	  const char *p;
   2142  1.1  mrg 	  char *end;
   2143  1.1  mrg 	  int len, c, m, i, opalt_num, this_alternative_matches;
   2144  1.1  mrg 	  bool win, did_match, offmemok, early_clobber_p;
   2145  1.1  mrg 	  /* false => this operand can be reloaded somehow for this
   2146  1.1  mrg 	     alternative.  */
   2147  1.1  mrg 	  bool badop;
   2148  1.1  mrg 	  /* true => this operand can be reloaded if the alternative
   2149  1.1  mrg 	     allows regs.  */
   2150  1.1  mrg 	  bool winreg;
   2151  1.1  mrg 	  /* True if a constant forced into memory would be OK for
   2152  1.1  mrg 	     this operand.  */
   2153  1.1  mrg 	  bool constmemok;
   2154  1.1  mrg 	  enum reg_class this_alternative, this_costly_alternative;
   2155  1.1  mrg 	  HARD_REG_SET this_alternative_set, this_costly_alternative_set;
   2156  1.1  mrg 	  HARD_REG_SET this_alternative_exclude_start_hard_regs;
   2157  1.1  mrg 	  bool this_alternative_match_win, this_alternative_win;
   2158  1.1  mrg 	  bool this_alternative_offmemok;
   2159  1.1  mrg 	  bool scratch_p;
   2160  1.1  mrg 	  machine_mode mode;
   2161  1.1  mrg 	  enum constraint_num cn;
   2162  1.1  mrg 
   2163  1.1  mrg 	  opalt_num = nalt * n_operands + nop;
   2164  1.1  mrg 	  if (curr_static_id->operand_alternative[opalt_num].anything_ok)
   2165  1.1  mrg 	    {
   2166  1.1  mrg 	      /* Fast track for no constraints at all.	*/
   2167  1.1  mrg 	      curr_alt[nop] = NO_REGS;
   2168  1.1  mrg 	      CLEAR_HARD_REG_SET (curr_alt_set[nop]);
   2169  1.1  mrg 	      curr_alt_win[nop] = true;
   2170  1.1  mrg 	      curr_alt_match_win[nop] = false;
   2171  1.1  mrg 	      curr_alt_offmemok[nop] = false;
   2172  1.1  mrg 	      curr_alt_matches[nop] = -1;
   2173  1.1  mrg 	      continue;
   2174  1.1  mrg 	    }
   2175  1.1  mrg 
   2176  1.1  mrg 	  op = no_subreg_reg_operand[nop];
   2177  1.1  mrg 	  mode = curr_operand_mode[nop];
   2178  1.1  mrg 
   2179  1.1  mrg 	  win = did_match = winreg = offmemok = constmemok = false;
   2180  1.1  mrg 	  badop = true;
   2181  1.1  mrg 
   2182  1.1  mrg 	  early_clobber_p = false;
   2183  1.1  mrg 	  p = curr_static_id->operand_alternative[opalt_num].constraint;
   2184  1.1  mrg 
   2185  1.1  mrg 	  this_costly_alternative = this_alternative = NO_REGS;
   2186  1.1  mrg 	  /* We update set of possible hard regs besides its class
   2187  1.1  mrg 	     because reg class might be inaccurate.  For example,
   2188  1.1  mrg 	     union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
   2189  1.1  mrg 	     is translated in HI_REGS because classes are merged by
   2190  1.1  mrg 	     pairs and there is no accurate intermediate class.	 */
   2191  1.1  mrg 	  CLEAR_HARD_REG_SET (this_alternative_set);
   2192  1.1  mrg 	  CLEAR_HARD_REG_SET (this_costly_alternative_set);
   2193  1.1  mrg 	  CLEAR_HARD_REG_SET (this_alternative_exclude_start_hard_regs);
   2194  1.1  mrg 	  this_alternative_win = false;
   2195  1.1  mrg 	  this_alternative_match_win = false;
   2196  1.1  mrg 	  this_alternative_offmemok = false;
   2197  1.1  mrg 	  this_alternative_matches = -1;
   2198  1.1  mrg 
   2199  1.1  mrg 	  /* An empty constraint should be excluded by the fast
   2200  1.1  mrg 	     track.  */
   2201  1.1  mrg 	  lra_assert (*p != 0 && *p != ',');
   2202  1.1  mrg 
   2203  1.1  mrg 	  op_reject = 0;
   2204  1.1  mrg 	  /* Scan this alternative's specs for this operand; set WIN
   2205  1.1  mrg 	     if the operand fits any letter in this alternative.
   2206  1.1  mrg 	     Otherwise, clear BADOP if this operand could fit some
   2207  1.1  mrg 	     letter after reloads, or set WINREG if this operand could
   2208  1.1  mrg 	     fit after reloads provided the constraint allows some
   2209  1.1  mrg 	     registers.	 */
   2210  1.1  mrg 	  costly_p = false;
   2211  1.1  mrg 	  do
   2212  1.1  mrg 	    {
   2213  1.1  mrg 	      switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
   2214  1.1  mrg 		{
   2215  1.1  mrg 		case '\0':
   2216  1.1  mrg 		  len = 0;
   2217  1.1  mrg 		  break;
   2218  1.1  mrg 		case ',':
   2219  1.1  mrg 		  c = '\0';
   2220  1.1  mrg 		  break;
   2221  1.1  mrg 
   2222  1.1  mrg 		case '&':
   2223  1.1  mrg 		  early_clobber_p = true;
   2224  1.1  mrg 		  break;
   2225  1.1  mrg 
   2226  1.1  mrg 		case '$':
   2227  1.1  mrg 		  op_reject += LRA_MAX_REJECT;
   2228  1.1  mrg 		  break;
   2229  1.1  mrg 		case '^':
   2230  1.1  mrg 		  op_reject += LRA_LOSER_COST_FACTOR;
   2231  1.1  mrg 		  break;
   2232  1.1  mrg 
   2233  1.1  mrg 		case '#':
   2234  1.1  mrg 		  /* Ignore rest of this alternative.  */
   2235  1.1  mrg 		  c = '\0';
   2236  1.1  mrg 		  break;
   2237  1.1  mrg 
   2238  1.1  mrg 		case '0':  case '1':  case '2':	 case '3':  case '4':
   2239  1.1  mrg 		case '5':  case '6':  case '7':	 case '8':  case '9':
   2240  1.1  mrg 		  {
   2241  1.1  mrg 		    int m_hregno;
   2242  1.1  mrg 		    bool match_p;
   2243  1.1  mrg 
   2244  1.1  mrg 		    m = strtoul (p, &end, 10);
   2245  1.1  mrg 		    p = end;
   2246  1.1  mrg 		    len = 0;
   2247  1.1  mrg 		    lra_assert (nop > m);
   2248  1.1  mrg 
   2249  1.1  mrg 		    /* Reject matches if we don't know which operand is
   2250  1.1  mrg 		       bigger.  This situation would arguably be a bug in
   2251  1.1  mrg 		       an .md pattern, but could also occur in a user asm.  */
   2252  1.1  mrg 		    if (!ordered_p (GET_MODE_SIZE (biggest_mode[m]),
   2253  1.1  mrg 				    GET_MODE_SIZE (biggest_mode[nop])))
   2254  1.1  mrg 		      break;
   2255  1.1  mrg 
   2256  1.1  mrg 		    /* Don't match wrong asm insn operands for proper
   2257  1.1  mrg 		       diagnostic later.  */
   2258  1.1  mrg 		    if (INSN_CODE (curr_insn) < 0
   2259  1.1  mrg 			&& (curr_operand_mode[m] == BLKmode
   2260  1.1  mrg 			    || curr_operand_mode[nop] == BLKmode)
   2261  1.1  mrg 			&& curr_operand_mode[m] != curr_operand_mode[nop])
   2262  1.1  mrg 		      break;
   2263  1.1  mrg 
   2264  1.1  mrg 		    m_hregno = get_hard_regno (*curr_id->operand_loc[m], false);
   2265  1.1  mrg 		    /* We are supposed to match a previous operand.
   2266  1.1  mrg 		       If we do, we win if that one did.  If we do
   2267  1.1  mrg 		       not, count both of the operands as losers.
   2268  1.1  mrg 		       (This is too conservative, since most of the
   2269  1.1  mrg 		       time only a single reload insn will be needed
   2270  1.1  mrg 		       to make the two operands win.  As a result,
   2271  1.1  mrg 		       this alternative may be rejected when it is
   2272  1.1  mrg 		       actually desirable.)  */
   2273  1.1  mrg 		    match_p = false;
   2274  1.1  mrg 		    if (operands_match_p (*curr_id->operand_loc[nop],
   2275  1.1  mrg 					  *curr_id->operand_loc[m], m_hregno))
   2276  1.1  mrg 		      {
   2277  1.1  mrg 			/* We should reject matching of an early
   2278  1.1  mrg 			   clobber operand if the matching operand is
   2279  1.1  mrg 			   not dying in the insn.  */
   2280  1.1  mrg 			if (!TEST_BIT (curr_static_id->operand[m]
   2281  1.1  mrg 				       .early_clobber_alts, nalt)
   2282  1.1  mrg 			    || operand_reg[nop] == NULL_RTX
   2283  1.1  mrg 			    || (find_regno_note (curr_insn, REG_DEAD,
   2284  1.1  mrg 						 REGNO (op))
   2285  1.1  mrg 				|| REGNO (op) == REGNO (operand_reg[m])))
   2286  1.1  mrg 			  match_p = true;
   2287  1.1  mrg 		      }
   2288  1.1  mrg 		    if (match_p)
   2289  1.1  mrg 		      {
   2290  1.1  mrg 			/* If we are matching a non-offsettable
   2291  1.1  mrg 			   address where an offsettable address was
   2292  1.1  mrg 			   expected, then we must reject this
   2293  1.1  mrg 			   combination, because we can't reload
   2294  1.1  mrg 			   it.	*/
   2295  1.1  mrg 			if (curr_alt_offmemok[m]
   2296  1.1  mrg 			    && MEM_P (*curr_id->operand_loc[m])
   2297  1.1  mrg 			    && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
   2298  1.1  mrg 			  continue;
   2299  1.1  mrg 		      }
   2300  1.1  mrg 		    else
   2301  1.1  mrg 		      {
   2302  1.1  mrg 			/* If the operands do not match and one
   2303  1.1  mrg 			   operand is INOUT, we can not match them.
   2304  1.1  mrg 			   Try other possibilities, e.g. other
   2305  1.1  mrg 			   alternatives or commutative operand
   2306  1.1  mrg 			   exchange.  */
   2307  1.1  mrg 			if (curr_static_id->operand[nop].type == OP_INOUT
   2308  1.1  mrg 			    || curr_static_id->operand[m].type == OP_INOUT)
   2309  1.1  mrg 			  break;
   2310  1.1  mrg 			/* Operands don't match.  If the operands are
   2311  1.1  mrg 			   different user defined explicit hard
   2312  1.1  mrg 			   registers, then we cannot make them match
   2313  1.1  mrg 			   when one is early clobber operand.  */
   2314  1.1  mrg 			if ((REG_P (*curr_id->operand_loc[nop])
   2315  1.1  mrg 			     || SUBREG_P (*curr_id->operand_loc[nop]))
   2316  1.1  mrg 			    && (REG_P (*curr_id->operand_loc[m])
   2317  1.1  mrg 				|| SUBREG_P (*curr_id->operand_loc[m])))
   2318  1.1  mrg 			  {
   2319  1.1  mrg 			    rtx nop_reg = *curr_id->operand_loc[nop];
   2320  1.1  mrg 			    if (SUBREG_P (nop_reg))
   2321  1.1  mrg 			      nop_reg = SUBREG_REG (nop_reg);
   2322  1.1  mrg 			    rtx m_reg = *curr_id->operand_loc[m];
   2323  1.1  mrg 			    if (SUBREG_P (m_reg))
   2324  1.1  mrg 			      m_reg = SUBREG_REG (m_reg);
   2325  1.1  mrg 
   2326  1.1  mrg 			    if (REG_P (nop_reg)
   2327  1.1  mrg 				&& HARD_REGISTER_P (nop_reg)
   2328  1.1  mrg 				&& REG_USERVAR_P (nop_reg)
   2329  1.1  mrg 				&& REG_P (m_reg)
   2330  1.1  mrg 				&& HARD_REGISTER_P (m_reg)
   2331  1.1  mrg 				&& REG_USERVAR_P (m_reg))
   2332  1.1  mrg 			      {
   2333  1.1  mrg 				int i;
   2334  1.1  mrg 
   2335  1.1  mrg 				for (i = 0; i < early_clobbered_regs_num; i++)
   2336  1.1  mrg 				  if (m == early_clobbered_nops[i])
   2337  1.1  mrg 				    break;
   2338  1.1  mrg 				if (i < early_clobbered_regs_num
   2339  1.1  mrg 				    || early_clobber_p)
   2340  1.1  mrg 				  break;
   2341  1.1  mrg 			      }
   2342  1.1  mrg 			  }
   2343  1.1  mrg 			/* Both operands must allow a reload register,
   2344  1.1  mrg 			   otherwise we cannot make them match.  */
   2345  1.1  mrg 			if (curr_alt[m] == NO_REGS)
   2346  1.1  mrg 			  break;
   2347  1.1  mrg 			/* Retroactively mark the operand we had to
   2348  1.1  mrg 			   match as a loser, if it wasn't already and
   2349  1.1  mrg 			   it wasn't matched to a register constraint
   2350  1.1  mrg 			   (e.g it might be matched by memory). */
   2351  1.1  mrg 			if (curr_alt_win[m]
   2352  1.1  mrg 			    && (operand_reg[m] == NULL_RTX
   2353  1.1  mrg 				|| hard_regno[m] < 0))
   2354  1.1  mrg 			  {
   2355  1.1  mrg 			    losers++;
   2356  1.1  mrg 			    reload_nregs
   2357  1.1  mrg 			      += (ira_reg_class_max_nregs[curr_alt[m]]
   2358  1.1  mrg 				  [GET_MODE (*curr_id->operand_loc[m])]);
   2359  1.1  mrg 			  }
   2360  1.1  mrg 
   2361  1.1  mrg 			/* Prefer matching earlyclobber alternative as
   2362  1.1  mrg 			   it results in less hard regs required for
   2363  1.1  mrg 			   the insn than a non-matching earlyclobber
   2364  1.1  mrg 			   alternative.  */
   2365  1.1  mrg 			if (TEST_BIT (curr_static_id->operand[m]
   2366  1.1  mrg 				      .early_clobber_alts, nalt))
   2367  1.1  mrg 			  {
   2368  1.1  mrg 			    if (lra_dump_file != NULL)
   2369  1.1  mrg 			      fprintf
   2370  1.1  mrg 				(lra_dump_file,
   2371  1.1  mrg 				 "            %d Matching earlyclobber alt:"
   2372  1.1  mrg 				 " reject--\n",
   2373  1.1  mrg 				 nop);
   2374  1.1  mrg 			    if (!matching_early_clobber[m])
   2375  1.1  mrg 			      {
   2376  1.1  mrg 				reject--;
   2377  1.1  mrg 				matching_early_clobber[m] = 1;
   2378  1.1  mrg 			      }
   2379  1.1  mrg 			  }
   2380  1.1  mrg 			/* Otherwise we prefer no matching
   2381  1.1  mrg 			   alternatives because it gives more freedom
   2382  1.1  mrg 			   in RA.  */
   2383  1.1  mrg 			else if (operand_reg[nop] == NULL_RTX
   2384  1.1  mrg 				 || (find_regno_note (curr_insn, REG_DEAD,
   2385  1.1  mrg 						      REGNO (operand_reg[nop]))
   2386  1.1  mrg 				     == NULL_RTX))
   2387  1.1  mrg 			  {
   2388  1.1  mrg 			    if (lra_dump_file != NULL)
   2389  1.1  mrg 			      fprintf
   2390  1.1  mrg 				(lra_dump_file,
   2391  1.1  mrg 				 "            %d Matching alt: reject+=2\n",
   2392  1.1  mrg 				 nop);
   2393  1.1  mrg 			    reject += 2;
   2394  1.1  mrg 			  }
   2395  1.1  mrg 		      }
   2396  1.1  mrg 		    /* If we have to reload this operand and some
   2397  1.1  mrg 		       previous operand also had to match the same
   2398  1.1  mrg 		       thing as this operand, we don't know how to do
   2399  1.1  mrg 		       that.  */
   2400  1.1  mrg 		    if (!match_p || !curr_alt_win[m])
   2401  1.1  mrg 		      {
   2402  1.1  mrg 			for (i = 0; i < nop; i++)
   2403  1.1  mrg 			  if (curr_alt_matches[i] == m)
   2404  1.1  mrg 			    break;
   2405  1.1  mrg 			if (i < nop)
   2406  1.1  mrg 			  break;
   2407  1.1  mrg 		      }
   2408  1.1  mrg 		    else
   2409  1.1  mrg 		      did_match = true;
   2410  1.1  mrg 
   2411  1.1  mrg 		    this_alternative_matches = m;
   2412  1.1  mrg 		    /* This can be fixed with reloads if the operand
   2413  1.1  mrg 		       we are supposed to match can be fixed with
   2414  1.1  mrg 		       reloads. */
   2415  1.1  mrg 		    badop = false;
   2416  1.1  mrg 		    this_alternative = curr_alt[m];
   2417  1.1  mrg 		    this_alternative_set = curr_alt_set[m];
   2418  1.1  mrg 		    this_alternative_exclude_start_hard_regs
   2419  1.1  mrg 			= curr_alt_exclude_start_hard_regs[m];
   2420  1.1  mrg 		    winreg = this_alternative != NO_REGS;
   2421  1.1  mrg 		    break;
   2422  1.1  mrg 		  }
   2423  1.1  mrg 
   2424  1.1  mrg 		case 'g':
   2425  1.1  mrg 		  if (MEM_P (op)
   2426  1.1  mrg 		      || general_constant_p (op)
   2427  1.1  mrg 		      || spilled_pseudo_p (op))
   2428  1.1  mrg 		    win = true;
   2429  1.1  mrg 		  cl = GENERAL_REGS;
   2430  1.1  mrg 		  goto reg;
   2431  1.1  mrg 
   2432  1.1  mrg 		default:
   2433  1.1  mrg 		  cn = lookup_constraint (p);
   2434  1.1  mrg 		  switch (get_constraint_type (cn))
   2435  1.1  mrg 		    {
   2436  1.1  mrg 		    case CT_REGISTER:
   2437  1.1  mrg 		      cl = reg_class_for_constraint (cn);
   2438  1.1  mrg 		      if (cl != NO_REGS)
   2439  1.1  mrg 			goto reg;
   2440  1.1  mrg 		      break;
   2441  1.1  mrg 
   2442  1.1  mrg 		    case CT_CONST_INT:
   2443  1.1  mrg 		      if (CONST_INT_P (op)
   2444  1.1  mrg 			  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
   2445  1.1  mrg 			win = true;
   2446  1.1  mrg 		      break;
   2447  1.1  mrg 
   2448  1.1  mrg 		    case CT_MEMORY:
   2449  1.1  mrg 		    case CT_RELAXED_MEMORY:
   2450  1.1  mrg 		      if (MEM_P (op)
   2451  1.1  mrg 			  && satisfies_memory_constraint_p (op, cn))
   2452  1.1  mrg 			win = true;
   2453  1.1  mrg 		      else if (spilled_pseudo_p (op))
   2454  1.1  mrg 			win = true;
   2455  1.1  mrg 
   2456  1.1  mrg 		      /* If we didn't already win, we can reload constants
   2457  1.1  mrg 			 via force_const_mem or put the pseudo value into
   2458  1.1  mrg 			 memory, or make other memory by reloading the
   2459  1.1  mrg 			 address like for 'o'.  */
   2460  1.1  mrg 		      if (CONST_POOL_OK_P (mode, op)
   2461  1.1  mrg 			  || MEM_P (op) || REG_P (op)
   2462  1.1  mrg 			  /* We can restore the equiv insn by a
   2463  1.1  mrg 			     reload.  */
   2464  1.1  mrg 			  || equiv_substition_p[nop])
   2465  1.1  mrg 			badop = false;
   2466  1.1  mrg 		      constmemok = true;
   2467  1.1  mrg 		      offmemok = true;
   2468  1.1  mrg 		      break;
   2469  1.1  mrg 
   2470  1.1  mrg 		    case CT_ADDRESS:
   2471  1.1  mrg 		      /* An asm operand with an address constraint
   2472  1.1  mrg 			 that doesn't satisfy address_operand has
   2473  1.1  mrg 			 is_address cleared, so that we don't try to
   2474  1.1  mrg 			 make a non-address fit.  */
   2475  1.1  mrg 		      if (!curr_static_id->operand[nop].is_address)
   2476  1.1  mrg 			break;
   2477  1.1  mrg 		      /* If we didn't already win, we can reload the address
   2478  1.1  mrg 			 into a base register.  */
   2479  1.1  mrg 		      if (satisfies_address_constraint_p (op, cn))
   2480  1.1  mrg 			win = true;
   2481  1.1  mrg 		      cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
   2482  1.1  mrg 					   ADDRESS, SCRATCH);
   2483  1.1  mrg 		      badop = false;
   2484  1.1  mrg 		      goto reg;
   2485  1.1  mrg 
   2486  1.1  mrg 		    case CT_FIXED_FORM:
   2487  1.1  mrg 		      if (constraint_satisfied_p (op, cn))
   2488  1.1  mrg 			win = true;
   2489  1.1  mrg 		      break;
   2490  1.1  mrg 
   2491  1.1  mrg 		    case CT_SPECIAL_MEMORY:
   2492  1.1  mrg 		      if (satisfies_memory_constraint_p (op, cn))
   2493  1.1  mrg 			win = true;
   2494  1.1  mrg 		      else if (spilled_pseudo_p (op))
   2495  1.1  mrg 			win = true;
   2496  1.1  mrg 		      break;
   2497  1.1  mrg 		    }
   2498  1.1  mrg 		  break;
   2499  1.1  mrg 
   2500  1.1  mrg 		reg:
   2501  1.1  mrg 		  if (mode == BLKmode)
   2502  1.1  mrg 		    break;
   2503  1.1  mrg 		  this_alternative = reg_class_subunion[this_alternative][cl];
   2504  1.1  mrg 		  if (hard_reg_set_subset_p (this_alternative_set,
   2505  1.1  mrg 					     reg_class_contents[cl]))
   2506  1.1  mrg 		    this_alternative_exclude_start_hard_regs
   2507  1.1  mrg 		      = ira_exclude_class_mode_regs[cl][mode];
   2508  1.1  mrg 		  else if (!hard_reg_set_subset_p (reg_class_contents[cl],
   2509  1.1  mrg 						   this_alternative_set))
   2510  1.1  mrg 		    this_alternative_exclude_start_hard_regs
   2511  1.1  mrg 		      |= ira_exclude_class_mode_regs[cl][mode];
   2512  1.1  mrg 		  this_alternative_set |= reg_class_contents[cl];
   2513  1.1  mrg 		  if (costly_p)
   2514  1.1  mrg 		    {
   2515  1.1  mrg 		      this_costly_alternative
   2516  1.1  mrg 			= reg_class_subunion[this_costly_alternative][cl];
   2517  1.1  mrg 		      this_costly_alternative_set |= reg_class_contents[cl];
   2518  1.1  mrg 		    }
   2519  1.1  mrg 		  winreg = true;
   2520  1.1  mrg 		  if (REG_P (op))
   2521  1.1  mrg 		    {
   2522  1.1  mrg 		      if (hard_regno[nop] >= 0
   2523  1.1  mrg 			  && in_hard_reg_set_p (this_alternative_set,
   2524  1.1  mrg 						mode, hard_regno[nop])
   2525  1.1  mrg 			  && !TEST_HARD_REG_BIT
   2526  1.1  mrg 			      (this_alternative_exclude_start_hard_regs,
   2527  1.1  mrg 			       hard_regno[nop]))
   2528  1.1  mrg 			win = true;
   2529  1.1  mrg 		      else if (hard_regno[nop] < 0
   2530  1.1  mrg 			       && in_class_p (op, this_alternative, NULL))
   2531  1.1  mrg 			win = true;
   2532  1.1  mrg 		    }
   2533  1.1  mrg 		  break;
   2534  1.1  mrg 		}
   2535  1.1  mrg 	      if (c != ' ' && c != '\t')
   2536  1.1  mrg 		costly_p = c == '*';
   2537  1.1  mrg 	    }
   2538  1.1  mrg 	  while ((p += len), c);
   2539  1.1  mrg 
   2540  1.1  mrg 	  scratch_p = (operand_reg[nop] != NULL_RTX
   2541  1.1  mrg 		       && ira_former_scratch_p (REGNO (operand_reg[nop])));
   2542  1.1  mrg 	  /* Record which operands fit this alternative.  */
   2543  1.1  mrg 	  if (win)
   2544  1.1  mrg 	    {
   2545  1.1  mrg 	      this_alternative_win = true;
   2546  1.1  mrg 	      if (operand_reg[nop] != NULL_RTX)
   2547  1.1  mrg 		{
   2548  1.1  mrg 		  if (hard_regno[nop] >= 0)
   2549  1.1  mrg 		    {
   2550  1.1  mrg 		      if (in_hard_reg_set_p (this_costly_alternative_set,
   2551  1.1  mrg 					     mode, hard_regno[nop]))
   2552  1.1  mrg 			{
   2553  1.1  mrg 			  if (lra_dump_file != NULL)
   2554  1.1  mrg 			    fprintf (lra_dump_file,
   2555  1.1  mrg 				     "            %d Costly set: reject++\n",
   2556  1.1  mrg 				     nop);
   2557  1.1  mrg 			  reject++;
   2558  1.1  mrg 			}
   2559  1.1  mrg 		    }
   2560  1.1  mrg 		  else
   2561  1.1  mrg 		    {
   2562  1.1  mrg 		      /* Prefer won reg to spilled pseudo under other
   2563  1.1  mrg 			 equal conditions for possibe inheritance.  */
   2564  1.1  mrg 		      if (! scratch_p)
   2565  1.1  mrg 			{
   2566  1.1  mrg 			  if (lra_dump_file != NULL)
   2567  1.1  mrg 			    fprintf
   2568  1.1  mrg 			      (lra_dump_file,
   2569  1.1  mrg 			       "            %d Non pseudo reload: reject++\n",
   2570  1.1  mrg 			       nop);
   2571  1.1  mrg 			  reject++;
   2572  1.1  mrg 			}
   2573  1.1  mrg 		      if (in_class_p (operand_reg[nop],
   2574  1.1  mrg 				      this_costly_alternative, NULL))
   2575  1.1  mrg 			{
   2576  1.1  mrg 			  if (lra_dump_file != NULL)
   2577  1.1  mrg 			    fprintf
   2578  1.1  mrg 			      (lra_dump_file,
   2579  1.1  mrg 			       "            %d Non pseudo costly reload:"
   2580  1.1  mrg 			       " reject++\n",
   2581  1.1  mrg 			       nop);
   2582  1.1  mrg 			  reject++;
   2583  1.1  mrg 			}
   2584  1.1  mrg 		    }
   2585  1.1  mrg 		  /* We simulate the behavior of old reload here.
   2586  1.1  mrg 		     Although scratches need hard registers and it
   2587  1.1  mrg 		     might result in spilling other pseudos, no reload
   2588  1.1  mrg 		     insns are generated for the scratches.  So it
   2589  1.1  mrg 		     might cost something but probably less than old
   2590  1.1  mrg 		     reload pass believes.  */
   2591  1.1  mrg 		  if (scratch_p)
   2592  1.1  mrg 		    {
   2593  1.1  mrg 		      if (lra_dump_file != NULL)
   2594  1.1  mrg 			fprintf (lra_dump_file,
   2595  1.1  mrg 				 "            %d Scratch win: reject+=2\n",
   2596  1.1  mrg 				 nop);
   2597  1.1  mrg 		      reject += 2;
   2598  1.1  mrg 		    }
   2599  1.1  mrg 		}
   2600  1.1  mrg 	    }
   2601  1.1  mrg 	  else if (did_match)
   2602  1.1  mrg 	    this_alternative_match_win = true;
   2603  1.1  mrg 	  else
   2604  1.1  mrg 	    {
   2605  1.1  mrg 	      int const_to_mem = 0;
   2606  1.1  mrg 	      bool no_regs_p;
   2607  1.1  mrg 
   2608  1.1  mrg 	      reject += op_reject;
   2609  1.1  mrg 	      /* Never do output reload of stack pointer.  It makes
   2610  1.1  mrg 		 impossible to do elimination when SP is changed in
   2611  1.1  mrg 		 RTL.  */
   2612  1.1  mrg 	      if (op == stack_pointer_rtx && ! frame_pointer_needed
   2613  1.1  mrg 		  && curr_static_id->operand[nop].type != OP_IN)
   2614  1.1  mrg 		goto fail;
   2615  1.1  mrg 
   2616  1.1  mrg 	      /* If this alternative asks for a specific reg class, see if there
   2617  1.1  mrg 		 is at least one allocatable register in that class.  */
   2618  1.1  mrg 	      no_regs_p
   2619  1.1  mrg 		= (this_alternative == NO_REGS
   2620  1.1  mrg 		   || (hard_reg_set_subset_p
   2621  1.1  mrg 		       (reg_class_contents[this_alternative],
   2622  1.1  mrg 			lra_no_alloc_regs)));
   2623  1.1  mrg 
   2624  1.1  mrg 	      /* For asms, verify that the class for this alternative is possible
   2625  1.1  mrg 		 for the mode that is specified.  */
   2626  1.1  mrg 	      if (!no_regs_p && INSN_CODE (curr_insn) < 0)
   2627  1.1  mrg 		{
   2628  1.1  mrg 		  int i;
   2629  1.1  mrg 		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
   2630  1.1  mrg 		    if (targetm.hard_regno_mode_ok (i, mode)
   2631  1.1  mrg 			&& in_hard_reg_set_p (reg_class_contents[this_alternative],
   2632  1.1  mrg 					      mode, i))
   2633  1.1  mrg 		      break;
   2634  1.1  mrg 		  if (i == FIRST_PSEUDO_REGISTER)
   2635  1.1  mrg 		    winreg = false;
   2636  1.1  mrg 		}
   2637  1.1  mrg 
   2638  1.1  mrg 	      /* If this operand accepts a register, and if the
   2639  1.1  mrg 		 register class has at least one allocatable register,
   2640  1.1  mrg 		 then this operand can be reloaded.  */
   2641  1.1  mrg 	      if (winreg && !no_regs_p)
   2642  1.1  mrg 		badop = false;
   2643  1.1  mrg 
   2644  1.1  mrg 	      if (badop)
   2645  1.1  mrg 		{
   2646  1.1  mrg 		  if (lra_dump_file != NULL)
   2647  1.1  mrg 		    fprintf (lra_dump_file,
   2648  1.1  mrg 			     "            alt=%d: Bad operand -- refuse\n",
   2649  1.1  mrg 			     nalt);
   2650  1.1  mrg 		  goto fail;
   2651  1.1  mrg 		}
   2652  1.1  mrg 
   2653  1.1  mrg 	      if (this_alternative != NO_REGS)
   2654  1.1  mrg 		{
   2655  1.1  mrg 		  HARD_REG_SET available_regs
   2656  1.1  mrg 		    = (reg_class_contents[this_alternative]
   2657  1.1  mrg 		       & ~((ira_prohibited_class_mode_regs
   2658  1.1  mrg 			    [this_alternative][mode])
   2659  1.1  mrg 			   | lra_no_alloc_regs));
   2660  1.1  mrg 		  if (hard_reg_set_empty_p (available_regs))
   2661  1.1  mrg 		    {
   2662  1.1  mrg 		      /* There are no hard regs holding a value of given
   2663  1.1  mrg 			 mode.  */
   2664  1.1  mrg 		      if (offmemok)
   2665  1.1  mrg 			{
   2666  1.1  mrg 			  this_alternative = NO_REGS;
   2667  1.1  mrg 			  if (lra_dump_file != NULL)
   2668  1.1  mrg 			    fprintf (lra_dump_file,
   2669  1.1  mrg 				     "            %d Using memory because of"
   2670  1.1  mrg 				     " a bad mode: reject+=2\n",
   2671  1.1  mrg 				     nop);
   2672  1.1  mrg 			  reject += 2;
   2673  1.1  mrg 			}
   2674  1.1  mrg 		      else
   2675  1.1  mrg 			{
   2676  1.1  mrg 			  if (lra_dump_file != NULL)
   2677  1.1  mrg 			    fprintf (lra_dump_file,
   2678  1.1  mrg 				     "            alt=%d: Wrong mode -- refuse\n",
   2679  1.1  mrg 				     nalt);
   2680  1.1  mrg 			  goto fail;
   2681  1.1  mrg 			}
   2682  1.1  mrg 		    }
   2683  1.1  mrg 		}
   2684  1.1  mrg 
   2685  1.1  mrg 	      /* If not assigned pseudo has a class which a subset of
   2686  1.1  mrg 		 required reg class, it is a less costly alternative
   2687  1.1  mrg 		 as the pseudo still can get a hard reg of necessary
   2688  1.1  mrg 		 class.  */
   2689  1.1  mrg 	      if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
   2690  1.1  mrg 		  && (cl = get_reg_class (REGNO (op))) != NO_REGS
   2691  1.1  mrg 		  && ira_class_subset_p[this_alternative][cl])
   2692  1.1  mrg 		{
   2693  1.1  mrg 		  if (lra_dump_file != NULL)
   2694  1.1  mrg 		    fprintf
   2695  1.1  mrg 		      (lra_dump_file,
   2696  1.1  mrg 		       "            %d Super set class reg: reject-=3\n", nop);
   2697  1.1  mrg 		  reject -= 3;
   2698  1.1  mrg 		}
   2699  1.1  mrg 
   2700  1.1  mrg 	      this_alternative_offmemok = offmemok;
   2701  1.1  mrg 	      if (this_costly_alternative != NO_REGS)
   2702  1.1  mrg 		{
   2703  1.1  mrg 		  if (lra_dump_file != NULL)
   2704  1.1  mrg 		    fprintf (lra_dump_file,
   2705  1.1  mrg 			     "            %d Costly loser: reject++\n", nop);
   2706  1.1  mrg 		  reject++;
   2707  1.1  mrg 		}
   2708  1.1  mrg 	      /* If the operand is dying, has a matching constraint,
   2709  1.1  mrg 		 and satisfies constraints of the matched operand
   2710  1.1  mrg 		 which failed to satisfy the own constraints, most probably
   2711  1.1  mrg 		 the reload for this operand will be gone.  */
   2712  1.1  mrg 	      if (this_alternative_matches >= 0
   2713  1.1  mrg 		  && !curr_alt_win[this_alternative_matches]
   2714  1.1  mrg 		  && REG_P (op)
   2715  1.1  mrg 		  && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
   2716  1.1  mrg 		  && (hard_regno[nop] >= 0
   2717  1.1  mrg 		      ? in_hard_reg_set_p (this_alternative_set,
   2718  1.1  mrg 					   mode, hard_regno[nop])
   2719  1.1  mrg 		      : in_class_p (op, this_alternative, NULL)))
   2720  1.1  mrg 		{
   2721  1.1  mrg 		  if (lra_dump_file != NULL)
   2722  1.1  mrg 		    fprintf
   2723  1.1  mrg 		      (lra_dump_file,
   2724  1.1  mrg 		       "            %d Dying matched operand reload: reject++\n",
   2725  1.1  mrg 		       nop);
   2726  1.1  mrg 		  reject++;
   2727  1.1  mrg 		}
   2728  1.1  mrg 	      else
   2729  1.1  mrg 		{
   2730  1.1  mrg 		  /* Strict_low_part requires to reload the register
   2731  1.1  mrg 		     not the sub-register.  In this case we should
   2732  1.1  mrg 		     check that a final reload hard reg can hold the
   2733  1.1  mrg 		     value mode.  */
   2734  1.1  mrg 		  if (curr_static_id->operand[nop].strict_low
   2735  1.1  mrg 		      && REG_P (op)
   2736  1.1  mrg 		      && hard_regno[nop] < 0
   2737  1.1  mrg 		      && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
   2738  1.1  mrg 		      && ira_class_hard_regs_num[this_alternative] > 0
   2739  1.1  mrg 		      && (!targetm.hard_regno_mode_ok
   2740  1.1  mrg 			  (ira_class_hard_regs[this_alternative][0],
   2741  1.1  mrg 			   GET_MODE (*curr_id->operand_loc[nop]))))
   2742  1.1  mrg 		    {
   2743  1.1  mrg 		      if (lra_dump_file != NULL)
   2744  1.1  mrg 			fprintf
   2745  1.1  mrg 			  (lra_dump_file,
   2746  1.1  mrg 			   "            alt=%d: Strict low subreg reload -- refuse\n",
   2747  1.1  mrg 			   nalt);
   2748  1.1  mrg 		      goto fail;
   2749  1.1  mrg 		    }
   2750  1.1  mrg 		  losers++;
   2751  1.1  mrg 		}
   2752  1.1  mrg 	      if (operand_reg[nop] != NULL_RTX
   2753  1.1  mrg 		  /* Output operands and matched input operands are
   2754  1.1  mrg 		     not inherited.  The following conditions do not
   2755  1.1  mrg 		     exactly describe the previous statement but they
   2756  1.1  mrg 		     are pretty close.  */
   2757  1.1  mrg 		  && curr_static_id->operand[nop].type != OP_OUT
   2758  1.1  mrg 		  && (this_alternative_matches < 0
   2759  1.1  mrg 		      || curr_static_id->operand[nop].type != OP_IN))
   2760  1.1  mrg 		{
   2761  1.1  mrg 		  int last_reload = (lra_reg_info[ORIGINAL_REGNO
   2762  1.1  mrg 						  (operand_reg[nop])]
   2763  1.1  mrg 				     .last_reload);
   2764  1.1  mrg 
   2765  1.1  mrg 		  /* The value of reload_sum has sense only if we
   2766  1.1  mrg 		     process insns in their order.  It happens only on
   2767  1.1  mrg 		     the first constraints sub-pass when we do most of
   2768  1.1  mrg 		     reload work.  */
   2769  1.1  mrg 		  if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
   2770  1.1  mrg 		    reload_sum += last_reload - bb_reload_num;
   2771  1.1  mrg 		}
   2772  1.1  mrg 	      /* If this is a constant that is reloaded into the
   2773  1.1  mrg 		 desired class by copying it to memory first, count
   2774  1.1  mrg 		 that as another reload.  This is consistent with
   2775  1.1  mrg 		 other code and is required to avoid choosing another
   2776  1.1  mrg 		 alternative when the constant is moved into memory.
   2777  1.1  mrg 		 Note that the test here is precisely the same as in
   2778  1.1  mrg 		 the code below that calls force_const_mem.  */
   2779  1.1  mrg 	      if (CONST_POOL_OK_P (mode, op)
   2780  1.1  mrg 		  && ((targetm.preferred_reload_class
   2781  1.1  mrg 		       (op, this_alternative) == NO_REGS)
   2782  1.1  mrg 		      || no_input_reloads_p))
   2783  1.1  mrg 		{
   2784  1.1  mrg 		  const_to_mem = 1;
   2785  1.1  mrg 		  if (! no_regs_p)
   2786  1.1  mrg 		    losers++;
   2787  1.1  mrg 		}
   2788  1.1  mrg 
   2789  1.1  mrg 	      /* Alternative loses if it requires a type of reload not
   2790  1.1  mrg 		 permitted for this insn.  We can always reload
   2791  1.1  mrg 		 objects with a REG_UNUSED note.  */
   2792  1.1  mrg 	      if ((curr_static_id->operand[nop].type != OP_IN
   2793  1.1  mrg 		   && no_output_reloads_p
   2794  1.1  mrg 		   && ! find_reg_note (curr_insn, REG_UNUSED, op))
   2795  1.1  mrg 		  || (curr_static_id->operand[nop].type != OP_OUT
   2796  1.1  mrg 		      && no_input_reloads_p && ! const_to_mem)
   2797  1.1  mrg 		  || (this_alternative_matches >= 0
   2798  1.1  mrg 		      && (no_input_reloads_p
   2799  1.1  mrg 			  || (no_output_reloads_p
   2800  1.1  mrg 			      && (curr_static_id->operand
   2801  1.1  mrg 				  [this_alternative_matches].type != OP_IN)
   2802  1.1  mrg 			      && ! find_reg_note (curr_insn, REG_UNUSED,
   2803  1.1  mrg 						  no_subreg_reg_operand
   2804  1.1  mrg 						  [this_alternative_matches])))))
   2805  1.1  mrg 		{
   2806  1.1  mrg 		  if (lra_dump_file != NULL)
   2807  1.1  mrg 		    fprintf
   2808  1.1  mrg 		      (lra_dump_file,
   2809  1.1  mrg 		       "            alt=%d: No input/output reload -- refuse\n",
   2810  1.1  mrg 		       nalt);
   2811  1.1  mrg 		  goto fail;
   2812  1.1  mrg 		}
   2813  1.1  mrg 
   2814  1.1  mrg 	      /* Alternative loses if it required class pseudo cannot
   2815  1.1  mrg 		 hold value of required mode.  Such insns can be
   2816  1.1  mrg 		 described by insn definitions with mode iterators.  */
   2817  1.1  mrg 	      if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
   2818  1.1  mrg 		  && ! hard_reg_set_empty_p (this_alternative_set)
   2819  1.1  mrg 		  /* It is common practice for constraints to use a
   2820  1.1  mrg 		     class which does not have actually enough regs to
   2821  1.1  mrg 		     hold the value (e.g. x86 AREG for mode requiring
   2822  1.1  mrg 		     more one general reg).  Therefore we have 2
   2823  1.1  mrg 		     conditions to check that the reload pseudo cannot
   2824  1.1  mrg 		     hold the mode value.  */
   2825  1.1  mrg 		  && (!targetm.hard_regno_mode_ok
   2826  1.1  mrg 		      (ira_class_hard_regs[this_alternative][0],
   2827  1.1  mrg 		       GET_MODE (*curr_id->operand_loc[nop])))
   2828  1.1  mrg 		  /* The above condition is not enough as the first
   2829  1.1  mrg 		     reg in ira_class_hard_regs can be not aligned for
   2830  1.1  mrg 		     multi-words mode values.  */
   2831  1.1  mrg 		  && (prohibited_class_reg_set_mode_p
   2832  1.1  mrg 		      (this_alternative, this_alternative_set,
   2833  1.1  mrg 		       GET_MODE (*curr_id->operand_loc[nop]))))
   2834  1.1  mrg 		{
   2835  1.1  mrg 		  if (lra_dump_file != NULL)
   2836  1.1  mrg 		    fprintf (lra_dump_file,
   2837  1.1  mrg 			     "            alt=%d: reload pseudo for op %d "
   2838  1.1  mrg 			     "cannot hold the mode value -- refuse\n",
   2839  1.1  mrg 			     nalt, nop);
   2840  1.1  mrg 		  goto fail;
   2841  1.1  mrg 		}
   2842  1.1  mrg 
   2843  1.1  mrg 	      /* Check strong discouragement of reload of non-constant
   2844  1.1  mrg 		 into class THIS_ALTERNATIVE.  */
   2845  1.1  mrg 	      if (! CONSTANT_P (op) && ! no_regs_p
   2846  1.1  mrg 		  && (targetm.preferred_reload_class
   2847  1.1  mrg 		      (op, this_alternative) == NO_REGS
   2848  1.1  mrg 		      || (curr_static_id->operand[nop].type == OP_OUT
   2849  1.1  mrg 			  && (targetm.preferred_output_reload_class
   2850  1.1  mrg 			      (op, this_alternative) == NO_REGS))))
   2851  1.1  mrg 		{
   2852  1.1  mrg 		  if (offmemok && REG_P (op))
   2853  1.1  mrg 		    {
   2854  1.1  mrg 		      if (lra_dump_file != NULL)
   2855  1.1  mrg 			fprintf
   2856  1.1  mrg 			  (lra_dump_file,
   2857  1.1  mrg 			   "            %d Spill pseudo into memory: reject+=3\n",
   2858  1.1  mrg 			   nop);
   2859  1.1  mrg 		      reject += 3;
   2860  1.1  mrg 		    }
   2861  1.1  mrg 		  else
   2862  1.1  mrg 		    {
   2863  1.1  mrg 		      if (lra_dump_file != NULL)
   2864  1.1  mrg 			fprintf
   2865  1.1  mrg 			  (lra_dump_file,
   2866  1.1  mrg 			   "            %d Non-prefered reload: reject+=%d\n",
   2867  1.1  mrg 			   nop, LRA_MAX_REJECT);
   2868  1.1  mrg 		      reject += LRA_MAX_REJECT;
   2869  1.1  mrg 		    }
   2870  1.1  mrg 		}
   2871  1.1  mrg 
   2872  1.1  mrg 	      if (! (MEM_P (op) && offmemok)
   2873  1.1  mrg 		  && ! (const_to_mem && constmemok))
   2874  1.1  mrg 		{
   2875  1.1  mrg 		  /* We prefer to reload pseudos over reloading other
   2876  1.1  mrg 		     things, since such reloads may be able to be
   2877  1.1  mrg 		     eliminated later.  So bump REJECT in other cases.
   2878  1.1  mrg 		     Don't do this in the case where we are forcing a
   2879  1.1  mrg 		     constant into memory and it will then win since
   2880  1.1  mrg 		     we don't want to have a different alternative
   2881  1.1  mrg 		     match then.  */
   2882  1.1  mrg 		  if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
   2883  1.1  mrg 		    {
   2884  1.1  mrg 		      if (lra_dump_file != NULL)
   2885  1.1  mrg 			fprintf
   2886  1.1  mrg 			  (lra_dump_file,
   2887  1.1  mrg 			   "            %d Non-pseudo reload: reject+=2\n",
   2888  1.1  mrg 			   nop);
   2889  1.1  mrg 		      reject += 2;
   2890  1.1  mrg 		    }
   2891  1.1  mrg 
   2892  1.1  mrg 		  if (! no_regs_p)
   2893  1.1  mrg 		    reload_nregs
   2894  1.1  mrg 		      += ira_reg_class_max_nregs[this_alternative][mode];
   2895  1.1  mrg 
   2896  1.1  mrg 		  if (SMALL_REGISTER_CLASS_P (this_alternative))
   2897  1.1  mrg 		    {
   2898  1.1  mrg 		      if (lra_dump_file != NULL)
   2899  1.1  mrg 			fprintf
   2900  1.1  mrg 			  (lra_dump_file,
   2901  1.1  mrg 			   "            %d Small class reload: reject+=%d\n",
   2902  1.1  mrg 			   nop, LRA_LOSER_COST_FACTOR / 2);
   2903  1.1  mrg 		      reject += LRA_LOSER_COST_FACTOR / 2;
   2904  1.1  mrg 		    }
   2905  1.1  mrg 		}
   2906  1.1  mrg 
   2907  1.1  mrg 	      /* We are trying to spill pseudo into memory.  It is
   2908  1.1  mrg 		 usually more costly than moving to a hard register
   2909  1.1  mrg 		 although it might takes the same number of
   2910  1.1  mrg 		 reloads.
   2911  1.1  mrg 
   2912  1.1  mrg 		 Non-pseudo spill may happen also.  Suppose a target allows both
   2913  1.1  mrg 		 register and memory in the operand constraint alternatives,
   2914  1.1  mrg 		 then it's typical that an eliminable register has a substition
   2915  1.1  mrg 		 of "base + offset" which can either be reloaded by a simple
   2916  1.1  mrg 		 "new_reg <= base + offset" which will match the register
   2917  1.1  mrg 		 constraint, or a similar reg addition followed by further spill
   2918  1.1  mrg 		 to and reload from memory which will match the memory
   2919  1.1  mrg 		 constraint, but this memory spill will be much more costly
   2920  1.1  mrg 		 usually.
   2921  1.1  mrg 
   2922  1.1  mrg 		 Code below increases the reject for both pseudo and non-pseudo
   2923  1.1  mrg 		 spill.  */
   2924  1.1  mrg 	      if (no_regs_p
   2925  1.1  mrg 		  && !(MEM_P (op) && offmemok)
   2926  1.1  mrg 		  && !(REG_P (op) && hard_regno[nop] < 0))
   2927  1.1  mrg 		{
   2928  1.1  mrg 		  if (lra_dump_file != NULL)
   2929  1.1  mrg 		    fprintf
   2930  1.1  mrg 		      (lra_dump_file,
   2931  1.1  mrg 		       "            %d Spill %spseudo into memory: reject+=3\n",
   2932  1.1  mrg 		       nop, REG_P (op) ? "" : "Non-");
   2933  1.1  mrg 		  reject += 3;
   2934  1.1  mrg 		  if (VECTOR_MODE_P (mode))
   2935  1.1  mrg 		    {
   2936  1.1  mrg 		      /* Spilling vectors into memory is usually more
   2937  1.1  mrg 			 costly as they contain big values.  */
   2938  1.1  mrg 		      if (lra_dump_file != NULL)
   2939  1.1  mrg 			fprintf
   2940  1.1  mrg 			  (lra_dump_file,
   2941  1.1  mrg 			   "            %d Spill vector pseudo: reject+=2\n",
   2942  1.1  mrg 			   nop);
   2943  1.1  mrg 		      reject += 2;
   2944  1.1  mrg 		    }
   2945  1.1  mrg 		}
   2946  1.1  mrg 
   2947  1.1  mrg 	      /* When we use an operand requiring memory in given
   2948  1.1  mrg 		 alternative, the insn should write *and* read the
   2949  1.1  mrg 		 value to/from memory it is costly in comparison with
   2950  1.1  mrg 		 an insn alternative which does not use memory
   2951  1.1  mrg 		 (e.g. register or immediate operand).  We exclude
   2952  1.1  mrg 		 memory operand for such case as we can satisfy the
   2953  1.1  mrg 		 memory constraints by reloading address.  */
   2954  1.1  mrg 	      if (no_regs_p && offmemok && !MEM_P (op))
   2955  1.1  mrg 		{
   2956  1.1  mrg 		  if (lra_dump_file != NULL)
   2957  1.1  mrg 		    fprintf
   2958  1.1  mrg 		      (lra_dump_file,
   2959  1.1  mrg 		       "            Using memory insn operand %d: reject+=3\n",
   2960  1.1  mrg 		       nop);
   2961  1.1  mrg 		  reject += 3;
   2962  1.1  mrg 		}
   2963  1.1  mrg 
   2964  1.1  mrg 	      /* If reload requires moving value through secondary
   2965  1.1  mrg 		 memory, it will need one more insn at least.  */
   2966  1.1  mrg 	      if (this_alternative != NO_REGS
   2967  1.1  mrg 		  && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
   2968  1.1  mrg 		  && ((curr_static_id->operand[nop].type != OP_OUT
   2969  1.1  mrg 		       && targetm.secondary_memory_needed (GET_MODE (op), cl,
   2970  1.1  mrg 							   this_alternative))
   2971  1.1  mrg 		      || (curr_static_id->operand[nop].type != OP_IN
   2972  1.1  mrg 			  && (targetm.secondary_memory_needed
   2973  1.1  mrg 			      (GET_MODE (op), this_alternative, cl)))))
   2974  1.1  mrg 		losers++;
   2975  1.1  mrg 
   2976  1.1  mrg 	      if (MEM_P (op) && offmemok)
   2977  1.1  mrg 		addr_losers++;
   2978  1.1  mrg 	      else
   2979  1.1  mrg 		{
   2980  1.1  mrg 		  /* Input reloads can be inherited more often than
   2981  1.1  mrg 		     output reloads can be removed, so penalize output
   2982  1.1  mrg 		     reloads.  */
   2983  1.1  mrg 		  if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
   2984  1.1  mrg 		    {
   2985  1.1  mrg 		      if (lra_dump_file != NULL)
   2986  1.1  mrg 			fprintf
   2987  1.1  mrg 			  (lra_dump_file,
   2988  1.1  mrg 			   "            %d Non input pseudo reload: reject++\n",
   2989  1.1  mrg 			   nop);
   2990  1.1  mrg 		      reject++;
   2991  1.1  mrg 		    }
   2992  1.1  mrg 
   2993  1.1  mrg 		  if (curr_static_id->operand[nop].type == OP_INOUT)
   2994  1.1  mrg 		    {
   2995  1.1  mrg 		      if (lra_dump_file != NULL)
   2996  1.1  mrg 			fprintf
   2997  1.1  mrg 			  (lra_dump_file,
   2998  1.1  mrg 			   "            %d Input/Output reload: reject+=%d\n",
   2999  1.1  mrg 			   nop, LRA_LOSER_COST_FACTOR);
   3000  1.1  mrg 		      reject += LRA_LOSER_COST_FACTOR;
   3001  1.1  mrg 		    }
   3002  1.1  mrg 		}
   3003  1.1  mrg 	    }
   3004  1.1  mrg 
   3005  1.1  mrg 	  if (early_clobber_p && ! scratch_p)
   3006  1.1  mrg 	    {
   3007  1.1  mrg 	      if (lra_dump_file != NULL)
   3008  1.1  mrg 		fprintf (lra_dump_file,
   3009  1.1  mrg 			 "            %d Early clobber: reject++\n", nop);
   3010  1.1  mrg 	      reject++;
   3011  1.1  mrg 	    }
   3012  1.1  mrg 	  /* ??? We check early clobbers after processing all operands
   3013  1.1  mrg 	     (see loop below) and there we update the costs more.
   3014  1.1  mrg 	     Should we update the cost (may be approximately) here
   3015  1.1  mrg 	     because of early clobber register reloads or it is a rare
   3016  1.1  mrg 	     or non-important thing to be worth to do it.  */
   3017  1.1  mrg 	  overall = (losers * LRA_LOSER_COST_FACTOR + reject
   3018  1.1  mrg 		     - (addr_losers == losers ? static_reject : 0));
   3019  1.1  mrg 	  if ((best_losers == 0 || losers != 0) && best_overall < overall)
   3020  1.1  mrg             {
   3021  1.1  mrg               if (lra_dump_file != NULL)
   3022  1.1  mrg 		fprintf (lra_dump_file,
   3023  1.1  mrg 			 "            alt=%d,overall=%d,losers=%d -- refuse\n",
   3024  1.1  mrg 			 nalt, overall, losers);
   3025  1.1  mrg               goto fail;
   3026  1.1  mrg             }
   3027  1.1  mrg 
   3028  1.1  mrg 	  if (update_and_check_small_class_inputs (nop, nalt,
   3029  1.1  mrg 						   this_alternative))
   3030  1.1  mrg 	    {
   3031  1.1  mrg 	      if (lra_dump_file != NULL)
   3032  1.1  mrg 		fprintf (lra_dump_file,
   3033  1.1  mrg 			 "            alt=%d, not enough small class regs -- refuse\n",
   3034  1.1  mrg 			 nalt);
   3035  1.1  mrg 	      goto fail;
   3036  1.1  mrg 	    }
   3037  1.1  mrg 	  curr_alt[nop] = this_alternative;
   3038  1.1  mrg 	  curr_alt_set[nop] = this_alternative_set;
   3039  1.1  mrg 	  curr_alt_exclude_start_hard_regs[nop]
   3040  1.1  mrg 	    = this_alternative_exclude_start_hard_regs;
   3041  1.1  mrg 	  curr_alt_win[nop] = this_alternative_win;
   3042  1.1  mrg 	  curr_alt_match_win[nop] = this_alternative_match_win;
   3043  1.1  mrg 	  curr_alt_offmemok[nop] = this_alternative_offmemok;
   3044  1.1  mrg 	  curr_alt_matches[nop] = this_alternative_matches;
   3045  1.1  mrg 
   3046  1.1  mrg 	  if (this_alternative_matches >= 0
   3047  1.1  mrg 	      && !did_match && !this_alternative_win)
   3048  1.1  mrg 	    curr_alt_win[this_alternative_matches] = false;
   3049  1.1  mrg 
   3050  1.1  mrg 	  if (early_clobber_p && operand_reg[nop] != NULL_RTX)
   3051  1.1  mrg 	    early_clobbered_nops[early_clobbered_regs_num++] = nop;
   3052  1.1  mrg 	}
   3053  1.1  mrg 
   3054  1.1  mrg       if (curr_insn_set != NULL_RTX && n_operands == 2
   3055  1.1  mrg 	  /* Prevent processing non-move insns.  */
   3056  1.1  mrg 	  && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
   3057  1.1  mrg 	      || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
   3058  1.1  mrg 	  && ((! curr_alt_win[0] && ! curr_alt_win[1]
   3059  1.1  mrg 	       && REG_P (no_subreg_reg_operand[0])
   3060  1.1  mrg 	       && REG_P (no_subreg_reg_operand[1])
   3061  1.1  mrg 	       && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
   3062  1.1  mrg 		   || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
   3063  1.1  mrg 	      || (! curr_alt_win[0] && curr_alt_win[1]
   3064  1.1  mrg 		  && REG_P (no_subreg_reg_operand[1])
   3065  1.1  mrg 		  /* Check that we reload memory not the memory
   3066  1.1  mrg 		     address.  */
   3067  1.1  mrg 		  && ! (curr_alt_offmemok[0]
   3068  1.1  mrg 			&& MEM_P (no_subreg_reg_operand[0]))
   3069  1.1  mrg 		  && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
   3070  1.1  mrg 	      || (curr_alt_win[0] && ! curr_alt_win[1]
   3071  1.1  mrg 		  && REG_P (no_subreg_reg_operand[0])
   3072  1.1  mrg 		  /* Check that we reload memory not the memory
   3073  1.1  mrg 		     address.  */
   3074  1.1  mrg 		  && ! (curr_alt_offmemok[1]
   3075  1.1  mrg 			&& MEM_P (no_subreg_reg_operand[1]))
   3076  1.1  mrg 		  && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
   3077  1.1  mrg 		  && (! CONST_POOL_OK_P (curr_operand_mode[1],
   3078  1.1  mrg 					 no_subreg_reg_operand[1])
   3079  1.1  mrg 		      || (targetm.preferred_reload_class
   3080  1.1  mrg 			  (no_subreg_reg_operand[1],
   3081  1.1  mrg 			   (enum reg_class) curr_alt[1]) != NO_REGS))
   3082  1.1  mrg 		  /* If it is a result of recent elimination in move
   3083  1.1  mrg 		     insn we can transform it into an add still by
   3084  1.1  mrg 		     using this alternative.  */
   3085  1.1  mrg 		  && GET_CODE (no_subreg_reg_operand[1]) != PLUS
   3086  1.1  mrg 		  /* Likewise if the source has been replaced with an
   3087  1.1  mrg 		     equivalent value.  This only happens once -- the reload
   3088  1.1  mrg 		     will use the equivalent value instead of the register it
   3089  1.1  mrg 		     replaces -- so there should be no danger of cycling.  */
   3090  1.1  mrg 		  && !equiv_substition_p[1])))
   3091  1.1  mrg 	{
   3092  1.1  mrg 	  /* We have a move insn and a new reload insn will be similar
   3093  1.1  mrg 	     to the current insn.  We should avoid such situation as
   3094  1.1  mrg 	     it results in LRA cycling.  */
   3095  1.1  mrg 	  if (lra_dump_file != NULL)
   3096  1.1  mrg 	    fprintf (lra_dump_file,
   3097  1.1  mrg 		     "            Cycle danger: overall += LRA_MAX_REJECT\n");
   3098  1.1  mrg 	  overall += LRA_MAX_REJECT;
   3099  1.1  mrg 	}
   3100  1.1  mrg       ok_p = true;
   3101  1.1  mrg       curr_alt_dont_inherit_ops_num = 0;
   3102  1.1  mrg       for (nop = 0; nop < early_clobbered_regs_num; nop++)
   3103  1.1  mrg 	{
   3104  1.1  mrg 	  int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
   3105  1.1  mrg 	  HARD_REG_SET temp_set;
   3106  1.1  mrg 
   3107  1.1  mrg 	  i = early_clobbered_nops[nop];
   3108  1.1  mrg 	  if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
   3109  1.1  mrg 	      || hard_regno[i] < 0)
   3110  1.1  mrg 	    continue;
   3111  1.1  mrg 	  lra_assert (operand_reg[i] != NULL_RTX);
   3112  1.1  mrg 	  clobbered_hard_regno = hard_regno[i];
   3113  1.1  mrg 	  CLEAR_HARD_REG_SET (temp_set);
   3114  1.1  mrg 	  add_to_hard_reg_set (&temp_set, biggest_mode[i], clobbered_hard_regno);
   3115  1.1  mrg 	  first_conflict_j = last_conflict_j = -1;
   3116  1.1  mrg 	  for (j = 0; j < n_operands; j++)
   3117  1.1  mrg 	    if (j == i
   3118  1.1  mrg 		/* We don't want process insides of match_operator and
   3119  1.1  mrg 		   match_parallel because otherwise we would process
   3120  1.1  mrg 		   their operands once again generating a wrong
   3121  1.1  mrg 		   code.  */
   3122  1.1  mrg 		|| curr_static_id->operand[j].is_operator)
   3123  1.1  mrg 	      continue;
   3124  1.1  mrg 	    else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
   3125  1.1  mrg 		     || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
   3126  1.1  mrg 	      continue;
   3127  1.1  mrg 	    /* If we don't reload j-th operand, check conflicts.  */
   3128  1.1  mrg 	    else if ((curr_alt_win[j] || curr_alt_match_win[j])
   3129  1.1  mrg 		     && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
   3130  1.1  mrg 	      {
   3131  1.1  mrg 		if (first_conflict_j < 0)
   3132  1.1  mrg 		  first_conflict_j = j;
   3133  1.1  mrg 		last_conflict_j = j;
   3134  1.1  mrg 		/* Both the earlyclobber operand and conflicting operand
   3135  1.1  mrg 		   cannot both be user defined hard registers.  */
   3136  1.1  mrg 		if (HARD_REGISTER_P (operand_reg[i])
   3137  1.1  mrg 		    && REG_USERVAR_P (operand_reg[i])
   3138  1.1  mrg 		    && operand_reg[j] != NULL_RTX
   3139  1.1  mrg 		    && HARD_REGISTER_P (operand_reg[j])
   3140  1.1  mrg 		    && REG_USERVAR_P (operand_reg[j]))
   3141  1.1  mrg 		  {
   3142  1.1  mrg 		    /* For asm, let curr_insn_transform diagnose it.  */
   3143  1.1  mrg 		    if (INSN_CODE (curr_insn) < 0)
   3144  1.1  mrg 		      return false;
   3145  1.1  mrg 		    fatal_insn ("unable to generate reloads for "
   3146  1.1  mrg 				"impossible constraints:", curr_insn);
   3147  1.1  mrg 		  }
   3148  1.1  mrg 	      }
   3149  1.1  mrg 	  if (last_conflict_j < 0)
   3150  1.1  mrg 	    continue;
   3151  1.1  mrg 
   3152  1.1  mrg 	  /* If an earlyclobber operand conflicts with another non-matching
   3153  1.1  mrg 	     operand (ie, they have been assigned the same hard register),
   3154  1.1  mrg 	     then it is better to reload the other operand, as there may
   3155  1.1  mrg 	     exist yet another operand with a matching constraint associated
   3156  1.1  mrg 	     with the earlyclobber operand.  However, if one of the operands
   3157  1.1  mrg 	     is an explicit use of a hard register, then we must reload the
   3158  1.1  mrg 	     other non-hard register operand.  */
   3159  1.1  mrg 	  if (HARD_REGISTER_P (operand_reg[i])
   3160  1.1  mrg 	      || (first_conflict_j == last_conflict_j
   3161  1.1  mrg 		  && operand_reg[last_conflict_j] != NULL_RTX
   3162  1.1  mrg 		  && !curr_alt_match_win[last_conflict_j]
   3163  1.1  mrg 		  && !HARD_REGISTER_P (operand_reg[last_conflict_j])))
   3164  1.1  mrg 	    {
   3165  1.1  mrg 	      curr_alt_win[last_conflict_j] = false;
   3166  1.1  mrg 	      curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
   3167  1.1  mrg 		= last_conflict_j;
   3168  1.1  mrg 	      losers++;
   3169  1.1  mrg 	      if (lra_dump_file != NULL)
   3170  1.1  mrg 		fprintf
   3171  1.1  mrg 		  (lra_dump_file,
   3172  1.1  mrg 		   "            %d Conflict early clobber reload: reject--\n",
   3173  1.1  mrg 		   i);
   3174  1.1  mrg 	    }
   3175  1.1  mrg 	  else
   3176  1.1  mrg 	    {
   3177  1.1  mrg 	      /* We need to reload early clobbered register and the
   3178  1.1  mrg 		 matched registers.  */
   3179  1.1  mrg 	      for (j = 0; j < n_operands; j++)
   3180  1.1  mrg 		if (curr_alt_matches[j] == i)
   3181  1.1  mrg 		  {
   3182  1.1  mrg 		    curr_alt_match_win[j] = false;
   3183  1.1  mrg 		    losers++;
   3184  1.1  mrg 		    overall += LRA_LOSER_COST_FACTOR;
   3185  1.1  mrg 		  }
   3186  1.1  mrg 	      if (! curr_alt_match_win[i])
   3187  1.1  mrg 		curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
   3188  1.1  mrg 	      else
   3189  1.1  mrg 		{
   3190  1.1  mrg 		  /* Remember pseudos used for match reloads are never
   3191  1.1  mrg 		     inherited.  */
   3192  1.1  mrg 		  lra_assert (curr_alt_matches[i] >= 0);
   3193  1.1  mrg 		  curr_alt_win[curr_alt_matches[i]] = false;
   3194  1.1  mrg 		}
   3195  1.1  mrg 	      curr_alt_win[i] = curr_alt_match_win[i] = false;
   3196  1.1  mrg 	      losers++;
   3197  1.1  mrg 	      if (lra_dump_file != NULL)
   3198  1.1  mrg 		fprintf
   3199  1.1  mrg 		  (lra_dump_file,
   3200  1.1  mrg 		   "            %d Matched conflict early clobber reloads: "
   3201  1.1  mrg 		   "reject--\n",
   3202  1.1  mrg 		   i);
   3203  1.1  mrg 	    }
   3204  1.1  mrg 	  /* Early clobber was already reflected in REJECT. */
   3205  1.1  mrg 	  if (!matching_early_clobber[i])
   3206  1.1  mrg 	    {
   3207  1.1  mrg 	      lra_assert (reject > 0);
   3208  1.1  mrg 	      reject--;
   3209  1.1  mrg 	      matching_early_clobber[i] = 1;
   3210  1.1  mrg 	    }
   3211  1.1  mrg 	  overall += LRA_LOSER_COST_FACTOR - 1;
   3212  1.1  mrg 	}
   3213  1.1  mrg       if (lra_dump_file != NULL)
   3214  1.1  mrg 	fprintf (lra_dump_file, "          alt=%d,overall=%d,losers=%d,rld_nregs=%d\n",
   3215  1.1  mrg 		 nalt, overall, losers, reload_nregs);
   3216  1.1  mrg 
   3217  1.1  mrg       /* If this alternative can be made to work by reloading, and it
   3218  1.1  mrg 	 needs less reloading than the others checked so far, record
   3219  1.1  mrg 	 it as the chosen goal for reloading.  */
   3220  1.1  mrg       if ((best_losers != 0 && losers == 0)
   3221  1.1  mrg 	  || (((best_losers == 0 && losers == 0)
   3222  1.1  mrg 	       || (best_losers != 0 && losers != 0))
   3223  1.1  mrg 	      && (best_overall > overall
   3224  1.1  mrg 		  || (best_overall == overall
   3225  1.1  mrg 		      /* If the cost of the reloads is the same,
   3226  1.1  mrg 			 prefer alternative which requires minimal
   3227  1.1  mrg 			 number of reload regs.  */
   3228  1.1  mrg 		      && (reload_nregs < best_reload_nregs
   3229  1.1  mrg 			  || (reload_nregs == best_reload_nregs
   3230  1.1  mrg 			      && (best_reload_sum < reload_sum
   3231  1.1  mrg 				  || (best_reload_sum == reload_sum
   3232  1.1  mrg 				      && nalt < goal_alt_number))))))))
   3233  1.1  mrg 	{
   3234  1.1  mrg 	  for (nop = 0; nop < n_operands; nop++)
   3235  1.1  mrg 	    {
   3236  1.1  mrg 	      goal_alt_win[nop] = curr_alt_win[nop];
   3237  1.1  mrg 	      goal_alt_match_win[nop] = curr_alt_match_win[nop];
   3238  1.1  mrg 	      goal_alt_matches[nop] = curr_alt_matches[nop];
   3239  1.1  mrg 	      goal_alt[nop] = curr_alt[nop];
   3240  1.1  mrg 	      goal_alt_exclude_start_hard_regs[nop]
   3241  1.1  mrg 		= curr_alt_exclude_start_hard_regs[nop];
   3242  1.1  mrg 	      goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
   3243  1.1  mrg 	    }
   3244  1.1  mrg 	  goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
   3245  1.1  mrg 	  for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
   3246  1.1  mrg 	    goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
   3247  1.1  mrg 	  goal_alt_swapped = curr_swapped;
   3248  1.1  mrg 	  best_overall = overall;
   3249  1.1  mrg 	  best_losers = losers;
   3250  1.1  mrg 	  best_reload_nregs = reload_nregs;
   3251  1.1  mrg 	  best_reload_sum = reload_sum;
   3252  1.1  mrg 	  goal_alt_number = nalt;
   3253  1.1  mrg 	}
   3254  1.1  mrg       if (losers == 0)
   3255  1.1  mrg 	/* Everything is satisfied.  Do not process alternatives
   3256  1.1  mrg 	   anymore.  */
   3257  1.1  mrg 	break;
   3258  1.1  mrg     fail:
   3259  1.1  mrg       ;
   3260  1.1  mrg     }
   3261  1.1  mrg   return ok_p;
   3262  1.1  mrg }
   3263  1.1  mrg 
   3264  1.1  mrg /* Make reload base reg from address AD.  */
   3265  1.1  mrg static rtx
   3266  1.1  mrg base_to_reg (struct address_info *ad)
   3267  1.1  mrg {
   3268  1.1  mrg   enum reg_class cl;
   3269  1.1  mrg   int code = -1;
   3270  1.1  mrg   rtx new_inner = NULL_RTX;
   3271  1.1  mrg   rtx new_reg = NULL_RTX;
   3272  1.1  mrg   rtx_insn *insn;
   3273  1.1  mrg   rtx_insn *last_insn = get_last_insn();
   3274  1.1  mrg 
   3275  1.1  mrg   lra_assert (ad->disp == ad->disp_term);
   3276  1.1  mrg   cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
   3277  1.1  mrg                        get_index_code (ad));
   3278  1.1  mrg   new_reg = lra_create_new_reg (GET_MODE (*ad->base), NULL_RTX, cl, NULL,
   3279  1.1  mrg 				"base");
   3280  1.1  mrg   new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
   3281  1.1  mrg                                    ad->disp_term == NULL
   3282  1.1  mrg                                    ? const0_rtx
   3283  1.1  mrg                                    : *ad->disp_term);
   3284  1.1  mrg   if (!valid_address_p (ad->mode, new_inner, ad->as))
   3285  1.1  mrg     return NULL_RTX;
   3286  1.1  mrg   insn = emit_insn (gen_rtx_SET (new_reg, *ad->base));
   3287  1.1  mrg   code = recog_memoized (insn);
   3288  1.1  mrg   if (code < 0)
   3289  1.1  mrg     {
   3290  1.1  mrg       delete_insns_since (last_insn);
   3291  1.1  mrg       return NULL_RTX;
   3292  1.1  mrg     }
   3293  1.1  mrg 
   3294  1.1  mrg   return new_inner;
   3295  1.1  mrg }
   3296  1.1  mrg 
   3297  1.1  mrg /* Make reload base reg + DISP from address AD.  Return the new pseudo.  */
   3298  1.1  mrg static rtx
   3299  1.1  mrg base_plus_disp_to_reg (struct address_info *ad, rtx disp)
   3300  1.1  mrg {
   3301  1.1  mrg   enum reg_class cl;
   3302  1.1  mrg   rtx new_reg;
   3303  1.1  mrg 
   3304  1.1  mrg   lra_assert (ad->base == ad->base_term);
   3305  1.1  mrg   cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
   3306  1.1  mrg 		       get_index_code (ad));
   3307  1.1  mrg   new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX, cl, NULL,
   3308  1.1  mrg 				"base + disp");
   3309  1.1  mrg   lra_emit_add (new_reg, *ad->base_term, disp);
   3310  1.1  mrg   return new_reg;
   3311  1.1  mrg }
   3312  1.1  mrg 
   3313  1.1  mrg /* Make reload of index part of address AD.  Return the new
   3314  1.1  mrg    pseudo.  */
   3315  1.1  mrg static rtx
   3316  1.1  mrg index_part_to_reg (struct address_info *ad)
   3317  1.1  mrg {
   3318  1.1  mrg   rtx new_reg;
   3319  1.1  mrg 
   3320  1.1  mrg   new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
   3321  1.1  mrg 				INDEX_REG_CLASS, NULL, "index term");
   3322  1.1  mrg   expand_mult (GET_MODE (*ad->index), *ad->index_term,
   3323  1.1  mrg 	       GEN_INT (get_index_scale (ad)), new_reg, 1);
   3324  1.1  mrg   return new_reg;
   3325  1.1  mrg }
   3326  1.1  mrg 
   3327  1.1  mrg /* Return true if we can add a displacement to address AD, even if that
   3328  1.1  mrg    makes the address invalid.  The fix-up code requires any new address
   3329  1.1  mrg    to be the sum of the BASE_TERM, INDEX and DISP_TERM fields.  */
   3330  1.1  mrg static bool
   3331  1.1  mrg can_add_disp_p (struct address_info *ad)
   3332  1.1  mrg {
   3333  1.1  mrg   return (!ad->autoinc_p
   3334  1.1  mrg 	  && ad->segment == NULL
   3335  1.1  mrg 	  && ad->base == ad->base_term
   3336  1.1  mrg 	  && ad->disp == ad->disp_term);
   3337  1.1  mrg }
   3338  1.1  mrg 
   3339  1.1  mrg /* Make equiv substitution in address AD.  Return true if a substitution
   3340  1.1  mrg    was made.  */
   3341  1.1  mrg static bool
   3342  1.1  mrg equiv_address_substitution (struct address_info *ad)
   3343  1.1  mrg {
   3344  1.1  mrg   rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
   3345  1.1  mrg   poly_int64 disp;
   3346  1.1  mrg   HOST_WIDE_INT scale;
   3347  1.1  mrg   bool change_p;
   3348  1.1  mrg 
   3349  1.1  mrg   base_term = strip_subreg (ad->base_term);
   3350  1.1  mrg   if (base_term == NULL)
   3351  1.1  mrg     base_reg = new_base_reg = NULL_RTX;
   3352  1.1  mrg   else
   3353  1.1  mrg     {
   3354  1.1  mrg       base_reg = *base_term;
   3355  1.1  mrg       new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
   3356  1.1  mrg     }
   3357  1.1  mrg   index_term = strip_subreg (ad->index_term);
   3358  1.1  mrg   if (index_term == NULL)
   3359  1.1  mrg     index_reg = new_index_reg = NULL_RTX;
   3360  1.1  mrg   else
   3361  1.1  mrg     {
   3362  1.1  mrg       index_reg = *index_term;
   3363  1.1  mrg       new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
   3364  1.1  mrg     }
   3365  1.1  mrg   if (base_reg == new_base_reg && index_reg == new_index_reg)
   3366  1.1  mrg     return false;
   3367  1.1  mrg   disp = 0;
   3368  1.1  mrg   change_p = false;
   3369  1.1  mrg   if (lra_dump_file != NULL)
   3370  1.1  mrg     {
   3371  1.1  mrg       fprintf (lra_dump_file, "Changing address in insn %d ",
   3372  1.1  mrg 	       INSN_UID (curr_insn));
   3373  1.1  mrg       dump_value_slim (lra_dump_file, *ad->outer, 1);
   3374  1.1  mrg     }
   3375  1.1  mrg   if (base_reg != new_base_reg)
   3376  1.1  mrg     {
   3377  1.1  mrg       poly_int64 offset;
   3378  1.1  mrg       if (REG_P (new_base_reg))
   3379  1.1  mrg 	{
   3380  1.1  mrg 	  *base_term = new_base_reg;
   3381  1.1  mrg 	  change_p = true;
   3382  1.1  mrg 	}
   3383  1.1  mrg       else if (GET_CODE (new_base_reg) == PLUS
   3384  1.1  mrg 	       && REG_P (XEXP (new_base_reg, 0))
   3385  1.1  mrg 	       && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
   3386  1.1  mrg 	       && can_add_disp_p (ad))
   3387  1.1  mrg 	{
   3388  1.1  mrg 	  disp += offset;
   3389  1.1  mrg 	  *base_term = XEXP (new_base_reg, 0);
   3390  1.1  mrg 	  change_p = true;
   3391  1.1  mrg 	}
   3392  1.1  mrg       if (ad->base_term2 != NULL)
   3393  1.1  mrg 	*ad->base_term2 = *ad->base_term;
   3394  1.1  mrg     }
   3395  1.1  mrg   if (index_reg != new_index_reg)
   3396  1.1  mrg     {
   3397  1.1  mrg       poly_int64 offset;
   3398  1.1  mrg       if (REG_P (new_index_reg))
   3399  1.1  mrg 	{
   3400  1.1  mrg 	  *index_term = new_index_reg;
   3401  1.1  mrg 	  change_p = true;
   3402  1.1  mrg 	}
   3403  1.1  mrg       else if (GET_CODE (new_index_reg) == PLUS
   3404  1.1  mrg 	       && REG_P (XEXP (new_index_reg, 0))
   3405  1.1  mrg 	       && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
   3406  1.1  mrg 	       && can_add_disp_p (ad)
   3407  1.1  mrg 	       && (scale = get_index_scale (ad)))
   3408  1.1  mrg 	{
   3409  1.1  mrg 	  disp += offset * scale;
   3410  1.1  mrg 	  *index_term = XEXP (new_index_reg, 0);
   3411  1.1  mrg 	  change_p = true;
   3412  1.1  mrg 	}
   3413  1.1  mrg     }
   3414  1.1  mrg   if (maybe_ne (disp, 0))
   3415  1.1  mrg     {
   3416  1.1  mrg       if (ad->disp != NULL)
   3417  1.1  mrg 	*ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
   3418  1.1  mrg       else
   3419  1.1  mrg 	{
   3420  1.1  mrg 	  *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
   3421  1.1  mrg 	  update_address (ad);
   3422  1.1  mrg 	}
   3423  1.1  mrg       change_p = true;
   3424  1.1  mrg     }
   3425  1.1  mrg   if (lra_dump_file != NULL)
   3426  1.1  mrg     {
   3427  1.1  mrg       if (! change_p)
   3428  1.1  mrg 	fprintf (lra_dump_file, " -- no change\n");
   3429  1.1  mrg       else
   3430  1.1  mrg 	{
   3431  1.1  mrg 	  fprintf (lra_dump_file, " on equiv ");
   3432  1.1  mrg 	  dump_value_slim (lra_dump_file, *ad->outer, 1);
   3433  1.1  mrg 	  fprintf (lra_dump_file, "\n");
   3434  1.1  mrg 	}
   3435  1.1  mrg     }
   3436  1.1  mrg   return change_p;
   3437  1.1  mrg }
   3438  1.1  mrg 
   3439  1.1  mrg /* Skip all modifiers and whitespaces in constraint STR and return the
   3440  1.1  mrg    result.  */
   3441  1.1  mrg static const char *
   3442  1.1  mrg skip_constraint_modifiers (const char *str)
   3443  1.1  mrg {
   3444  1.1  mrg   for (;;str++)
   3445  1.1  mrg     switch (*str)
   3446  1.1  mrg       {
   3447  1.1  mrg       case '+': case '&' : case '=': case '*': case ' ': case '\t':
   3448  1.1  mrg       case '$': case '^' : case '%': case '?': case '!':
   3449  1.1  mrg 	break;
   3450  1.1  mrg       default: return str;
   3451  1.1  mrg       }
   3452  1.1  mrg }
   3453  1.1  mrg 
   3454  1.1  mrg /* Major function to make reloads for an address in operand NOP or
   3455  1.1  mrg    check its correctness (If CHECK_ONLY_P is true). The supported
   3456  1.1  mrg    cases are:
   3457  1.1  mrg 
   3458  1.1  mrg    1) an address that existed before LRA started, at which point it
   3459  1.1  mrg    must have been valid.  These addresses are subject to elimination
   3460  1.1  mrg    and may have become invalid due to the elimination offset being out
   3461  1.1  mrg    of range.
   3462  1.1  mrg 
   3463  1.1  mrg    2) an address created by forcing a constant to memory
   3464  1.1  mrg    (force_const_to_mem).  The initial form of these addresses might
   3465  1.1  mrg    not be valid, and it is this function's job to make them valid.
   3466  1.1  mrg 
   3467  1.1  mrg    3) a frame address formed from a register and a (possibly zero)
   3468  1.1  mrg    constant offset.  As above, these addresses might not be valid and
   3469  1.1  mrg    this function must make them so.
   3470  1.1  mrg 
   3471  1.1  mrg    Add reloads to the lists *BEFORE and *AFTER.  We might need to add
   3472  1.1  mrg    reloads to *AFTER because of inc/dec, {pre, post} modify in the
   3473  1.1  mrg    address.  Return true for any RTL change.
   3474  1.1  mrg 
   3475  1.1  mrg    The function is a helper function which does not produce all
   3476  1.1  mrg    transformations (when CHECK_ONLY_P is false) which can be
   3477  1.1  mrg    necessary.  It does just basic steps.  To do all necessary
   3478  1.1  mrg    transformations use function process_address.  */
   3479  1.1  mrg static bool
   3480  1.1  mrg process_address_1 (int nop, bool check_only_p,
   3481  1.1  mrg 		   rtx_insn **before, rtx_insn **after)
   3482  1.1  mrg {
   3483  1.1  mrg   struct address_info ad;
   3484  1.1  mrg   rtx new_reg;
   3485  1.1  mrg   HOST_WIDE_INT scale;
   3486  1.1  mrg   rtx op = *curr_id->operand_loc[nop];
   3487  1.1  mrg   rtx mem = extract_mem_from_operand (op);
   3488  1.1  mrg   const char *constraint;
   3489  1.1  mrg   enum constraint_num cn;
   3490  1.1  mrg   bool change_p = false;
   3491  1.1  mrg 
   3492  1.1  mrg   if (MEM_P (mem)
   3493  1.1  mrg       && GET_MODE (mem) == BLKmode
   3494  1.1  mrg       && GET_CODE (XEXP (mem, 0)) == SCRATCH)
   3495  1.1  mrg     return false;
   3496  1.1  mrg 
   3497  1.1  mrg   constraint
   3498  1.1  mrg     = skip_constraint_modifiers (curr_static_id->operand[nop].constraint);
   3499  1.1  mrg   if (IN_RANGE (constraint[0], '0', '9'))
   3500  1.1  mrg     {
   3501  1.1  mrg       char *end;
   3502  1.1  mrg       unsigned long dup = strtoul (constraint, &end, 10);
   3503  1.1  mrg       constraint
   3504  1.1  mrg 	= skip_constraint_modifiers (curr_static_id->operand[dup].constraint);
   3505  1.1  mrg     }
   3506  1.1  mrg   cn = lookup_constraint (*constraint == '\0' ? "X" : constraint);
   3507  1.1  mrg   /* If we have several alternatives or/and several constraints in an
   3508  1.1  mrg      alternative and we can not say at this stage what constraint will be used,
   3509  1.1  mrg      use unknown constraint.  The exception is an address constraint.  If
   3510  1.1  mrg      operand has one address constraint, probably all others constraints are
   3511  1.1  mrg      address ones.  */
   3512  1.1  mrg   if (constraint[0] != '\0' && get_constraint_type (cn) != CT_ADDRESS
   3513  1.1  mrg       && *skip_constraint_modifiers (constraint
   3514  1.1  mrg 				     + CONSTRAINT_LEN (constraint[0],
   3515  1.1  mrg 						       constraint)) != '\0')
   3516  1.1  mrg     cn = CONSTRAINT__UNKNOWN;
   3517  1.1  mrg   if (insn_extra_address_constraint (cn)
   3518  1.1  mrg       /* When we find an asm operand with an address constraint that
   3519  1.1  mrg 	 doesn't satisfy address_operand to begin with, we clear
   3520  1.1  mrg 	 is_address, so that we don't try to make a non-address fit.
   3521  1.1  mrg 	 If the asm statement got this far, it's because other
   3522  1.1  mrg 	 constraints are available, and we'll use them, disregarding
   3523  1.1  mrg 	 the unsatisfiable address ones.  */
   3524  1.1  mrg       && curr_static_id->operand[nop].is_address)
   3525  1.1  mrg     decompose_lea_address (&ad, curr_id->operand_loc[nop]);
   3526  1.1  mrg   /* Do not attempt to decompose arbitrary addresses generated by combine
   3527  1.1  mrg      for asm operands with loose constraints, e.g 'X'.
   3528  1.1  mrg      Need to extract memory from op for special memory constraint,
   3529  1.1  mrg      i.e. bcst_mem_operand in i386 backend.  */
   3530  1.1  mrg   else if (MEM_P (mem)
   3531  1.1  mrg 	   && !(INSN_CODE (curr_insn) < 0
   3532  1.1  mrg 		&& get_constraint_type (cn) == CT_FIXED_FORM
   3533  1.1  mrg 		&& constraint_satisfied_p (op, cn)))
   3534  1.1  mrg     decompose_mem_address (&ad, mem);
   3535  1.1  mrg   else if (GET_CODE (op) == SUBREG
   3536  1.1  mrg 	   && MEM_P (SUBREG_REG (op)))
   3537  1.1  mrg     decompose_mem_address (&ad, SUBREG_REG (op));
   3538  1.1  mrg   else
   3539  1.1  mrg     return false;
   3540  1.1  mrg   /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
   3541  1.1  mrg      index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
   3542  1.1  mrg      when INDEX_REG_CLASS is a single register class.  */
   3543  1.1  mrg   if (ad.base_term != NULL
   3544  1.1  mrg       && ad.index_term != NULL
   3545  1.1  mrg       && ira_class_hard_regs_num[INDEX_REG_CLASS] == 1
   3546  1.1  mrg       && REG_P (*ad.base_term)
   3547  1.1  mrg       && REG_P (*ad.index_term)
   3548  1.1  mrg       && in_class_p (*ad.base_term, INDEX_REG_CLASS, NULL)
   3549  1.1  mrg       && ! in_class_p (*ad.index_term, INDEX_REG_CLASS, NULL))
   3550  1.1  mrg     {
   3551  1.1  mrg       std::swap (ad.base, ad.index);
   3552  1.1  mrg       std::swap (ad.base_term, ad.index_term);
   3553  1.1  mrg     }
   3554  1.1  mrg   if (! check_only_p)
   3555  1.1  mrg     change_p = equiv_address_substitution (&ad);
   3556  1.1  mrg   if (ad.base_term != NULL
   3557  1.1  mrg       && (process_addr_reg
   3558  1.1  mrg 	  (ad.base_term, check_only_p, before,
   3559  1.1  mrg 	   (ad.autoinc_p
   3560  1.1  mrg 	    && !(REG_P (*ad.base_term)
   3561  1.1  mrg 		 && find_regno_note (curr_insn, REG_DEAD,
   3562  1.1  mrg 				     REGNO (*ad.base_term)) != NULL_RTX)
   3563  1.1  mrg 	    ? after : NULL),
   3564  1.1  mrg 	   base_reg_class (ad.mode, ad.as, ad.base_outer_code,
   3565  1.1  mrg 			   get_index_code (&ad)))))
   3566  1.1  mrg     {
   3567  1.1  mrg       change_p = true;
   3568  1.1  mrg       if (ad.base_term2 != NULL)
   3569  1.1  mrg 	*ad.base_term2 = *ad.base_term;
   3570  1.1  mrg     }
   3571  1.1  mrg   if (ad.index_term != NULL
   3572  1.1  mrg       && process_addr_reg (ad.index_term, check_only_p,
   3573  1.1  mrg 			   before, NULL, INDEX_REG_CLASS))
   3574  1.1  mrg     change_p = true;
   3575  1.1  mrg 
   3576  1.1  mrg   /* Target hooks sometimes don't treat extra-constraint addresses as
   3577  1.1  mrg      legitimate address_operands, so handle them specially.  */
   3578  1.1  mrg   if (insn_extra_address_constraint (cn)
   3579  1.1  mrg       && satisfies_address_constraint_p (&ad, cn))
   3580  1.1  mrg     return change_p;
   3581  1.1  mrg 
   3582  1.1  mrg   if (check_only_p)
   3583  1.1  mrg     return change_p;
   3584  1.1  mrg 
   3585  1.1  mrg   /* There are three cases where the shape of *AD.INNER may now be invalid:
   3586  1.1  mrg 
   3587  1.1  mrg      1) the original address was valid, but either elimination or
   3588  1.1  mrg      equiv_address_substitution was applied and that made
   3589  1.1  mrg      the address invalid.
   3590  1.1  mrg 
   3591  1.1  mrg      2) the address is an invalid symbolic address created by
   3592  1.1  mrg      force_const_to_mem.
   3593  1.1  mrg 
   3594  1.1  mrg      3) the address is a frame address with an invalid offset.
   3595  1.1  mrg 
   3596  1.1  mrg      4) the address is a frame address with an invalid base.
   3597  1.1  mrg 
   3598  1.1  mrg      All these cases involve a non-autoinc address, so there is no
   3599  1.1  mrg      point revalidating other types.  */
   3600  1.1  mrg   if (ad.autoinc_p || valid_address_p (op, &ad, cn))
   3601  1.1  mrg     return change_p;
   3602  1.1  mrg 
   3603  1.1  mrg   /* Any index existed before LRA started, so we can assume that the
   3604  1.1  mrg      presence and shape of the index is valid.  */
   3605  1.1  mrg   push_to_sequence (*before);
   3606  1.1  mrg   lra_assert (ad.disp == ad.disp_term);
   3607  1.1  mrg   if (ad.base == NULL)
   3608  1.1  mrg     {
   3609  1.1  mrg       if (ad.index == NULL)
   3610  1.1  mrg 	{
   3611  1.1  mrg 	  rtx_insn *insn;
   3612  1.1  mrg 	  rtx_insn *last = get_last_insn ();
   3613  1.1  mrg 	  int code = -1;
   3614  1.1  mrg 	  enum reg_class cl = base_reg_class (ad.mode, ad.as,
   3615  1.1  mrg 					      SCRATCH, SCRATCH);
   3616  1.1  mrg 	  rtx addr = *ad.inner;
   3617  1.1  mrg 
   3618  1.1  mrg 	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
   3619  1.1  mrg 	  if (HAVE_lo_sum)
   3620  1.1  mrg 	    {
   3621  1.1  mrg 	      /* addr => lo_sum (new_base, addr), case (2) above.  */
   3622  1.1  mrg 	      insn = emit_insn (gen_rtx_SET
   3623  1.1  mrg 				(new_reg,
   3624  1.1  mrg 				 gen_rtx_HIGH (Pmode, copy_rtx (addr))));
   3625  1.1  mrg 	      code = recog_memoized (insn);
   3626  1.1  mrg 	      if (code >= 0)
   3627  1.1  mrg 		{
   3628  1.1  mrg 		  *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
   3629  1.1  mrg 		  if (!valid_address_p (op, &ad, cn))
   3630  1.1  mrg 		    {
   3631  1.1  mrg 		      /* Try to put lo_sum into register.  */
   3632  1.1  mrg 		      insn = emit_insn (gen_rtx_SET
   3633  1.1  mrg 					(new_reg,
   3634  1.1  mrg 					 gen_rtx_LO_SUM (Pmode, new_reg, addr)));
   3635  1.1  mrg 		      code = recog_memoized (insn);
   3636  1.1  mrg 		      if (code >= 0)
   3637  1.1  mrg 			{
   3638  1.1  mrg 			  *ad.inner = new_reg;
   3639  1.1  mrg 			  if (!valid_address_p (op, &ad, cn))
   3640  1.1  mrg 			    {
   3641  1.1  mrg 			      *ad.inner = addr;
   3642  1.1  mrg 			      code = -1;
   3643  1.1  mrg 			    }
   3644  1.1  mrg 			}
   3645  1.1  mrg 
   3646  1.1  mrg 		    }
   3647  1.1  mrg 		}
   3648  1.1  mrg 	      if (code < 0)
   3649  1.1  mrg 		delete_insns_since (last);
   3650  1.1  mrg 	    }
   3651  1.1  mrg 
   3652  1.1  mrg 	  if (code < 0)
   3653  1.1  mrg 	    {
   3654  1.1  mrg 	      /* addr => new_base, case (2) above.  */
   3655  1.1  mrg 	      lra_emit_move (new_reg, addr);
   3656  1.1  mrg 
   3657  1.1  mrg 	      for (insn = last == NULL_RTX ? get_insns () : NEXT_INSN (last);
   3658  1.1  mrg 		   insn != NULL_RTX;
   3659  1.1  mrg 		   insn = NEXT_INSN (insn))
   3660  1.1  mrg 		if (recog_memoized (insn) < 0)
   3661  1.1  mrg 		  break;
   3662  1.1  mrg 	      if (insn != NULL_RTX)
   3663  1.1  mrg 		{
   3664  1.1  mrg 		  /* Do nothing if we cannot generate right insns.
   3665  1.1  mrg 		     This is analogous to reload pass behavior.  */
   3666  1.1  mrg 		  delete_insns_since (last);
   3667  1.1  mrg 		  end_sequence ();
   3668  1.1  mrg 		  return false;
   3669  1.1  mrg 		}
   3670  1.1  mrg 	      *ad.inner = new_reg;
   3671  1.1  mrg 	    }
   3672  1.1  mrg 	}
   3673  1.1  mrg       else
   3674  1.1  mrg 	{
   3675  1.1  mrg 	  /* index * scale + disp => new base + index * scale,
   3676  1.1  mrg 	     case (1) above.  */
   3677  1.1  mrg 	  enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
   3678  1.1  mrg 					      GET_CODE (*ad.index));
   3679  1.1  mrg 
   3680  1.1  mrg 	  lra_assert (INDEX_REG_CLASS != NO_REGS);
   3681  1.1  mrg 	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "disp");
   3682  1.1  mrg 	  lra_emit_move (new_reg, *ad.disp);
   3683  1.1  mrg 	  *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
   3684  1.1  mrg 					   new_reg, *ad.index);
   3685  1.1  mrg 	}
   3686  1.1  mrg     }
   3687  1.1  mrg   else if (ad.index == NULL)
   3688  1.1  mrg     {
   3689  1.1  mrg       int regno;
   3690  1.1  mrg       enum reg_class cl;
   3691  1.1  mrg       rtx set;
   3692  1.1  mrg       rtx_insn *insns, *last_insn;
   3693  1.1  mrg       /* Try to reload base into register only if the base is invalid
   3694  1.1  mrg          for the address but with valid offset, case (4) above.  */
   3695  1.1  mrg       start_sequence ();
   3696  1.1  mrg       new_reg = base_to_reg (&ad);
   3697  1.1  mrg 
   3698  1.1  mrg       /* base + disp => new base, cases (1) and (3) above.  */
   3699  1.1  mrg       /* Another option would be to reload the displacement into an
   3700  1.1  mrg 	 index register.  However, postreload has code to optimize
   3701  1.1  mrg 	 address reloads that have the same base and different
   3702  1.1  mrg 	 displacements, so reloading into an index register would
   3703  1.1  mrg 	 not necessarily be a win.  */
   3704  1.1  mrg       if (new_reg == NULL_RTX)
   3705  1.1  mrg 	{
   3706  1.1  mrg 	  /* See if the target can split the displacement into a
   3707  1.1  mrg 	     legitimate new displacement from a local anchor.  */
   3708  1.1  mrg 	  gcc_assert (ad.disp == ad.disp_term);
   3709  1.1  mrg 	  poly_int64 orig_offset;
   3710  1.1  mrg 	  rtx offset1, offset2;
   3711  1.1  mrg 	  if (poly_int_rtx_p (*ad.disp, &orig_offset)
   3712  1.1  mrg 	      && targetm.legitimize_address_displacement (&offset1, &offset2,
   3713  1.1  mrg 							  orig_offset,
   3714  1.1  mrg 							  ad.mode))
   3715  1.1  mrg 	    {
   3716  1.1  mrg 	      new_reg = base_plus_disp_to_reg (&ad, offset1);
   3717  1.1  mrg 	      new_reg = gen_rtx_PLUS (GET_MODE (new_reg), new_reg, offset2);
   3718  1.1  mrg 	    }
   3719  1.1  mrg 	  else
   3720  1.1  mrg 	    new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
   3721  1.1  mrg 	}
   3722  1.1  mrg       insns = get_insns ();
   3723  1.1  mrg       last_insn = get_last_insn ();
   3724  1.1  mrg       /* If we generated at least two insns, try last insn source as
   3725  1.1  mrg 	 an address.  If we succeed, we generate one less insn.  */
   3726  1.1  mrg       if (REG_P (new_reg)
   3727  1.1  mrg 	  && last_insn != insns
   3728  1.1  mrg 	  && (set = single_set (last_insn)) != NULL_RTX
   3729  1.1  mrg 	  && GET_CODE (SET_SRC (set)) == PLUS
   3730  1.1  mrg 	  && REG_P (XEXP (SET_SRC (set), 0))
   3731  1.1  mrg 	  && CONSTANT_P (XEXP (SET_SRC (set), 1)))
   3732  1.1  mrg 	{
   3733  1.1  mrg 	  *ad.inner = SET_SRC (set);
   3734  1.1  mrg 	  if (valid_address_p (op, &ad, cn))
   3735  1.1  mrg 	    {
   3736  1.1  mrg 	      *ad.base_term = XEXP (SET_SRC (set), 0);
   3737  1.1  mrg 	      *ad.disp_term = XEXP (SET_SRC (set), 1);
   3738  1.1  mrg 	      cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
   3739  1.1  mrg 				   get_index_code (&ad));
   3740  1.1  mrg 	      regno = REGNO (*ad.base_term);
   3741  1.1  mrg 	      if (regno >= FIRST_PSEUDO_REGISTER
   3742  1.1  mrg 		  && cl != lra_get_allocno_class (regno))
   3743  1.1  mrg 		lra_change_class (regno, cl, "      Change to", true);
   3744  1.1  mrg 	      new_reg = SET_SRC (set);
   3745  1.1  mrg 	      delete_insns_since (PREV_INSN (last_insn));
   3746  1.1  mrg 	    }
   3747  1.1  mrg 	}
   3748  1.1  mrg       end_sequence ();
   3749  1.1  mrg       emit_insn (insns);
   3750  1.1  mrg       *ad.inner = new_reg;
   3751  1.1  mrg     }
   3752  1.1  mrg   else if (ad.disp_term != NULL)
   3753  1.1  mrg     {
   3754  1.1  mrg       /* base + scale * index + disp => new base + scale * index,
   3755  1.1  mrg 	 case (1) above.  */
   3756  1.1  mrg       gcc_assert (ad.disp == ad.disp_term);
   3757  1.1  mrg       new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
   3758  1.1  mrg       *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
   3759  1.1  mrg 				       new_reg, *ad.index);
   3760  1.1  mrg     }
   3761  1.1  mrg   else if ((scale = get_index_scale (&ad)) == 1)
   3762  1.1  mrg     {
   3763  1.1  mrg       /* The last transformation to one reg will be made in
   3764  1.1  mrg 	 curr_insn_transform function.  */
   3765  1.1  mrg       end_sequence ();
   3766  1.1  mrg       return false;
   3767  1.1  mrg     }
   3768  1.1  mrg   else if (scale != 0)
   3769  1.1  mrg     {
   3770  1.1  mrg       /* base + scale * index => base + new_reg,
   3771  1.1  mrg 	 case (1) above.
   3772  1.1  mrg       Index part of address may become invalid.  For example, we
   3773  1.1  mrg       changed pseudo on the equivalent memory and a subreg of the
   3774  1.1  mrg       pseudo onto the memory of different mode for which the scale is
   3775  1.1  mrg       prohibitted.  */
   3776  1.1  mrg       new_reg = index_part_to_reg (&ad);
   3777  1.1  mrg       *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
   3778  1.1  mrg 				       *ad.base_term, new_reg);
   3779  1.1  mrg     }
   3780  1.1  mrg   else
   3781  1.1  mrg     {
   3782  1.1  mrg       enum reg_class cl = base_reg_class (ad.mode, ad.as,
   3783  1.1  mrg 					  SCRATCH, SCRATCH);
   3784  1.1  mrg       rtx addr = *ad.inner;
   3785  1.1  mrg 
   3786  1.1  mrg       new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
   3787  1.1  mrg       /* addr => new_base.  */
   3788  1.1  mrg       lra_emit_move (new_reg, addr);
   3789  1.1  mrg       *ad.inner = new_reg;
   3790  1.1  mrg     }
   3791  1.1  mrg   *before = get_insns ();
   3792  1.1  mrg   end_sequence ();
   3793  1.1  mrg   return true;
   3794  1.1  mrg }
   3795  1.1  mrg 
   3796  1.1  mrg /* If CHECK_ONLY_P is false, do address reloads until it is necessary.
   3797  1.1  mrg    Use process_address_1 as a helper function.  Return true for any
   3798  1.1  mrg    RTL changes.
   3799  1.1  mrg 
   3800  1.1  mrg    If CHECK_ONLY_P is true, just check address correctness.  Return
   3801  1.1  mrg    false if the address correct.  */
   3802  1.1  mrg static bool
   3803  1.1  mrg process_address (int nop, bool check_only_p,
   3804  1.1  mrg 		 rtx_insn **before, rtx_insn **after)
   3805  1.1  mrg {
   3806  1.1  mrg   bool res = false;
   3807  1.1  mrg 
   3808  1.1  mrg   while (process_address_1 (nop, check_only_p, before, after))
   3809  1.1  mrg     {
   3810  1.1  mrg       if (check_only_p)
   3811  1.1  mrg 	return true;
   3812  1.1  mrg       res = true;
   3813  1.1  mrg     }
   3814  1.1  mrg   return res;
   3815  1.1  mrg }
   3816  1.1  mrg 
   3817  1.1  mrg /* Emit insns to reload VALUE into a new register.  VALUE is an
   3818  1.1  mrg    auto-increment or auto-decrement RTX whose operand is a register or
   3819  1.1  mrg    memory location; so reloading involves incrementing that location.
   3820  1.1  mrg    IN is either identical to VALUE, or some cheaper place to reload
   3821  1.1  mrg    value being incremented/decremented from.
   3822  1.1  mrg 
   3823  1.1  mrg    INC_AMOUNT is the number to increment or decrement by (always
   3824  1.1  mrg    positive and ignored for POST_MODIFY/PRE_MODIFY).
   3825  1.1  mrg 
   3826  1.1  mrg    Return pseudo containing the result.	 */
   3827  1.1  mrg static rtx
   3828  1.1  mrg emit_inc (enum reg_class new_rclass, rtx in, rtx value, poly_int64 inc_amount)
   3829  1.1  mrg {
   3830  1.1  mrg   /* REG or MEM to be copied and incremented.  */
   3831  1.1  mrg   rtx incloc = XEXP (value, 0);
   3832  1.1  mrg   /* Nonzero if increment after copying.  */
   3833  1.1  mrg   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
   3834  1.1  mrg 	      || GET_CODE (value) == POST_MODIFY);
   3835  1.1  mrg   rtx_insn *last;
   3836  1.1  mrg   rtx inc;
   3837  1.1  mrg   rtx_insn *add_insn;
   3838  1.1  mrg   int code;
   3839  1.1  mrg   rtx real_in = in == value ? incloc : in;
   3840  1.1  mrg   rtx result;
   3841  1.1  mrg   bool plus_p = true;
   3842  1.1  mrg 
   3843  1.1  mrg   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
   3844  1.1  mrg     {
   3845  1.1  mrg       lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
   3846  1.1  mrg 		  || GET_CODE (XEXP (value, 1)) == MINUS);
   3847  1.1  mrg       lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
   3848  1.1  mrg       plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
   3849  1.1  mrg       inc = XEXP (XEXP (value, 1), 1);
   3850  1.1  mrg     }
   3851  1.1  mrg   else
   3852  1.1  mrg     {
   3853  1.1  mrg       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
   3854  1.1  mrg 	inc_amount = -inc_amount;
   3855  1.1  mrg 
   3856  1.1  mrg       inc = gen_int_mode (inc_amount, GET_MODE (value));
   3857  1.1  mrg     }
   3858  1.1  mrg 
   3859  1.1  mrg   if (! post && REG_P (incloc))
   3860  1.1  mrg     result = incloc;
   3861  1.1  mrg   else
   3862  1.1  mrg     result = lra_create_new_reg (GET_MODE (value), value, new_rclass, NULL,
   3863  1.1  mrg 				 "INC/DEC result");
   3864  1.1  mrg 
   3865  1.1  mrg   if (real_in != result)
   3866  1.1  mrg     {
   3867  1.1  mrg       /* First copy the location to the result register.  */
   3868  1.1  mrg       lra_assert (REG_P (result));
   3869  1.1  mrg       emit_insn (gen_move_insn (result, real_in));
   3870  1.1  mrg     }
   3871  1.1  mrg 
   3872  1.1  mrg   /* We suppose that there are insns to add/sub with the constant
   3873  1.1  mrg      increment permitted in {PRE/POST)_{DEC/INC/MODIFY}.  At least the
   3874  1.1  mrg      old reload worked with this assumption.  If the assumption
   3875  1.1  mrg      becomes wrong, we should use approach in function
   3876  1.1  mrg      base_plus_disp_to_reg.  */
   3877  1.1  mrg   if (in == value)
   3878  1.1  mrg     {
   3879  1.1  mrg       /* See if we can directly increment INCLOC.  */
   3880  1.1  mrg       last = get_last_insn ();
   3881  1.1  mrg       add_insn = emit_insn (plus_p
   3882  1.1  mrg 			    ? gen_add2_insn (incloc, inc)
   3883  1.1  mrg 			    : gen_sub2_insn (incloc, inc));
   3884  1.1  mrg 
   3885  1.1  mrg       code = recog_memoized (add_insn);
   3886  1.1  mrg       if (code >= 0)
   3887  1.1  mrg 	{
   3888  1.1  mrg 	  if (! post && result != incloc)
   3889  1.1  mrg 	    emit_insn (gen_move_insn (result, incloc));
   3890  1.1  mrg 	  return result;
   3891  1.1  mrg 	}
   3892  1.1  mrg       delete_insns_since (last);
   3893  1.1  mrg     }
   3894  1.1  mrg 
   3895  1.1  mrg   /* If couldn't do the increment directly, must increment in RESULT.
   3896  1.1  mrg      The way we do this depends on whether this is pre- or
   3897  1.1  mrg      post-increment.  For pre-increment, copy INCLOC to the reload
   3898  1.1  mrg      register, increment it there, then save back.  */
   3899  1.1  mrg   if (! post)
   3900  1.1  mrg     {
   3901  1.1  mrg       if (real_in != result)
   3902  1.1  mrg 	emit_insn (gen_move_insn (result, real_in));
   3903  1.1  mrg       if (plus_p)
   3904  1.1  mrg 	emit_insn (gen_add2_insn (result, inc));
   3905  1.1  mrg       else
   3906  1.1  mrg 	emit_insn (gen_sub2_insn (result, inc));
   3907  1.1  mrg       if (result != incloc)
   3908  1.1  mrg 	emit_insn (gen_move_insn (incloc, result));
   3909  1.1  mrg     }
   3910  1.1  mrg   else
   3911  1.1  mrg     {
   3912  1.1  mrg       /* Post-increment.
   3913  1.1  mrg 
   3914  1.1  mrg 	 Because this might be a jump insn or a compare, and because
   3915  1.1  mrg 	 RESULT may not be available after the insn in an input
   3916  1.1  mrg 	 reload, we must do the incrementing before the insn being
   3917  1.1  mrg 	 reloaded for.
   3918  1.1  mrg 
   3919  1.1  mrg 	 We have already copied IN to RESULT.  Increment the copy in
   3920  1.1  mrg 	 RESULT, save that back, then decrement RESULT so it has
   3921  1.1  mrg 	 the original value.  */
   3922  1.1  mrg       if (plus_p)
   3923  1.1  mrg 	emit_insn (gen_add2_insn (result, inc));
   3924  1.1  mrg       else
   3925  1.1  mrg 	emit_insn (gen_sub2_insn (result, inc));
   3926  1.1  mrg       emit_insn (gen_move_insn (incloc, result));
   3927  1.1  mrg       /* Restore non-modified value for the result.  We prefer this
   3928  1.1  mrg 	 way because it does not require an additional hard
   3929  1.1  mrg 	 register.  */
   3930  1.1  mrg       if (plus_p)
   3931  1.1  mrg 	{
   3932  1.1  mrg 	  poly_int64 offset;
   3933  1.1  mrg 	  if (poly_int_rtx_p (inc, &offset))
   3934  1.1  mrg 	    emit_insn (gen_add2_insn (result,
   3935  1.1  mrg 				      gen_int_mode (-offset,
   3936  1.1  mrg 						    GET_MODE (result))));
   3937  1.1  mrg 	  else
   3938  1.1  mrg 	    emit_insn (gen_sub2_insn (result, inc));
   3939  1.1  mrg 	}
   3940  1.1  mrg       else
   3941  1.1  mrg 	emit_insn (gen_add2_insn (result, inc));
   3942  1.1  mrg     }
   3943  1.1  mrg   return result;
   3944  1.1  mrg }
   3945  1.1  mrg 
   3946  1.1  mrg /* Return true if the current move insn does not need processing as we
   3947  1.1  mrg    already know that it satisfies its constraints.  */
   3948  1.1  mrg static bool
   3949  1.1  mrg simple_move_p (void)
   3950  1.1  mrg {
   3951  1.1  mrg   rtx dest, src;
   3952  1.1  mrg   enum reg_class dclass, sclass;
   3953  1.1  mrg 
   3954  1.1  mrg   lra_assert (curr_insn_set != NULL_RTX);
   3955  1.1  mrg   dest = SET_DEST (curr_insn_set);
   3956  1.1  mrg   src = SET_SRC (curr_insn_set);
   3957  1.1  mrg 
   3958  1.1  mrg   /* If the instruction has multiple sets we need to process it even if it
   3959  1.1  mrg      is single_set.  This can happen if one or more of the SETs are dead.
   3960  1.1  mrg      See PR73650.  */
   3961  1.1  mrg   if (multiple_sets (curr_insn))
   3962  1.1  mrg     return false;
   3963  1.1  mrg 
   3964  1.1  mrg   return ((dclass = get_op_class (dest)) != NO_REGS
   3965  1.1  mrg 	  && (sclass = get_op_class (src)) != NO_REGS
   3966  1.1  mrg 	  /* The backend guarantees that register moves of cost 2
   3967  1.1  mrg 	     never need reloads.  */
   3968  1.1  mrg 	  && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
   3969  1.1  mrg  }
   3970  1.1  mrg 
   3971  1.1  mrg /* Swap operands NOP and NOP + 1. */
   3972  1.1  mrg static inline void
   3973  1.1  mrg swap_operands (int nop)
   3974  1.1  mrg {
   3975  1.1  mrg   std::swap (curr_operand_mode[nop], curr_operand_mode[nop + 1]);
   3976  1.1  mrg   std::swap (original_subreg_reg_mode[nop], original_subreg_reg_mode[nop + 1]);
   3977  1.1  mrg   std::swap (*curr_id->operand_loc[nop], *curr_id->operand_loc[nop + 1]);
   3978  1.1  mrg   std::swap (equiv_substition_p[nop], equiv_substition_p[nop + 1]);
   3979  1.1  mrg   /* Swap the duplicates too.  */
   3980  1.1  mrg   lra_update_dup (curr_id, nop);
   3981  1.1  mrg   lra_update_dup (curr_id, nop + 1);
   3982  1.1  mrg }
   3983  1.1  mrg 
   3984  1.1  mrg /* Main entry point of the constraint code: search the body of the
   3985  1.1  mrg    current insn to choose the best alternative.  It is mimicking insn
   3986  1.1  mrg    alternative cost calculation model of former reload pass.  That is
   3987  1.1  mrg    because machine descriptions were written to use this model.  This
   3988  1.1  mrg    model can be changed in future.  Make commutative operand exchange
   3989  1.1  mrg    if it is chosen.
   3990  1.1  mrg 
   3991  1.1  mrg    if CHECK_ONLY_P is false, do RTL changes to satisfy the
   3992  1.1  mrg    constraints.  Return true if any change happened during function
   3993  1.1  mrg    call.
   3994  1.1  mrg 
   3995  1.1  mrg    If CHECK_ONLY_P is true then don't do any transformation.  Just
   3996  1.1  mrg    check that the insn satisfies all constraints.  If the insn does
   3997  1.1  mrg    not satisfy any constraint, return true.  */
   3998  1.1  mrg static bool
   3999  1.1  mrg curr_insn_transform (bool check_only_p)
   4000  1.1  mrg {
   4001  1.1  mrg   int i, j, k;
   4002  1.1  mrg   int n_operands;
   4003  1.1  mrg   int n_alternatives;
   4004  1.1  mrg   int n_outputs;
   4005  1.1  mrg   int commutative;
   4006  1.1  mrg   signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
   4007  1.1  mrg   signed char match_inputs[MAX_RECOG_OPERANDS + 1];
   4008  1.1  mrg   signed char outputs[MAX_RECOG_OPERANDS + 1];
   4009  1.1  mrg   rtx_insn *before, *after;
   4010  1.1  mrg   bool alt_p = false;
   4011  1.1  mrg   /* Flag that the insn has been changed through a transformation.  */
   4012  1.1  mrg   bool change_p;
   4013  1.1  mrg   bool sec_mem_p;
   4014  1.1  mrg   bool use_sec_mem_p;
   4015  1.1  mrg   int max_regno_before;
   4016  1.1  mrg   int reused_alternative_num;
   4017  1.1  mrg 
   4018  1.1  mrg   curr_insn_set = single_set (curr_insn);
   4019  1.1  mrg   if (curr_insn_set != NULL_RTX && simple_move_p ())
   4020  1.1  mrg     {
   4021  1.1  mrg       /* We assume that the corresponding insn alternative has no
   4022  1.1  mrg 	 earlier clobbers.  If it is not the case, don't define move
   4023  1.1  mrg 	 cost equal to 2 for the corresponding register classes.  */
   4024  1.1  mrg       lra_set_used_insn_alternative (curr_insn, LRA_NON_CLOBBERED_ALT);
   4025  1.1  mrg       return false;
   4026  1.1  mrg     }
   4027  1.1  mrg 
   4028  1.1  mrg   no_input_reloads_p = no_output_reloads_p = false;
   4029  1.1  mrg   goal_alt_number = -1;
   4030  1.1  mrg   change_p = sec_mem_p = false;
   4031  1.1  mrg 
   4032  1.1  mrg   /* CALL_INSNs are not allowed to have any output reloads.  */
   4033  1.1  mrg   if (CALL_P (curr_insn))
   4034  1.1  mrg     no_output_reloads_p = true;
   4035  1.1  mrg 
   4036  1.1  mrg   n_operands = curr_static_id->n_operands;
   4037  1.1  mrg   n_alternatives = curr_static_id->n_alternatives;
   4038  1.1  mrg 
   4039  1.1  mrg   /* Just return "no reloads" if insn has no operands with
   4040  1.1  mrg      constraints.  */
   4041  1.1  mrg   if (n_operands == 0 || n_alternatives == 0)
   4042  1.1  mrg     return false;
   4043  1.1  mrg 
   4044  1.1  mrg   max_regno_before = max_reg_num ();
   4045  1.1  mrg 
   4046  1.1  mrg   for (i = 0; i < n_operands; i++)
   4047  1.1  mrg     {
   4048  1.1  mrg       goal_alt_matched[i][0] = -1;
   4049  1.1  mrg       goal_alt_matches[i] = -1;
   4050  1.1  mrg     }
   4051  1.1  mrg 
   4052  1.1  mrg   commutative = curr_static_id->commutative;
   4053  1.1  mrg 
   4054  1.1  mrg   /* Now see what we need for pseudos that didn't get hard regs or got
   4055  1.1  mrg      the wrong kind of hard reg.  For this, we must consider all the
   4056  1.1  mrg      operands together against the register constraints.  */
   4057  1.1  mrg 
   4058  1.1  mrg   best_losers = best_overall = INT_MAX;
   4059  1.1  mrg   best_reload_sum = 0;
   4060  1.1  mrg 
   4061  1.1  mrg   curr_swapped = false;
   4062  1.1  mrg   goal_alt_swapped = false;
   4063  1.1  mrg 
   4064  1.1  mrg   if (! check_only_p)
   4065  1.1  mrg     /* Make equivalence substitution and memory subreg elimination
   4066  1.1  mrg        before address processing because an address legitimacy can
   4067  1.1  mrg        depend on memory mode.  */
   4068  1.1  mrg     for (i = 0; i < n_operands; i++)
   4069  1.1  mrg       {
   4070  1.1  mrg 	rtx op, subst, old;
   4071  1.1  mrg 	bool op_change_p = false;
   4072  1.1  mrg 
   4073  1.1  mrg 	if (curr_static_id->operand[i].is_operator)
   4074  1.1  mrg 	  continue;
   4075  1.1  mrg 
   4076  1.1  mrg 	old = op = *curr_id->operand_loc[i];
   4077  1.1  mrg 	if (GET_CODE (old) == SUBREG)
   4078  1.1  mrg 	  old = SUBREG_REG (old);
   4079  1.1  mrg 	subst = get_equiv_with_elimination (old, curr_insn);
   4080  1.1  mrg 	original_subreg_reg_mode[i] = VOIDmode;
   4081  1.1  mrg 	equiv_substition_p[i] = false;
   4082  1.1  mrg 	if (subst != old)
   4083  1.1  mrg 	  {
   4084  1.1  mrg 	    equiv_substition_p[i] = true;
   4085  1.1  mrg 	    subst = copy_rtx (subst);
   4086  1.1  mrg 	    lra_assert (REG_P (old));
   4087  1.1  mrg 	    if (GET_CODE (op) != SUBREG)
   4088  1.1  mrg 	      *curr_id->operand_loc[i] = subst;
   4089  1.1  mrg 	    else
   4090  1.1  mrg 	      {
   4091  1.1  mrg 		SUBREG_REG (op) = subst;
   4092  1.1  mrg 		if (GET_MODE (subst) == VOIDmode)
   4093  1.1  mrg 		  original_subreg_reg_mode[i] = GET_MODE (old);
   4094  1.1  mrg 	      }
   4095  1.1  mrg 	    if (lra_dump_file != NULL)
   4096  1.1  mrg 	      {
   4097  1.1  mrg 		fprintf (lra_dump_file,
   4098  1.1  mrg 			 "Changing pseudo %d in operand %i of insn %u on equiv ",
   4099  1.1  mrg 			 REGNO (old), i, INSN_UID (curr_insn));
   4100  1.1  mrg 		dump_value_slim (lra_dump_file, subst, 1);
   4101  1.1  mrg 		fprintf (lra_dump_file, "\n");
   4102  1.1  mrg 	      }
   4103  1.1  mrg 	    op_change_p = change_p = true;
   4104  1.1  mrg 	  }
   4105  1.1  mrg 	if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
   4106  1.1  mrg 	  {
   4107  1.1  mrg 	    change_p = true;
   4108  1.1  mrg 	    lra_update_dup (curr_id, i);
   4109  1.1  mrg 	  }
   4110  1.1  mrg       }
   4111  1.1  mrg 
   4112  1.1  mrg   /* Reload address registers and displacements.  We do it before
   4113  1.1  mrg      finding an alternative because of memory constraints.  */
   4114  1.1  mrg   before = after = NULL;
   4115  1.1  mrg   for (i = 0; i < n_operands; i++)
   4116  1.1  mrg     if (! curr_static_id->operand[i].is_operator
   4117  1.1  mrg 	&& process_address (i, check_only_p, &before, &after))
   4118  1.1  mrg       {
   4119  1.1  mrg 	if (check_only_p)
   4120  1.1  mrg 	  return true;
   4121  1.1  mrg 	change_p = true;
   4122  1.1  mrg 	lra_update_dup (curr_id, i);
   4123  1.1  mrg       }
   4124  1.1  mrg 
   4125  1.1  mrg   if (change_p)
   4126  1.1  mrg     /* If we've changed the instruction then any alternative that
   4127  1.1  mrg        we chose previously may no longer be valid.  */
   4128  1.1  mrg     lra_set_used_insn_alternative (curr_insn, LRA_UNKNOWN_ALT);
   4129  1.1  mrg 
   4130  1.1  mrg   if (! check_only_p && curr_insn_set != NULL_RTX
   4131  1.1  mrg       && check_and_process_move (&change_p, &sec_mem_p))
   4132  1.1  mrg     return change_p;
   4133  1.1  mrg 
   4134  1.1  mrg  try_swapped:
   4135  1.1  mrg 
   4136  1.1  mrg   reused_alternative_num = check_only_p ? LRA_UNKNOWN_ALT : curr_id->used_insn_alternative;
   4137  1.1  mrg   if (lra_dump_file != NULL && reused_alternative_num >= 0)
   4138  1.1  mrg     fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
   4139  1.1  mrg 	     reused_alternative_num, INSN_UID (curr_insn));
   4140  1.1  mrg 
   4141  1.1  mrg   if (process_alt_operands (reused_alternative_num))
   4142  1.1  mrg     alt_p = true;
   4143  1.1  mrg 
   4144  1.1  mrg   if (check_only_p)
   4145  1.1  mrg     return ! alt_p || best_losers != 0;
   4146  1.1  mrg 
   4147  1.1  mrg   /* If insn is commutative (it's safe to exchange a certain pair of
   4148  1.1  mrg      operands) then we need to try each alternative twice, the second
   4149  1.1  mrg      time matching those two operands as if we had exchanged them.  To
   4150  1.1  mrg      do this, really exchange them in operands.
   4151  1.1  mrg 
   4152  1.1  mrg      If we have just tried the alternatives the second time, return
   4153  1.1  mrg      operands to normal and drop through.  */
   4154  1.1  mrg 
   4155  1.1  mrg   if (reused_alternative_num < 0 && commutative >= 0)
   4156  1.1  mrg     {
   4157  1.1  mrg       curr_swapped = !curr_swapped;
   4158  1.1  mrg       if (curr_swapped)
   4159  1.1  mrg 	{
   4160  1.1  mrg 	  swap_operands (commutative);
   4161  1.1  mrg 	  goto try_swapped;
   4162  1.1  mrg 	}
   4163  1.1  mrg       else
   4164  1.1  mrg 	swap_operands (commutative);
   4165  1.1  mrg     }
   4166  1.1  mrg 
   4167  1.1  mrg   if (! alt_p && ! sec_mem_p)
   4168  1.1  mrg     {
   4169  1.1  mrg       /* No alternative works with reloads??  */
   4170  1.1  mrg       if (INSN_CODE (curr_insn) >= 0)
   4171  1.1  mrg 	fatal_insn ("unable to generate reloads for:", curr_insn);
   4172  1.1  mrg       error_for_asm (curr_insn,
   4173  1.1  mrg 		     "inconsistent operand constraints in an %<asm%>");
   4174  1.1  mrg       lra_asm_error_p = true;
   4175  1.1  mrg       if (! JUMP_P (curr_insn))
   4176  1.1  mrg 	{
   4177  1.1  mrg 	  /* Avoid further trouble with this insn.  Don't generate use
   4178  1.1  mrg 	     pattern here as we could use the insn SP offset.  */
   4179  1.1  mrg 	  lra_set_insn_deleted (curr_insn);
   4180  1.1  mrg 	}
   4181  1.1  mrg       else
   4182  1.1  mrg 	{
   4183  1.1  mrg 	  lra_invalidate_insn_data (curr_insn);
   4184  1.1  mrg 	  ira_nullify_asm_goto (curr_insn);
   4185  1.1  mrg 	  lra_update_insn_regno_info (curr_insn);
   4186  1.1  mrg 	}
   4187  1.1  mrg       return true;
   4188  1.1  mrg     }
   4189  1.1  mrg 
   4190  1.1  mrg   /* If the best alternative is with operands 1 and 2 swapped, swap
   4191  1.1  mrg      them.  Update the operand numbers of any reloads already
   4192  1.1  mrg      pushed.  */
   4193  1.1  mrg 
   4194  1.1  mrg   if (goal_alt_swapped)
   4195  1.1  mrg     {
   4196  1.1  mrg       if (lra_dump_file != NULL)
   4197  1.1  mrg 	fprintf (lra_dump_file, "  Commutative operand exchange in insn %u\n",
   4198  1.1  mrg 		 INSN_UID (curr_insn));
   4199  1.1  mrg 
   4200  1.1  mrg       /* Swap the duplicates too.  */
   4201  1.1  mrg       swap_operands (commutative);
   4202  1.1  mrg       change_p = true;
   4203  1.1  mrg     }
   4204  1.1  mrg 
   4205  1.1  mrg   /* Some targets' TARGET_SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
   4206  1.1  mrg      too conservatively.  So we use the secondary memory only if there
   4207  1.1  mrg      is no any alternative without reloads.  */
   4208  1.1  mrg   use_sec_mem_p = false;
   4209  1.1  mrg   if (! alt_p)
   4210  1.1  mrg     use_sec_mem_p = true;
   4211  1.1  mrg   else if (sec_mem_p)
   4212  1.1  mrg     {
   4213  1.1  mrg       for (i = 0; i < n_operands; i++)
   4214  1.1  mrg 	if (! goal_alt_win[i] && ! goal_alt_match_win[i])
   4215  1.1  mrg 	  break;
   4216  1.1  mrg       use_sec_mem_p = i < n_operands;
   4217  1.1  mrg     }
   4218  1.1  mrg 
   4219  1.1  mrg   if (use_sec_mem_p)
   4220  1.1  mrg     {
   4221  1.1  mrg       int in = -1, out = -1;
   4222  1.1  mrg       rtx new_reg, src, dest, rld;
   4223  1.1  mrg       machine_mode sec_mode, rld_mode;
   4224  1.1  mrg 
   4225  1.1  mrg       lra_assert (curr_insn_set != NULL_RTX && sec_mem_p);
   4226  1.1  mrg       dest = SET_DEST (curr_insn_set);
   4227  1.1  mrg       src = SET_SRC (curr_insn_set);
   4228  1.1  mrg       for (i = 0; i < n_operands; i++)
   4229  1.1  mrg 	if (*curr_id->operand_loc[i] == dest)
   4230  1.1  mrg 	  out = i;
   4231  1.1  mrg 	else if (*curr_id->operand_loc[i] == src)
   4232  1.1  mrg 	  in = i;
   4233  1.1  mrg       for (i = 0; i < curr_static_id->n_dups; i++)
   4234  1.1  mrg 	if (out < 0 && *curr_id->dup_loc[i] == dest)
   4235  1.1  mrg 	  out = curr_static_id->dup_num[i];
   4236  1.1  mrg 	else if (in < 0 && *curr_id->dup_loc[i] == src)
   4237  1.1  mrg 	  in = curr_static_id->dup_num[i];
   4238  1.1  mrg       lra_assert (out >= 0 && in >= 0
   4239  1.1  mrg 		  && curr_static_id->operand[out].type == OP_OUT
   4240  1.1  mrg 		  && curr_static_id->operand[in].type == OP_IN);
   4241  1.1  mrg       rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
   4242  1.1  mrg       rld_mode = GET_MODE (rld);
   4243  1.1  mrg       sec_mode = targetm.secondary_memory_needed_mode (rld_mode);
   4244  1.1  mrg       new_reg = lra_create_new_reg (sec_mode, NULL_RTX, NO_REGS, NULL,
   4245  1.1  mrg 				    "secondary");
   4246  1.1  mrg       /* If the mode is changed, it should be wider.  */
   4247  1.1  mrg       lra_assert (!partial_subreg_p (sec_mode, rld_mode));
   4248  1.1  mrg       if (sec_mode != rld_mode)
   4249  1.1  mrg         {
   4250  1.1  mrg 	  /* If the target says specifically to use another mode for
   4251  1.1  mrg 	     secondary memory moves we cannot reuse the original
   4252  1.1  mrg 	     insn.  */
   4253  1.1  mrg 	  after = emit_spill_move (false, new_reg, dest);
   4254  1.1  mrg 	  lra_process_new_insns (curr_insn, NULL, after,
   4255  1.1  mrg 				 "Inserting the sec. move");
   4256  1.1  mrg 	  /* We may have non null BEFORE here (e.g. after address
   4257  1.1  mrg 	     processing.  */
   4258  1.1  mrg 	  push_to_sequence (before);
   4259  1.1  mrg 	  before = emit_spill_move (true, new_reg, src);
   4260  1.1  mrg 	  emit_insn (before);
   4261  1.1  mrg 	  before = get_insns ();
   4262  1.1  mrg 	  end_sequence ();
   4263  1.1  mrg 	  lra_process_new_insns (curr_insn, before, NULL, "Changing on");
   4264  1.1  mrg 	  lra_set_insn_deleted (curr_insn);
   4265  1.1  mrg 	}
   4266  1.1  mrg       else if (dest == rld)
   4267  1.1  mrg         {
   4268  1.1  mrg 	  *curr_id->operand_loc[out] = new_reg;
   4269  1.1  mrg 	  lra_update_dup (curr_id, out);
   4270  1.1  mrg 	  after = emit_spill_move (false, new_reg, dest);
   4271  1.1  mrg 	  lra_process_new_insns (curr_insn, NULL, after,
   4272  1.1  mrg 				 "Inserting the sec. move");
   4273  1.1  mrg 	}
   4274  1.1  mrg       else
   4275  1.1  mrg 	{
   4276  1.1  mrg 	  *curr_id->operand_loc[in] = new_reg;
   4277  1.1  mrg 	  lra_update_dup (curr_id, in);
   4278  1.1  mrg 	  /* See comments above.  */
   4279  1.1  mrg 	  push_to_sequence (before);
   4280  1.1  mrg 	  before = emit_spill_move (true, new_reg, src);
   4281  1.1  mrg 	  emit_insn (before);
   4282  1.1  mrg 	  before = get_insns ();
   4283  1.1  mrg 	  end_sequence ();
   4284  1.1  mrg 	  lra_process_new_insns (curr_insn, before, NULL,
   4285  1.1  mrg 				 "Inserting the sec. move");
   4286  1.1  mrg 	}
   4287  1.1  mrg       lra_update_insn_regno_info (curr_insn);
   4288  1.1  mrg       return true;
   4289  1.1  mrg     }
   4290  1.1  mrg 
   4291  1.1  mrg   lra_assert (goal_alt_number >= 0);
   4292  1.1  mrg   lra_set_used_insn_alternative (curr_insn, goal_alt_number);
   4293  1.1  mrg 
   4294  1.1  mrg   if (lra_dump_file != NULL)
   4295  1.1  mrg     {
   4296  1.1  mrg       const char *p;
   4297  1.1  mrg 
   4298  1.1  mrg       fprintf (lra_dump_file, "	 Choosing alt %d in insn %u:",
   4299  1.1  mrg 	       goal_alt_number, INSN_UID (curr_insn));
   4300  1.1  mrg       for (i = 0; i < n_operands; i++)
   4301  1.1  mrg 	{
   4302  1.1  mrg 	  p = (curr_static_id->operand_alternative
   4303  1.1  mrg 	       [goal_alt_number * n_operands + i].constraint);
   4304  1.1  mrg 	  if (*p == '\0')
   4305  1.1  mrg 	    continue;
   4306  1.1  mrg 	  fprintf (lra_dump_file, "  (%d) ", i);
   4307  1.1  mrg 	  for (; *p != '\0' && *p != ',' && *p != '#'; p++)
   4308  1.1  mrg 	    fputc (*p, lra_dump_file);
   4309  1.1  mrg 	}
   4310  1.1  mrg       if (INSN_CODE (curr_insn) >= 0
   4311  1.1  mrg           && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
   4312  1.1  mrg         fprintf (lra_dump_file, " {%s}", p);
   4313  1.1  mrg       if (maybe_ne (curr_id->sp_offset, 0))
   4314  1.1  mrg 	{
   4315  1.1  mrg 	  fprintf (lra_dump_file, " (sp_off=");
   4316  1.1  mrg 	  print_dec (curr_id->sp_offset, lra_dump_file);
   4317  1.1  mrg 	  fprintf (lra_dump_file, ")");
   4318  1.1  mrg 	}
   4319  1.1  mrg       fprintf (lra_dump_file, "\n");
   4320  1.1  mrg     }
   4321  1.1  mrg 
   4322  1.1  mrg   /* Right now, for any pair of operands I and J that are required to
   4323  1.1  mrg      match, with J < I, goal_alt_matches[I] is J.  Add I to
   4324  1.1  mrg      goal_alt_matched[J].  */
   4325  1.1  mrg 
   4326  1.1  mrg   for (i = 0; i < n_operands; i++)
   4327  1.1  mrg     if ((j = goal_alt_matches[i]) >= 0)
   4328  1.1  mrg       {
   4329  1.1  mrg 	for (k = 0; goal_alt_matched[j][k] >= 0; k++)
   4330  1.1  mrg 	  ;
   4331  1.1  mrg 	/* We allow matching one output operand and several input
   4332  1.1  mrg 	   operands.  */
   4333  1.1  mrg 	lra_assert (k == 0
   4334  1.1  mrg 		    || (curr_static_id->operand[j].type == OP_OUT
   4335  1.1  mrg 			&& curr_static_id->operand[i].type == OP_IN
   4336  1.1  mrg 			&& (curr_static_id->operand
   4337  1.1  mrg 			    [goal_alt_matched[j][0]].type == OP_IN)));
   4338  1.1  mrg 	goal_alt_matched[j][k] = i;
   4339  1.1  mrg 	goal_alt_matched[j][k + 1] = -1;
   4340  1.1  mrg       }
   4341  1.1  mrg 
   4342  1.1  mrg   for (i = 0; i < n_operands; i++)
   4343  1.1  mrg     goal_alt_win[i] |= goal_alt_match_win[i];
   4344  1.1  mrg 
   4345  1.1  mrg   /* Any constants that aren't allowed and can't be reloaded into
   4346  1.1  mrg      registers are here changed into memory references.	 */
   4347  1.1  mrg   for (i = 0; i < n_operands; i++)
   4348  1.1  mrg     if (goal_alt_win[i])
   4349  1.1  mrg       {
   4350  1.1  mrg 	int regno;
   4351  1.1  mrg 	enum reg_class new_class;
   4352  1.1  mrg 	rtx reg = *curr_id->operand_loc[i];
   4353  1.1  mrg 
   4354  1.1  mrg 	if (GET_CODE (reg) == SUBREG)
   4355  1.1  mrg 	  reg = SUBREG_REG (reg);
   4356  1.1  mrg 
   4357  1.1  mrg 	if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
   4358  1.1  mrg 	  {
   4359  1.1  mrg 	    bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
   4360  1.1  mrg 
   4361  1.1  mrg 	    if (new_class != NO_REGS && get_reg_class (regno) != new_class)
   4362  1.1  mrg 	      {
   4363  1.1  mrg 		lra_assert (ok_p);
   4364  1.1  mrg 		lra_change_class (regno, new_class, "      Change to", true);
   4365  1.1  mrg 	      }
   4366  1.1  mrg 	  }
   4367  1.1  mrg       }
   4368  1.1  mrg     else
   4369  1.1  mrg       {
   4370  1.1  mrg 	const char *constraint;
   4371  1.1  mrg 	char c;
   4372  1.1  mrg 	rtx op = *curr_id->operand_loc[i];
   4373  1.1  mrg 	rtx subreg = NULL_RTX;
   4374  1.1  mrg 	machine_mode mode = curr_operand_mode[i];
   4375  1.1  mrg 
   4376  1.1  mrg 	if (GET_CODE (op) == SUBREG)
   4377  1.1  mrg 	  {
   4378  1.1  mrg 	    subreg = op;
   4379  1.1  mrg 	    op = SUBREG_REG (op);
   4380  1.1  mrg 	    mode = GET_MODE (op);
   4381  1.1  mrg 	  }
   4382  1.1  mrg 
   4383  1.1  mrg 	if (CONST_POOL_OK_P (mode, op)
   4384  1.1  mrg 	    && ((targetm.preferred_reload_class
   4385  1.1  mrg 		 (op, (enum reg_class) goal_alt[i]) == NO_REGS)
   4386  1.1  mrg 		|| no_input_reloads_p))
   4387  1.1  mrg 	  {
   4388  1.1  mrg 	    rtx tem = force_const_mem (mode, op);
   4389  1.1  mrg 
   4390  1.1  mrg 	    change_p = true;
   4391  1.1  mrg 	    if (subreg != NULL_RTX)
   4392  1.1  mrg 	      tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
   4393  1.1  mrg 
   4394  1.1  mrg 	    *curr_id->operand_loc[i] = tem;
   4395  1.1  mrg 	    lra_update_dup (curr_id, i);
   4396  1.1  mrg 	    process_address (i, false, &before, &after);
   4397  1.1  mrg 
   4398  1.1  mrg 	    /* If the alternative accepts constant pool refs directly
   4399  1.1  mrg 	       there will be no reload needed at all.  */
   4400  1.1  mrg 	    if (subreg != NULL_RTX)
   4401  1.1  mrg 	      continue;
   4402  1.1  mrg 	    /* Skip alternatives before the one requested.  */
   4403  1.1  mrg 	    constraint = (curr_static_id->operand_alternative
   4404  1.1  mrg 			  [goal_alt_number * n_operands + i].constraint);
   4405  1.1  mrg 	    for (;
   4406  1.1  mrg 		 (c = *constraint) && c != ',' && c != '#';
   4407  1.1  mrg 		 constraint += CONSTRAINT_LEN (c, constraint))
   4408  1.1  mrg 	      {
   4409  1.1  mrg 		enum constraint_num cn = lookup_constraint (constraint);
   4410  1.1  mrg 		if ((insn_extra_memory_constraint (cn)
   4411  1.1  mrg 		     || insn_extra_special_memory_constraint (cn)
   4412  1.1  mrg 		     || insn_extra_relaxed_memory_constraint (cn))
   4413  1.1  mrg 		    && satisfies_memory_constraint_p (tem, cn))
   4414  1.1  mrg 		  break;
   4415  1.1  mrg 	      }
   4416  1.1  mrg 	    if (c == '\0' || c == ',' || c == '#')
   4417  1.1  mrg 	      continue;
   4418  1.1  mrg 
   4419  1.1  mrg 	    goal_alt_win[i] = true;
   4420  1.1  mrg 	  }
   4421  1.1  mrg       }
   4422  1.1  mrg 
   4423  1.1  mrg   n_outputs = 0;
   4424  1.1  mrg   for (i = 0; i < n_operands; i++)
   4425  1.1  mrg     if (curr_static_id->operand[i].type == OP_OUT)
   4426  1.1  mrg       outputs[n_outputs++] = i;
   4427  1.1  mrg   outputs[n_outputs] = -1;
   4428  1.1  mrg   for (i = 0; i < n_operands; i++)
   4429  1.1  mrg     {
   4430  1.1  mrg       int regno;
   4431  1.1  mrg       bool optional_p = false;
   4432  1.1  mrg       rtx old, new_reg;
   4433  1.1  mrg       rtx op = *curr_id->operand_loc[i];
   4434  1.1  mrg 
   4435  1.1  mrg       if (goal_alt_win[i])
   4436  1.1  mrg 	{
   4437  1.1  mrg 	  if (goal_alt[i] == NO_REGS
   4438  1.1  mrg 	      && REG_P (op)
   4439  1.1  mrg 	      /* When we assign NO_REGS it means that we will not
   4440  1.1  mrg 		 assign a hard register to the scratch pseudo by
   4441  1.1  mrg 		 assigment pass and the scratch pseudo will be
   4442  1.1  mrg 		 spilled.  Spilled scratch pseudos are transformed
   4443  1.1  mrg 		 back to scratches at the LRA end.  */
   4444  1.1  mrg 	      && ira_former_scratch_operand_p (curr_insn, i)
   4445  1.1  mrg 	      && ira_former_scratch_p (REGNO (op)))
   4446  1.1  mrg 	    {
   4447  1.1  mrg 	      int regno = REGNO (op);
   4448  1.1  mrg 	      lra_change_class (regno, NO_REGS, "      Change to", true);
   4449  1.1  mrg 	      if (lra_get_regno_hard_regno (regno) >= 0)
   4450  1.1  mrg 		/* We don't have to mark all insn affected by the
   4451  1.1  mrg 		   spilled pseudo as there is only one such insn, the
   4452  1.1  mrg 		   current one.  */
   4453  1.1  mrg 		reg_renumber[regno] = -1;
   4454  1.1  mrg 	      lra_assert (bitmap_single_bit_set_p
   4455  1.1  mrg 			  (&lra_reg_info[REGNO (op)].insn_bitmap));
   4456  1.1  mrg 	    }
   4457  1.1  mrg 	  /* We can do an optional reload.  If the pseudo got a hard
   4458  1.1  mrg 	     reg, we might improve the code through inheritance.  If
   4459  1.1  mrg 	     it does not get a hard register we coalesce memory/memory
   4460  1.1  mrg 	     moves later.  Ignore move insns to avoid cycling.  */
   4461  1.1  mrg 	  if (! lra_simple_p
   4462  1.1  mrg 	      && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
   4463  1.1  mrg 	      && goal_alt[i] != NO_REGS && REG_P (op)
   4464  1.1  mrg 	      && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
   4465  1.1  mrg 	      && regno < new_regno_start
   4466  1.1  mrg 	      && ! ira_former_scratch_p (regno)
   4467  1.1  mrg 	      && reg_renumber[regno] < 0
   4468  1.1  mrg 	      /* Check that the optional reload pseudo will be able to
   4469  1.1  mrg 		 hold given mode value.  */
   4470  1.1  mrg 	      && ! (prohibited_class_reg_set_mode_p
   4471  1.1  mrg 		    (goal_alt[i], reg_class_contents[goal_alt[i]],
   4472  1.1  mrg 		     PSEUDO_REGNO_MODE (regno)))
   4473  1.1  mrg 	      && (curr_insn_set == NULL_RTX
   4474  1.1  mrg 		  || !((REG_P (SET_SRC (curr_insn_set))
   4475  1.1  mrg 			|| MEM_P (SET_SRC (curr_insn_set))
   4476  1.1  mrg 			|| GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
   4477  1.1  mrg 		       && (REG_P (SET_DEST (curr_insn_set))
   4478  1.1  mrg 			   || MEM_P (SET_DEST (curr_insn_set))
   4479  1.1  mrg 			   || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
   4480  1.1  mrg 	    optional_p = true;
   4481  1.1  mrg 	  else if (goal_alt_matched[i][0] != -1
   4482  1.1  mrg 		   && curr_static_id->operand[i].type == OP_OUT
   4483  1.1  mrg 		   && (curr_static_id->operand_alternative
   4484  1.1  mrg 		       [goal_alt_number * n_operands + i].earlyclobber)
   4485  1.1  mrg 		   && REG_P (op))
   4486  1.1  mrg 	    {
   4487  1.1  mrg 	      for (j = 0; goal_alt_matched[i][j] != -1; j++)
   4488  1.1  mrg 		{
   4489  1.1  mrg 		  rtx op2 = *curr_id->operand_loc[goal_alt_matched[i][j]];
   4490  1.1  mrg 
   4491  1.1  mrg 		  if (REG_P (op2) && REGNO (op) != REGNO (op2))
   4492  1.1  mrg 		    break;
   4493  1.1  mrg 		}
   4494  1.1  mrg 	      if (goal_alt_matched[i][j] != -1)
   4495  1.1  mrg 		{
   4496  1.1  mrg 		  /* Generate reloads for different output and matched
   4497  1.1  mrg 		     input registers.  This is the easiest way to avoid
   4498  1.1  mrg 		     creation of non-existing register conflicts in
   4499  1.1  mrg 		     lra-lives.cc.  */
   4500  1.1  mrg 		  match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
   4501  1.1  mrg 				&goal_alt_exclude_start_hard_regs[i], &before,
   4502  1.1  mrg 				&after, TRUE);
   4503  1.1  mrg 		}
   4504  1.1  mrg 	      continue;
   4505  1.1  mrg 	    }
   4506  1.1  mrg 	  else
   4507  1.1  mrg 	    continue;
   4508  1.1  mrg 	}
   4509  1.1  mrg 
   4510  1.1  mrg       /* Operands that match previous ones have already been handled.  */
   4511  1.1  mrg       if (goal_alt_matches[i] >= 0)
   4512  1.1  mrg 	continue;
   4513  1.1  mrg 
   4514  1.1  mrg       /* We should not have an operand with a non-offsettable address
   4515  1.1  mrg 	 appearing where an offsettable address will do.  It also may
   4516  1.1  mrg 	 be a case when the address should be special in other words
   4517  1.1  mrg 	 not a general one (e.g. it needs no index reg).  */
   4518  1.1  mrg       if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
   4519  1.1  mrg 	{
   4520  1.1  mrg 	  enum reg_class rclass;
   4521  1.1  mrg 	  rtx *loc = &XEXP (op, 0);
   4522  1.1  mrg 	  enum rtx_code code = GET_CODE (*loc);
   4523  1.1  mrg 
   4524  1.1  mrg 	  push_to_sequence (before);
   4525  1.1  mrg 	  rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
   4526  1.1  mrg 				   MEM, SCRATCH);
   4527  1.1  mrg 	  if (GET_RTX_CLASS (code) == RTX_AUTOINC)
   4528  1.1  mrg 	    new_reg = emit_inc (rclass, *loc, *loc,
   4529  1.1  mrg 				/* This value does not matter for MODIFY.  */
   4530  1.1  mrg 				GET_MODE_SIZE (GET_MODE (op)));
   4531  1.1  mrg 	  else if (get_reload_reg (OP_IN, Pmode, *loc, rclass,
   4532  1.1  mrg 				   NULL, FALSE,
   4533  1.1  mrg 				   "offsetable address", &new_reg))
   4534  1.1  mrg 	    {
   4535  1.1  mrg 	      rtx addr = *loc;
   4536  1.1  mrg 	      enum rtx_code code = GET_CODE (addr);
   4537  1.1  mrg 	      bool align_p = false;
   4538  1.1  mrg 
   4539  1.1  mrg 	      if (code == AND && CONST_INT_P (XEXP (addr, 1)))
   4540  1.1  mrg 		{
   4541  1.1  mrg 		  /* (and ... (const_int -X)) is used to align to X bytes.  */
   4542  1.1  mrg 		  align_p = true;
   4543  1.1  mrg 		  addr = XEXP (*loc, 0);
   4544  1.1  mrg 		}
   4545  1.1  mrg 	      else
   4546  1.1  mrg 		addr = canonicalize_reload_addr (addr);
   4547  1.1  mrg 
   4548  1.1  mrg 	      lra_emit_move (new_reg, addr);
   4549  1.1  mrg 	      if (align_p)
   4550  1.1  mrg 		emit_move_insn (new_reg, gen_rtx_AND (GET_MODE (new_reg), new_reg, XEXP (*loc, 1)));
   4551  1.1  mrg 	    }
   4552  1.1  mrg 	  before = get_insns ();
   4553  1.1  mrg 	  end_sequence ();
   4554  1.1  mrg 	  *loc = new_reg;
   4555  1.1  mrg 	  lra_update_dup (curr_id, i);
   4556  1.1  mrg 	}
   4557  1.1  mrg       else if (goal_alt_matched[i][0] == -1)
   4558  1.1  mrg 	{
   4559  1.1  mrg 	  machine_mode mode;
   4560  1.1  mrg 	  rtx reg, *loc;
   4561  1.1  mrg 	  int hard_regno;
   4562  1.1  mrg 	  enum op_type type = curr_static_id->operand[i].type;
   4563  1.1  mrg 
   4564  1.1  mrg 	  loc = curr_id->operand_loc[i];
   4565  1.1  mrg 	  mode = curr_operand_mode[i];
   4566  1.1  mrg 	  if (GET_CODE (*loc) == SUBREG)
   4567  1.1  mrg 	    {
   4568  1.1  mrg 	      reg = SUBREG_REG (*loc);
   4569  1.1  mrg 	      poly_int64 byte = SUBREG_BYTE (*loc);
   4570  1.1  mrg 	      if (REG_P (reg)
   4571  1.1  mrg 		  /* Strict_low_part requires reloading the register and not
   4572  1.1  mrg 		     just the subreg.  Likewise for a strict subreg no wider
   4573  1.1  mrg 		     than a word for WORD_REGISTER_OPERATIONS targets.  */
   4574  1.1  mrg 		  && (curr_static_id->operand[i].strict_low
   4575  1.1  mrg 		      || (!paradoxical_subreg_p (mode, GET_MODE (reg))
   4576  1.1  mrg 			  && (hard_regno
   4577  1.1  mrg 			      = get_try_hard_regno (REGNO (reg))) >= 0
   4578  1.1  mrg 			  && (simplify_subreg_regno
   4579  1.1  mrg 			      (hard_regno,
   4580  1.1  mrg 			       GET_MODE (reg), byte, mode) < 0)
   4581  1.1  mrg 			  && (goal_alt[i] == NO_REGS
   4582  1.1  mrg 			      || (simplify_subreg_regno
   4583  1.1  mrg 				  (ira_class_hard_regs[goal_alt[i]][0],
   4584  1.1  mrg 				   GET_MODE (reg), byte, mode) >= 0)))
   4585  1.1  mrg 		      || (partial_subreg_p (mode, GET_MODE (reg))
   4586  1.1  mrg 			  && known_le (GET_MODE_SIZE (GET_MODE (reg)),
   4587  1.1  mrg 				       UNITS_PER_WORD)
   4588  1.1  mrg 			  && WORD_REGISTER_OPERATIONS)))
   4589  1.1  mrg 		{
   4590  1.1  mrg 		  /* An OP_INOUT is required when reloading a subreg of a
   4591  1.1  mrg 		     mode wider than a word to ensure that data beyond the
   4592  1.1  mrg 		     word being reloaded is preserved.  Also automatically
   4593  1.1  mrg 		     ensure that strict_low_part reloads are made into
   4594  1.1  mrg 		     OP_INOUT which should already be true from the backend
   4595  1.1  mrg 		     constraints.  */
   4596  1.1  mrg 		  if (type == OP_OUT
   4597  1.1  mrg 		      && (curr_static_id->operand[i].strict_low
   4598  1.1  mrg 			  || read_modify_subreg_p (*loc)))
   4599  1.1  mrg 		    type = OP_INOUT;
   4600  1.1  mrg 		  loc = &SUBREG_REG (*loc);
   4601  1.1  mrg 		  mode = GET_MODE (*loc);
   4602  1.1  mrg 		}
   4603  1.1  mrg 	    }
   4604  1.1  mrg 	  old = *loc;
   4605  1.1  mrg 	  if (get_reload_reg (type, mode, old, goal_alt[i],
   4606  1.1  mrg 			      &goal_alt_exclude_start_hard_regs[i],
   4607  1.1  mrg 			      loc != curr_id->operand_loc[i], "", &new_reg)
   4608  1.1  mrg 	      && type != OP_OUT)
   4609  1.1  mrg 	    {
   4610  1.1  mrg 	      push_to_sequence (before);
   4611  1.1  mrg 	      lra_emit_move (new_reg, old);
   4612  1.1  mrg 	      before = get_insns ();
   4613  1.1  mrg 	      end_sequence ();
   4614  1.1  mrg 	    }
   4615  1.1  mrg 	  *loc = new_reg;
   4616  1.1  mrg 	  if (type != OP_IN
   4617  1.1  mrg 	      && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX)
   4618  1.1  mrg 	    {
   4619  1.1  mrg 	      start_sequence ();
   4620  1.1  mrg 	      lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
   4621  1.1  mrg 	      emit_insn (after);
   4622  1.1  mrg 	      after = get_insns ();
   4623  1.1  mrg 	      end_sequence ();
   4624  1.1  mrg 	      *loc = new_reg;
   4625  1.1  mrg 	    }
   4626  1.1  mrg 	  for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
   4627  1.1  mrg 	    if (goal_alt_dont_inherit_ops[j] == i)
   4628  1.1  mrg 	      {
   4629  1.1  mrg 		lra_set_regno_unique_value (REGNO (new_reg));
   4630  1.1  mrg 		break;
   4631  1.1  mrg 	      }
   4632  1.1  mrg 	  lra_update_dup (curr_id, i);
   4633  1.1  mrg 	}
   4634  1.1  mrg       else if (curr_static_id->operand[i].type == OP_IN
   4635  1.1  mrg 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
   4636  1.1  mrg 		   == OP_OUT
   4637  1.1  mrg 		   || (curr_static_id->operand[goal_alt_matched[i][0]].type
   4638  1.1  mrg 		       == OP_INOUT
   4639  1.1  mrg 		       && (operands_match_p
   4640  1.1  mrg 			   (*curr_id->operand_loc[i],
   4641  1.1  mrg 			    *curr_id->operand_loc[goal_alt_matched[i][0]],
   4642  1.1  mrg 			    -1)))))
   4643  1.1  mrg 	{
   4644  1.1  mrg 	  /* generate reloads for input and matched outputs.  */
   4645  1.1  mrg 	  match_inputs[0] = i;
   4646  1.1  mrg 	  match_inputs[1] = -1;
   4647  1.1  mrg 	  match_reload (goal_alt_matched[i][0], match_inputs, outputs,
   4648  1.1  mrg 			goal_alt[i], &goal_alt_exclude_start_hard_regs[i],
   4649  1.1  mrg 			&before, &after,
   4650  1.1  mrg 			curr_static_id->operand_alternative
   4651  1.1  mrg 			[goal_alt_number * n_operands + goal_alt_matched[i][0]]
   4652  1.1  mrg 			.earlyclobber);
   4653  1.1  mrg 	}
   4654  1.1  mrg       else if ((curr_static_id->operand[i].type == OP_OUT
   4655  1.1  mrg 		|| (curr_static_id->operand[i].type == OP_INOUT
   4656  1.1  mrg 		    && (operands_match_p
   4657  1.1  mrg 			(*curr_id->operand_loc[i],
   4658  1.1  mrg 			 *curr_id->operand_loc[goal_alt_matched[i][0]],
   4659  1.1  mrg 			 -1))))
   4660  1.1  mrg 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
   4661  1.1  mrg 		    == OP_IN))
   4662  1.1  mrg 	/* Generate reloads for output and matched inputs.  */
   4663  1.1  mrg 	match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
   4664  1.1  mrg 		      &goal_alt_exclude_start_hard_regs[i], &before, &after,
   4665  1.1  mrg 		      curr_static_id->operand_alternative
   4666  1.1  mrg 		      [goal_alt_number * n_operands + i].earlyclobber);
   4667  1.1  mrg       else if (curr_static_id->operand[i].type == OP_IN
   4668  1.1  mrg 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
   4669  1.1  mrg 		   == OP_IN))
   4670  1.1  mrg 	{
   4671  1.1  mrg 	  /* Generate reloads for matched inputs.  */
   4672  1.1  mrg 	  match_inputs[0] = i;
   4673  1.1  mrg 	  for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
   4674  1.1  mrg 	    match_inputs[j + 1] = k;
   4675  1.1  mrg 	  match_inputs[j + 1] = -1;
   4676  1.1  mrg 	  match_reload (-1, match_inputs, outputs, goal_alt[i],
   4677  1.1  mrg 			&goal_alt_exclude_start_hard_regs[i],
   4678  1.1  mrg 			&before, &after, false);
   4679  1.1  mrg 	}
   4680  1.1  mrg       else
   4681  1.1  mrg 	/* We must generate code in any case when function
   4682  1.1  mrg 	   process_alt_operands decides that it is possible.  */
   4683  1.1  mrg 	gcc_unreachable ();
   4684  1.1  mrg 
   4685  1.1  mrg       if (optional_p)
   4686  1.1  mrg 	{
   4687  1.1  mrg 	  rtx reg = op;
   4688  1.1  mrg 
   4689  1.1  mrg 	  lra_assert (REG_P (reg));
   4690  1.1  mrg 	  regno = REGNO (reg);
   4691  1.1  mrg 	  op = *curr_id->operand_loc[i]; /* Substitution.  */
   4692  1.1  mrg 	  if (GET_CODE (op) == SUBREG)
   4693  1.1  mrg 	    op = SUBREG_REG (op);
   4694  1.1  mrg 	  gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
   4695  1.1  mrg 	  bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
   4696  1.1  mrg 	  lra_reg_info[REGNO (op)].restore_rtx = reg;
   4697  1.1  mrg 	  if (lra_dump_file != NULL)
   4698  1.1  mrg 	    fprintf (lra_dump_file,
   4699  1.1  mrg 		     "      Making reload reg %d for reg %d optional\n",
   4700  1.1  mrg 		     REGNO (op), regno);
   4701  1.1  mrg 	}
   4702  1.1  mrg     }
   4703  1.1  mrg   if (before != NULL_RTX || after != NULL_RTX
   4704  1.1  mrg       || max_regno_before != max_reg_num ())
   4705  1.1  mrg     change_p = true;
   4706  1.1  mrg   if (change_p)
   4707  1.1  mrg     {
   4708  1.1  mrg       lra_update_operator_dups (curr_id);
   4709  1.1  mrg       /* Something changes -- process the insn.	 */
   4710  1.1  mrg       lra_update_insn_regno_info (curr_insn);
   4711  1.1  mrg     }
   4712  1.1  mrg   lra_process_new_insns (curr_insn, before, after, "Inserting insn reload");
   4713  1.1  mrg   return change_p;
   4714  1.1  mrg }
   4715  1.1  mrg 
   4716  1.1  mrg /* Return true if INSN satisfies all constraints.  In other words, no
   4717  1.1  mrg    reload insns are needed.  */
   4718  1.1  mrg bool
   4719  1.1  mrg lra_constrain_insn (rtx_insn *insn)
   4720  1.1  mrg {
   4721  1.1  mrg   int saved_new_regno_start = new_regno_start;
   4722  1.1  mrg   int saved_new_insn_uid_start = new_insn_uid_start;
   4723  1.1  mrg   bool change_p;
   4724  1.1  mrg 
   4725  1.1  mrg   curr_insn = insn;
   4726  1.1  mrg   curr_id = lra_get_insn_recog_data (curr_insn);
   4727  1.1  mrg   curr_static_id = curr_id->insn_static_data;
   4728  1.1  mrg   new_insn_uid_start = get_max_uid ();
   4729  1.1  mrg   new_regno_start = max_reg_num ();
   4730  1.1  mrg   change_p = curr_insn_transform (true);
   4731  1.1  mrg   new_regno_start = saved_new_regno_start;
   4732  1.1  mrg   new_insn_uid_start = saved_new_insn_uid_start;
   4733  1.1  mrg   return ! change_p;
   4734  1.1  mrg }
   4735  1.1  mrg 
   4736  1.1  mrg /* Return true if X is in LIST.	 */
   4737  1.1  mrg static bool
   4738  1.1  mrg in_list_p (rtx x, rtx list)
   4739  1.1  mrg {
   4740  1.1  mrg   for (; list != NULL_RTX; list = XEXP (list, 1))
   4741  1.1  mrg     if (XEXP (list, 0) == x)
   4742  1.1  mrg       return true;
   4743  1.1  mrg   return false;
   4744  1.1  mrg }
   4745  1.1  mrg 
   4746  1.1  mrg /* Return true if X contains an allocatable hard register (if
   4747  1.1  mrg    HARD_REG_P) or a (spilled if SPILLED_P) pseudo.  */
   4748  1.1  mrg static bool
   4749  1.1  mrg contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
   4750  1.1  mrg {
   4751  1.1  mrg   int i, j;
   4752  1.1  mrg   const char *fmt;
   4753  1.1  mrg   enum rtx_code code;
   4754  1.1  mrg 
   4755  1.1  mrg   code = GET_CODE (x);
   4756  1.1  mrg   if (REG_P (x))
   4757  1.1  mrg     {
   4758  1.1  mrg       int regno = REGNO (x);
   4759  1.1  mrg       HARD_REG_SET alloc_regs;
   4760  1.1  mrg 
   4761  1.1  mrg       if (hard_reg_p)
   4762  1.1  mrg 	{
   4763  1.1  mrg 	  if (regno >= FIRST_PSEUDO_REGISTER)
   4764  1.1  mrg 	    regno = lra_get_regno_hard_regno (regno);
   4765  1.1  mrg 	  if (regno < 0)
   4766  1.1  mrg 	    return false;
   4767  1.1  mrg 	  alloc_regs = ~lra_no_alloc_regs;
   4768  1.1  mrg 	  return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
   4769  1.1  mrg 	}
   4770  1.1  mrg       else
   4771  1.1  mrg 	{
   4772  1.1  mrg 	  if (regno < FIRST_PSEUDO_REGISTER)
   4773  1.1  mrg 	    return false;
   4774  1.1  mrg 	  if (! spilled_p)
   4775  1.1  mrg 	    return true;
   4776  1.1  mrg 	  return lra_get_regno_hard_regno (regno) < 0;
   4777  1.1  mrg 	}
   4778  1.1  mrg     }
   4779  1.1  mrg   fmt = GET_RTX_FORMAT (code);
   4780  1.1  mrg   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   4781  1.1  mrg     {
   4782  1.1  mrg       if (fmt[i] == 'e')
   4783  1.1  mrg 	{
   4784  1.1  mrg 	  if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
   4785  1.1  mrg 	    return true;
   4786  1.1  mrg 	}
   4787  1.1  mrg       else if (fmt[i] == 'E')
   4788  1.1  mrg 	{
   4789  1.1  mrg 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   4790  1.1  mrg 	    if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
   4791  1.1  mrg 	      return true;
   4792  1.1  mrg 	}
   4793  1.1  mrg     }
   4794  1.1  mrg   return false;
   4795  1.1  mrg }
   4796  1.1  mrg 
   4797  1.1  mrg /* Process all regs in location *LOC and change them on equivalent
   4798  1.1  mrg    substitution.  Return true if any change was done.  */
   4799  1.1  mrg static bool
   4800  1.1  mrg loc_equivalence_change_p (rtx *loc)
   4801  1.1  mrg {
   4802  1.1  mrg   rtx subst, reg, x = *loc;
   4803  1.1  mrg   bool result = false;
   4804  1.1  mrg   enum rtx_code code = GET_CODE (x);
   4805  1.1  mrg   const char *fmt;
   4806  1.1  mrg   int i, j;
   4807  1.1  mrg 
   4808  1.1  mrg   if (code == SUBREG)
   4809  1.1  mrg     {
   4810  1.1  mrg       reg = SUBREG_REG (x);
   4811  1.1  mrg       if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
   4812  1.1  mrg 	  && GET_MODE (subst) == VOIDmode)
   4813  1.1  mrg 	{
   4814  1.1  mrg 	  /* We cannot reload debug location.  Simplify subreg here
   4815  1.1  mrg 	     while we know the inner mode.  */
   4816  1.1  mrg 	  *loc = simplify_gen_subreg (GET_MODE (x), subst,
   4817  1.1  mrg 				      GET_MODE (reg), SUBREG_BYTE (x));
   4818  1.1  mrg 	  return true;
   4819  1.1  mrg 	}
   4820  1.1  mrg     }
   4821  1.1  mrg   if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
   4822  1.1  mrg     {
   4823  1.1  mrg       *loc = subst;
   4824  1.1  mrg       return true;
   4825  1.1  mrg     }
   4826  1.1  mrg 
   4827  1.1  mrg   /* Scan all the operand sub-expressions.  */
   4828  1.1  mrg   fmt = GET_RTX_FORMAT (code);
   4829  1.1  mrg   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   4830  1.1  mrg     {
   4831  1.1  mrg       if (fmt[i] == 'e')
   4832  1.1  mrg 	result = loc_equivalence_change_p (&XEXP (x, i)) || result;
   4833  1.1  mrg       else if (fmt[i] == 'E')
   4834  1.1  mrg 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   4835  1.1  mrg 	  result
   4836  1.1  mrg 	    = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
   4837  1.1  mrg     }
   4838  1.1  mrg   return result;
   4839  1.1  mrg }
   4840  1.1  mrg 
   4841  1.1  mrg /* Similar to loc_equivalence_change_p, but for use as
   4842  1.1  mrg    simplify_replace_fn_rtx callback.  DATA is insn for which the
   4843  1.1  mrg    elimination is done.  If it null we don't do the elimination.  */
   4844  1.1  mrg static rtx
   4845  1.1  mrg loc_equivalence_callback (rtx loc, const_rtx, void *data)
   4846  1.1  mrg {
   4847  1.1  mrg   if (!REG_P (loc))
   4848  1.1  mrg     return NULL_RTX;
   4849  1.1  mrg 
   4850  1.1  mrg   rtx subst = (data == NULL
   4851  1.1  mrg 	       ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
   4852  1.1  mrg   if (subst != loc)
   4853  1.1  mrg     return subst;
   4854  1.1  mrg 
   4855  1.1  mrg   return NULL_RTX;
   4856  1.1  mrg }
   4857  1.1  mrg 
   4858  1.1  mrg /* Maximum number of generated reload insns per an insn.  It is for
   4859  1.1  mrg    preventing this pass cycling in a bug case.	*/
   4860  1.1  mrg #define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
   4861  1.1  mrg 
   4862  1.1  mrg /* The current iteration number of this LRA pass.  */
   4863  1.1  mrg int lra_constraint_iter;
   4864  1.1  mrg 
   4865  1.1  mrg /* True if we should during assignment sub-pass check assignment
   4866  1.1  mrg    correctness for all pseudos and spill some of them to correct
   4867  1.1  mrg    conflicts.  It can be necessary when we substitute equiv which
   4868  1.1  mrg    needs checking register allocation correctness because the
   4869  1.1  mrg    equivalent value contains allocatable hard registers, or when we
   4870  1.1  mrg    restore multi-register pseudo, or when we change the insn code and
   4871  1.1  mrg    its operand became INOUT operand when it was IN one before.  */
   4872  1.1  mrg bool check_and_force_assignment_correctness_p;
   4873  1.1  mrg 
   4874  1.1  mrg /* Return true if REGNO is referenced in more than one block.  */
   4875  1.1  mrg static bool
   4876  1.1  mrg multi_block_pseudo_p (int regno)
   4877  1.1  mrg {
   4878  1.1  mrg   basic_block bb = NULL;
   4879  1.1  mrg   unsigned int uid;
   4880  1.1  mrg   bitmap_iterator bi;
   4881  1.1  mrg 
   4882  1.1  mrg   if (regno < FIRST_PSEUDO_REGISTER)
   4883  1.1  mrg     return false;
   4884  1.1  mrg 
   4885  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
   4886  1.1  mrg     if (bb == NULL)
   4887  1.1  mrg       bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
   4888  1.1  mrg     else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
   4889  1.1  mrg       return true;
   4890  1.1  mrg   return false;
   4891  1.1  mrg }
   4892  1.1  mrg 
   4893  1.1  mrg /* Return true if LIST contains a deleted insn.  */
   4894  1.1  mrg static bool
   4895  1.1  mrg contains_deleted_insn_p (rtx_insn_list *list)
   4896  1.1  mrg {
   4897  1.1  mrg   for (; list != NULL_RTX; list = list->next ())
   4898  1.1  mrg     if (NOTE_P (list->insn ())
   4899  1.1  mrg 	&& NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
   4900  1.1  mrg       return true;
   4901  1.1  mrg   return false;
   4902  1.1  mrg }
   4903  1.1  mrg 
   4904  1.1  mrg /* Return true if X contains a pseudo dying in INSN.  */
   4905  1.1  mrg static bool
   4906  1.1  mrg dead_pseudo_p (rtx x, rtx_insn *insn)
   4907  1.1  mrg {
   4908  1.1  mrg   int i, j;
   4909  1.1  mrg   const char *fmt;
   4910  1.1  mrg   enum rtx_code code;
   4911  1.1  mrg 
   4912  1.1  mrg   if (REG_P (x))
   4913  1.1  mrg     return (insn != NULL_RTX
   4914  1.1  mrg 	    && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
   4915  1.1  mrg   code = GET_CODE (x);
   4916  1.1  mrg   fmt = GET_RTX_FORMAT (code);
   4917  1.1  mrg   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   4918  1.1  mrg     {
   4919  1.1  mrg       if (fmt[i] == 'e')
   4920  1.1  mrg 	{
   4921  1.1  mrg 	  if (dead_pseudo_p (XEXP (x, i), insn))
   4922  1.1  mrg 	    return true;
   4923  1.1  mrg 	}
   4924  1.1  mrg       else if (fmt[i] == 'E')
   4925  1.1  mrg 	{
   4926  1.1  mrg 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   4927  1.1  mrg 	    if (dead_pseudo_p (XVECEXP (x, i, j), insn))
   4928  1.1  mrg 	      return true;
   4929  1.1  mrg 	}
   4930  1.1  mrg     }
   4931  1.1  mrg   return false;
   4932  1.1  mrg }
   4933  1.1  mrg 
   4934  1.1  mrg /* Return true if INSN contains a dying pseudo in INSN right hand
   4935  1.1  mrg    side.  */
   4936  1.1  mrg static bool
   4937  1.1  mrg insn_rhs_dead_pseudo_p (rtx_insn *insn)
   4938  1.1  mrg {
   4939  1.1  mrg   rtx set = single_set (insn);
   4940  1.1  mrg 
   4941  1.1  mrg   gcc_assert (set != NULL);
   4942  1.1  mrg   return dead_pseudo_p (SET_SRC (set), insn);
   4943  1.1  mrg }
   4944  1.1  mrg 
   4945  1.1  mrg /* Return true if any init insn of REGNO contains a dying pseudo in
   4946  1.1  mrg    insn right hand side.  */
   4947  1.1  mrg static bool
   4948  1.1  mrg init_insn_rhs_dead_pseudo_p (int regno)
   4949  1.1  mrg {
   4950  1.1  mrg   rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
   4951  1.1  mrg 
   4952  1.1  mrg   if (insns == NULL)
   4953  1.1  mrg     return false;
   4954  1.1  mrg   for (; insns != NULL_RTX; insns = insns->next ())
   4955  1.1  mrg     if (insn_rhs_dead_pseudo_p (insns->insn ()))
   4956  1.1  mrg       return true;
   4957  1.1  mrg   return false;
   4958  1.1  mrg }
   4959  1.1  mrg 
   4960  1.1  mrg /* Return TRUE if REGNO has a reverse equivalence.  The equivalence is
   4961  1.1  mrg    reverse only if we have one init insn with given REGNO as a
   4962  1.1  mrg    source.  */
   4963  1.1  mrg static bool
   4964  1.1  mrg reverse_equiv_p (int regno)
   4965  1.1  mrg {
   4966  1.1  mrg   rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
   4967  1.1  mrg   rtx set;
   4968  1.1  mrg 
   4969  1.1  mrg   if (insns == NULL)
   4970  1.1  mrg     return false;
   4971  1.1  mrg   if (! INSN_P (insns->insn ())
   4972  1.1  mrg       || insns->next () != NULL)
   4973  1.1  mrg     return false;
   4974  1.1  mrg   if ((set = single_set (insns->insn ())) == NULL_RTX)
   4975  1.1  mrg     return false;
   4976  1.1  mrg   return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
   4977  1.1  mrg }
   4978  1.1  mrg 
   4979  1.1  mrg /* Return TRUE if REGNO was reloaded in an equivalence init insn.  We
   4980  1.1  mrg    call this function only for non-reverse equivalence.  */
   4981  1.1  mrg static bool
   4982  1.1  mrg contains_reloaded_insn_p (int regno)
   4983  1.1  mrg {
   4984  1.1  mrg   rtx set;
   4985  1.1  mrg   rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
   4986  1.1  mrg 
   4987  1.1  mrg   for (; list != NULL; list = list->next ())
   4988  1.1  mrg     if ((set = single_set (list->insn ())) == NULL_RTX
   4989  1.1  mrg 	|| ! REG_P (SET_DEST (set))
   4990  1.1  mrg 	|| (int) REGNO (SET_DEST (set)) != regno)
   4991  1.1  mrg       return true;
   4992  1.1  mrg   return false;
   4993  1.1  mrg }
   4994  1.1  mrg 
   4995  1.1  mrg /* Entry function of LRA constraint pass.  Return true if the
   4996  1.1  mrg    constraint pass did change the code.	 */
   4997  1.1  mrg bool
   4998  1.1  mrg lra_constraints (bool first_p)
   4999  1.1  mrg {
   5000  1.1  mrg   bool changed_p;
   5001  1.1  mrg   int i, hard_regno, new_insns_num;
   5002  1.1  mrg   unsigned int min_len, new_min_len, uid;
   5003  1.1  mrg   rtx set, x, reg, dest_reg;
   5004  1.1  mrg   basic_block last_bb;
   5005  1.1  mrg   bitmap_iterator bi;
   5006  1.1  mrg 
   5007  1.1  mrg   lra_constraint_iter++;
   5008  1.1  mrg   if (lra_dump_file != NULL)
   5009  1.1  mrg     fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
   5010  1.1  mrg 	     lra_constraint_iter);
   5011  1.1  mrg   changed_p = false;
   5012  1.1  mrg   if (pic_offset_table_rtx
   5013  1.1  mrg       && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
   5014  1.1  mrg     check_and_force_assignment_correctness_p = true;
   5015  1.1  mrg   else if (first_p)
   5016  1.1  mrg     /* On the first iteration we should check IRA assignment
   5017  1.1  mrg        correctness.  In rare cases, the assignments can be wrong as
   5018  1.1  mrg        early clobbers operands are ignored in IRA or usages of
   5019  1.1  mrg        paradoxical sub-registers are not taken into account by
   5020  1.1  mrg        IRA.  */
   5021  1.1  mrg     check_and_force_assignment_correctness_p = true;
   5022  1.1  mrg   new_insn_uid_start = get_max_uid ();
   5023  1.1  mrg   new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
   5024  1.1  mrg   /* Mark used hard regs for target stack size calulations.  */
   5025  1.1  mrg   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
   5026  1.1  mrg     if (lra_reg_info[i].nrefs != 0
   5027  1.1  mrg 	&& (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
   5028  1.1  mrg       {
   5029  1.1  mrg 	int j, nregs;
   5030  1.1  mrg 
   5031  1.1  mrg 	nregs = hard_regno_nregs (hard_regno, lra_reg_info[i].biggest_mode);
   5032  1.1  mrg 	for (j = 0; j < nregs; j++)
   5033  1.1  mrg 	  df_set_regs_ever_live (hard_regno + j, true);
   5034  1.1  mrg       }
   5035  1.1  mrg   /* Do elimination before the equivalence processing as we can spill
   5036  1.1  mrg      some pseudos during elimination.  */
   5037  1.1  mrg   lra_eliminate (false, first_p);
   5038  1.1  mrg   auto_bitmap equiv_insn_bitmap (&reg_obstack);
   5039  1.1  mrg   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
   5040  1.1  mrg     if (lra_reg_info[i].nrefs != 0)
   5041  1.1  mrg       {
   5042  1.1  mrg 	ira_reg_equiv[i].profitable_p = true;
   5043  1.1  mrg 	reg = regno_reg_rtx[i];
   5044  1.1  mrg 	if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
   5045  1.1  mrg 	  {
   5046  1.1  mrg 	    bool pseudo_p = contains_reg_p (x, false, false);
   5047  1.1  mrg 
   5048  1.1  mrg 	    /* After RTL transformation, we cannot guarantee that
   5049  1.1  mrg 	       pseudo in the substitution was not reloaded which might
   5050  1.1  mrg 	       make equivalence invalid.  For example, in reverse
   5051  1.1  mrg 	       equiv of p0
   5052  1.1  mrg 
   5053  1.1  mrg 	       p0 <- ...
   5054  1.1  mrg 	       ...
   5055  1.1  mrg 	       equiv_mem <- p0
   5056  1.1  mrg 
   5057  1.1  mrg 	       the memory address register was reloaded before the 2nd
   5058  1.1  mrg 	       insn.  */
   5059  1.1  mrg 	    if ((! first_p && pseudo_p)
   5060  1.1  mrg 		/* We don't use DF for compilation speed sake.  So it
   5061  1.1  mrg 		   is problematic to update live info when we use an
   5062  1.1  mrg 		   equivalence containing pseudos in more than one
   5063  1.1  mrg 		   BB.  */
   5064  1.1  mrg 		|| (pseudo_p && multi_block_pseudo_p (i))
   5065  1.1  mrg 		/* If an init insn was deleted for some reason, cancel
   5066  1.1  mrg 		   the equiv.  We could update the equiv insns after
   5067  1.1  mrg 		   transformations including an equiv insn deletion
   5068  1.1  mrg 		   but it is not worthy as such cases are extremely
   5069  1.1  mrg 		   rare.  */
   5070  1.1  mrg 		|| contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
   5071  1.1  mrg 		/* If it is not a reverse equivalence, we check that a
   5072  1.1  mrg 		   pseudo in rhs of the init insn is not dying in the
   5073  1.1  mrg 		   insn.  Otherwise, the live info at the beginning of
   5074  1.1  mrg 		   the corresponding BB might be wrong after we
   5075  1.1  mrg 		   removed the insn.  When the equiv can be a
   5076  1.1  mrg 		   constant, the right hand side of the init insn can
   5077  1.1  mrg 		   be a pseudo.  */
   5078  1.1  mrg 		|| (! reverse_equiv_p (i)
   5079  1.1  mrg 		    && (init_insn_rhs_dead_pseudo_p (i)
   5080  1.1  mrg 			/* If we reloaded the pseudo in an equivalence
   5081  1.1  mrg 			   init insn, we cannot remove the equiv init
   5082  1.1  mrg 			   insns and the init insns might write into
   5083  1.1  mrg 			   const memory in this case.  */
   5084  1.1  mrg 			|| contains_reloaded_insn_p (i)))
   5085  1.1  mrg 		/* Prevent access beyond equivalent memory for
   5086  1.1  mrg 		   paradoxical subregs.  */
   5087  1.1  mrg 		|| (MEM_P (x)
   5088  1.1  mrg 		    && maybe_gt (GET_MODE_SIZE (lra_reg_info[i].biggest_mode),
   5089  1.1  mrg 				 GET_MODE_SIZE (GET_MODE (x))))
   5090  1.1  mrg 		|| (pic_offset_table_rtx
   5091  1.1  mrg 		    && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
   5092  1.1  mrg 			 && (targetm.preferred_reload_class
   5093  1.1  mrg 			     (x, lra_get_allocno_class (i)) == NO_REGS))
   5094  1.1  mrg 			|| contains_symbol_ref_p (x))))
   5095  1.1  mrg 	      ira_reg_equiv[i].defined_p = false;
   5096  1.1  mrg 	    if (contains_reg_p (x, false, true))
   5097  1.1  mrg 	      ira_reg_equiv[i].profitable_p = false;
   5098  1.1  mrg 	    if (get_equiv (reg) != reg)
   5099  1.1  mrg 	      bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
   5100  1.1  mrg 	  }
   5101  1.1  mrg       }
   5102  1.1  mrg   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
   5103  1.1  mrg     update_equiv (i);
   5104  1.1  mrg   /* We should add all insns containing pseudos which should be
   5105  1.1  mrg      substituted by their equivalences.  */
   5106  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
   5107  1.1  mrg     lra_push_insn_by_uid (uid);
   5108  1.1  mrg   min_len = lra_insn_stack_length ();
   5109  1.1  mrg   new_insns_num = 0;
   5110  1.1  mrg   last_bb = NULL;
   5111  1.1  mrg   changed_p = false;
   5112  1.1  mrg   while ((new_min_len = lra_insn_stack_length ()) != 0)
   5113  1.1  mrg     {
   5114  1.1  mrg       curr_insn = lra_pop_insn ();
   5115  1.1  mrg       --new_min_len;
   5116  1.1  mrg       curr_bb = BLOCK_FOR_INSN (curr_insn);
   5117  1.1  mrg       if (curr_bb != last_bb)
   5118  1.1  mrg 	{
   5119  1.1  mrg 	  last_bb = curr_bb;
   5120  1.1  mrg 	  bb_reload_num = lra_curr_reload_num;
   5121  1.1  mrg 	}
   5122  1.1  mrg       if (min_len > new_min_len)
   5123  1.1  mrg 	{
   5124  1.1  mrg 	  min_len = new_min_len;
   5125  1.1  mrg 	  new_insns_num = 0;
   5126  1.1  mrg 	}
   5127  1.1  mrg       if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
   5128  1.1  mrg 	internal_error
   5129  1.1  mrg 	  ("maximum number of generated reload insns per insn achieved (%d)",
   5130  1.1  mrg 	   MAX_RELOAD_INSNS_NUMBER);
   5131  1.1  mrg       new_insns_num++;
   5132  1.1  mrg       if (DEBUG_INSN_P (curr_insn))
   5133  1.1  mrg 	{
   5134  1.1  mrg 	  /* We need to check equivalence in debug insn and change
   5135  1.1  mrg 	     pseudo to the equivalent value if necessary.  */
   5136  1.1  mrg 	  curr_id = lra_get_insn_recog_data (curr_insn);
   5137  1.1  mrg 	  if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
   5138  1.1  mrg 	    {
   5139  1.1  mrg 	      rtx old = *curr_id->operand_loc[0];
   5140  1.1  mrg 	      *curr_id->operand_loc[0]
   5141  1.1  mrg 		= simplify_replace_fn_rtx (old, NULL_RTX,
   5142  1.1  mrg 					   loc_equivalence_callback, curr_insn);
   5143  1.1  mrg 	      if (old != *curr_id->operand_loc[0])
   5144  1.1  mrg 		{
   5145  1.1  mrg 		  /* If we substitute pseudo by shared equivalence, we can fail
   5146  1.1  mrg 		     to update LRA reg info and this can result in many
   5147  1.1  mrg 		     unexpected consequences.  So keep rtl unshared:  */
   5148  1.1  mrg 		  *curr_id->operand_loc[0]
   5149  1.1  mrg 		    = copy_rtx (*curr_id->operand_loc[0]);
   5150  1.1  mrg 		  lra_update_insn_regno_info (curr_insn);
   5151  1.1  mrg 		  changed_p = true;
   5152  1.1  mrg 		}
   5153  1.1  mrg 	    }
   5154  1.1  mrg 	}
   5155  1.1  mrg       else if (INSN_P (curr_insn))
   5156  1.1  mrg 	{
   5157  1.1  mrg 	  if ((set = single_set (curr_insn)) != NULL_RTX)
   5158  1.1  mrg 	    {
   5159  1.1  mrg 	      dest_reg = SET_DEST (set);
   5160  1.1  mrg 	      /* The equivalence pseudo could be set up as SUBREG in a
   5161  1.1  mrg 		 case when it is a call restore insn in a mode
   5162  1.1  mrg 		 different from the pseudo mode.  */
   5163  1.1  mrg 	      if (GET_CODE (dest_reg) == SUBREG)
   5164  1.1  mrg 		dest_reg = SUBREG_REG (dest_reg);
   5165  1.1  mrg 	      if ((REG_P (dest_reg)
   5166  1.1  mrg 		   && (x = get_equiv (dest_reg)) != dest_reg
   5167  1.1  mrg 		   /* Remove insns which set up a pseudo whose value
   5168  1.1  mrg 		      cannot be changed.  Such insns might be not in
   5169  1.1  mrg 		      init_insns because we don't update equiv data
   5170  1.1  mrg 		      during insn transformations.
   5171  1.1  mrg 
   5172  1.1  mrg 		      As an example, let suppose that a pseudo got
   5173  1.1  mrg 		      hard register and on the 1st pass was not
   5174  1.1  mrg 		      changed to equivalent constant.  We generate an
   5175  1.1  mrg 		      additional insn setting up the pseudo because of
   5176  1.1  mrg 		      secondary memory movement.  Then the pseudo is
   5177  1.1  mrg 		      spilled and we use the equiv constant.  In this
   5178  1.1  mrg 		      case we should remove the additional insn and
   5179  1.1  mrg 		      this insn is not init_insns list.  */
   5180  1.1  mrg 		   && (! MEM_P (x) || MEM_READONLY_P (x)
   5181  1.1  mrg 		       /* Check that this is actually an insn setting
   5182  1.1  mrg 			  up the equivalence.  */
   5183  1.1  mrg 		       || in_list_p (curr_insn,
   5184  1.1  mrg 				     ira_reg_equiv
   5185  1.1  mrg 				     [REGNO (dest_reg)].init_insns)))
   5186  1.1  mrg 		  || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
   5187  1.1  mrg 		      && in_list_p (curr_insn,
   5188  1.1  mrg 				    ira_reg_equiv
   5189  1.1  mrg 				    [REGNO (SET_SRC (set))].init_insns)))
   5190  1.1  mrg 		{
   5191  1.1  mrg 		  /* This is equiv init insn of pseudo which did not get a
   5192  1.1  mrg 		     hard register -- remove the insn.	*/
   5193  1.1  mrg 		  if (lra_dump_file != NULL)
   5194  1.1  mrg 		    {
   5195  1.1  mrg 		      fprintf (lra_dump_file,
   5196  1.1  mrg 			       "      Removing equiv init insn %i (freq=%d)\n",
   5197  1.1  mrg 			       INSN_UID (curr_insn),
   5198  1.1  mrg 			       REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
   5199  1.1  mrg 		      dump_insn_slim (lra_dump_file, curr_insn);
   5200  1.1  mrg 		    }
   5201  1.1  mrg 		  if (contains_reg_p (x, true, false))
   5202  1.1  mrg 		    check_and_force_assignment_correctness_p = true;
   5203  1.1  mrg 		  lra_set_insn_deleted (curr_insn);
   5204  1.1  mrg 		  continue;
   5205  1.1  mrg 		}
   5206  1.1  mrg 	    }
   5207  1.1  mrg 	  curr_id = lra_get_insn_recog_data (curr_insn);
   5208  1.1  mrg 	  curr_static_id = curr_id->insn_static_data;
   5209  1.1  mrg 	  init_curr_insn_input_reloads ();
   5210  1.1  mrg 	  init_curr_operand_mode ();
   5211  1.1  mrg 	  if (curr_insn_transform (false))
   5212  1.1  mrg 	    changed_p = true;
   5213  1.1  mrg 	  /* Check non-transformed insns too for equiv change as USE
   5214  1.1  mrg 	     or CLOBBER don't need reloads but can contain pseudos
   5215  1.1  mrg 	     being changed on their equivalences.  */
   5216  1.1  mrg 	  else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
   5217  1.1  mrg 		   && loc_equivalence_change_p (&PATTERN (curr_insn)))
   5218  1.1  mrg 	    {
   5219  1.1  mrg 	      lra_update_insn_regno_info (curr_insn);
   5220  1.1  mrg 	      changed_p = true;
   5221  1.1  mrg 	    }
   5222  1.1  mrg 	}
   5223  1.1  mrg     }
   5224  1.1  mrg 
   5225  1.1  mrg   /* If we used a new hard regno, changed_p should be true because the
   5226  1.1  mrg      hard reg is assigned to a new pseudo.  */
   5227  1.1  mrg   if (flag_checking && !changed_p)
   5228  1.1  mrg     {
   5229  1.1  mrg       for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
   5230  1.1  mrg 	if (lra_reg_info[i].nrefs != 0
   5231  1.1  mrg 	    && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
   5232  1.1  mrg 	  {
   5233  1.1  mrg 	    int j, nregs = hard_regno_nregs (hard_regno,
   5234  1.1  mrg 					     PSEUDO_REGNO_MODE (i));
   5235  1.1  mrg 
   5236  1.1  mrg 	    for (j = 0; j < nregs; j++)
   5237  1.1  mrg 	      lra_assert (df_regs_ever_live_p (hard_regno + j));
   5238  1.1  mrg 	  }
   5239  1.1  mrg     }
   5240  1.1  mrg   return changed_p;
   5241  1.1  mrg }
   5242  1.1  mrg 
   5243  1.1  mrg static void initiate_invariants (void);
   5244  1.1  mrg static void finish_invariants (void);
   5245  1.1  mrg 
   5246  1.1  mrg /* Initiate the LRA constraint pass.  It is done once per
   5247  1.1  mrg    function.  */
   5248  1.1  mrg void
   5249  1.1  mrg lra_constraints_init (void)
   5250  1.1  mrg {
   5251  1.1  mrg   initiate_invariants ();
   5252  1.1  mrg }
   5253  1.1  mrg 
   5254  1.1  mrg /* Finalize the LRA constraint pass.  It is done once per
   5255  1.1  mrg    function.  */
   5256  1.1  mrg void
   5257  1.1  mrg lra_constraints_finish (void)
   5258  1.1  mrg {
   5259  1.1  mrg   finish_invariants ();
   5260  1.1  mrg }
   5261  1.1  mrg 
   5262  1.1  mrg 
   5263  1.1  mrg 
   5265  1.1  mrg /* Structure describes invariants for ineheritance.  */
   5266  1.1  mrg struct lra_invariant
   5267  1.1  mrg {
   5268  1.1  mrg   /* The order number of the invariant.  */
   5269  1.1  mrg   int num;
   5270  1.1  mrg   /* The invariant RTX.  */
   5271  1.1  mrg   rtx invariant_rtx;
   5272  1.1  mrg   /* The origin insn of the invariant.  */
   5273  1.1  mrg   rtx_insn *insn;
   5274  1.1  mrg };
   5275  1.1  mrg 
   5276  1.1  mrg typedef lra_invariant invariant_t;
   5277  1.1  mrg typedef invariant_t *invariant_ptr_t;
   5278  1.1  mrg typedef const invariant_t *const_invariant_ptr_t;
   5279  1.1  mrg 
   5280  1.1  mrg /* Pointer to the inheritance invariants.  */
   5281  1.1  mrg static vec<invariant_ptr_t> invariants;
   5282  1.1  mrg 
   5283  1.1  mrg /* Allocation pool for the invariants.  */
   5284  1.1  mrg static object_allocator<lra_invariant> *invariants_pool;
   5285  1.1  mrg 
   5286  1.1  mrg /* Hash table for the invariants.  */
   5287  1.1  mrg static htab_t invariant_table;
   5288  1.1  mrg 
   5289  1.1  mrg /* Hash function for INVARIANT.  */
   5290  1.1  mrg static hashval_t
   5291  1.1  mrg invariant_hash (const void *invariant)
   5292  1.1  mrg {
   5293  1.1  mrg   rtx inv = ((const_invariant_ptr_t) invariant)->invariant_rtx;
   5294  1.1  mrg   return lra_rtx_hash (inv);
   5295  1.1  mrg }
   5296  1.1  mrg 
   5297  1.1  mrg /* Equal function for invariants INVARIANT1 and INVARIANT2.  */
   5298  1.1  mrg static int
   5299  1.1  mrg invariant_eq_p (const void *invariant1, const void *invariant2)
   5300  1.1  mrg {
   5301  1.1  mrg   rtx inv1 = ((const_invariant_ptr_t) invariant1)->invariant_rtx;
   5302  1.1  mrg   rtx inv2 = ((const_invariant_ptr_t) invariant2)->invariant_rtx;
   5303  1.1  mrg 
   5304  1.1  mrg   return rtx_equal_p (inv1, inv2);
   5305  1.1  mrg }
   5306  1.1  mrg 
   5307  1.1  mrg /* Insert INVARIANT_RTX into the table if it is not there yet.  Return
   5308  1.1  mrg    invariant which is in the table.  */
   5309  1.1  mrg static invariant_ptr_t
   5310  1.1  mrg insert_invariant (rtx invariant_rtx)
   5311  1.1  mrg {
   5312  1.1  mrg   void **entry_ptr;
   5313  1.1  mrg   invariant_t invariant;
   5314  1.1  mrg   invariant_ptr_t invariant_ptr;
   5315  1.1  mrg 
   5316  1.1  mrg   invariant.invariant_rtx = invariant_rtx;
   5317  1.1  mrg   entry_ptr = htab_find_slot (invariant_table, &invariant, INSERT);
   5318  1.1  mrg   if (*entry_ptr == NULL)
   5319  1.1  mrg     {
   5320  1.1  mrg       invariant_ptr = invariants_pool->allocate ();
   5321  1.1  mrg       invariant_ptr->invariant_rtx = invariant_rtx;
   5322  1.1  mrg       invariant_ptr->insn = NULL;
   5323  1.1  mrg       invariants.safe_push (invariant_ptr);
   5324  1.1  mrg       *entry_ptr = (void *) invariant_ptr;
   5325  1.1  mrg     }
   5326  1.1  mrg   return (invariant_ptr_t) *entry_ptr;
   5327  1.1  mrg }
   5328  1.1  mrg 
   5329  1.1  mrg /* Initiate the invariant table.  */
   5330  1.1  mrg static void
   5331  1.1  mrg initiate_invariants (void)
   5332  1.1  mrg {
   5333  1.1  mrg   invariants.create (100);
   5334  1.1  mrg   invariants_pool
   5335  1.1  mrg     = new object_allocator<lra_invariant> ("Inheritance invariants");
   5336  1.1  mrg   invariant_table = htab_create (100, invariant_hash, invariant_eq_p, NULL);
   5337  1.1  mrg }
   5338  1.1  mrg 
   5339  1.1  mrg /* Finish the invariant table.  */
   5340  1.1  mrg static void
   5341  1.1  mrg finish_invariants (void)
   5342  1.1  mrg {
   5343  1.1  mrg   htab_delete (invariant_table);
   5344  1.1  mrg   delete invariants_pool;
   5345  1.1  mrg   invariants.release ();
   5346  1.1  mrg }
   5347  1.1  mrg 
   5348  1.1  mrg /* Make the invariant table empty.  */
   5349  1.1  mrg static void
   5350  1.1  mrg clear_invariants (void)
   5351  1.1  mrg {
   5352  1.1  mrg   htab_empty (invariant_table);
   5353  1.1  mrg   invariants_pool->release ();
   5354  1.1  mrg   invariants.truncate (0);
   5355  1.1  mrg }
   5356  1.1  mrg 
   5357  1.1  mrg 
   5358  1.1  mrg 
   5360  1.1  mrg /* This page contains code to do inheritance/split
   5361  1.1  mrg    transformations.  */
   5362  1.1  mrg 
   5363  1.1  mrg /* Number of reloads passed so far in current EBB.  */
   5364  1.1  mrg static int reloads_num;
   5365  1.1  mrg 
   5366  1.1  mrg /* Number of calls passed so far in current EBB.  */
   5367  1.1  mrg static int calls_num;
   5368  1.1  mrg 
   5369  1.1  mrg /* Index ID is the CALLS_NUM associated the last call we saw with
   5370  1.1  mrg    ABI identifier ID.  */
   5371  1.1  mrg static int last_call_for_abi[NUM_ABI_IDS];
   5372  1.1  mrg 
   5373  1.1  mrg /* Which registers have been fully or partially clobbered by a call
   5374  1.1  mrg    since they were last used.  */
   5375  1.1  mrg static HARD_REG_SET full_and_partial_call_clobbers;
   5376  1.1  mrg 
   5377  1.1  mrg /* Current reload pseudo check for validity of elements in
   5378  1.1  mrg    USAGE_INSNS.	 */
   5379  1.1  mrg static int curr_usage_insns_check;
   5380  1.1  mrg 
   5381  1.1  mrg /* Info about last usage of registers in EBB to do inheritance/split
   5382  1.1  mrg    transformation.  Inheritance transformation is done from a spilled
   5383  1.1  mrg    pseudo and split transformations from a hard register or a pseudo
   5384  1.1  mrg    assigned to a hard register.	 */
   5385  1.1  mrg struct usage_insns
   5386  1.1  mrg {
   5387  1.1  mrg   /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
   5388  1.1  mrg      value INSNS is valid.  The insns is chain of optional debug insns
   5389  1.1  mrg      and a finishing non-debug insn using the corresponding reg.  The
   5390  1.1  mrg      value is also used to mark the registers which are set up in the
   5391  1.1  mrg      current insn.  The negated insn uid is used for this.  */
   5392  1.1  mrg   int check;
   5393  1.1  mrg   /* Value of global reloads_num at the last insn in INSNS.  */
   5394  1.1  mrg   int reloads_num;
   5395  1.1  mrg   /* Value of global reloads_nums at the last insn in INSNS.  */
   5396  1.1  mrg   int calls_num;
   5397  1.1  mrg   /* It can be true only for splitting.	 And it means that the restore
   5398  1.1  mrg      insn should be put after insn given by the following member.  */
   5399  1.1  mrg   bool after_p;
   5400  1.1  mrg   /* Next insns in the current EBB which use the original reg and the
   5401  1.1  mrg      original reg value is not changed between the current insn and
   5402  1.1  mrg      the next insns.  In order words, e.g. for inheritance, if we need
   5403  1.1  mrg      to use the original reg value again in the next insns we can try
   5404  1.1  mrg      to use the value in a hard register from a reload insn of the
   5405  1.1  mrg      current insn.  */
   5406  1.1  mrg   rtx insns;
   5407  1.1  mrg };
   5408  1.1  mrg 
   5409  1.1  mrg /* Map: regno -> corresponding pseudo usage insns.  */
   5410  1.1  mrg static struct usage_insns *usage_insns;
   5411  1.1  mrg 
   5412  1.1  mrg static void
   5413  1.1  mrg setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
   5414  1.1  mrg {
   5415  1.1  mrg   usage_insns[regno].check = curr_usage_insns_check;
   5416  1.1  mrg   usage_insns[regno].insns = insn;
   5417  1.1  mrg   usage_insns[regno].reloads_num = reloads_num;
   5418  1.1  mrg   usage_insns[regno].calls_num = calls_num;
   5419  1.1  mrg   usage_insns[regno].after_p = after_p;
   5420  1.1  mrg   if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0)
   5421  1.1  mrg     remove_from_hard_reg_set (&full_and_partial_call_clobbers,
   5422  1.1  mrg 			      PSEUDO_REGNO_MODE (regno),
   5423  1.1  mrg 			      reg_renumber[regno]);
   5424  1.1  mrg }
   5425  1.1  mrg 
   5426  1.1  mrg /* The function is used to form list REGNO usages which consists of
   5427  1.1  mrg    optional debug insns finished by a non-debug insn using REGNO.
   5428  1.1  mrg    RELOADS_NUM is current number of reload insns processed so far.  */
   5429  1.1  mrg static void
   5430  1.1  mrg add_next_usage_insn (int regno, rtx_insn *insn, int reloads_num)
   5431  1.1  mrg {
   5432  1.1  mrg   rtx next_usage_insns;
   5433  1.1  mrg 
   5434  1.1  mrg   if (usage_insns[regno].check == curr_usage_insns_check
   5435  1.1  mrg       && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
   5436  1.1  mrg       && DEBUG_INSN_P (insn))
   5437  1.1  mrg     {
   5438  1.1  mrg       /* Check that we did not add the debug insn yet.	*/
   5439  1.1  mrg       if (next_usage_insns != insn
   5440  1.1  mrg 	  && (GET_CODE (next_usage_insns) != INSN_LIST
   5441  1.1  mrg 	      || XEXP (next_usage_insns, 0) != insn))
   5442  1.1  mrg 	usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
   5443  1.1  mrg 						      next_usage_insns);
   5444  1.1  mrg     }
   5445  1.1  mrg   else if (NONDEBUG_INSN_P (insn))
   5446  1.1  mrg     setup_next_usage_insn (regno, insn, reloads_num, false);
   5447  1.1  mrg   else
   5448  1.1  mrg     usage_insns[regno].check = 0;
   5449  1.1  mrg }
   5450  1.1  mrg 
   5451  1.1  mrg /* Return first non-debug insn in list USAGE_INSNS.  */
   5452  1.1  mrg static rtx_insn *
   5453  1.1  mrg skip_usage_debug_insns (rtx usage_insns)
   5454  1.1  mrg {
   5455  1.1  mrg   rtx insn;
   5456  1.1  mrg 
   5457  1.1  mrg   /* Skip debug insns.  */
   5458  1.1  mrg   for (insn = usage_insns;
   5459  1.1  mrg        insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
   5460  1.1  mrg        insn = XEXP (insn, 1))
   5461  1.1  mrg     ;
   5462  1.1  mrg   return safe_as_a <rtx_insn *> (insn);
   5463  1.1  mrg }
   5464  1.1  mrg 
   5465  1.1  mrg /* Return true if we need secondary memory moves for insn in
   5466  1.1  mrg    USAGE_INSNS after inserting inherited pseudo of class INHER_CL
   5467  1.1  mrg    into the insn.  */
   5468  1.1  mrg static bool
   5469  1.1  mrg check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
   5470  1.1  mrg 				 rtx usage_insns ATTRIBUTE_UNUSED)
   5471  1.1  mrg {
   5472  1.1  mrg   rtx_insn *insn;
   5473  1.1  mrg   rtx set, dest;
   5474  1.1  mrg   enum reg_class cl;
   5475  1.1  mrg 
   5476  1.1  mrg   if (inher_cl == ALL_REGS
   5477  1.1  mrg       || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
   5478  1.1  mrg     return false;
   5479  1.1  mrg   lra_assert (INSN_P (insn));
   5480  1.1  mrg   if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
   5481  1.1  mrg     return false;
   5482  1.1  mrg   dest = SET_DEST (set);
   5483  1.1  mrg   if (! REG_P (dest))
   5484  1.1  mrg     return false;
   5485  1.1  mrg   lra_assert (inher_cl != NO_REGS);
   5486  1.1  mrg   cl = get_reg_class (REGNO (dest));
   5487  1.1  mrg   return (cl != NO_REGS && cl != ALL_REGS
   5488  1.1  mrg 	  && targetm.secondary_memory_needed (GET_MODE (dest), inher_cl, cl));
   5489  1.1  mrg }
   5490  1.1  mrg 
   5491  1.1  mrg /* Registers involved in inheritance/split in the current EBB
   5492  1.1  mrg    (inheritance/split pseudos and original registers).	*/
   5493  1.1  mrg static bitmap_head check_only_regs;
   5494  1.1  mrg 
   5495  1.1  mrg /* Reload pseudos cannot be involded in invariant inheritance in the
   5496  1.1  mrg    current EBB.  */
   5497  1.1  mrg static bitmap_head invalid_invariant_regs;
   5498  1.1  mrg 
   5499  1.1  mrg /* Do inheritance transformations for insn INSN, which defines (if
   5500  1.1  mrg    DEF_P) or uses ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which
   5501  1.1  mrg    instruction in the EBB next uses ORIGINAL_REGNO; it has the same
   5502  1.1  mrg    form as the "insns" field of usage_insns.  Return true if we
   5503  1.1  mrg    succeed in such transformation.
   5504  1.1  mrg 
   5505  1.1  mrg    The transformations look like:
   5506  1.1  mrg 
   5507  1.1  mrg      p <- ...		  i <- ...
   5508  1.1  mrg      ...		  p <- i    (new insn)
   5509  1.1  mrg      ...	     =>
   5510  1.1  mrg      <- ... p ...	  <- ... i ...
   5511  1.1  mrg    or
   5512  1.1  mrg      ...		  i <- p    (new insn)
   5513  1.1  mrg      <- ... p ...	  <- ... i ...
   5514  1.1  mrg      ...	     =>
   5515  1.1  mrg      <- ... p ...	  <- ... i ...
   5516  1.1  mrg    where p is a spilled original pseudo and i is a new inheritance pseudo.
   5517  1.1  mrg 
   5518  1.1  mrg 
   5519  1.1  mrg    The inheritance pseudo has the smallest class of two classes CL and
   5520  1.1  mrg    class of ORIGINAL REGNO.  */
   5521  1.1  mrg static bool
   5522  1.1  mrg inherit_reload_reg (bool def_p, int original_regno,
   5523  1.1  mrg 		    enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
   5524  1.1  mrg {
   5525  1.1  mrg   if (optimize_function_for_size_p (cfun))
   5526  1.1  mrg     return false;
   5527  1.1  mrg 
   5528  1.1  mrg   enum reg_class rclass = lra_get_allocno_class (original_regno);
   5529  1.1  mrg   rtx original_reg = regno_reg_rtx[original_regno];
   5530  1.1  mrg   rtx new_reg, usage_insn;
   5531  1.1  mrg   rtx_insn *new_insns;
   5532  1.1  mrg 
   5533  1.1  mrg   lra_assert (! usage_insns[original_regno].after_p);
   5534  1.1  mrg   if (lra_dump_file != NULL)
   5535  1.1  mrg     fprintf (lra_dump_file,
   5536  1.1  mrg 	     "    <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
   5537  1.1  mrg   if (! ira_reg_classes_intersect_p[cl][rclass])
   5538  1.1  mrg     {
   5539  1.1  mrg       if (lra_dump_file != NULL)
   5540  1.1  mrg 	{
   5541  1.1  mrg 	  fprintf (lra_dump_file,
   5542  1.1  mrg 		   "    Rejecting inheritance for %d "
   5543  1.1  mrg 		   "because of disjoint classes %s and %s\n",
   5544  1.1  mrg 		   original_regno, reg_class_names[cl],
   5545  1.1  mrg 		   reg_class_names[rclass]);
   5546  1.1  mrg 	  fprintf (lra_dump_file,
   5547  1.1  mrg 		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
   5548  1.1  mrg 	}
   5549  1.1  mrg       return false;
   5550  1.1  mrg     }
   5551  1.1  mrg   if ((ira_class_subset_p[cl][rclass] && cl != rclass)
   5552  1.1  mrg       /* We don't use a subset of two classes because it can be
   5553  1.1  mrg 	 NO_REGS.  This transformation is still profitable in most
   5554  1.1  mrg 	 cases even if the classes are not intersected as register
   5555  1.1  mrg 	 move is probably cheaper than a memory load.  */
   5556  1.1  mrg       || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
   5557  1.1  mrg     {
   5558  1.1  mrg       if (lra_dump_file != NULL)
   5559  1.1  mrg 	fprintf (lra_dump_file, "    Use smallest class of %s and %s\n",
   5560  1.1  mrg 		 reg_class_names[cl], reg_class_names[rclass]);
   5561  1.1  mrg 
   5562  1.1  mrg       rclass = cl;
   5563  1.1  mrg     }
   5564  1.1  mrg   if (check_secondary_memory_needed_p (rclass, next_usage_insns))
   5565  1.1  mrg     {
   5566  1.1  mrg       /* Reject inheritance resulting in secondary memory moves.
   5567  1.1  mrg 	 Otherwise, there is a danger in LRA cycling.  Also such
   5568  1.1  mrg 	 transformation will be unprofitable.  */
   5569  1.1  mrg       if (lra_dump_file != NULL)
   5570  1.1  mrg 	{
   5571  1.1  mrg 	  rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
   5572  1.1  mrg 	  rtx set = single_set (insn);
   5573  1.1  mrg 
   5574  1.1  mrg 	  lra_assert (set != NULL_RTX);
   5575  1.1  mrg 
   5576  1.1  mrg 	  rtx dest = SET_DEST (set);
   5577  1.1  mrg 
   5578  1.1  mrg 	  lra_assert (REG_P (dest));
   5579  1.1  mrg 	  fprintf (lra_dump_file,
   5580  1.1  mrg 		   "    Rejecting inheritance for insn %d(%s)<-%d(%s) "
   5581  1.1  mrg 		   "as secondary mem is needed\n",
   5582  1.1  mrg 		   REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
   5583  1.1  mrg 		   original_regno, reg_class_names[rclass]);
   5584  1.1  mrg 	  fprintf (lra_dump_file,
   5585  1.1  mrg 		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
   5586  1.1  mrg 	}
   5587  1.1  mrg       return false;
   5588  1.1  mrg     }
   5589  1.1  mrg   new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
   5590  1.1  mrg 				rclass, NULL, "inheritance");
   5591  1.1  mrg   start_sequence ();
   5592  1.1  mrg   if (def_p)
   5593  1.1  mrg     lra_emit_move (original_reg, new_reg);
   5594  1.1  mrg   else
   5595  1.1  mrg     lra_emit_move (new_reg, original_reg);
   5596  1.1  mrg   new_insns = get_insns ();
   5597  1.1  mrg   end_sequence ();
   5598  1.1  mrg   if (NEXT_INSN (new_insns) != NULL_RTX)
   5599  1.1  mrg     {
   5600  1.1  mrg       if (lra_dump_file != NULL)
   5601  1.1  mrg 	{
   5602  1.1  mrg 	  fprintf (lra_dump_file,
   5603  1.1  mrg 		   "    Rejecting inheritance %d->%d "
   5604  1.1  mrg 		   "as it results in 2 or more insns:\n",
   5605  1.1  mrg 		   original_regno, REGNO (new_reg));
   5606  1.1  mrg 	  dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
   5607  1.1  mrg 	  fprintf (lra_dump_file,
   5608  1.1  mrg 		   "	>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
   5609  1.1  mrg 	}
   5610  1.1  mrg       return false;
   5611  1.1  mrg     }
   5612  1.1  mrg   lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
   5613  1.1  mrg   lra_update_insn_regno_info (insn);
   5614  1.1  mrg   if (! def_p)
   5615  1.1  mrg     /* We now have a new usage insn for original regno.  */
   5616  1.1  mrg     setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
   5617  1.1  mrg   if (lra_dump_file != NULL)
   5618  1.1  mrg     fprintf (lra_dump_file, "    Original reg change %d->%d (bb%d):\n",
   5619  1.1  mrg 	     original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
   5620  1.1  mrg   lra_reg_info[REGNO (new_reg)].restore_rtx = regno_reg_rtx[original_regno];
   5621  1.1  mrg   bitmap_set_bit (&check_only_regs, REGNO (new_reg));
   5622  1.1  mrg   bitmap_set_bit (&check_only_regs, original_regno);
   5623  1.1  mrg   bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
   5624  1.1  mrg   if (def_p)
   5625  1.1  mrg     lra_process_new_insns (insn, NULL, new_insns,
   5626  1.1  mrg 			   "Add original<-inheritance");
   5627  1.1  mrg   else
   5628  1.1  mrg     lra_process_new_insns (insn, new_insns, NULL,
   5629  1.1  mrg 			   "Add inheritance<-original");
   5630  1.1  mrg   while (next_usage_insns != NULL_RTX)
   5631  1.1  mrg     {
   5632  1.1  mrg       if (GET_CODE (next_usage_insns) != INSN_LIST)
   5633  1.1  mrg 	{
   5634  1.1  mrg 	  usage_insn = next_usage_insns;
   5635  1.1  mrg 	  lra_assert (NONDEBUG_INSN_P (usage_insn));
   5636  1.1  mrg 	  next_usage_insns = NULL;
   5637  1.1  mrg 	}
   5638  1.1  mrg       else
   5639  1.1  mrg 	{
   5640  1.1  mrg 	  usage_insn = XEXP (next_usage_insns, 0);
   5641  1.1  mrg 	  lra_assert (DEBUG_INSN_P (usage_insn));
   5642  1.1  mrg 	  next_usage_insns = XEXP (next_usage_insns, 1);
   5643  1.1  mrg 	}
   5644  1.1  mrg       lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
   5645  1.1  mrg 			     DEBUG_INSN_P (usage_insn));
   5646  1.1  mrg       lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
   5647  1.1  mrg       if (lra_dump_file != NULL)
   5648  1.1  mrg 	{
   5649  1.1  mrg 	  basic_block bb = BLOCK_FOR_INSN (usage_insn);
   5650  1.1  mrg 	  fprintf (lra_dump_file,
   5651  1.1  mrg 		   "    Inheritance reuse change %d->%d (bb%d):\n",
   5652  1.1  mrg 		   original_regno, REGNO (new_reg),
   5653  1.1  mrg 		   bb ? bb->index : -1);
   5654  1.1  mrg 	  dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
   5655  1.1  mrg 	}
   5656  1.1  mrg     }
   5657  1.1  mrg   if (lra_dump_file != NULL)
   5658  1.1  mrg     fprintf (lra_dump_file,
   5659  1.1  mrg 	     "	  >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
   5660  1.1  mrg   return true;
   5661  1.1  mrg }
   5662  1.1  mrg 
   5663  1.1  mrg /* Return true if we need a caller save/restore for pseudo REGNO which
   5664  1.1  mrg    was assigned to a hard register.  */
   5665  1.1  mrg static inline bool
   5666  1.1  mrg need_for_call_save_p (int regno)
   5667  1.1  mrg {
   5668  1.1  mrg   lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
   5669  1.1  mrg   if (usage_insns[regno].calls_num < calls_num)
   5670  1.1  mrg     {
   5671  1.1  mrg       unsigned int abis = 0;
   5672  1.1  mrg       for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
   5673  1.1  mrg 	if (last_call_for_abi[i] > usage_insns[regno].calls_num)
   5674  1.1  mrg 	  abis |= 1 << i;
   5675  1.1  mrg       gcc_assert (abis);
   5676  1.1  mrg       if (call_clobbered_in_region_p (abis, full_and_partial_call_clobbers,
   5677  1.1  mrg 				      PSEUDO_REGNO_MODE (regno),
   5678  1.1  mrg 				      reg_renumber[regno]))
   5679  1.1  mrg 	return true;
   5680  1.1  mrg     }
   5681  1.1  mrg   return false;
   5682  1.1  mrg }
   5683  1.1  mrg 
   5684  1.1  mrg /* Global registers occurring in the current EBB.  */
   5685  1.1  mrg static bitmap_head ebb_global_regs;
   5686  1.1  mrg 
   5687  1.1  mrg /* Return true if we need a split for hard register REGNO or pseudo
   5688  1.1  mrg    REGNO which was assigned to a hard register.
   5689  1.1  mrg    POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
   5690  1.1  mrg    used for reloads since the EBB end.	It is an approximation of the
   5691  1.1  mrg    used hard registers in the split range.  The exact value would
   5692  1.1  mrg    require expensive calculations.  If we were aggressive with
   5693  1.1  mrg    splitting because of the approximation, the split pseudo will save
   5694  1.1  mrg    the same hard register assignment and will be removed in the undo
   5695  1.1  mrg    pass.  We still need the approximation because too aggressive
   5696  1.1  mrg    splitting would result in too inaccurate cost calculation in the
   5697  1.1  mrg    assignment pass because of too many generated moves which will be
   5698  1.1  mrg    probably removed in the undo pass.  */
   5699  1.1  mrg static inline bool
   5700  1.1  mrg need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
   5701  1.1  mrg {
   5702  1.1  mrg   int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
   5703  1.1  mrg 
   5704  1.1  mrg   lra_assert (hard_regno >= 0);
   5705  1.1  mrg   return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
   5706  1.1  mrg 	   /* Don't split eliminable hard registers, otherwise we can
   5707  1.1  mrg 	      split hard registers like hard frame pointer, which
   5708  1.1  mrg 	      lives on BB start/end according to DF-infrastructure,
   5709  1.1  mrg 	      when there is a pseudo assigned to the register and
   5710  1.1  mrg 	      living in the same BB.  */
   5711  1.1  mrg 	   && (regno >= FIRST_PSEUDO_REGISTER
   5712  1.1  mrg 	       || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
   5713  1.1  mrg 	   && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
   5714  1.1  mrg 	   /* Don't split call clobbered hard regs living through
   5715  1.1  mrg 	      calls, otherwise we might have a check problem in the
   5716  1.1  mrg 	      assign sub-pass as in the most cases (exception is a
   5717  1.1  mrg 	      situation when check_and_force_assignment_correctness_p value is
   5718  1.1  mrg 	      true) the assign pass assumes that all pseudos living
   5719  1.1  mrg 	      through calls are assigned to call saved hard regs.  */
   5720  1.1  mrg 	   && (regno >= FIRST_PSEUDO_REGISTER
   5721  1.1  mrg 	       || !TEST_HARD_REG_BIT (full_and_partial_call_clobbers, regno))
   5722  1.1  mrg 	   /* We need at least 2 reloads to make pseudo splitting
   5723  1.1  mrg 	      profitable.  We should provide hard regno splitting in
   5724  1.1  mrg 	      any case to solve 1st insn scheduling problem when
   5725  1.1  mrg 	      moving hard register definition up might result in
   5726  1.1  mrg 	      impossibility to find hard register for reload pseudo of
   5727  1.1  mrg 	      small register class.  */
   5728  1.1  mrg 	   && (usage_insns[regno].reloads_num
   5729  1.1  mrg 	       + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
   5730  1.1  mrg 	   && (regno < FIRST_PSEUDO_REGISTER
   5731  1.1  mrg 	       /* For short living pseudos, spilling + inheritance can
   5732  1.1  mrg 		  be considered a substitution for splitting.
   5733  1.1  mrg 		  Therefore we do not splitting for local pseudos.  It
   5734  1.1  mrg 		  decreases also aggressiveness of splitting.  The
   5735  1.1  mrg 		  minimal number of references is chosen taking into
   5736  1.1  mrg 		  account that for 2 references splitting has no sense
   5737  1.1  mrg 		  as we can just spill the pseudo.  */
   5738  1.1  mrg 	       || (regno >= FIRST_PSEUDO_REGISTER
   5739  1.1  mrg 		   && lra_reg_info[regno].nrefs > 3
   5740  1.1  mrg 		   && bitmap_bit_p (&ebb_global_regs, regno))))
   5741  1.1  mrg 	  || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
   5742  1.1  mrg }
   5743  1.1  mrg 
   5744  1.1  mrg /* Return class for the split pseudo created from original pseudo with
   5745  1.1  mrg    ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO.	 We
   5746  1.1  mrg    choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
   5747  1.1  mrg    results in no secondary memory movements.  */
   5748  1.1  mrg static enum reg_class
   5749  1.1  mrg choose_split_class (enum reg_class allocno_class,
   5750  1.1  mrg 		    int hard_regno ATTRIBUTE_UNUSED,
   5751  1.1  mrg 		    machine_mode mode ATTRIBUTE_UNUSED)
   5752  1.1  mrg {
   5753  1.1  mrg   int i;
   5754  1.1  mrg   enum reg_class cl, best_cl = NO_REGS;
   5755  1.1  mrg   enum reg_class hard_reg_class ATTRIBUTE_UNUSED
   5756  1.1  mrg     = REGNO_REG_CLASS (hard_regno);
   5757  1.1  mrg 
   5758  1.1  mrg   if (! targetm.secondary_memory_needed (mode, allocno_class, allocno_class)
   5759  1.1  mrg       && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
   5760  1.1  mrg     return allocno_class;
   5761  1.1  mrg   for (i = 0;
   5762  1.1  mrg        (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
   5763  1.1  mrg        i++)
   5764  1.1  mrg     if (! targetm.secondary_memory_needed (mode, cl, hard_reg_class)
   5765  1.1  mrg 	&& ! targetm.secondary_memory_needed (mode, hard_reg_class, cl)
   5766  1.1  mrg 	&& TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
   5767  1.1  mrg 	&& (best_cl == NO_REGS
   5768  1.1  mrg 	    || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
   5769  1.1  mrg       best_cl = cl;
   5770  1.1  mrg   return best_cl;
   5771  1.1  mrg }
   5772  1.1  mrg 
   5773  1.1  mrg /* Copy any equivalence information from ORIGINAL_REGNO to NEW_REGNO.
   5774  1.1  mrg    It only makes sense to call this function if NEW_REGNO is always
   5775  1.1  mrg    equal to ORIGINAL_REGNO.  */
   5776  1.1  mrg 
   5777  1.1  mrg static void
   5778  1.1  mrg lra_copy_reg_equiv (unsigned int new_regno, unsigned int original_regno)
   5779  1.1  mrg {
   5780  1.1  mrg   if (!ira_reg_equiv[original_regno].defined_p)
   5781  1.1  mrg     return;
   5782  1.1  mrg 
   5783  1.1  mrg   ira_expand_reg_equiv ();
   5784  1.1  mrg   ira_reg_equiv[new_regno].defined_p = true;
   5785  1.1  mrg   if (ira_reg_equiv[original_regno].memory)
   5786  1.1  mrg     ira_reg_equiv[new_regno].memory
   5787  1.1  mrg       = copy_rtx (ira_reg_equiv[original_regno].memory);
   5788  1.1  mrg   if (ira_reg_equiv[original_regno].constant)
   5789  1.1  mrg     ira_reg_equiv[new_regno].constant
   5790  1.1  mrg       = copy_rtx (ira_reg_equiv[original_regno].constant);
   5791  1.1  mrg   if (ira_reg_equiv[original_regno].invariant)
   5792  1.1  mrg     ira_reg_equiv[new_regno].invariant
   5793  1.1  mrg       = copy_rtx (ira_reg_equiv[original_regno].invariant);
   5794  1.1  mrg }
   5795  1.1  mrg 
   5796  1.1  mrg /* Do split transformations for insn INSN, which defines or uses
   5797  1.1  mrg    ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which instruction in
   5798  1.1  mrg    the EBB next uses ORIGINAL_REGNO; it has the same form as the
   5799  1.1  mrg    "insns" field of usage_insns.  If TO is not NULL, we don't use
   5800  1.1  mrg    usage_insns, we put restore insns after TO insn.  It is a case when
   5801  1.1  mrg    we call it from lra_split_hard_reg_for, outside the inheritance
   5802  1.1  mrg    pass.
   5803  1.1  mrg 
   5804  1.1  mrg    The transformations look like:
   5805  1.1  mrg 
   5806  1.1  mrg      p <- ...		  p <- ...
   5807  1.1  mrg      ...		  s <- p    (new insn -- save)
   5808  1.1  mrg      ...	     =>
   5809  1.1  mrg      ...		  p <- s    (new insn -- restore)
   5810  1.1  mrg      <- ... p ...	  <- ... p ...
   5811  1.1  mrg    or
   5812  1.1  mrg      <- ... p ...	  <- ... p ...
   5813  1.1  mrg      ...		  s <- p    (new insn -- save)
   5814  1.1  mrg      ...	     =>
   5815  1.1  mrg      ...		  p <- s    (new insn -- restore)
   5816  1.1  mrg      <- ... p ...	  <- ... p ...
   5817  1.1  mrg 
   5818  1.1  mrg    where p is an original pseudo got a hard register or a hard
   5819  1.1  mrg    register and s is a new split pseudo.  The save is put before INSN
   5820  1.1  mrg    if BEFORE_P is true.	 Return true if we succeed in such
   5821  1.1  mrg    transformation.  */
   5822  1.1  mrg static bool
   5823  1.1  mrg split_reg (bool before_p, int original_regno, rtx_insn *insn,
   5824  1.1  mrg 	   rtx next_usage_insns, rtx_insn *to)
   5825  1.1  mrg {
   5826  1.1  mrg   enum reg_class rclass;
   5827  1.1  mrg   rtx original_reg;
   5828  1.1  mrg   int hard_regno, nregs;
   5829  1.1  mrg   rtx new_reg, usage_insn;
   5830  1.1  mrg   rtx_insn *restore, *save;
   5831  1.1  mrg   bool after_p;
   5832  1.1  mrg   bool call_save_p;
   5833  1.1  mrg   machine_mode mode;
   5834  1.1  mrg 
   5835  1.1  mrg   if (original_regno < FIRST_PSEUDO_REGISTER)
   5836  1.1  mrg     {
   5837  1.1  mrg       rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
   5838  1.1  mrg       hard_regno = original_regno;
   5839  1.1  mrg       call_save_p = false;
   5840  1.1  mrg       nregs = 1;
   5841  1.1  mrg       mode = lra_reg_info[hard_regno].biggest_mode;
   5842  1.1  mrg       machine_mode reg_rtx_mode = GET_MODE (regno_reg_rtx[hard_regno]);
   5843  1.1  mrg       /* A reg can have a biggest_mode of VOIDmode if it was only ever seen as
   5844  1.1  mrg 	 part of a multi-word register.  In that case, just use the reg_rtx
   5845  1.1  mrg 	 mode.  Do the same also if the biggest mode was larger than a register
   5846  1.1  mrg 	 or we can not compare the modes.  Otherwise, limit the size to that of
   5847  1.1  mrg 	 the biggest access in the function or to the natural mode at least.  */
   5848  1.1  mrg       if (mode == VOIDmode
   5849  1.1  mrg 	  || !ordered_p (GET_MODE_PRECISION (mode),
   5850  1.1  mrg 			 GET_MODE_PRECISION (reg_rtx_mode))
   5851  1.1  mrg 	  || paradoxical_subreg_p (mode, reg_rtx_mode)
   5852  1.1  mrg 	  || maybe_gt (GET_MODE_PRECISION (reg_rtx_mode), GET_MODE_PRECISION (mode)))
   5853  1.1  mrg 	{
   5854  1.1  mrg 	  original_reg = regno_reg_rtx[hard_regno];
   5855  1.1  mrg 	  mode = reg_rtx_mode;
   5856  1.1  mrg 	}
   5857  1.1  mrg       else
   5858  1.1  mrg 	original_reg = gen_rtx_REG (mode, hard_regno);
   5859  1.1  mrg     }
   5860  1.1  mrg   else
   5861  1.1  mrg     {
   5862  1.1  mrg       mode = PSEUDO_REGNO_MODE (original_regno);
   5863  1.1  mrg       hard_regno = reg_renumber[original_regno];
   5864  1.1  mrg       nregs = hard_regno_nregs (hard_regno, mode);
   5865  1.1  mrg       rclass = lra_get_allocno_class (original_regno);
   5866  1.1  mrg       original_reg = regno_reg_rtx[original_regno];
   5867  1.1  mrg       call_save_p = need_for_call_save_p (original_regno);
   5868  1.1  mrg     }
   5869  1.1  mrg   lra_assert (hard_regno >= 0);
   5870  1.1  mrg   if (lra_dump_file != NULL)
   5871  1.1  mrg     fprintf (lra_dump_file,
   5872  1.1  mrg 	     "	  ((((((((((((((((((((((((((((((((((((((((((((((((\n");
   5873  1.1  mrg 
   5874  1.1  mrg   if (call_save_p)
   5875  1.1  mrg     {
   5876  1.1  mrg       mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
   5877  1.1  mrg 					  hard_regno_nregs (hard_regno, mode),
   5878  1.1  mrg 					  mode);
   5879  1.1  mrg       new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, NULL, "save");
   5880  1.1  mrg     }
   5881  1.1  mrg   else
   5882  1.1  mrg     {
   5883  1.1  mrg       rclass = choose_split_class (rclass, hard_regno, mode);
   5884  1.1  mrg       if (rclass == NO_REGS)
   5885  1.1  mrg 	{
   5886  1.1  mrg 	  if (lra_dump_file != NULL)
   5887  1.1  mrg 	    {
   5888  1.1  mrg 	      fprintf (lra_dump_file,
   5889  1.1  mrg 		       "    Rejecting split of %d(%s): "
   5890  1.1  mrg 		       "no good reg class for %d(%s)\n",
   5891  1.1  mrg 		       original_regno,
   5892  1.1  mrg 		       reg_class_names[lra_get_allocno_class (original_regno)],
   5893  1.1  mrg 		       hard_regno,
   5894  1.1  mrg 		       reg_class_names[REGNO_REG_CLASS (hard_regno)]);
   5895  1.1  mrg 	      fprintf
   5896  1.1  mrg 		(lra_dump_file,
   5897  1.1  mrg 		 "    ))))))))))))))))))))))))))))))))))))))))))))))))\n");
   5898  1.1  mrg 	    }
   5899  1.1  mrg 	  return false;
   5900  1.1  mrg 	}
   5901  1.1  mrg       /* Split_if_necessary can split hard registers used as part of a
   5902  1.1  mrg 	 multi-register mode but splits each register individually.  The
   5903  1.1  mrg 	 mode used for each independent register may not be supported
   5904  1.1  mrg 	 so reject the split.  Splitting the wider mode should theoretically
   5905  1.1  mrg 	 be possible but is not implemented.  */
   5906  1.1  mrg       if (!targetm.hard_regno_mode_ok (hard_regno, mode))
   5907  1.1  mrg 	{
   5908  1.1  mrg 	  if (lra_dump_file != NULL)
   5909  1.1  mrg 	    {
   5910  1.1  mrg 	      fprintf (lra_dump_file,
   5911  1.1  mrg 		       "    Rejecting split of %d(%s): unsuitable mode %s\n",
   5912  1.1  mrg 		       original_regno,
   5913  1.1  mrg 		       reg_class_names[lra_get_allocno_class (original_regno)],
   5914  1.1  mrg 		       GET_MODE_NAME (mode));
   5915  1.1  mrg 	      fprintf
   5916  1.1  mrg 		(lra_dump_file,
   5917  1.1  mrg 		 "    ))))))))))))))))))))))))))))))))))))))))))))))))\n");
   5918  1.1  mrg 	    }
   5919  1.1  mrg 	  return false;
   5920  1.1  mrg 	}
   5921  1.1  mrg       new_reg = lra_create_new_reg (mode, original_reg, rclass, NULL, "split");
   5922  1.1  mrg       reg_renumber[REGNO (new_reg)] = hard_regno;
   5923  1.1  mrg     }
   5924  1.1  mrg   int new_regno = REGNO (new_reg);
   5925  1.1  mrg   save = emit_spill_move (true, new_reg, original_reg);
   5926  1.1  mrg   if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
   5927  1.1  mrg     {
   5928  1.1  mrg       if (lra_dump_file != NULL)
   5929  1.1  mrg 	{
   5930  1.1  mrg 	  fprintf
   5931  1.1  mrg 	    (lra_dump_file,
   5932  1.1  mrg 	     "	  Rejecting split %d->%d resulting in > 2 save insns:\n",
   5933  1.1  mrg 	     original_regno, new_regno);
   5934  1.1  mrg 	  dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
   5935  1.1  mrg 	  fprintf (lra_dump_file,
   5936  1.1  mrg 		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
   5937  1.1  mrg 	}
   5938  1.1  mrg       return false;
   5939  1.1  mrg     }
   5940  1.1  mrg   restore = emit_spill_move (false, new_reg, original_reg);
   5941  1.1  mrg   if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
   5942  1.1  mrg     {
   5943  1.1  mrg       if (lra_dump_file != NULL)
   5944  1.1  mrg 	{
   5945  1.1  mrg 	  fprintf (lra_dump_file,
   5946  1.1  mrg 		   "	Rejecting split %d->%d "
   5947  1.1  mrg 		   "resulting in > 2 restore insns:\n",
   5948  1.1  mrg 		   original_regno, new_regno);
   5949  1.1  mrg 	  dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
   5950  1.1  mrg 	  fprintf (lra_dump_file,
   5951  1.1  mrg 		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
   5952  1.1  mrg 	}
   5953  1.1  mrg       return false;
   5954  1.1  mrg     }
   5955  1.1  mrg   /* Transfer equivalence information to the spill register, so that
   5956  1.1  mrg      if we fail to allocate the spill register, we have the option of
   5957  1.1  mrg      rematerializing the original value instead of spilling to the stack.  */
   5958  1.1  mrg   if (!HARD_REGISTER_NUM_P (original_regno)
   5959  1.1  mrg       && mode == PSEUDO_REGNO_MODE (original_regno))
   5960  1.1  mrg     lra_copy_reg_equiv (new_regno, original_regno);
   5961  1.1  mrg   lra_reg_info[new_regno].restore_rtx = regno_reg_rtx[original_regno];
   5962  1.1  mrg   bitmap_set_bit (&lra_split_regs, new_regno);
   5963  1.1  mrg   if (to != NULL)
   5964  1.1  mrg     {
   5965  1.1  mrg       lra_assert (next_usage_insns == NULL);
   5966  1.1  mrg       usage_insn = to;
   5967  1.1  mrg       after_p = TRUE;
   5968  1.1  mrg     }
   5969  1.1  mrg   else
   5970  1.1  mrg     {
   5971  1.1  mrg       /* We need check_only_regs only inside the inheritance pass.  */
   5972  1.1  mrg       bitmap_set_bit (&check_only_regs, new_regno);
   5973  1.1  mrg       bitmap_set_bit (&check_only_regs, original_regno);
   5974  1.1  mrg       after_p = usage_insns[original_regno].after_p;
   5975  1.1  mrg       for (;;)
   5976  1.1  mrg 	{
   5977  1.1  mrg 	  if (GET_CODE (next_usage_insns) != INSN_LIST)
   5978  1.1  mrg 	    {
   5979  1.1  mrg 	      usage_insn = next_usage_insns;
   5980  1.1  mrg 	      break;
   5981  1.1  mrg 	    }
   5982  1.1  mrg 	  usage_insn = XEXP (next_usage_insns, 0);
   5983  1.1  mrg 	  lra_assert (DEBUG_INSN_P (usage_insn));
   5984  1.1  mrg 	  next_usage_insns = XEXP (next_usage_insns, 1);
   5985  1.1  mrg 	  lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
   5986  1.1  mrg 				 true);
   5987  1.1  mrg 	  lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
   5988  1.1  mrg 	  if (lra_dump_file != NULL)
   5989  1.1  mrg 	    {
   5990  1.1  mrg 	      fprintf (lra_dump_file, "    Split reuse change %d->%d:\n",
   5991  1.1  mrg 		       original_regno, new_regno);
   5992  1.1  mrg 	      dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
   5993  1.1  mrg 	    }
   5994  1.1  mrg 	}
   5995  1.1  mrg     }
   5996  1.1  mrg   lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
   5997  1.1  mrg   lra_assert (usage_insn != insn || (after_p && before_p));
   5998  1.1  mrg   lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
   5999  1.1  mrg 			 after_p ? NULL : restore,
   6000  1.1  mrg 			 after_p ? restore : NULL,
   6001  1.1  mrg 			 call_save_p
   6002  1.1  mrg 			 ?  "Add reg<-save" : "Add reg<-split");
   6003  1.1  mrg   lra_process_new_insns (insn, before_p ? save : NULL,
   6004  1.1  mrg 			 before_p ? NULL : save,
   6005  1.1  mrg 			 call_save_p
   6006  1.1  mrg 			 ?  "Add save<-reg" : "Add split<-reg");
   6007  1.1  mrg   if (nregs > 1 || original_regno < FIRST_PSEUDO_REGISTER)
   6008  1.1  mrg     /* If we are trying to split multi-register.  We should check
   6009  1.1  mrg        conflicts on the next assignment sub-pass.  IRA can allocate on
   6010  1.1  mrg        sub-register levels, LRA do this on pseudos level right now and
   6011  1.1  mrg        this discrepancy may create allocation conflicts after
   6012  1.1  mrg        splitting.
   6013  1.1  mrg 
   6014  1.1  mrg        If we are trying to split hard register we should also check conflicts
   6015  1.1  mrg        as such splitting can create artificial conflict of the hard register
   6016  1.1  mrg        with another pseudo because of simplified conflict calculation in
   6017  1.1  mrg        LRA.  */
   6018  1.1  mrg     check_and_force_assignment_correctness_p = true;
   6019  1.1  mrg   if (lra_dump_file != NULL)
   6020  1.1  mrg     fprintf (lra_dump_file,
   6021  1.1  mrg 	     "	  ))))))))))))))))))))))))))))))))))))))))))))))))\n");
   6022  1.1  mrg   return true;
   6023  1.1  mrg }
   6024  1.1  mrg 
   6025  1.1  mrg /* Split a hard reg for reload pseudo REGNO having RCLASS and living
   6026  1.1  mrg    in the range [FROM, TO].  Return true if did a split.  Otherwise,
   6027  1.1  mrg    return false.  */
   6028  1.1  mrg bool
   6029  1.1  mrg spill_hard_reg_in_range (int regno, enum reg_class rclass, rtx_insn *from, rtx_insn *to)
   6030  1.1  mrg {
   6031  1.1  mrg   int i, hard_regno;
   6032  1.1  mrg   int rclass_size;
   6033  1.1  mrg   rtx_insn *insn;
   6034  1.1  mrg   unsigned int uid;
   6035  1.1  mrg   bitmap_iterator bi;
   6036  1.1  mrg   HARD_REG_SET ignore;
   6037  1.1  mrg 
   6038  1.1  mrg   lra_assert (from != NULL && to != NULL);
   6039  1.1  mrg   ignore = lra_no_alloc_regs;
   6040  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
   6041  1.1  mrg     {
   6042  1.1  mrg       lra_insn_recog_data_t id = lra_insn_recog_data[uid];
   6043  1.1  mrg       struct lra_static_insn_data *static_id = id->insn_static_data;
   6044  1.1  mrg       struct lra_insn_reg *reg;
   6045  1.1  mrg 
   6046  1.1  mrg       for (reg = id->regs; reg != NULL; reg = reg->next)
   6047  1.1  mrg 	if (reg->regno < FIRST_PSEUDO_REGISTER)
   6048  1.1  mrg 	  SET_HARD_REG_BIT (ignore, reg->regno);
   6049  1.1  mrg       for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
   6050  1.1  mrg 	SET_HARD_REG_BIT (ignore, reg->regno);
   6051  1.1  mrg     }
   6052  1.1  mrg   rclass_size = ira_class_hard_regs_num[rclass];
   6053  1.1  mrg   for (i = 0; i < rclass_size; i++)
   6054  1.1  mrg     {
   6055  1.1  mrg       hard_regno = ira_class_hard_regs[rclass][i];
   6056  1.1  mrg       if (! TEST_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hard_regno)
   6057  1.1  mrg 	  || TEST_HARD_REG_BIT (ignore, hard_regno))
   6058  1.1  mrg 	continue;
   6059  1.1  mrg       for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
   6060  1.1  mrg 	{
   6061  1.1  mrg 	  struct lra_static_insn_data *static_id;
   6062  1.1  mrg 	  struct lra_insn_reg *reg;
   6063  1.1  mrg 
   6064  1.1  mrg 	  if (!INSN_P (insn))
   6065  1.1  mrg 	      continue;
   6066  1.1  mrg 	  if (bitmap_bit_p (&lra_reg_info[hard_regno].insn_bitmap,
   6067  1.1  mrg 			    INSN_UID (insn)))
   6068  1.1  mrg 	    break;
   6069  1.1  mrg 	  static_id = lra_get_insn_recog_data (insn)->insn_static_data;
   6070  1.1  mrg 	  for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
   6071  1.1  mrg 	    if (reg->regno == hard_regno)
   6072  1.1  mrg 	      break;
   6073  1.1  mrg 	  if (reg != NULL)
   6074  1.1  mrg 	    break;
   6075  1.1  mrg 	}
   6076  1.1  mrg       if (insn != NEXT_INSN (to))
   6077  1.1  mrg 	continue;
   6078  1.1  mrg       if (split_reg (TRUE, hard_regno, from, NULL, to))
   6079  1.1  mrg 	return true;
   6080  1.1  mrg     }
   6081  1.1  mrg   return false;
   6082  1.1  mrg }
   6083  1.1  mrg 
   6084  1.1  mrg /* Recognize that we need a split transformation for insn INSN, which
   6085  1.1  mrg    defines or uses REGNO in its insn biggest MODE (we use it only if
   6086  1.1  mrg    REGNO is a hard register).  POTENTIAL_RELOAD_HARD_REGS contains
   6087  1.1  mrg    hard registers which might be used for reloads since the EBB end.
   6088  1.1  mrg    Put the save before INSN if BEFORE_P is true.  MAX_UID is maximla
   6089  1.1  mrg    uid before starting INSN processing.  Return true if we succeed in
   6090  1.1  mrg    such transformation.  */
   6091  1.1  mrg static bool
   6092  1.1  mrg split_if_necessary (int regno, machine_mode mode,
   6093  1.1  mrg 		    HARD_REG_SET potential_reload_hard_regs,
   6094  1.1  mrg 		    bool before_p, rtx_insn *insn, int max_uid)
   6095  1.1  mrg {
   6096  1.1  mrg   bool res = false;
   6097  1.1  mrg   int i, nregs = 1;
   6098  1.1  mrg   rtx next_usage_insns;
   6099  1.1  mrg 
   6100  1.1  mrg   if (regno < FIRST_PSEUDO_REGISTER)
   6101  1.1  mrg     nregs = hard_regno_nregs (regno, mode);
   6102  1.1  mrg   for (i = 0; i < nregs; i++)
   6103  1.1  mrg     if (usage_insns[regno + i].check == curr_usage_insns_check
   6104  1.1  mrg 	&& (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
   6105  1.1  mrg 	/* To avoid processing the register twice or more.  */
   6106  1.1  mrg 	&& ((GET_CODE (next_usage_insns) != INSN_LIST
   6107  1.1  mrg 	     && INSN_UID (next_usage_insns) < max_uid)
   6108  1.1  mrg 	    || (GET_CODE (next_usage_insns) == INSN_LIST
   6109  1.1  mrg 		&& (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
   6110  1.1  mrg 	&& need_for_split_p (potential_reload_hard_regs, regno + i)
   6111  1.1  mrg 	&& split_reg (before_p, regno + i, insn, next_usage_insns, NULL))
   6112  1.1  mrg     res = true;
   6113  1.1  mrg   return res;
   6114  1.1  mrg }
   6115  1.1  mrg 
   6116  1.1  mrg /* Return TRUE if rtx X is considered as an invariant for
   6117  1.1  mrg    inheritance.  */
   6118  1.1  mrg static bool
   6119  1.1  mrg invariant_p (const_rtx x)
   6120  1.1  mrg {
   6121  1.1  mrg   machine_mode mode;
   6122  1.1  mrg   const char *fmt;
   6123  1.1  mrg   enum rtx_code code;
   6124  1.1  mrg   int i, j;
   6125  1.1  mrg 
   6126  1.1  mrg   if (side_effects_p (x))
   6127  1.1  mrg     return false;
   6128  1.1  mrg 
   6129  1.1  mrg   code = GET_CODE (x);
   6130  1.1  mrg   mode = GET_MODE (x);
   6131  1.1  mrg   if (code == SUBREG)
   6132  1.1  mrg     {
   6133  1.1  mrg       x = SUBREG_REG (x);
   6134  1.1  mrg       code = GET_CODE (x);
   6135  1.1  mrg       mode = wider_subreg_mode (mode, GET_MODE (x));
   6136  1.1  mrg     }
   6137  1.1  mrg 
   6138  1.1  mrg   if (MEM_P (x))
   6139  1.1  mrg     return false;
   6140  1.1  mrg 
   6141  1.1  mrg   if (REG_P (x))
   6142  1.1  mrg     {
   6143  1.1  mrg       int i, nregs, regno = REGNO (x);
   6144  1.1  mrg 
   6145  1.1  mrg       if (regno >= FIRST_PSEUDO_REGISTER || regno == STACK_POINTER_REGNUM
   6146  1.1  mrg 	  || TEST_HARD_REG_BIT (eliminable_regset, regno)
   6147  1.1  mrg 	  || GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
   6148  1.1  mrg 	return false;
   6149  1.1  mrg       nregs = hard_regno_nregs (regno, mode);
   6150  1.1  mrg       for (i = 0; i < nregs; i++)
   6151  1.1  mrg 	if (! fixed_regs[regno + i]
   6152  1.1  mrg 	    /* A hard register may be clobbered in the current insn
   6153  1.1  mrg 	       but we can ignore this case because if the hard
   6154  1.1  mrg 	       register is used it should be set somewhere after the
   6155  1.1  mrg 	       clobber.  */
   6156  1.1  mrg 	    || bitmap_bit_p (&invalid_invariant_regs, regno + i))
   6157  1.1  mrg 	  return false;
   6158  1.1  mrg     }
   6159  1.1  mrg   fmt = GET_RTX_FORMAT (code);
   6160  1.1  mrg   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   6161  1.1  mrg     {
   6162  1.1  mrg       if (fmt[i] == 'e')
   6163  1.1  mrg 	{
   6164  1.1  mrg 	  if (! invariant_p (XEXP (x, i)))
   6165  1.1  mrg 	    return false;
   6166  1.1  mrg 	}
   6167  1.1  mrg       else if (fmt[i] == 'E')
   6168  1.1  mrg 	{
   6169  1.1  mrg 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   6170  1.1  mrg 	    if (! invariant_p (XVECEXP (x, i, j)))
   6171  1.1  mrg 	      return false;
   6172  1.1  mrg 	}
   6173  1.1  mrg     }
   6174  1.1  mrg   return true;
   6175  1.1  mrg }
   6176  1.1  mrg 
   6177  1.1  mrg /* We have 'dest_reg <- invariant'.  Let us try to make an invariant
   6178  1.1  mrg    inheritance transformation (using dest_reg instead invariant in a
   6179  1.1  mrg    subsequent insn).  */
   6180  1.1  mrg static bool
   6181  1.1  mrg process_invariant_for_inheritance (rtx dst_reg, rtx invariant_rtx)
   6182  1.1  mrg {
   6183  1.1  mrg   invariant_ptr_t invariant_ptr;
   6184  1.1  mrg   rtx_insn *insn, *new_insns;
   6185  1.1  mrg   rtx insn_set, insn_reg, new_reg;
   6186  1.1  mrg   int insn_regno;
   6187  1.1  mrg   bool succ_p = false;
   6188  1.1  mrg   int dst_regno = REGNO (dst_reg);
   6189  1.1  mrg   machine_mode dst_mode = GET_MODE (dst_reg);
   6190  1.1  mrg   enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
   6191  1.1  mrg 
   6192  1.1  mrg   invariant_ptr = insert_invariant (invariant_rtx);
   6193  1.1  mrg   if ((insn = invariant_ptr->insn) != NULL_RTX)
   6194  1.1  mrg     {
   6195  1.1  mrg       /* We have a subsequent insn using the invariant.  */
   6196  1.1  mrg       insn_set = single_set (insn);
   6197  1.1  mrg       lra_assert (insn_set != NULL);
   6198  1.1  mrg       insn_reg = SET_DEST (insn_set);
   6199  1.1  mrg       lra_assert (REG_P (insn_reg));
   6200  1.1  mrg       insn_regno = REGNO (insn_reg);
   6201  1.1  mrg       insn_reg_cl = lra_get_allocno_class (insn_regno);
   6202  1.1  mrg 
   6203  1.1  mrg       if (dst_mode == GET_MODE (insn_reg)
   6204  1.1  mrg 	  /* We should consider only result move reg insns which are
   6205  1.1  mrg 	     cheap.  */
   6206  1.1  mrg 	  && targetm.register_move_cost (dst_mode, cl, insn_reg_cl) == 2
   6207  1.1  mrg 	  && targetm.register_move_cost (dst_mode, cl, cl) == 2)
   6208  1.1  mrg 	{
   6209  1.1  mrg 	  if (lra_dump_file != NULL)
   6210  1.1  mrg 	    fprintf (lra_dump_file,
   6211  1.1  mrg 		     "    [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[\n");
   6212  1.1  mrg 	  new_reg = lra_create_new_reg (dst_mode, dst_reg, cl, NULL,
   6213  1.1  mrg 					"invariant inheritance");
   6214  1.1  mrg 	  bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
   6215  1.1  mrg 	  bitmap_set_bit (&check_only_regs, REGNO (new_reg));
   6216  1.1  mrg 	  lra_reg_info[REGNO (new_reg)].restore_rtx = PATTERN (insn);
   6217  1.1  mrg 	  start_sequence ();
   6218  1.1  mrg 	  lra_emit_move (new_reg, dst_reg);
   6219  1.1  mrg 	  new_insns = get_insns ();
   6220  1.1  mrg 	  end_sequence ();
   6221  1.1  mrg 	  lra_process_new_insns (curr_insn, NULL, new_insns,
   6222  1.1  mrg 				 "Add invariant inheritance<-original");
   6223  1.1  mrg 	  start_sequence ();
   6224  1.1  mrg 	  lra_emit_move (SET_DEST (insn_set), new_reg);
   6225  1.1  mrg 	  new_insns = get_insns ();
   6226  1.1  mrg 	  end_sequence ();
   6227  1.1  mrg 	  lra_process_new_insns (insn, NULL, new_insns,
   6228  1.1  mrg 				 "Changing reload<-inheritance");
   6229  1.1  mrg 	  lra_set_insn_deleted (insn);
   6230  1.1  mrg 	  succ_p = true;
   6231  1.1  mrg 	  if (lra_dump_file != NULL)
   6232  1.1  mrg 	    {
   6233  1.1  mrg 	      fprintf (lra_dump_file,
   6234  1.1  mrg 		       "    Invariant inheritance reuse change %d (bb%d):\n",
   6235  1.1  mrg 		       REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
   6236  1.1  mrg 	      dump_insn_slim (lra_dump_file, insn);
   6237  1.1  mrg 	      fprintf (lra_dump_file,
   6238  1.1  mrg 		       "	  ]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]\n");
   6239  1.1  mrg 	    }
   6240  1.1  mrg 	}
   6241  1.1  mrg     }
   6242  1.1  mrg   invariant_ptr->insn = curr_insn;
   6243  1.1  mrg   return succ_p;
   6244  1.1  mrg }
   6245  1.1  mrg 
   6246  1.1  mrg /* Check only registers living at the current program point in the
   6247  1.1  mrg    current EBB.	 */
   6248  1.1  mrg static bitmap_head live_regs;
   6249  1.1  mrg 
   6250  1.1  mrg /* Update live info in EBB given by its HEAD and TAIL insns after
   6251  1.1  mrg    inheritance/split transformation.  The function removes dead moves
   6252  1.1  mrg    too.	 */
   6253  1.1  mrg static void
   6254  1.1  mrg update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
   6255  1.1  mrg {
   6256  1.1  mrg   unsigned int j;
   6257  1.1  mrg   int i, regno;
   6258  1.1  mrg   bool live_p;
   6259  1.1  mrg   rtx_insn *prev_insn;
   6260  1.1  mrg   rtx set;
   6261  1.1  mrg   bool remove_p;
   6262  1.1  mrg   basic_block last_bb, prev_bb, curr_bb;
   6263  1.1  mrg   bitmap_iterator bi;
   6264  1.1  mrg   struct lra_insn_reg *reg;
   6265  1.1  mrg   edge e;
   6266  1.1  mrg   edge_iterator ei;
   6267  1.1  mrg 
   6268  1.1  mrg   last_bb = BLOCK_FOR_INSN (tail);
   6269  1.1  mrg   prev_bb = NULL;
   6270  1.1  mrg   for (curr_insn = tail;
   6271  1.1  mrg        curr_insn != PREV_INSN (head);
   6272  1.1  mrg        curr_insn = prev_insn)
   6273  1.1  mrg     {
   6274  1.1  mrg       prev_insn = PREV_INSN (curr_insn);
   6275  1.1  mrg       /* We need to process empty blocks too.  They contain
   6276  1.1  mrg 	 NOTE_INSN_BASIC_BLOCK referring for the basic block.  */
   6277  1.1  mrg       if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
   6278  1.1  mrg 	continue;
   6279  1.1  mrg       curr_bb = BLOCK_FOR_INSN (curr_insn);
   6280  1.1  mrg       if (curr_bb != prev_bb)
   6281  1.1  mrg 	{
   6282  1.1  mrg 	  if (prev_bb != NULL)
   6283  1.1  mrg 	    {
   6284  1.1  mrg 	      /* Update df_get_live_in (prev_bb):  */
   6285  1.1  mrg 	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
   6286  1.1  mrg 		if (bitmap_bit_p (&live_regs, j))
   6287  1.1  mrg 		  bitmap_set_bit (df_get_live_in (prev_bb), j);
   6288  1.1  mrg 		else
   6289  1.1  mrg 		  bitmap_clear_bit (df_get_live_in (prev_bb), j);
   6290  1.1  mrg 	    }
   6291  1.1  mrg 	  if (curr_bb != last_bb)
   6292  1.1  mrg 	    {
   6293  1.1  mrg 	      /* Update df_get_live_out (curr_bb):  */
   6294  1.1  mrg 	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
   6295  1.1  mrg 		{
   6296  1.1  mrg 		  live_p = bitmap_bit_p (&live_regs, j);
   6297  1.1  mrg 		  if (! live_p)
   6298  1.1  mrg 		    FOR_EACH_EDGE (e, ei, curr_bb->succs)
   6299  1.1  mrg 		      if (bitmap_bit_p (df_get_live_in (e->dest), j))
   6300  1.1  mrg 			{
   6301  1.1  mrg 			  live_p = true;
   6302  1.1  mrg 			  break;
   6303  1.1  mrg 			}
   6304  1.1  mrg 		  if (live_p)
   6305  1.1  mrg 		    bitmap_set_bit (df_get_live_out (curr_bb), j);
   6306  1.1  mrg 		  else
   6307  1.1  mrg 		    bitmap_clear_bit (df_get_live_out (curr_bb), j);
   6308  1.1  mrg 		}
   6309  1.1  mrg 	    }
   6310  1.1  mrg 	  prev_bb = curr_bb;
   6311  1.1  mrg 	  bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
   6312  1.1  mrg 	}
   6313  1.1  mrg       if (! NONDEBUG_INSN_P (curr_insn))
   6314  1.1  mrg 	continue;
   6315  1.1  mrg       curr_id = lra_get_insn_recog_data (curr_insn);
   6316  1.1  mrg       curr_static_id = curr_id->insn_static_data;
   6317  1.1  mrg       remove_p = false;
   6318  1.1  mrg       if ((set = single_set (curr_insn)) != NULL_RTX
   6319  1.1  mrg 	  && REG_P (SET_DEST (set))
   6320  1.1  mrg 	  && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
   6321  1.1  mrg 	  && SET_DEST (set) != pic_offset_table_rtx
   6322  1.1  mrg 	  && bitmap_bit_p (&check_only_regs, regno)
   6323  1.1  mrg 	  && ! bitmap_bit_p (&live_regs, regno))
   6324  1.1  mrg 	remove_p = true;
   6325  1.1  mrg       /* See which defined values die here.  */
   6326  1.1  mrg       for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   6327  1.1  mrg 	if (reg->type == OP_OUT && ! reg->subreg_p)
   6328  1.1  mrg 	  bitmap_clear_bit (&live_regs, reg->regno);
   6329  1.1  mrg       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
   6330  1.1  mrg 	if (reg->type == OP_OUT && ! reg->subreg_p)
   6331  1.1  mrg 	  bitmap_clear_bit (&live_regs, reg->regno);
   6332  1.1  mrg       if (curr_id->arg_hard_regs != NULL)
   6333  1.1  mrg 	/* Make clobbered argument hard registers die.  */
   6334  1.1  mrg 	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6335  1.1  mrg 	  if (regno >= FIRST_PSEUDO_REGISTER)
   6336  1.1  mrg 	    bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
   6337  1.1  mrg       /* Mark each used value as live.  */
   6338  1.1  mrg       for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   6339  1.1  mrg 	if (reg->type != OP_OUT
   6340  1.1  mrg 	    && bitmap_bit_p (&check_only_regs, reg->regno))
   6341  1.1  mrg 	  bitmap_set_bit (&live_regs, reg->regno);
   6342  1.1  mrg       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
   6343  1.1  mrg 	if (reg->type != OP_OUT
   6344  1.1  mrg 	    && bitmap_bit_p (&check_only_regs, reg->regno))
   6345  1.1  mrg 	  bitmap_set_bit (&live_regs, reg->regno);
   6346  1.1  mrg       if (curr_id->arg_hard_regs != NULL)
   6347  1.1  mrg 	/* Make used argument hard registers live.  */
   6348  1.1  mrg 	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6349  1.1  mrg 	  if (regno < FIRST_PSEUDO_REGISTER
   6350  1.1  mrg 	      && bitmap_bit_p (&check_only_regs, regno))
   6351  1.1  mrg 	    bitmap_set_bit (&live_regs, regno);
   6352  1.1  mrg       /* It is quite important to remove dead move insns because it
   6353  1.1  mrg 	 means removing dead store.  We don't need to process them for
   6354  1.1  mrg 	 constraints.  */
   6355  1.1  mrg       if (remove_p)
   6356  1.1  mrg 	{
   6357  1.1  mrg 	  if (lra_dump_file != NULL)
   6358  1.1  mrg 	    {
   6359  1.1  mrg 	      fprintf (lra_dump_file, "	    Removing dead insn:\n ");
   6360  1.1  mrg 	      dump_insn_slim (lra_dump_file, curr_insn);
   6361  1.1  mrg 	    }
   6362  1.1  mrg 	  lra_set_insn_deleted (curr_insn);
   6363  1.1  mrg 	}
   6364  1.1  mrg     }
   6365  1.1  mrg }
   6366  1.1  mrg 
   6367  1.1  mrg /* The structure describes info to do an inheritance for the current
   6368  1.1  mrg    insn.  We need to collect such info first before doing the
   6369  1.1  mrg    transformations because the transformations change the insn
   6370  1.1  mrg    internal representation.  */
   6371  1.1  mrg struct to_inherit
   6372  1.1  mrg {
   6373  1.1  mrg   /* Original regno.  */
   6374  1.1  mrg   int regno;
   6375  1.1  mrg   /* Subsequent insns which can inherit original reg value.  */
   6376  1.1  mrg   rtx insns;
   6377  1.1  mrg };
   6378  1.1  mrg 
   6379  1.1  mrg /* Array containing all info for doing inheritance from the current
   6380  1.1  mrg    insn.  */
   6381  1.1  mrg static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
   6382  1.1  mrg 
   6383  1.1  mrg /* Number elements in the previous array.  */
   6384  1.1  mrg static int to_inherit_num;
   6385  1.1  mrg 
   6386  1.1  mrg /* Add inheritance info REGNO and INSNS. Their meaning is described in
   6387  1.1  mrg    structure to_inherit.  */
   6388  1.1  mrg static void
   6389  1.1  mrg add_to_inherit (int regno, rtx insns)
   6390  1.1  mrg {
   6391  1.1  mrg   int i;
   6392  1.1  mrg 
   6393  1.1  mrg   for (i = 0; i < to_inherit_num; i++)
   6394  1.1  mrg     if (to_inherit[i].regno == regno)
   6395  1.1  mrg       return;
   6396  1.1  mrg   lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
   6397  1.1  mrg   to_inherit[to_inherit_num].regno = regno;
   6398  1.1  mrg   to_inherit[to_inherit_num++].insns = insns;
   6399  1.1  mrg }
   6400  1.1  mrg 
   6401  1.1  mrg /* Return the last non-debug insn in basic block BB, or the block begin
   6402  1.1  mrg    note if none.  */
   6403  1.1  mrg static rtx_insn *
   6404  1.1  mrg get_last_insertion_point (basic_block bb)
   6405  1.1  mrg {
   6406  1.1  mrg   rtx_insn *insn;
   6407  1.1  mrg 
   6408  1.1  mrg   FOR_BB_INSNS_REVERSE (bb, insn)
   6409  1.1  mrg     if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
   6410  1.1  mrg       return insn;
   6411  1.1  mrg   gcc_unreachable ();
   6412  1.1  mrg }
   6413  1.1  mrg 
   6414  1.1  mrg /* Set up RES by registers living on edges FROM except the edge (FROM,
   6415  1.1  mrg    TO) or by registers set up in a jump insn in BB FROM.  */
   6416  1.1  mrg static void
   6417  1.1  mrg get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
   6418  1.1  mrg {
   6419  1.1  mrg   rtx_insn *last;
   6420  1.1  mrg   struct lra_insn_reg *reg;
   6421  1.1  mrg   edge e;
   6422  1.1  mrg   edge_iterator ei;
   6423  1.1  mrg 
   6424  1.1  mrg   lra_assert (to != NULL);
   6425  1.1  mrg   bitmap_clear (res);
   6426  1.1  mrg   FOR_EACH_EDGE (e, ei, from->succs)
   6427  1.1  mrg     if (e->dest != to)
   6428  1.1  mrg       bitmap_ior_into (res, df_get_live_in (e->dest));
   6429  1.1  mrg   last = get_last_insertion_point (from);
   6430  1.1  mrg   if (! JUMP_P (last))
   6431  1.1  mrg     return;
   6432  1.1  mrg   curr_id = lra_get_insn_recog_data (last);
   6433  1.1  mrg   for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   6434  1.1  mrg     if (reg->type != OP_IN)
   6435  1.1  mrg       bitmap_set_bit (res, reg->regno);
   6436  1.1  mrg }
   6437  1.1  mrg 
   6438  1.1  mrg /* Used as a temporary results of some bitmap calculations.  */
   6439  1.1  mrg static bitmap_head temp_bitmap;
   6440  1.1  mrg 
   6441  1.1  mrg /* We split for reloads of small class of hard regs.  The following
   6442  1.1  mrg    defines how many hard regs the class should have to be qualified as
   6443  1.1  mrg    small.  The code is mostly oriented to x86/x86-64 architecture
   6444  1.1  mrg    where some insns need to use only specific register or pair of
   6445  1.1  mrg    registers and these register can live in RTL explicitly, e.g. for
   6446  1.1  mrg    parameter passing.  */
   6447  1.1  mrg static const int max_small_class_regs_num = 2;
   6448  1.1  mrg 
   6449  1.1  mrg /* Do inheritance/split transformations in EBB starting with HEAD and
   6450  1.1  mrg    finishing on TAIL.  We process EBB insns in the reverse order.
   6451  1.1  mrg    Return true if we did any inheritance/split transformation in the
   6452  1.1  mrg    EBB.
   6453  1.1  mrg 
   6454  1.1  mrg    We should avoid excessive splitting which results in worse code
   6455  1.1  mrg    because of inaccurate cost calculations for spilling new split
   6456  1.1  mrg    pseudos in such case.  To achieve this we do splitting only if
   6457  1.1  mrg    register pressure is high in given basic block and there are reload
   6458  1.1  mrg    pseudos requiring hard registers.  We could do more register
   6459  1.1  mrg    pressure calculations at any given program point to avoid necessary
   6460  1.1  mrg    splitting even more but it is to expensive and the current approach
   6461  1.1  mrg    works well enough.  */
   6462  1.1  mrg static bool
   6463  1.1  mrg inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
   6464  1.1  mrg {
   6465  1.1  mrg   int i, src_regno, dst_regno, nregs;
   6466  1.1  mrg   bool change_p, succ_p, update_reloads_num_p;
   6467  1.1  mrg   rtx_insn *prev_insn, *last_insn;
   6468  1.1  mrg   rtx next_usage_insns, curr_set;
   6469  1.1  mrg   enum reg_class cl;
   6470  1.1  mrg   struct lra_insn_reg *reg;
   6471  1.1  mrg   basic_block last_processed_bb, curr_bb = NULL;
   6472  1.1  mrg   HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
   6473  1.1  mrg   bitmap to_process;
   6474  1.1  mrg   unsigned int j;
   6475  1.1  mrg   bitmap_iterator bi;
   6476  1.1  mrg   bool head_p, after_p;
   6477  1.1  mrg 
   6478  1.1  mrg   change_p = false;
   6479  1.1  mrg   curr_usage_insns_check++;
   6480  1.1  mrg   clear_invariants ();
   6481  1.1  mrg   reloads_num = calls_num = 0;
   6482  1.1  mrg   for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
   6483  1.1  mrg     last_call_for_abi[i] = 0;
   6484  1.1  mrg   CLEAR_HARD_REG_SET (full_and_partial_call_clobbers);
   6485  1.1  mrg   bitmap_clear (&check_only_regs);
   6486  1.1  mrg   bitmap_clear (&invalid_invariant_regs);
   6487  1.1  mrg   last_processed_bb = NULL;
   6488  1.1  mrg   CLEAR_HARD_REG_SET (potential_reload_hard_regs);
   6489  1.1  mrg   live_hard_regs = eliminable_regset | lra_no_alloc_regs;
   6490  1.1  mrg   /* We don't process new insns generated in the loop.	*/
   6491  1.1  mrg   for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
   6492  1.1  mrg     {
   6493  1.1  mrg       prev_insn = PREV_INSN (curr_insn);
   6494  1.1  mrg       if (BLOCK_FOR_INSN (curr_insn) != NULL)
   6495  1.1  mrg 	curr_bb = BLOCK_FOR_INSN (curr_insn);
   6496  1.1  mrg       if (last_processed_bb != curr_bb)
   6497  1.1  mrg 	{
   6498  1.1  mrg 	  /* We are at the end of BB.  Add qualified living
   6499  1.1  mrg 	     pseudos for potential splitting.  */
   6500  1.1  mrg 	  to_process = df_get_live_out (curr_bb);
   6501  1.1  mrg 	  if (last_processed_bb != NULL)
   6502  1.1  mrg 	    {
   6503  1.1  mrg 	      /* We are somewhere in the middle of EBB.	 */
   6504  1.1  mrg 	      get_live_on_other_edges (curr_bb, last_processed_bb,
   6505  1.1  mrg 				       &temp_bitmap);
   6506  1.1  mrg 	      to_process = &temp_bitmap;
   6507  1.1  mrg 	    }
   6508  1.1  mrg 	  last_processed_bb = curr_bb;
   6509  1.1  mrg 	  last_insn = get_last_insertion_point (curr_bb);
   6510  1.1  mrg 	  after_p = (! JUMP_P (last_insn)
   6511  1.1  mrg 		     && (! CALL_P (last_insn)
   6512  1.1  mrg 			 || (find_reg_note (last_insn,
   6513  1.1  mrg 					   REG_NORETURN, NULL_RTX) == NULL_RTX
   6514  1.1  mrg 			     && ! SIBLING_CALL_P (last_insn))));
   6515  1.1  mrg 	  CLEAR_HARD_REG_SET (potential_reload_hard_regs);
   6516  1.1  mrg 	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
   6517  1.1  mrg 	    {
   6518  1.1  mrg 	      if ((int) j >= lra_constraint_new_regno_start)
   6519  1.1  mrg 		break;
   6520  1.1  mrg 	      if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
   6521  1.1  mrg 		{
   6522  1.1  mrg 		  if (j < FIRST_PSEUDO_REGISTER)
   6523  1.1  mrg 		    SET_HARD_REG_BIT (live_hard_regs, j);
   6524  1.1  mrg 		  else
   6525  1.1  mrg 		    add_to_hard_reg_set (&live_hard_regs,
   6526  1.1  mrg 					 PSEUDO_REGNO_MODE (j),
   6527  1.1  mrg 					 reg_renumber[j]);
   6528  1.1  mrg 		  setup_next_usage_insn (j, last_insn, reloads_num, after_p);
   6529  1.1  mrg 		}
   6530  1.1  mrg 	    }
   6531  1.1  mrg 	}
   6532  1.1  mrg       src_regno = dst_regno = -1;
   6533  1.1  mrg       curr_set = single_set (curr_insn);
   6534  1.1  mrg       if (curr_set != NULL_RTX && REG_P (SET_DEST (curr_set)))
   6535  1.1  mrg 	dst_regno = REGNO (SET_DEST (curr_set));
   6536  1.1  mrg       if (curr_set != NULL_RTX && REG_P (SET_SRC (curr_set)))
   6537  1.1  mrg 	src_regno = REGNO (SET_SRC (curr_set));
   6538  1.1  mrg       update_reloads_num_p = true;
   6539  1.1  mrg       if (src_regno < lra_constraint_new_regno_start
   6540  1.1  mrg 	  && src_regno >= FIRST_PSEUDO_REGISTER
   6541  1.1  mrg 	  && reg_renumber[src_regno] < 0
   6542  1.1  mrg 	  && dst_regno >= lra_constraint_new_regno_start
   6543  1.1  mrg 	  && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
   6544  1.1  mrg 	{
   6545  1.1  mrg 	  /* 'reload_pseudo <- original_pseudo'.  */
   6546  1.1  mrg 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
   6547  1.1  mrg 	    reloads_num++;
   6548  1.1  mrg 	  update_reloads_num_p = false;
   6549  1.1  mrg 	  succ_p = false;
   6550  1.1  mrg 	  if (usage_insns[src_regno].check == curr_usage_insns_check
   6551  1.1  mrg 	      && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
   6552  1.1  mrg 	    succ_p = inherit_reload_reg (false, src_regno, cl,
   6553  1.1  mrg 					 curr_insn, next_usage_insns);
   6554  1.1  mrg 	  if (succ_p)
   6555  1.1  mrg 	    change_p = true;
   6556  1.1  mrg 	  else
   6557  1.1  mrg 	    setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
   6558  1.1  mrg 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
   6559  1.1  mrg 	    potential_reload_hard_regs |= reg_class_contents[cl];
   6560  1.1  mrg 	}
   6561  1.1  mrg       else if (src_regno < 0
   6562  1.1  mrg 	       && dst_regno >= lra_constraint_new_regno_start
   6563  1.1  mrg 	       && invariant_p (SET_SRC (curr_set))
   6564  1.1  mrg 	       && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS
   6565  1.1  mrg 	       && ! bitmap_bit_p (&invalid_invariant_regs, dst_regno)
   6566  1.1  mrg 	       && ! bitmap_bit_p (&invalid_invariant_regs,
   6567  1.1  mrg 				  ORIGINAL_REGNO(regno_reg_rtx[dst_regno])))
   6568  1.1  mrg 	{
   6569  1.1  mrg 	  /* 'reload_pseudo <- invariant'.  */
   6570  1.1  mrg 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
   6571  1.1  mrg 	    reloads_num++;
   6572  1.1  mrg 	  update_reloads_num_p = false;
   6573  1.1  mrg 	  if (process_invariant_for_inheritance (SET_DEST (curr_set), SET_SRC (curr_set)))
   6574  1.1  mrg 	    change_p = true;
   6575  1.1  mrg 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
   6576  1.1  mrg 	    potential_reload_hard_regs |= reg_class_contents[cl];
   6577  1.1  mrg 	}
   6578  1.1  mrg       else if (src_regno >= lra_constraint_new_regno_start
   6579  1.1  mrg 	       && dst_regno < lra_constraint_new_regno_start
   6580  1.1  mrg 	       && dst_regno >= FIRST_PSEUDO_REGISTER
   6581  1.1  mrg 	       && reg_renumber[dst_regno] < 0
   6582  1.1  mrg 	       && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
   6583  1.1  mrg 	       && usage_insns[dst_regno].check == curr_usage_insns_check
   6584  1.1  mrg 	       && (next_usage_insns
   6585  1.1  mrg 		   = usage_insns[dst_regno].insns) != NULL_RTX)
   6586  1.1  mrg 	{
   6587  1.1  mrg 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
   6588  1.1  mrg 	    reloads_num++;
   6589  1.1  mrg 	  update_reloads_num_p = false;
   6590  1.1  mrg 	  /* 'original_pseudo <- reload_pseudo'.  */
   6591  1.1  mrg 	  if (! JUMP_P (curr_insn)
   6592  1.1  mrg 	      && inherit_reload_reg (true, dst_regno, cl,
   6593  1.1  mrg 				     curr_insn, next_usage_insns))
   6594  1.1  mrg 	    change_p = true;
   6595  1.1  mrg 	  /* Invalidate.  */
   6596  1.1  mrg 	  usage_insns[dst_regno].check = 0;
   6597  1.1  mrg 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
   6598  1.1  mrg 	    potential_reload_hard_regs |= reg_class_contents[cl];
   6599  1.1  mrg 	}
   6600  1.1  mrg       else if (INSN_P (curr_insn))
   6601  1.1  mrg 	{
   6602  1.1  mrg 	  int iter;
   6603  1.1  mrg 	  int max_uid = get_max_uid ();
   6604  1.1  mrg 
   6605  1.1  mrg 	  curr_id = lra_get_insn_recog_data (curr_insn);
   6606  1.1  mrg 	  curr_static_id = curr_id->insn_static_data;
   6607  1.1  mrg 	  to_inherit_num = 0;
   6608  1.1  mrg 	  /* Process insn definitions.	*/
   6609  1.1  mrg 	  for (iter = 0; iter < 2; iter++)
   6610  1.1  mrg 	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
   6611  1.1  mrg 		 reg != NULL;
   6612  1.1  mrg 		 reg = reg->next)
   6613  1.1  mrg 	      if (reg->type != OP_IN
   6614  1.1  mrg 		  && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
   6615  1.1  mrg 		{
   6616  1.1  mrg 		  if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
   6617  1.1  mrg 		      && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
   6618  1.1  mrg 		      && usage_insns[dst_regno].check == curr_usage_insns_check
   6619  1.1  mrg 		      && (next_usage_insns
   6620  1.1  mrg 			  = usage_insns[dst_regno].insns) != NULL_RTX)
   6621  1.1  mrg 		    {
   6622  1.1  mrg 		      struct lra_insn_reg *r;
   6623  1.1  mrg 
   6624  1.1  mrg 		      for (r = curr_id->regs; r != NULL; r = r->next)
   6625  1.1  mrg 			if (r->type != OP_OUT && r->regno == dst_regno)
   6626  1.1  mrg 			  break;
   6627  1.1  mrg 		      /* Don't do inheritance if the pseudo is also
   6628  1.1  mrg 			 used in the insn.  */
   6629  1.1  mrg 		      if (r == NULL)
   6630  1.1  mrg 			/* We cannot do inheritance right now
   6631  1.1  mrg 			   because the current insn reg info (chain
   6632  1.1  mrg 			   regs) can change after that.  */
   6633  1.1  mrg 			add_to_inherit (dst_regno, next_usage_insns);
   6634  1.1  mrg 		    }
   6635  1.1  mrg 		  /* We cannot process one reg twice here because of
   6636  1.1  mrg 		     usage_insns invalidation.  */
   6637  1.1  mrg 		  if ((dst_regno < FIRST_PSEUDO_REGISTER
   6638  1.1  mrg 		       || reg_renumber[dst_regno] >= 0)
   6639  1.1  mrg 		      && ! reg->subreg_p && reg->type != OP_IN)
   6640  1.1  mrg 		    {
   6641  1.1  mrg 		      HARD_REG_SET s;
   6642  1.1  mrg 
   6643  1.1  mrg 		      if (split_if_necessary (dst_regno, reg->biggest_mode,
   6644  1.1  mrg 					      potential_reload_hard_regs,
   6645  1.1  mrg 					      false, curr_insn, max_uid))
   6646  1.1  mrg 			change_p = true;
   6647  1.1  mrg 		      CLEAR_HARD_REG_SET (s);
   6648  1.1  mrg 		      if (dst_regno < FIRST_PSEUDO_REGISTER)
   6649  1.1  mrg 			add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
   6650  1.1  mrg 		      else
   6651  1.1  mrg 			add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
   6652  1.1  mrg 					     reg_renumber[dst_regno]);
   6653  1.1  mrg 		      live_hard_regs &= ~s;
   6654  1.1  mrg 		      potential_reload_hard_regs &= ~s;
   6655  1.1  mrg 		    }
   6656  1.1  mrg 		  /* We should invalidate potential inheritance or
   6657  1.1  mrg 		     splitting for the current insn usages to the next
   6658  1.1  mrg 		     usage insns (see code below) as the output pseudo
   6659  1.1  mrg 		     prevents this.  */
   6660  1.1  mrg 		  if ((dst_regno >= FIRST_PSEUDO_REGISTER
   6661  1.1  mrg 		       && reg_renumber[dst_regno] < 0)
   6662  1.1  mrg 		      || (reg->type == OP_OUT && ! reg->subreg_p
   6663  1.1  mrg 			  && (dst_regno < FIRST_PSEUDO_REGISTER
   6664  1.1  mrg 			      || reg_renumber[dst_regno] >= 0)))
   6665  1.1  mrg 		    {
   6666  1.1  mrg 		      /* Invalidate and mark definitions.  */
   6667  1.1  mrg 		      if (dst_regno >= FIRST_PSEUDO_REGISTER)
   6668  1.1  mrg 			usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
   6669  1.1  mrg 		      else
   6670  1.1  mrg 			{
   6671  1.1  mrg 			  nregs = hard_regno_nregs (dst_regno,
   6672  1.1  mrg 						    reg->biggest_mode);
   6673  1.1  mrg 			  for (i = 0; i < nregs; i++)
   6674  1.1  mrg 			    usage_insns[dst_regno + i].check
   6675  1.1  mrg 			      = -(int) INSN_UID (curr_insn);
   6676  1.1  mrg 			}
   6677  1.1  mrg 		    }
   6678  1.1  mrg 		}
   6679  1.1  mrg 	  /* Process clobbered call regs.  */
   6680  1.1  mrg 	  if (curr_id->arg_hard_regs != NULL)
   6681  1.1  mrg 	    for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6682  1.1  mrg 	      if (dst_regno >= FIRST_PSEUDO_REGISTER)
   6683  1.1  mrg 		usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
   6684  1.1  mrg 		  = -(int) INSN_UID (curr_insn);
   6685  1.1  mrg 	  if (! JUMP_P (curr_insn))
   6686  1.1  mrg 	    for (i = 0; i < to_inherit_num; i++)
   6687  1.1  mrg 	      if (inherit_reload_reg (true, to_inherit[i].regno,
   6688  1.1  mrg 				      ALL_REGS, curr_insn,
   6689  1.1  mrg 				      to_inherit[i].insns))
   6690  1.1  mrg 	      change_p = true;
   6691  1.1  mrg 	  if (CALL_P (curr_insn))
   6692  1.1  mrg 	    {
   6693  1.1  mrg 	      rtx cheap, pat, dest;
   6694  1.1  mrg 	      rtx_insn *restore;
   6695  1.1  mrg 	      int regno, hard_regno;
   6696  1.1  mrg 
   6697  1.1  mrg 	      calls_num++;
   6698  1.1  mrg 	      function_abi callee_abi = insn_callee_abi (curr_insn);
   6699  1.1  mrg 	      last_call_for_abi[callee_abi.id ()] = calls_num;
   6700  1.1  mrg 	      full_and_partial_call_clobbers
   6701  1.1  mrg 		|= callee_abi.full_and_partial_reg_clobbers ();
   6702  1.1  mrg 	      if ((cheap = find_reg_note (curr_insn,
   6703  1.1  mrg 					  REG_RETURNED, NULL_RTX)) != NULL_RTX
   6704  1.1  mrg 		  && ((cheap = XEXP (cheap, 0)), true)
   6705  1.1  mrg 		  && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
   6706  1.1  mrg 		  && (hard_regno = reg_renumber[regno]) >= 0
   6707  1.1  mrg 		  && usage_insns[regno].check == curr_usage_insns_check
   6708  1.1  mrg 		  /* If there are pending saves/restores, the
   6709  1.1  mrg 		     optimization is not worth.	 */
   6710  1.1  mrg 		  && usage_insns[regno].calls_num == calls_num - 1
   6711  1.1  mrg 		  && callee_abi.clobbers_reg_p (GET_MODE (cheap), hard_regno))
   6712  1.1  mrg 		{
   6713  1.1  mrg 		  /* Restore the pseudo from the call result as
   6714  1.1  mrg 		     REG_RETURNED note says that the pseudo value is
   6715  1.1  mrg 		     in the call result and the pseudo is an argument
   6716  1.1  mrg 		     of the call.  */
   6717  1.1  mrg 		  pat = PATTERN (curr_insn);
   6718  1.1  mrg 		  if (GET_CODE (pat) == PARALLEL)
   6719  1.1  mrg 		    pat = XVECEXP (pat, 0, 0);
   6720  1.1  mrg 		  dest = SET_DEST (pat);
   6721  1.1  mrg 		  /* For multiple return values dest is PARALLEL.
   6722  1.1  mrg 		     Currently we handle only single return value case.  */
   6723  1.1  mrg 		  if (REG_P (dest))
   6724  1.1  mrg 		    {
   6725  1.1  mrg 		      start_sequence ();
   6726  1.1  mrg 		      emit_move_insn (cheap, copy_rtx (dest));
   6727  1.1  mrg 		      restore = get_insns ();
   6728  1.1  mrg 		      end_sequence ();
   6729  1.1  mrg 		      lra_process_new_insns (curr_insn, NULL, restore,
   6730  1.1  mrg 					     "Inserting call parameter restore");
   6731  1.1  mrg 		      /* We don't need to save/restore of the pseudo from
   6732  1.1  mrg 			 this call.	 */
   6733  1.1  mrg 		      usage_insns[regno].calls_num = calls_num;
   6734  1.1  mrg 		      remove_from_hard_reg_set
   6735  1.1  mrg 			(&full_and_partial_call_clobbers,
   6736  1.1  mrg 			 GET_MODE (cheap), hard_regno);
   6737  1.1  mrg 		      bitmap_set_bit (&check_only_regs, regno);
   6738  1.1  mrg 		    }
   6739  1.1  mrg 		}
   6740  1.1  mrg 	    }
   6741  1.1  mrg 	  to_inherit_num = 0;
   6742  1.1  mrg 	  /* Process insn usages.  */
   6743  1.1  mrg 	  for (iter = 0; iter < 2; iter++)
   6744  1.1  mrg 	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
   6745  1.1  mrg 		 reg != NULL;
   6746  1.1  mrg 		 reg = reg->next)
   6747  1.1  mrg 	      if ((reg->type != OP_OUT
   6748  1.1  mrg 		   || (reg->type == OP_OUT && reg->subreg_p))
   6749  1.1  mrg 		  && (src_regno = reg->regno) < lra_constraint_new_regno_start)
   6750  1.1  mrg 		{
   6751  1.1  mrg 		  if (src_regno >= FIRST_PSEUDO_REGISTER
   6752  1.1  mrg 		      && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
   6753  1.1  mrg 		    {
   6754  1.1  mrg 		      if (usage_insns[src_regno].check == curr_usage_insns_check
   6755  1.1  mrg 			  && (next_usage_insns
   6756  1.1  mrg 			      = usage_insns[src_regno].insns) != NULL_RTX
   6757  1.1  mrg 			  && NONDEBUG_INSN_P (curr_insn))
   6758  1.1  mrg 			add_to_inherit (src_regno, next_usage_insns);
   6759  1.1  mrg 		      else if (usage_insns[src_regno].check
   6760  1.1  mrg 			       != -(int) INSN_UID (curr_insn))
   6761  1.1  mrg 			/* Add usages but only if the reg is not set up
   6762  1.1  mrg 			   in the same insn.  */
   6763  1.1  mrg 			add_next_usage_insn (src_regno, curr_insn, reloads_num);
   6764  1.1  mrg 		    }
   6765  1.1  mrg 		  else if (src_regno < FIRST_PSEUDO_REGISTER
   6766  1.1  mrg 			   || reg_renumber[src_regno] >= 0)
   6767  1.1  mrg 		    {
   6768  1.1  mrg 		      bool before_p;
   6769  1.1  mrg 		      rtx_insn *use_insn = curr_insn;
   6770  1.1  mrg 
   6771  1.1  mrg 		      before_p = (JUMP_P (curr_insn)
   6772  1.1  mrg 				  || (CALL_P (curr_insn) && reg->type == OP_IN));
   6773  1.1  mrg 		      if (NONDEBUG_INSN_P (curr_insn)
   6774  1.1  mrg 			  && (! JUMP_P (curr_insn) || reg->type == OP_IN)
   6775  1.1  mrg 			  && split_if_necessary (src_regno, reg->biggest_mode,
   6776  1.1  mrg 						 potential_reload_hard_regs,
   6777  1.1  mrg 						 before_p, curr_insn, max_uid))
   6778  1.1  mrg 			{
   6779  1.1  mrg 			  if (reg->subreg_p)
   6780  1.1  mrg 			    check_and_force_assignment_correctness_p = true;
   6781  1.1  mrg 			  change_p = true;
   6782  1.1  mrg 			  /* Invalidate. */
   6783  1.1  mrg 			  usage_insns[src_regno].check = 0;
   6784  1.1  mrg 			  if (before_p)
   6785  1.1  mrg 			    use_insn = PREV_INSN (curr_insn);
   6786  1.1  mrg 			}
   6787  1.1  mrg 		      if (NONDEBUG_INSN_P (curr_insn))
   6788  1.1  mrg 			{
   6789  1.1  mrg 			  if (src_regno < FIRST_PSEUDO_REGISTER)
   6790  1.1  mrg 			    add_to_hard_reg_set (&live_hard_regs,
   6791  1.1  mrg 						 reg->biggest_mode, src_regno);
   6792  1.1  mrg 			  else
   6793  1.1  mrg 			    add_to_hard_reg_set (&live_hard_regs,
   6794  1.1  mrg 						 PSEUDO_REGNO_MODE (src_regno),
   6795  1.1  mrg 						 reg_renumber[src_regno]);
   6796  1.1  mrg 			}
   6797  1.1  mrg 		      if (src_regno >= FIRST_PSEUDO_REGISTER)
   6798  1.1  mrg 			add_next_usage_insn (src_regno, use_insn, reloads_num);
   6799  1.1  mrg 		      else
   6800  1.1  mrg 			{
   6801  1.1  mrg 			  for (i = 0; i < hard_regno_nregs (src_regno, reg->biggest_mode); i++)
   6802  1.1  mrg 			    add_next_usage_insn (src_regno + i, use_insn, reloads_num);
   6803  1.1  mrg 			}
   6804  1.1  mrg 		    }
   6805  1.1  mrg 		}
   6806  1.1  mrg 	  /* Process used call regs.  */
   6807  1.1  mrg 	  if (curr_id->arg_hard_regs != NULL)
   6808  1.1  mrg 	    for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6809  1.1  mrg 	      if (src_regno < FIRST_PSEUDO_REGISTER)
   6810  1.1  mrg 		{
   6811  1.1  mrg 	           SET_HARD_REG_BIT (live_hard_regs, src_regno);
   6812  1.1  mrg 	           add_next_usage_insn (src_regno, curr_insn, reloads_num);
   6813  1.1  mrg 		}
   6814  1.1  mrg 	  for (i = 0; i < to_inherit_num; i++)
   6815  1.1  mrg 	    {
   6816  1.1  mrg 	      src_regno = to_inherit[i].regno;
   6817  1.1  mrg 	      if (inherit_reload_reg (false, src_regno, ALL_REGS,
   6818  1.1  mrg 				      curr_insn, to_inherit[i].insns))
   6819  1.1  mrg 		change_p = true;
   6820  1.1  mrg 	      else
   6821  1.1  mrg 		setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
   6822  1.1  mrg 	    }
   6823  1.1  mrg 	}
   6824  1.1  mrg       if (update_reloads_num_p
   6825  1.1  mrg 	  && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
   6826  1.1  mrg 	{
   6827  1.1  mrg 	  int regno = -1;
   6828  1.1  mrg 	  if ((REG_P (SET_DEST (curr_set))
   6829  1.1  mrg 	       && (regno = REGNO (SET_DEST (curr_set))) >= lra_constraint_new_regno_start
   6830  1.1  mrg 	       && reg_renumber[regno] < 0
   6831  1.1  mrg 	       && (cl = lra_get_allocno_class (regno)) != NO_REGS)
   6832  1.1  mrg 	      || (REG_P (SET_SRC (curr_set))
   6833  1.1  mrg 	          && (regno = REGNO (SET_SRC (curr_set))) >= lra_constraint_new_regno_start
   6834  1.1  mrg 	          && reg_renumber[regno] < 0
   6835  1.1  mrg 	          && (cl = lra_get_allocno_class (regno)) != NO_REGS))
   6836  1.1  mrg 	    {
   6837  1.1  mrg 	      if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
   6838  1.1  mrg 		reloads_num++;
   6839  1.1  mrg 	      if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
   6840  1.1  mrg 		potential_reload_hard_regs |= reg_class_contents[cl];
   6841  1.1  mrg 	    }
   6842  1.1  mrg 	}
   6843  1.1  mrg       if (NONDEBUG_INSN_P (curr_insn))
   6844  1.1  mrg 	{
   6845  1.1  mrg 	  int regno;
   6846  1.1  mrg 
   6847  1.1  mrg 	  /* Invalidate invariants with changed regs.  */
   6848  1.1  mrg 	  curr_id = lra_get_insn_recog_data (curr_insn);
   6849  1.1  mrg 	  for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   6850  1.1  mrg 	    if (reg->type != OP_IN)
   6851  1.1  mrg 	      {
   6852  1.1  mrg 		bitmap_set_bit (&invalid_invariant_regs, reg->regno);
   6853  1.1  mrg 		bitmap_set_bit (&invalid_invariant_regs,
   6854  1.1  mrg 				ORIGINAL_REGNO (regno_reg_rtx[reg->regno]));
   6855  1.1  mrg 	      }
   6856  1.1  mrg 	  curr_static_id = curr_id->insn_static_data;
   6857  1.1  mrg 	  for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
   6858  1.1  mrg 	    if (reg->type != OP_IN)
   6859  1.1  mrg 	      bitmap_set_bit (&invalid_invariant_regs, reg->regno);
   6860  1.1  mrg 	  if (curr_id->arg_hard_regs != NULL)
   6861  1.1  mrg 	    for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
   6862  1.1  mrg 	      if (regno >= FIRST_PSEUDO_REGISTER)
   6863  1.1  mrg 		bitmap_set_bit (&invalid_invariant_regs,
   6864  1.1  mrg 				regno - FIRST_PSEUDO_REGISTER);
   6865  1.1  mrg 	}
   6866  1.1  mrg       /* We reached the start of the current basic block.  */
   6867  1.1  mrg       if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
   6868  1.1  mrg 	  || BLOCK_FOR_INSN (prev_insn) != curr_bb)
   6869  1.1  mrg 	{
   6870  1.1  mrg 	  /* We reached the beginning of the current block -- do
   6871  1.1  mrg 	     rest of spliting in the current BB.  */
   6872  1.1  mrg 	  to_process = df_get_live_in (curr_bb);
   6873  1.1  mrg 	  if (BLOCK_FOR_INSN (head) != curr_bb)
   6874  1.1  mrg 	    {
   6875  1.1  mrg 	      /* We are somewhere in the middle of EBB.	 */
   6876  1.1  mrg 	      get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
   6877  1.1  mrg 				       curr_bb, &temp_bitmap);
   6878  1.1  mrg 	      to_process = &temp_bitmap;
   6879  1.1  mrg 	    }
   6880  1.1  mrg 	  head_p = true;
   6881  1.1  mrg 	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
   6882  1.1  mrg 	    {
   6883  1.1  mrg 	      if ((int) j >= lra_constraint_new_regno_start)
   6884  1.1  mrg 		break;
   6885  1.1  mrg 	      if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
   6886  1.1  mrg 		  && usage_insns[j].check == curr_usage_insns_check
   6887  1.1  mrg 		  && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
   6888  1.1  mrg 		{
   6889  1.1  mrg 		  if (need_for_split_p (potential_reload_hard_regs, j))
   6890  1.1  mrg 		    {
   6891  1.1  mrg 		      if (lra_dump_file != NULL && head_p)
   6892  1.1  mrg 			{
   6893  1.1  mrg 			  fprintf (lra_dump_file,
   6894  1.1  mrg 				   "  ----------------------------------\n");
   6895  1.1  mrg 			  head_p = false;
   6896  1.1  mrg 			}
   6897  1.1  mrg 		      if (split_reg (false, j, bb_note (curr_bb),
   6898  1.1  mrg 				     next_usage_insns, NULL))
   6899  1.1  mrg 			change_p = true;
   6900  1.1  mrg 		    }
   6901  1.1  mrg 		  usage_insns[j].check = 0;
   6902  1.1  mrg 		}
   6903  1.1  mrg 	    }
   6904  1.1  mrg 	}
   6905  1.1  mrg     }
   6906  1.1  mrg   return change_p;
   6907  1.1  mrg }
   6908  1.1  mrg 
   6909  1.1  mrg /* This value affects EBB forming.  If probability of edge from EBB to
   6910  1.1  mrg    a BB is not greater than the following value, we don't add the BB
   6911  1.1  mrg    to EBB.  */
   6912  1.1  mrg #define EBB_PROBABILITY_CUTOFF \
   6913  1.1  mrg   ((REG_BR_PROB_BASE * param_lra_inheritance_ebb_probability_cutoff) / 100)
   6914  1.1  mrg 
   6915  1.1  mrg /* Current number of inheritance/split iteration.  */
   6916  1.1  mrg int lra_inheritance_iter;
   6917  1.1  mrg 
   6918  1.1  mrg /* Entry function for inheritance/split pass.  */
   6919  1.1  mrg void
   6920  1.1  mrg lra_inheritance (void)
   6921  1.1  mrg {
   6922  1.1  mrg   int i;
   6923  1.1  mrg   basic_block bb, start_bb;
   6924  1.1  mrg   edge e;
   6925  1.1  mrg 
   6926  1.1  mrg   lra_inheritance_iter++;
   6927  1.1  mrg   if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
   6928  1.1  mrg     return;
   6929  1.1  mrg   timevar_push (TV_LRA_INHERITANCE);
   6930  1.1  mrg   if (lra_dump_file != NULL)
   6931  1.1  mrg     fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
   6932  1.1  mrg 	     lra_inheritance_iter);
   6933  1.1  mrg   curr_usage_insns_check = 0;
   6934  1.1  mrg   usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
   6935  1.1  mrg   for (i = 0; i < lra_constraint_new_regno_start; i++)
   6936  1.1  mrg     usage_insns[i].check = 0;
   6937  1.1  mrg   bitmap_initialize (&check_only_regs, &reg_obstack);
   6938  1.1  mrg   bitmap_initialize (&invalid_invariant_regs, &reg_obstack);
   6939  1.1  mrg   bitmap_initialize (&live_regs, &reg_obstack);
   6940  1.1  mrg   bitmap_initialize (&temp_bitmap, &reg_obstack);
   6941  1.1  mrg   bitmap_initialize (&ebb_global_regs, &reg_obstack);
   6942  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
   6943  1.1  mrg     {
   6944  1.1  mrg       start_bb = bb;
   6945  1.1  mrg       if (lra_dump_file != NULL)
   6946  1.1  mrg 	fprintf (lra_dump_file, "EBB");
   6947  1.1  mrg       /* Form a EBB starting with BB.  */
   6948  1.1  mrg       bitmap_clear (&ebb_global_regs);
   6949  1.1  mrg       bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
   6950  1.1  mrg       for (;;)
   6951  1.1  mrg 	{
   6952  1.1  mrg 	  if (lra_dump_file != NULL)
   6953  1.1  mrg 	    fprintf (lra_dump_file, " %d", bb->index);
   6954  1.1  mrg 	  if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
   6955  1.1  mrg 	      || LABEL_P (BB_HEAD (bb->next_bb)))
   6956  1.1  mrg 	    break;
   6957  1.1  mrg 	  e = find_fallthru_edge (bb->succs);
   6958  1.1  mrg 	  if (! e)
   6959  1.1  mrg 	    break;
   6960  1.1  mrg 	  if (e->probability.initialized_p ()
   6961  1.1  mrg 	      && e->probability.to_reg_br_prob_base () < EBB_PROBABILITY_CUTOFF)
   6962  1.1  mrg 	    break;
   6963  1.1  mrg 	  bb = bb->next_bb;
   6964  1.1  mrg 	}
   6965  1.1  mrg       bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
   6966  1.1  mrg       if (lra_dump_file != NULL)
   6967  1.1  mrg 	fprintf (lra_dump_file, "\n");
   6968  1.1  mrg       if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
   6969  1.1  mrg 	/* Remember that the EBB head and tail can change in
   6970  1.1  mrg 	   inherit_in_ebb.  */
   6971  1.1  mrg 	update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
   6972  1.1  mrg     }
   6973  1.1  mrg   bitmap_release (&ebb_global_regs);
   6974  1.1  mrg   bitmap_release (&temp_bitmap);
   6975  1.1  mrg   bitmap_release (&live_regs);
   6976  1.1  mrg   bitmap_release (&invalid_invariant_regs);
   6977  1.1  mrg   bitmap_release (&check_only_regs);
   6978  1.1  mrg   free (usage_insns);
   6979  1.1  mrg 
   6980  1.1  mrg   timevar_pop (TV_LRA_INHERITANCE);
   6981  1.1  mrg }
   6982  1.1  mrg 
   6983  1.1  mrg 
   6984  1.1  mrg 
   6986  1.1  mrg /* This page contains code to undo failed inheritance/split
   6987  1.1  mrg    transformations.  */
   6988  1.1  mrg 
   6989  1.1  mrg /* Current number of iteration undoing inheritance/split.  */
   6990  1.1  mrg int lra_undo_inheritance_iter;
   6991  1.1  mrg 
   6992  1.1  mrg /* Fix BB live info LIVE after removing pseudos created on pass doing
   6993  1.1  mrg    inheritance/split which are REMOVED_PSEUDOS.	 */
   6994  1.1  mrg static void
   6995  1.1  mrg fix_bb_live_info (bitmap live, bitmap removed_pseudos)
   6996  1.1  mrg {
   6997  1.1  mrg   unsigned int regno;
   6998  1.1  mrg   bitmap_iterator bi;
   6999  1.1  mrg 
   7000  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
   7001  1.1  mrg     if (bitmap_clear_bit (live, regno)
   7002  1.1  mrg 	&& REG_P (lra_reg_info[regno].restore_rtx))
   7003  1.1  mrg       bitmap_set_bit (live, REGNO (lra_reg_info[regno].restore_rtx));
   7004  1.1  mrg }
   7005  1.1  mrg 
   7006  1.1  mrg /* Return regno of the (subreg of) REG. Otherwise, return a negative
   7007  1.1  mrg    number.  */
   7008  1.1  mrg static int
   7009  1.1  mrg get_regno (rtx reg)
   7010  1.1  mrg {
   7011  1.1  mrg   if (GET_CODE (reg) == SUBREG)
   7012  1.1  mrg     reg = SUBREG_REG (reg);
   7013  1.1  mrg   if (REG_P (reg))
   7014  1.1  mrg     return REGNO (reg);
   7015  1.1  mrg   return -1;
   7016  1.1  mrg }
   7017  1.1  mrg 
   7018  1.1  mrg /* Delete a move INSN with destination reg DREGNO and a previous
   7019  1.1  mrg    clobber insn with the same regno.  The inheritance/split code can
   7020  1.1  mrg    generate moves with preceding clobber and when we delete such moves
   7021  1.1  mrg    we should delete the clobber insn too to keep the correct life
   7022  1.1  mrg    info.  */
   7023  1.1  mrg static void
   7024  1.1  mrg delete_move_and_clobber (rtx_insn *insn, int dregno)
   7025  1.1  mrg {
   7026  1.1  mrg   rtx_insn *prev_insn = PREV_INSN (insn);
   7027  1.1  mrg 
   7028  1.1  mrg   lra_set_insn_deleted (insn);
   7029  1.1  mrg   lra_assert (dregno >= 0);
   7030  1.1  mrg   if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
   7031  1.1  mrg       && GET_CODE (PATTERN (prev_insn)) == CLOBBER
   7032  1.1  mrg       && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
   7033  1.1  mrg     lra_set_insn_deleted (prev_insn);
   7034  1.1  mrg }
   7035  1.1  mrg 
   7036  1.1  mrg /* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
   7037  1.1  mrg    return true if we did any change.  The undo transformations for
   7038  1.1  mrg    inheritance looks like
   7039  1.1  mrg       i <- i2
   7040  1.1  mrg       p <- i	  =>   p <- i2
   7041  1.1  mrg    or removing
   7042  1.1  mrg       p <- i, i <- p, and i <- i3
   7043  1.1  mrg    where p is original pseudo from which inheritance pseudo i was
   7044  1.1  mrg    created, i and i3 are removed inheritance pseudos, i2 is another
   7045  1.1  mrg    not removed inheritance pseudo.  All split pseudos or other
   7046  1.1  mrg    occurrences of removed inheritance pseudos are changed on the
   7047  1.1  mrg    corresponding original pseudos.
   7048  1.1  mrg 
   7049  1.1  mrg    The function also schedules insns changed and created during
   7050  1.1  mrg    inheritance/split pass for processing by the subsequent constraint
   7051  1.1  mrg    pass.  */
   7052  1.1  mrg static bool
   7053  1.1  mrg remove_inheritance_pseudos (bitmap remove_pseudos)
   7054  1.1  mrg {
   7055  1.1  mrg   basic_block bb;
   7056  1.1  mrg   int regno, sregno, prev_sregno, dregno;
   7057  1.1  mrg   rtx restore_rtx;
   7058  1.1  mrg   rtx set, prev_set;
   7059  1.1  mrg   rtx_insn *prev_insn;
   7060  1.1  mrg   bool change_p, done_p;
   7061  1.1  mrg 
   7062  1.1  mrg   change_p = ! bitmap_empty_p (remove_pseudos);
   7063  1.1  mrg   /* We cannot finish the function right away if CHANGE_P is true
   7064  1.1  mrg      because we need to marks insns affected by previous
   7065  1.1  mrg      inheritance/split pass for processing by the subsequent
   7066  1.1  mrg      constraint pass.  */
   7067  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
   7068  1.1  mrg     {
   7069  1.1  mrg       fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
   7070  1.1  mrg       fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
   7071  1.1  mrg       FOR_BB_INSNS_REVERSE (bb, curr_insn)
   7072  1.1  mrg 	{
   7073  1.1  mrg 	  if (! INSN_P (curr_insn))
   7074  1.1  mrg 	    continue;
   7075  1.1  mrg 	  done_p = false;
   7076  1.1  mrg 	  sregno = dregno = -1;
   7077  1.1  mrg 	  if (change_p && NONDEBUG_INSN_P (curr_insn)
   7078  1.1  mrg 	      && (set = single_set (curr_insn)) != NULL_RTX)
   7079  1.1  mrg 	    {
   7080  1.1  mrg 	      dregno = get_regno (SET_DEST (set));
   7081  1.1  mrg 	      sregno = get_regno (SET_SRC (set));
   7082  1.1  mrg 	    }
   7083  1.1  mrg 
   7084  1.1  mrg 	  if (sregno >= 0 && dregno >= 0)
   7085  1.1  mrg 	    {
   7086  1.1  mrg 	      if (bitmap_bit_p (remove_pseudos, dregno)
   7087  1.1  mrg 		  && ! REG_P (lra_reg_info[dregno].restore_rtx))
   7088  1.1  mrg 		{
   7089  1.1  mrg 		  /* invariant inheritance pseudo <- original pseudo */
   7090  1.1  mrg 		  if (lra_dump_file != NULL)
   7091  1.1  mrg 		    {
   7092  1.1  mrg 		      fprintf (lra_dump_file, "	   Removing invariant inheritance:\n");
   7093  1.1  mrg 		      dump_insn_slim (lra_dump_file, curr_insn);
   7094  1.1  mrg 		      fprintf (lra_dump_file, "\n");
   7095  1.1  mrg 		    }
   7096  1.1  mrg 		  delete_move_and_clobber (curr_insn, dregno);
   7097  1.1  mrg 		  done_p = true;
   7098  1.1  mrg 		}
   7099  1.1  mrg 	      else if (bitmap_bit_p (remove_pseudos, sregno)
   7100  1.1  mrg 		       && ! REG_P (lra_reg_info[sregno].restore_rtx))
   7101  1.1  mrg 		{
   7102  1.1  mrg 		  /* reload pseudo <- invariant inheritance pseudo */
   7103  1.1  mrg 		  start_sequence ();
   7104  1.1  mrg 		  /* We cannot just change the source.  It might be
   7105  1.1  mrg 		     an insn different from the move.  */
   7106  1.1  mrg 		  emit_insn (lra_reg_info[sregno].restore_rtx);
   7107  1.1  mrg 		  rtx_insn *new_insns = get_insns ();
   7108  1.1  mrg 		  end_sequence ();
   7109  1.1  mrg 		  lra_assert (single_set (new_insns) != NULL
   7110  1.1  mrg 			      && SET_DEST (set) == SET_DEST (single_set (new_insns)));
   7111  1.1  mrg 		  lra_process_new_insns (curr_insn, NULL, new_insns,
   7112  1.1  mrg 					 "Changing reload<-invariant inheritance");
   7113  1.1  mrg 		  delete_move_and_clobber (curr_insn, dregno);
   7114  1.1  mrg 		  done_p = true;
   7115  1.1  mrg 		}
   7116  1.1  mrg 	      else if ((bitmap_bit_p (remove_pseudos, sregno)
   7117  1.1  mrg 			&& (get_regno (lra_reg_info[sregno].restore_rtx) == dregno
   7118  1.1  mrg 			    || (bitmap_bit_p (remove_pseudos, dregno)
   7119  1.1  mrg 				&& get_regno (lra_reg_info[sregno].restore_rtx) >= 0
   7120  1.1  mrg 				&& (get_regno (lra_reg_info[sregno].restore_rtx)
   7121  1.1  mrg 				    == get_regno (lra_reg_info[dregno].restore_rtx)))))
   7122  1.1  mrg 		       || (bitmap_bit_p (remove_pseudos, dregno)
   7123  1.1  mrg 			   && get_regno (lra_reg_info[dregno].restore_rtx) == sregno))
   7124  1.1  mrg 		/* One of the following cases:
   7125  1.1  mrg 		     original <- removed inheritance pseudo
   7126  1.1  mrg 		     removed inherit pseudo <- another removed inherit pseudo
   7127  1.1  mrg 		     removed inherit pseudo <- original pseudo
   7128  1.1  mrg 		   Or
   7129  1.1  mrg 		     removed_split_pseudo <- original_reg
   7130  1.1  mrg 		     original_reg <- removed_split_pseudo */
   7131  1.1  mrg 		{
   7132  1.1  mrg 		  if (lra_dump_file != NULL)
   7133  1.1  mrg 		    {
   7134  1.1  mrg 		      fprintf (lra_dump_file, "	   Removing %s:\n",
   7135  1.1  mrg 			       bitmap_bit_p (&lra_split_regs, sregno)
   7136  1.1  mrg 			       || bitmap_bit_p (&lra_split_regs, dregno)
   7137  1.1  mrg 			       ? "split" : "inheritance");
   7138  1.1  mrg 		      dump_insn_slim (lra_dump_file, curr_insn);
   7139  1.1  mrg 		    }
   7140  1.1  mrg 		  delete_move_and_clobber (curr_insn, dregno);
   7141  1.1  mrg 		  done_p = true;
   7142  1.1  mrg 		}
   7143  1.1  mrg 	      else if (bitmap_bit_p (remove_pseudos, sregno)
   7144  1.1  mrg 		       && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
   7145  1.1  mrg 		{
   7146  1.1  mrg 		  /* Search the following pattern:
   7147  1.1  mrg 		       inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
   7148  1.1  mrg 		       original_pseudo <- inherit_or_split_pseudo1
   7149  1.1  mrg 		    where the 2nd insn is the current insn and
   7150  1.1  mrg 		    inherit_or_split_pseudo2 is not removed.  If it is found,
   7151  1.1  mrg 		    change the current insn onto:
   7152  1.1  mrg 		       original_pseudo <- inherit_or_split_pseudo2.  */
   7153  1.1  mrg 		  for (prev_insn = PREV_INSN (curr_insn);
   7154  1.1  mrg 		       prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
   7155  1.1  mrg 		       prev_insn = PREV_INSN (prev_insn))
   7156  1.1  mrg 		    ;
   7157  1.1  mrg 		  if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
   7158  1.1  mrg 		      && (prev_set = single_set (prev_insn)) != NULL_RTX
   7159  1.1  mrg 		      /* There should be no subregs in insn we are
   7160  1.1  mrg 			 searching because only the original reg might
   7161  1.1  mrg 			 be in subreg when we changed the mode of
   7162  1.1  mrg 			 load/store for splitting.  */
   7163  1.1  mrg 		      && REG_P (SET_DEST (prev_set))
   7164  1.1  mrg 		      && REG_P (SET_SRC (prev_set))
   7165  1.1  mrg 		      && (int) REGNO (SET_DEST (prev_set)) == sregno
   7166  1.1  mrg 		      && ((prev_sregno = REGNO (SET_SRC (prev_set)))
   7167  1.1  mrg 			  >= FIRST_PSEUDO_REGISTER)
   7168  1.1  mrg 		      && (lra_reg_info[prev_sregno].restore_rtx == NULL_RTX
   7169  1.1  mrg 			  ||
   7170  1.1  mrg 			  /* As we consider chain of inheritance or
   7171  1.1  mrg 			     splitting described in above comment we should
   7172  1.1  mrg 			     check that sregno and prev_sregno were
   7173  1.1  mrg 			     inheritance/split pseudos created from the
   7174  1.1  mrg 			     same original regno.  */
   7175  1.1  mrg 			  (get_regno (lra_reg_info[sregno].restore_rtx) >= 0
   7176  1.1  mrg 			   && (get_regno (lra_reg_info[sregno].restore_rtx)
   7177  1.1  mrg 			       == get_regno (lra_reg_info[prev_sregno].restore_rtx))))
   7178  1.1  mrg 		      && ! bitmap_bit_p (remove_pseudos, prev_sregno))
   7179  1.1  mrg 		    {
   7180  1.1  mrg 		      lra_assert (GET_MODE (SET_SRC (prev_set))
   7181  1.1  mrg 				  == GET_MODE (regno_reg_rtx[sregno]));
   7182  1.1  mrg 		      /* Although we have a single set, the insn can
   7183  1.1  mrg 			 contain more one sregno register occurrence
   7184  1.1  mrg 			 as a source.  Change all occurrences.  */
   7185  1.1  mrg 		      lra_substitute_pseudo_within_insn (curr_insn, sregno,
   7186  1.1  mrg 							 SET_SRC (prev_set),
   7187  1.1  mrg 							 false);
   7188  1.1  mrg 		      /* As we are finishing with processing the insn
   7189  1.1  mrg 			 here, check the destination too as it might
   7190  1.1  mrg 			 inheritance pseudo for another pseudo.  */
   7191  1.1  mrg 		      if (bitmap_bit_p (remove_pseudos, dregno)
   7192  1.1  mrg 			  && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
   7193  1.1  mrg 			  && (restore_rtx
   7194  1.1  mrg 			      = lra_reg_info[dregno].restore_rtx) != NULL_RTX)
   7195  1.1  mrg 			{
   7196  1.1  mrg 			  if (GET_CODE (SET_DEST (set)) == SUBREG)
   7197  1.1  mrg 			    SUBREG_REG (SET_DEST (set)) = restore_rtx;
   7198  1.1  mrg 			  else
   7199  1.1  mrg 			    SET_DEST (set) = restore_rtx;
   7200  1.1  mrg 			}
   7201  1.1  mrg 		      lra_push_insn_and_update_insn_regno_info (curr_insn);
   7202  1.1  mrg 		      lra_set_used_insn_alternative_by_uid
   7203  1.1  mrg 			(INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
   7204  1.1  mrg 		      done_p = true;
   7205  1.1  mrg 		      if (lra_dump_file != NULL)
   7206  1.1  mrg 			{
   7207  1.1  mrg 			  fprintf (lra_dump_file, "    Change reload insn:\n");
   7208  1.1  mrg 			  dump_insn_slim (lra_dump_file, curr_insn);
   7209  1.1  mrg 			}
   7210  1.1  mrg 		    }
   7211  1.1  mrg 		}
   7212  1.1  mrg 	    }
   7213  1.1  mrg 	  if (! done_p)
   7214  1.1  mrg 	    {
   7215  1.1  mrg 	      struct lra_insn_reg *reg;
   7216  1.1  mrg 	      bool restored_regs_p = false;
   7217  1.1  mrg 	      bool kept_regs_p = false;
   7218  1.1  mrg 
   7219  1.1  mrg 	      curr_id = lra_get_insn_recog_data (curr_insn);
   7220  1.1  mrg 	      for (reg = curr_id->regs; reg != NULL; reg = reg->next)
   7221  1.1  mrg 		{
   7222  1.1  mrg 		  regno = reg->regno;
   7223  1.1  mrg 		  restore_rtx = lra_reg_info[regno].restore_rtx;
   7224  1.1  mrg 		  if (restore_rtx != NULL_RTX)
   7225  1.1  mrg 		    {
   7226  1.1  mrg 		      if (change_p && bitmap_bit_p (remove_pseudos, regno))
   7227  1.1  mrg 			{
   7228  1.1  mrg 			  lra_substitute_pseudo_within_insn
   7229  1.1  mrg 			    (curr_insn, regno, restore_rtx, false);
   7230  1.1  mrg 			  restored_regs_p = true;
   7231  1.1  mrg 			}
   7232  1.1  mrg 		      else
   7233  1.1  mrg 			kept_regs_p = true;
   7234  1.1  mrg 		    }
   7235  1.1  mrg 		}
   7236  1.1  mrg 	      if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
   7237  1.1  mrg 		{
   7238  1.1  mrg 		  /* The instruction has changed since the previous
   7239  1.1  mrg 		     constraints pass.  */
   7240  1.1  mrg 		  lra_push_insn_and_update_insn_regno_info (curr_insn);
   7241  1.1  mrg 		  lra_set_used_insn_alternative_by_uid
   7242  1.1  mrg 		    (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
   7243  1.1  mrg 		}
   7244  1.1  mrg 	      else if (restored_regs_p)
   7245  1.1  mrg 		/* The instruction has been restored to the form that
   7246  1.1  mrg 		   it had during the previous constraints pass.  */
   7247  1.1  mrg 		lra_update_insn_regno_info (curr_insn);
   7248  1.1  mrg 	      if (restored_regs_p && lra_dump_file != NULL)
   7249  1.1  mrg 		{
   7250  1.1  mrg 		  fprintf (lra_dump_file, "   Insn after restoring regs:\n");
   7251  1.1  mrg 		  dump_insn_slim (lra_dump_file, curr_insn);
   7252  1.1  mrg 		}
   7253  1.1  mrg 	    }
   7254  1.1  mrg 	}
   7255  1.1  mrg     }
   7256  1.1  mrg   return change_p;
   7257  1.1  mrg }
   7258  1.1  mrg 
   7259  1.1  mrg /* If optional reload pseudos failed to get a hard register or was not
   7260  1.1  mrg    inherited, it is better to remove optional reloads.  We do this
   7261  1.1  mrg    transformation after undoing inheritance to figure out necessity to
   7262  1.1  mrg    remove optional reloads easier.  Return true if we do any
   7263  1.1  mrg    change.  */
   7264  1.1  mrg static bool
   7265  1.1  mrg undo_optional_reloads (void)
   7266  1.1  mrg {
   7267  1.1  mrg   bool change_p, keep_p;
   7268  1.1  mrg   unsigned int regno, uid;
   7269  1.1  mrg   bitmap_iterator bi, bi2;
   7270  1.1  mrg   rtx_insn *insn;
   7271  1.1  mrg   rtx set, src, dest;
   7272  1.1  mrg   auto_bitmap removed_optional_reload_pseudos (&reg_obstack);
   7273  1.1  mrg 
   7274  1.1  mrg   bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
   7275  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
   7276  1.1  mrg     {
   7277  1.1  mrg       keep_p = false;
   7278  1.1  mrg       /* Keep optional reloads from previous subpasses.  */
   7279  1.1  mrg       if (lra_reg_info[regno].restore_rtx == NULL_RTX
   7280  1.1  mrg 	  /* If the original pseudo changed its allocation, just
   7281  1.1  mrg 	     removing the optional pseudo is dangerous as the original
   7282  1.1  mrg 	     pseudo will have longer live range.  */
   7283  1.1  mrg 	  || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] >= 0)
   7284  1.1  mrg 	keep_p = true;
   7285  1.1  mrg       else if (reg_renumber[regno] >= 0)
   7286  1.1  mrg 	EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
   7287  1.1  mrg 	  {
   7288  1.1  mrg 	    insn = lra_insn_recog_data[uid]->insn;
   7289  1.1  mrg 	    if ((set = single_set (insn)) == NULL_RTX)
   7290  1.1  mrg 	      continue;
   7291  1.1  mrg 	    src = SET_SRC (set);
   7292  1.1  mrg 	    dest = SET_DEST (set);
   7293  1.1  mrg 	    if ((! REG_P (src) && ! SUBREG_P (src))
   7294  1.1  mrg 		|| (! REG_P (dest) && ! SUBREG_P (dest)))
   7295  1.1  mrg 	      continue;
   7296  1.1  mrg 	    if (get_regno (dest) == (int) regno
   7297  1.1  mrg 		/* Ignore insn for optional reloads itself.  */
   7298  1.1  mrg 		&& (get_regno (lra_reg_info[regno].restore_rtx)
   7299  1.1  mrg 		    != get_regno (src))
   7300  1.1  mrg 		/* Check only inheritance on last inheritance pass.  */
   7301  1.1  mrg 		&& get_regno (src) >= new_regno_start
   7302  1.1  mrg 		/* Check that the optional reload was inherited.  */
   7303  1.1  mrg 		&& bitmap_bit_p (&lra_inheritance_pseudos, get_regno (src)))
   7304  1.1  mrg 	      {
   7305  1.1  mrg 		keep_p = true;
   7306  1.1  mrg 		break;
   7307  1.1  mrg 	      }
   7308  1.1  mrg 	  }
   7309  1.1  mrg       if (keep_p)
   7310  1.1  mrg 	{
   7311  1.1  mrg 	  bitmap_clear_bit (removed_optional_reload_pseudos, regno);
   7312  1.1  mrg 	  if (lra_dump_file != NULL)
   7313  1.1  mrg 	    fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
   7314  1.1  mrg 	}
   7315  1.1  mrg     }
   7316  1.1  mrg   change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
   7317  1.1  mrg   auto_bitmap insn_bitmap (&reg_obstack);
   7318  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
   7319  1.1  mrg     {
   7320  1.1  mrg       if (lra_dump_file != NULL)
   7321  1.1  mrg 	fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
   7322  1.1  mrg       bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
   7323  1.1  mrg       EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
   7324  1.1  mrg 	{
   7325  1.1  mrg 	  /* We may have already removed a clobber.  */
   7326  1.1  mrg 	  if (!lra_insn_recog_data[uid])
   7327  1.1  mrg 	    continue;
   7328  1.1  mrg 	  insn = lra_insn_recog_data[uid]->insn;
   7329  1.1  mrg 	  if ((set = single_set (insn)) != NULL_RTX)
   7330  1.1  mrg 	    {
   7331  1.1  mrg 	      src = SET_SRC (set);
   7332  1.1  mrg 	      dest = SET_DEST (set);
   7333  1.1  mrg 	      if ((REG_P (src) || SUBREG_P (src))
   7334  1.1  mrg 		  && (REG_P (dest) || SUBREG_P (dest))
   7335  1.1  mrg 		  && ((get_regno (src) == (int) regno
   7336  1.1  mrg 		       && (get_regno (lra_reg_info[regno].restore_rtx)
   7337  1.1  mrg 			   == get_regno (dest)))
   7338  1.1  mrg 		      || (get_regno (dest) == (int) regno
   7339  1.1  mrg 			  && (get_regno (lra_reg_info[regno].restore_rtx)
   7340  1.1  mrg 			      == get_regno (src)))))
   7341  1.1  mrg 		{
   7342  1.1  mrg 		  if (lra_dump_file != NULL)
   7343  1.1  mrg 		    {
   7344  1.1  mrg 		      fprintf (lra_dump_file, "  Deleting move %u\n",
   7345  1.1  mrg 			       INSN_UID (insn));
   7346  1.1  mrg 		      dump_insn_slim (lra_dump_file, insn);
   7347  1.1  mrg 		    }
   7348  1.1  mrg 		  delete_move_and_clobber (insn, get_regno (dest));
   7349  1.1  mrg 		  continue;
   7350  1.1  mrg 		}
   7351  1.1  mrg 	      /* We should not worry about generation memory-memory
   7352  1.1  mrg 		 moves here as if the corresponding inheritance did
   7353  1.1  mrg 		 not work (inheritance pseudo did not get a hard reg),
   7354  1.1  mrg 		 we remove the inheritance pseudo and the optional
   7355  1.1  mrg 		 reload.  */
   7356  1.1  mrg 	    }
   7357  1.1  mrg 	  if (GET_CODE (PATTERN (insn)) == CLOBBER
   7358  1.1  mrg 	      && REG_P (SET_DEST (insn))
   7359  1.1  mrg 	      && get_regno (SET_DEST (insn)) == (int) regno)
   7360  1.1  mrg 	    /* Refuse to remap clobbers to preexisting pseudos.  */
   7361  1.1  mrg 	    gcc_unreachable ();
   7362  1.1  mrg 	  lra_substitute_pseudo_within_insn
   7363  1.1  mrg 	    (insn, regno, lra_reg_info[regno].restore_rtx, false);
   7364  1.1  mrg 	  lra_update_insn_regno_info (insn);
   7365  1.1  mrg 	  if (lra_dump_file != NULL)
   7366  1.1  mrg 	    {
   7367  1.1  mrg 	      fprintf (lra_dump_file,
   7368  1.1  mrg 		       "  Restoring original insn:\n");
   7369  1.1  mrg 	      dump_insn_slim (lra_dump_file, insn);
   7370  1.1  mrg 	    }
   7371  1.1  mrg 	}
   7372  1.1  mrg     }
   7373  1.1  mrg   /* Clear restore_regnos.  */
   7374  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
   7375  1.1  mrg     lra_reg_info[regno].restore_rtx = NULL_RTX;
   7376  1.1  mrg   return change_p;
   7377  1.1  mrg }
   7378  1.1  mrg 
   7379  1.1  mrg /* Entry function for undoing inheritance/split transformation.	 Return true
   7380  1.1  mrg    if we did any RTL change in this pass.  */
   7381  1.1  mrg bool
   7382  1.1  mrg lra_undo_inheritance (void)
   7383  1.1  mrg {
   7384  1.1  mrg   unsigned int regno;
   7385  1.1  mrg   int hard_regno;
   7386  1.1  mrg   int n_all_inherit, n_inherit, n_all_split, n_split;
   7387  1.1  mrg   rtx restore_rtx;
   7388  1.1  mrg   bitmap_iterator bi;
   7389  1.1  mrg   bool change_p;
   7390  1.1  mrg 
   7391  1.1  mrg   lra_undo_inheritance_iter++;
   7392  1.1  mrg   if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
   7393  1.1  mrg     return false;
   7394  1.1  mrg   if (lra_dump_file != NULL)
   7395  1.1  mrg     fprintf (lra_dump_file,
   7396  1.1  mrg 	     "\n********** Undoing inheritance #%d: **********\n\n",
   7397  1.1  mrg 	     lra_undo_inheritance_iter);
   7398  1.1  mrg   auto_bitmap remove_pseudos (&reg_obstack);
   7399  1.1  mrg   n_inherit = n_all_inherit = 0;
   7400  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
   7401  1.1  mrg     if (lra_reg_info[regno].restore_rtx != NULL_RTX)
   7402  1.1  mrg       {
   7403  1.1  mrg 	n_all_inherit++;
   7404  1.1  mrg 	if (reg_renumber[regno] < 0
   7405  1.1  mrg 	    /* If the original pseudo changed its allocation, just
   7406  1.1  mrg 	       removing inheritance is dangerous as for changing
   7407  1.1  mrg 	       allocation we used shorter live-ranges.  */
   7408  1.1  mrg 	    && (! REG_P (lra_reg_info[regno].restore_rtx)
   7409  1.1  mrg 		|| reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
   7410  1.1  mrg 	  bitmap_set_bit (remove_pseudos, regno);
   7411  1.1  mrg 	else
   7412  1.1  mrg 	  n_inherit++;
   7413  1.1  mrg       }
   7414  1.1  mrg   if (lra_dump_file != NULL && n_all_inherit != 0)
   7415  1.1  mrg     fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
   7416  1.1  mrg 	     n_inherit, n_all_inherit,
   7417  1.1  mrg 	     (double) n_inherit / n_all_inherit * 100);
   7418  1.1  mrg   n_split = n_all_split = 0;
   7419  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
   7420  1.1  mrg     if ((restore_rtx = lra_reg_info[regno].restore_rtx) != NULL_RTX)
   7421  1.1  mrg       {
   7422  1.1  mrg 	int restore_regno = REGNO (restore_rtx);
   7423  1.1  mrg 
   7424  1.1  mrg 	n_all_split++;
   7425  1.1  mrg 	hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
   7426  1.1  mrg 		      ? reg_renumber[restore_regno] : restore_regno);
   7427  1.1  mrg 	if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
   7428  1.1  mrg 	  bitmap_set_bit (remove_pseudos, regno);
   7429  1.1  mrg 	else
   7430  1.1  mrg 	  {
   7431  1.1  mrg 	    n_split++;
   7432  1.1  mrg 	    if (lra_dump_file != NULL)
   7433  1.1  mrg 	      fprintf (lra_dump_file, "	     Keep split r%d (orig=r%d)\n",
   7434  1.1  mrg 		       regno, restore_regno);
   7435  1.1  mrg 	  }
   7436  1.1  mrg       }
   7437  1.1  mrg   if (lra_dump_file != NULL && n_all_split != 0)
   7438  1.1  mrg     fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
   7439  1.1  mrg 	     n_split, n_all_split,
   7440  1.1  mrg 	     (double) n_split / n_all_split * 100);
   7441  1.1  mrg   change_p = remove_inheritance_pseudos (remove_pseudos);
   7442  1.1  mrg   /* Clear restore_regnos.  */
   7443  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
   7444               lra_reg_info[regno].restore_rtx = NULL_RTX;
   7445             EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
   7446               lra_reg_info[regno].restore_rtx = NULL_RTX;
   7447             change_p = undo_optional_reloads () || change_p;
   7448             return change_p;
   7449           }
   7450