Home | History | Annotate | Line # | Download | only in gcc
      1 /* Optimize jump instructions, for GNU compiler.
      2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
      3 
      4 This file is part of GCC.
      5 
      6 GCC is free software; you can redistribute it and/or modify it under
      7 the terms of the GNU General Public License as published by the Free
      8 Software Foundation; either version 3, or (at your option) any later
      9 version.
     10 
     11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14 for more details.
     15 
     16 You should have received a copy of the GNU General Public License
     17 along with GCC; see the file COPYING3.  If not see
     18 <http://www.gnu.org/licenses/>.  */
     19 
     20 /* This is the pathetic reminder of old fame of the jump-optimization pass
     21    of the compiler.  Now it contains basically a set of utility functions to
     22    operate with jumps.
     23 
     24    Each CODE_LABEL has a count of the times it is used
     25    stored in the LABEL_NUSES internal field, and each JUMP_INSN
     26    has one label that it refers to stored in the
     27    JUMP_LABEL internal field.  With this we can detect labels that
     28    become unused because of the deletion of all the jumps that
     29    formerly used them.  The JUMP_LABEL info is sometimes looked
     30    at by later passes.  For return insns, it contains either a
     31    RETURN or a SIMPLE_RETURN rtx.
     32 
     33    The subroutines redirect_jump and invert_jump are used
     34    from other passes as well.  */
     35 
     36 #include "config.h"
     37 #include "system.h"
     38 #include "coretypes.h"
     39 #include "backend.h"
     40 #include "target.h"
     41 #include "rtl.h"
     42 #include "tree.h"
     43 #include "cfghooks.h"
     44 #include "tree-pass.h"
     45 #include "memmodel.h"
     46 #include "tm_p.h"
     47 #include "insn-config.h"
     48 #include "regs.h"
     49 #include "emit-rtl.h"
     50 #include "recog.h"
     51 #include "cfgrtl.h"
     52 #include "rtl-iter.h"
     53 
     54 /* Optimize jump y; x: ... y: jumpif... x?
     55    Don't know if it is worth bothering with.  */
     56 /* Optimize two cases of conditional jump to conditional jump?
     57    This can never delete any instruction or make anything dead,
     58    or even change what is live at any point.
     59    So perhaps let combiner do it.  */
     60 
     61 static void init_label_info (rtx_insn *);
     62 static void mark_all_labels (rtx_insn *);
     63 static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
     64 static void mark_jump_label_asm (rtx, rtx_insn *);
     65 static void redirect_exp_1 (rtx *, rtx, rtx, rtx_insn *);
     66 static int invert_exp_1 (rtx, rtx_insn *);
     67 
     68 /* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
     70 static void
     71 rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
     72 {
     73   timevar_push (TV_REBUILD_JUMP);
     74   init_label_info (f);
     75   mark_all_labels (f);
     76 
     77   /* Keep track of labels used from static data; we don't track them
     78      closely enough to delete them here, so make sure their reference
     79      count doesn't drop to zero.  */
     80 
     81   if (count_forced)
     82     {
     83       rtx_insn *insn;
     84       unsigned int i;
     85       FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
     86 	if (LABEL_P (insn))
     87 	  LABEL_NUSES (insn)++;
     88     }
     89   timevar_pop (TV_REBUILD_JUMP);
     90 }
     91 
     92 /* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
     93    notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
     94    instructions and jumping insns that have labels as operands
     95    (e.g. cbranchsi4).  */
     96 void
     97 rebuild_jump_labels (rtx_insn *f)
     98 {
     99   rebuild_jump_labels_1 (f, true);
    100 }
    101 
    102 /* This function is like rebuild_jump_labels, but doesn't run over
    103    forced_labels.  It can be used on insn chains that aren't the
    104    main function chain.  */
    105 void
    106 rebuild_jump_labels_chain (rtx_insn *chain)
    107 {
    108   rebuild_jump_labels_1 (chain, false);
    109 }
    110 
    111 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
    113    non-fallthru insn.  This is not generally true, as multiple barriers
    114    may have crept in, or the BARRIER may be separated from the last
    115    real insn by one or more NOTEs.
    116 
    117    This simple pass moves barriers and removes duplicates so that the
    118    old code is happy.
    119  */
    120 static unsigned int
    121 cleanup_barriers (void)
    122 {
    123   rtx_insn *insn;
    124   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    125     {
    126       if (BARRIER_P (insn))
    127 	{
    128 	  rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
    129 	  if (!prev)
    130 	    continue;
    131 
    132 	  if (BARRIER_P (prev))
    133 	    delete_insn (insn);
    134 	  else if (prev != PREV_INSN (insn))
    135 	    {
    136 	      basic_block bb = BLOCK_FOR_INSN (prev);
    137 	      rtx_insn *end = PREV_INSN (insn);
    138 	      reorder_insns_nobb (insn, insn, prev);
    139 	      if (bb)
    140 		{
    141 		  /* If the backend called in machine reorg compute_bb_for_insn
    142 		     and didn't free_bb_for_insn again, preserve basic block
    143 		     boundaries.  Move the end of basic block to PREV since
    144 		     it is followed by a barrier now, and clear BLOCK_FOR_INSN
    145 		     on the following notes.
    146 		     ???  Maybe the proper solution for the targets that have
    147 		     cfg around after machine reorg is not to run cleanup_barriers
    148 		     pass at all.  */
    149 		  BB_END (bb) = prev;
    150 		  do
    151 		    {
    152 		      prev = NEXT_INSN (prev);
    153 		      if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
    154 			BLOCK_FOR_INSN (prev) = NULL;
    155 		    }
    156 		  while (prev != end);
    157 		}
    158 	    }
    159 	}
    160     }
    161   return 0;
    162 }
    163 
    164 namespace {
    165 
    166 const pass_data pass_data_cleanup_barriers =
    167 {
    168   RTL_PASS, /* type */
    169   "barriers", /* name */
    170   OPTGROUP_NONE, /* optinfo_flags */
    171   TV_NONE, /* tv_id */
    172   0, /* properties_required */
    173   0, /* properties_provided */
    174   0, /* properties_destroyed */
    175   0, /* todo_flags_start */
    176   0, /* todo_flags_finish */
    177 };
    178 
    179 class pass_cleanup_barriers : public rtl_opt_pass
    180 {
    181 public:
    182   pass_cleanup_barriers (gcc::context *ctxt)
    183     : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
    184   {}
    185 
    186   /* opt_pass methods: */
    187   virtual unsigned int execute (function *) { return cleanup_barriers (); }
    188 
    189 }; // class pass_cleanup_barriers
    190 
    191 } // anon namespace
    192 
    193 rtl_opt_pass *
    194 make_pass_cleanup_barriers (gcc::context *ctxt)
    195 {
    196   return new pass_cleanup_barriers (ctxt);
    197 }
    198 
    199 
    200 /* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
    202    for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
    203    notes whose labels don't occur in the insn any more.  */
    204 
    205 static void
    206 init_label_info (rtx_insn *f)
    207 {
    208   rtx_insn *insn;
    209 
    210   for (insn = f; insn; insn = NEXT_INSN (insn))
    211     {
    212       if (LABEL_P (insn))
    213 	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
    214 
    215       /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
    216 	 sticky and not reset here; that way we won't lose association
    217 	 with a label when e.g. the source for a target register
    218 	 disappears out of reach for targets that may use jump-target
    219 	 registers.  Jump transformations are supposed to transform
    220 	 any REG_LABEL_TARGET notes.  The target label reference in a
    221 	 branch may disappear from the branch (and from the
    222 	 instruction before it) for other reasons, like register
    223 	 allocation.  */
    224 
    225       if (INSN_P (insn))
    226 	{
    227 	  rtx note, next;
    228 
    229 	  for (note = REG_NOTES (insn); note; note = next)
    230 	    {
    231 	      next = XEXP (note, 1);
    232 	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
    233 		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
    234 		remove_note (insn, note);
    235 	    }
    236 	}
    237     }
    238 }
    239 
    240 /* A subroutine of mark_all_labels.  Trivially propagate a simple label
    241    load into a jump_insn that uses it.  */
    242 
    243 static void
    244 maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
    245 {
    246   rtx label_note, pc, pc_src;
    247 
    248   pc = pc_set (jump_insn);
    249   pc_src = pc != NULL ? SET_SRC (pc) : NULL;
    250   label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
    251 
    252   /* If the previous non-jump insn sets something to a label,
    253      something that this jump insn uses, make that label the primary
    254      target of this insn if we don't yet have any.  That previous
    255      insn must be a single_set and not refer to more than one label.
    256      The jump insn must not refer to other labels as jump targets
    257      and must be a plain (set (pc) ...), maybe in a parallel, and
    258      may refer to the item being set only directly or as one of the
    259      arms in an IF_THEN_ELSE.  */
    260 
    261   if (label_note != NULL && pc_src != NULL)
    262     {
    263       rtx label_set = single_set (prev_nonjump_insn);
    264       rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
    265 
    266       if (label_set != NULL
    267 	  /* The source must be the direct LABEL_REF, not a
    268 	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
    269 	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
    270 	  && (rtx_equal_p (label_dest, pc_src)
    271 	      || (GET_CODE (pc_src) == IF_THEN_ELSE
    272 		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
    273 		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
    274 	{
    275 	  /* The CODE_LABEL referred to in the note must be the
    276 	     CODE_LABEL in the LABEL_REF of the "set".  We can
    277 	     conveniently use it for the marker function, which
    278 	     requires a LABEL_REF wrapping.  */
    279 	  gcc_assert (XEXP (label_note, 0) == label_ref_label (SET_SRC (label_set)));
    280 
    281 	  mark_jump_label_1 (label_set, jump_insn, false, true);
    282 
    283 	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
    284 	}
    285     }
    286 }
    287 
    288 /* Mark the label each jump jumps to.
    289    Combine consecutive labels, and count uses of labels.  */
    290 
    291 static void
    292 mark_all_labels (rtx_insn *f)
    293 {
    294   rtx_insn *insn;
    295 
    296   if (current_ir_type () == IR_RTL_CFGLAYOUT)
    297     {
    298       basic_block bb;
    299       FOR_EACH_BB_FN (bb, cfun)
    300 	{
    301 	  /* In cfglayout mode, we don't bother with trivial next-insn
    302 	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
    303 	     handled by other optimizers using better algorithms.  */
    304 	  FOR_BB_INSNS (bb, insn)
    305 	    {
    306 	      gcc_assert (! insn->deleted ());
    307 	      if (NONDEBUG_INSN_P (insn))
    308 	        mark_jump_label (PATTERN (insn), insn, 0);
    309 	    }
    310 
    311 	  /* In cfglayout mode, there may be non-insns between the
    312 	     basic blocks.  If those non-insns represent tablejump data,
    313 	     they contain label references that we must record.  */
    314 	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
    315 	    if (JUMP_TABLE_DATA_P (insn))
    316 	      mark_jump_label (PATTERN (insn), insn, 0);
    317 	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
    318 	    if (JUMP_TABLE_DATA_P (insn))
    319 	      mark_jump_label (PATTERN (insn), insn, 0);
    320 	}
    321     }
    322   else
    323     {
    324       rtx_insn *prev_nonjump_insn = NULL;
    325       for (insn = f; insn; insn = NEXT_INSN (insn))
    326 	{
    327 	  if (insn->deleted ())
    328 	    ;
    329 	  else if (LABEL_P (insn))
    330 	    prev_nonjump_insn = NULL;
    331 	  else if (JUMP_TABLE_DATA_P (insn))
    332 	    mark_jump_label (PATTERN (insn), insn, 0);
    333 	  else if (NONDEBUG_INSN_P (insn))
    334 	    {
    335 	      mark_jump_label (PATTERN (insn), insn, 0);
    336 	      if (JUMP_P (insn))
    337 		{
    338 		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
    339 		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
    340 		}
    341 	      else
    342 		prev_nonjump_insn = insn;
    343 	    }
    344 	}
    345     }
    346 }
    347 
    348 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
    350    of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
    351    UNKNOWN may be returned in case we are having CC_MODE compare and we don't
    352    know whether it's source is floating point or integer comparison.  Machine
    353    description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
    354    to help this function avoid overhead in these cases.  */
    355 enum rtx_code
    356 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
    357 				const_rtx arg1, const rtx_insn *insn)
    358 {
    359   machine_mode mode;
    360 
    361   /* If this is not actually a comparison, we can't reverse it.  */
    362   if (GET_RTX_CLASS (code) != RTX_COMPARE
    363       && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
    364     return UNKNOWN;
    365 
    366   mode = GET_MODE (arg0);
    367   if (mode == VOIDmode)
    368     mode = GET_MODE (arg1);
    369 
    370   /* First see if machine description supplies us way to reverse the
    371      comparison.  Give it priority over everything else to allow
    372      machine description to do tricks.  */
    373   if (GET_MODE_CLASS (mode) == MODE_CC
    374       && REVERSIBLE_CC_MODE (mode))
    375     return REVERSE_CONDITION (code, mode);
    376 
    377   /* Try a few special cases based on the comparison code.  */
    378   switch (code)
    379     {
    380     case GEU:
    381     case GTU:
    382     case LEU:
    383     case LTU:
    384     case NE:
    385     case EQ:
    386       /* It is always safe to reverse EQ and NE, even for the floating
    387 	 point.  Similarly the unsigned comparisons are never used for
    388 	 floating point so we can reverse them in the default way.  */
    389       return reverse_condition (code);
    390     case ORDERED:
    391     case UNORDERED:
    392     case LTGT:
    393     case UNEQ:
    394       /* In case we already see unordered comparison, we can be sure to
    395 	 be dealing with floating point so we don't need any more tests.  */
    396       return reverse_condition_maybe_unordered (code);
    397     case UNLT:
    398     case UNLE:
    399     case UNGT:
    400     case UNGE:
    401       /* We don't have safe way to reverse these yet.  */
    402       return UNKNOWN;
    403     default:
    404       break;
    405     }
    406 
    407   if (GET_MODE_CLASS (mode) == MODE_CC)
    408     {
    409       /* Try to search for the comparison to determine the real mode.
    410          This code is expensive, but with sane machine description it
    411          will be never used, since REVERSIBLE_CC_MODE will return true
    412          in all cases.  */
    413       if (! insn)
    414 	return UNKNOWN;
    415 
    416       /* These CONST_CAST's are okay because prev_nonnote_insn just
    417 	 returns its argument and we assign it to a const_rtx
    418 	 variable.  */
    419       for (rtx_insn *prev = prev_nonnote_insn (const_cast<rtx_insn *> (insn));
    420 	   prev != 0 && !LABEL_P (prev);
    421 	   prev = prev_nonnote_insn (prev))
    422 	{
    423 	  const_rtx set = set_of (arg0, prev);
    424 	  if (set && GET_CODE (set) == SET
    425 	      && rtx_equal_p (SET_DEST (set), arg0))
    426 	    {
    427 	      rtx src = SET_SRC (set);
    428 
    429 	      if (GET_CODE (src) == COMPARE)
    430 		{
    431 		  rtx comparison = src;
    432 		  arg0 = XEXP (src, 0);
    433 		  mode = GET_MODE (arg0);
    434 		  if (mode == VOIDmode)
    435 		    mode = GET_MODE (XEXP (comparison, 1));
    436 		  break;
    437 		}
    438 	      /* We can get past reg-reg moves.  This may be useful for model
    439 	         of i387 comparisons that first move flag registers around.  */
    440 	      if (REG_P (src))
    441 		{
    442 		  arg0 = src;
    443 		  continue;
    444 		}
    445 	    }
    446 	  /* If register is clobbered in some ununderstandable way,
    447 	     give up.  */
    448 	  if (set)
    449 	    return UNKNOWN;
    450 	}
    451     }
    452 
    453   /* Test for an integer condition, or a floating-point comparison
    454      in which NaNs can be ignored.  */
    455   if (CONST_INT_P (arg0)
    456       || (GET_MODE (arg0) != VOIDmode
    457 	  && GET_MODE_CLASS (mode) != MODE_CC
    458 	  && !HONOR_NANS (mode)))
    459     return reverse_condition (code);
    460 
    461   return UNKNOWN;
    462 }
    463 
    464 /* A wrapper around the previous function to take COMPARISON as rtx
    465    expression.  This simplifies many callers.  */
    466 enum rtx_code
    467 reversed_comparison_code (const_rtx comparison, const rtx_insn *insn)
    468 {
    469   if (!COMPARISON_P (comparison))
    470     return UNKNOWN;
    471   return reversed_comparison_code_parts (GET_CODE (comparison),
    472 					 XEXP (comparison, 0),
    473 					 XEXP (comparison, 1), insn);
    474 }
    475 
    476 /* Return comparison with reversed code of EXP.
    477    Return NULL_RTX in case we fail to do the reversal.  */
    478 rtx
    479 reversed_comparison (const_rtx exp, machine_mode mode)
    480 {
    481   enum rtx_code reversed_code = reversed_comparison_code (exp, NULL);
    482   if (reversed_code == UNKNOWN)
    483     return NULL_RTX;
    484   else
    485     return simplify_gen_relational (reversed_code, mode, VOIDmode,
    486                                     XEXP (exp, 0), XEXP (exp, 1));
    487 }
    488 
    489 
    490 /* Given an rtx-code for a comparison, return the code for the negated
    492    comparison.  If no such code exists, return UNKNOWN.
    493 
    494    WATCH OUT!  reverse_condition is not safe to use on a jump that might
    495    be acting on the results of an IEEE floating point comparison, because
    496    of the special treatment of non-signaling nans in comparisons.
    497    Use reversed_comparison_code instead.  */
    498 
    499 enum rtx_code
    500 reverse_condition (enum rtx_code code)
    501 {
    502   switch (code)
    503     {
    504     case EQ:
    505       return NE;
    506     case NE:
    507       return EQ;
    508     case GT:
    509       return LE;
    510     case GE:
    511       return LT;
    512     case LT:
    513       return GE;
    514     case LE:
    515       return GT;
    516     case GTU:
    517       return LEU;
    518     case GEU:
    519       return LTU;
    520     case LTU:
    521       return GEU;
    522     case LEU:
    523       return GTU;
    524     case UNORDERED:
    525       return ORDERED;
    526     case ORDERED:
    527       return UNORDERED;
    528 
    529     case UNLT:
    530     case UNLE:
    531     case UNGT:
    532     case UNGE:
    533     case UNEQ:
    534     case LTGT:
    535       return UNKNOWN;
    536 
    537     default:
    538       gcc_unreachable ();
    539     }
    540 }
    541 
    542 /* Similar, but we're allowed to generate unordered comparisons, which
    543    makes it safe for IEEE floating-point.  Of course, we have to recognize
    544    that the target will support them too...  */
    545 
    546 enum rtx_code
    547 reverse_condition_maybe_unordered (enum rtx_code code)
    548 {
    549   switch (code)
    550     {
    551     case EQ:
    552       return NE;
    553     case NE:
    554       return EQ;
    555     case GT:
    556       return UNLE;
    557     case GE:
    558       return UNLT;
    559     case LT:
    560       return UNGE;
    561     case LE:
    562       return UNGT;
    563     case LTGT:
    564       return UNEQ;
    565     case UNORDERED:
    566       return ORDERED;
    567     case ORDERED:
    568       return UNORDERED;
    569     case UNLT:
    570       return GE;
    571     case UNLE:
    572       return GT;
    573     case UNGT:
    574       return LE;
    575     case UNGE:
    576       return LT;
    577     case UNEQ:
    578       return LTGT;
    579 
    580     default:
    581       gcc_unreachable ();
    582     }
    583 }
    584 
    585 /* Similar, but return the code when two operands of a comparison are swapped.
    586    This IS safe for IEEE floating-point.  */
    587 
    588 enum rtx_code
    589 swap_condition (enum rtx_code code)
    590 {
    591   switch (code)
    592     {
    593     case EQ:
    594     case NE:
    595     case UNORDERED:
    596     case ORDERED:
    597     case UNEQ:
    598     case LTGT:
    599       return code;
    600 
    601     case GT:
    602       return LT;
    603     case GE:
    604       return LE;
    605     case LT:
    606       return GT;
    607     case LE:
    608       return GE;
    609     case GTU:
    610       return LTU;
    611     case GEU:
    612       return LEU;
    613     case LTU:
    614       return GTU;
    615     case LEU:
    616       return GEU;
    617     case UNLT:
    618       return UNGT;
    619     case UNLE:
    620       return UNGE;
    621     case UNGT:
    622       return UNLT;
    623     case UNGE:
    624       return UNLE;
    625 
    626     default:
    627       gcc_unreachable ();
    628     }
    629 }
    630 
    631 /* Given a comparison CODE, return the corresponding unsigned comparison.
    632    If CODE is an equality comparison or already an unsigned comparison,
    633    CODE is returned.  */
    634 
    635 enum rtx_code
    636 unsigned_condition (enum rtx_code code)
    637 {
    638   switch (code)
    639     {
    640     case EQ:
    641     case NE:
    642     case GTU:
    643     case GEU:
    644     case LTU:
    645     case LEU:
    646       return code;
    647 
    648     case GT:
    649       return GTU;
    650     case GE:
    651       return GEU;
    652     case LT:
    653       return LTU;
    654     case LE:
    655       return LEU;
    656 
    657     default:
    658       gcc_unreachable ();
    659     }
    660 }
    661 
    662 /* Similarly, return the signed version of a comparison.  */
    663 
    664 enum rtx_code
    665 signed_condition (enum rtx_code code)
    666 {
    667   switch (code)
    668     {
    669     case EQ:
    670     case NE:
    671     case GT:
    672     case GE:
    673     case LT:
    674     case LE:
    675       return code;
    676 
    677     case GTU:
    678       return GT;
    679     case GEU:
    680       return GE;
    681     case LTU:
    682       return LT;
    683     case LEU:
    684       return LE;
    685 
    686     default:
    687       gcc_unreachable ();
    688     }
    689 }
    690 
    691 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
    693    truth of CODE1 implies the truth of CODE2.  */
    694 
    695 int
    696 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
    697 {
    698   /* UNKNOWN comparison codes can happen as a result of trying to revert
    699      comparison codes.
    700      They can't match anything, so we have to reject them here.  */
    701   if (code1 == UNKNOWN || code2 == UNKNOWN)
    702     return 0;
    703 
    704   if (code1 == code2)
    705     return 1;
    706 
    707   switch (code1)
    708     {
    709     case UNEQ:
    710       if (code2 == UNLE || code2 == UNGE)
    711 	return 1;
    712       break;
    713 
    714     case EQ:
    715       if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
    716 	  || code2 == ORDERED)
    717 	return 1;
    718       break;
    719 
    720     case UNLT:
    721       if (code2 == UNLE || code2 == NE)
    722 	return 1;
    723       break;
    724 
    725     case LT:
    726       if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
    727 	return 1;
    728       break;
    729 
    730     case UNGT:
    731       if (code2 == UNGE || code2 == NE)
    732 	return 1;
    733       break;
    734 
    735     case GT:
    736       if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
    737 	return 1;
    738       break;
    739 
    740     case GE:
    741     case LE:
    742       if (code2 == ORDERED)
    743 	return 1;
    744       break;
    745 
    746     case LTGT:
    747       if (code2 == NE || code2 == ORDERED)
    748 	return 1;
    749       break;
    750 
    751     case LTU:
    752       if (code2 == LEU || code2 == NE)
    753 	return 1;
    754       break;
    755 
    756     case GTU:
    757       if (code2 == GEU || code2 == NE)
    758 	return 1;
    759       break;
    760 
    761     case UNORDERED:
    762       if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
    763 	  || code2 == UNGE || code2 == UNGT)
    764 	return 1;
    765       break;
    766 
    767     default:
    768       break;
    769     }
    770 
    771   return 0;
    772 }
    773 
    774 /* Return 1 if INSN is an unconditional jump and nothing else.  */
    776 
    777 int
    778 simplejump_p (const rtx_insn *insn)
    779 {
    780   return (JUMP_P (insn)
    781 	  && GET_CODE (PATTERN (insn)) == SET
    782 	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
    783 	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
    784 }
    785 
    786 /* Return nonzero if INSN is a (possibly) conditional jump
    787    and nothing more.
    788 
    789    Use of this function is deprecated, since we need to support combined
    790    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
    791 
    792 int
    793 condjump_p (const rtx_insn *insn)
    794 {
    795   const_rtx x = PATTERN (insn);
    796 
    797   if (GET_CODE (x) != SET
    798       || GET_CODE (SET_DEST (x)) != PC)
    799     return 0;
    800 
    801   x = SET_SRC (x);
    802   if (GET_CODE (x) == LABEL_REF)
    803     return 1;
    804   else
    805     return (GET_CODE (x) == IF_THEN_ELSE
    806 	    && ((GET_CODE (XEXP (x, 2)) == PC
    807 		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
    808 		     || ANY_RETURN_P (XEXP (x, 1))))
    809 		|| (GET_CODE (XEXP (x, 1)) == PC
    810 		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
    811 			|| ANY_RETURN_P (XEXP (x, 2))))));
    812 }
    813 
    814 /* Return nonzero if INSN is a (possibly) conditional jump inside a
    815    PARALLEL.
    816 
    817    Use this function is deprecated, since we need to support combined
    818    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
    819 
    820 int
    821 condjump_in_parallel_p (const rtx_insn *insn)
    822 {
    823   const_rtx x = PATTERN (insn);
    824 
    825   if (GET_CODE (x) != PARALLEL)
    826     return 0;
    827   else
    828     x = XVECEXP (x, 0, 0);
    829 
    830   if (GET_CODE (x) != SET)
    831     return 0;
    832   if (GET_CODE (SET_DEST (x)) != PC)
    833     return 0;
    834   if (GET_CODE (SET_SRC (x)) == LABEL_REF)
    835     return 1;
    836   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    837     return 0;
    838   if (XEXP (SET_SRC (x), 2) == pc_rtx
    839       && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
    840 	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
    841     return 1;
    842   if (XEXP (SET_SRC (x), 1) == pc_rtx
    843       && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
    844 	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
    845     return 1;
    846   return 0;
    847 }
    848 
    849 /* Return set of PC, otherwise NULL.  */
    850 
    851 rtx
    852 pc_set (const rtx_insn *insn)
    853 {
    854   rtx pat;
    855   if (!JUMP_P (insn))
    856     return NULL_RTX;
    857   pat = PATTERN (insn);
    858 
    859   /* The set is allowed to appear either as the insn pattern or
    860      the first set in a PARALLEL, UNSPEC or UNSPEC_VOLATILE.  */
    861   switch (GET_CODE (pat))
    862     {
    863     case PARALLEL:
    864     case UNSPEC:
    865     case UNSPEC_VOLATILE:
    866       pat = XVECEXP (pat, 0, 0);
    867       break;
    868     default:
    869       break;
    870     }
    871   if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
    872     return pat;
    873 
    874   return NULL_RTX;
    875 }
    876 
    877 /* Return true when insn is an unconditional direct jump,
    878    possibly bundled inside a PARALLEL, UNSPEC or UNSPEC_VOLATILE.
    879    The instruction may have various other effects so before removing the jump
    880    you must verify onlyjump_p.  */
    881 
    882 int
    883 any_uncondjump_p (const rtx_insn *insn)
    884 {
    885   const_rtx x = pc_set (insn);
    886   if (!x)
    887     return 0;
    888   if (GET_CODE (SET_SRC (x)) != LABEL_REF)
    889     return 0;
    890   if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
    891     return 0;
    892   return 1;
    893 }
    894 
    895 /* Return true when insn is a conditional jump.  This function works for
    896    instructions containing PC sets in PARALLELs, UNSPECs or UNSPEC_VOLATILEs.
    897    The instruction may have various other effects so before removing the jump
    898    you must verify onlyjump_p.
    899 
    900    Note that unlike condjump_p it returns false for unconditional jumps.  */
    901 
    902 int
    903 any_condjump_p (const rtx_insn *insn)
    904 {
    905   const_rtx x = pc_set (insn);
    906   enum rtx_code a, b;
    907 
    908   if (!x)
    909     return 0;
    910   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
    911     return 0;
    912 
    913   a = GET_CODE (XEXP (SET_SRC (x), 1));
    914   b = GET_CODE (XEXP (SET_SRC (x), 2));
    915 
    916   return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
    917 	  || (a == PC
    918 	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
    919 }
    920 
    921 /* Return the label of a conditional jump.  */
    922 
    923 rtx
    924 condjump_label (const rtx_insn *insn)
    925 {
    926   rtx x = pc_set (insn);
    927 
    928   if (!x)
    929     return NULL_RTX;
    930   x = SET_SRC (x);
    931   if (GET_CODE (x) == LABEL_REF)
    932     return x;
    933   if (GET_CODE (x) != IF_THEN_ELSE)
    934     return NULL_RTX;
    935   if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
    936     return XEXP (x, 1);
    937   if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
    938     return XEXP (x, 2);
    939   return NULL_RTX;
    940 }
    941 
    942 /* Return TRUE if INSN is a return jump.  */
    943 
    944 int
    945 returnjump_p (const rtx_insn *insn)
    946 {
    947   if (JUMP_P (insn))
    948     {
    949       subrtx_iterator::array_type array;
    950       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
    951 	{
    952 	  const_rtx x = *iter;
    953 	  switch (GET_CODE (x))
    954 	    {
    955 	    case RETURN:
    956 	    case SIMPLE_RETURN:
    957 	    case EH_RETURN:
    958 	      return true;
    959 
    960 	    case SET:
    961 	      if (SET_IS_RETURN_P (x))
    962 		return true;
    963 	      break;
    964 
    965 	    default:
    966 	      break;
    967 	    }
    968 	}
    969     }
    970   return false;
    971 }
    972 
    973 /* Return true if INSN is a (possibly conditional) return insn.  */
    974 
    975 int
    976 eh_returnjump_p (rtx_insn *insn)
    977 {
    978   if (JUMP_P (insn))
    979     {
    980       subrtx_iterator::array_type array;
    981       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
    982 	if (GET_CODE (*iter) == EH_RETURN)
    983 	  return true;
    984     }
    985   return false;
    986 }
    987 
    988 /* Return true if INSN is a jump that only transfers control and
    989    nothing more.  */
    990 
    991 int
    992 onlyjump_p (const rtx_insn *insn)
    993 {
    994   rtx set;
    995 
    996   if (!JUMP_P (insn))
    997     return 0;
    998 
    999   set = single_set (insn);
   1000   if (set == NULL)
   1001     return 0;
   1002   if (GET_CODE (SET_DEST (set)) != PC)
   1003     return 0;
   1004   if (side_effects_p (SET_SRC (set)))
   1005     return 0;
   1006 
   1007   return 1;
   1008 }
   1009 
   1010 /* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
   1011    NULL or a return.  */
   1012 bool
   1013 jump_to_label_p (const rtx_insn *insn)
   1014 {
   1015   return (JUMP_P (insn)
   1016 	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
   1017 }
   1018 
   1019 /* Find all CODE_LABELs referred to in X, and increment their use
   1021    counts.  If INSN is a JUMP_INSN and there is at least one
   1022    CODE_LABEL referenced in INSN as a jump target, then store the last
   1023    one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
   1024    for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
   1025    notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
   1026    a JUMP_INSN, and there is at least one CODE_LABEL referenced in
   1027    INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
   1028    For returnjumps, the JUMP_LABEL will also be set as appropriate.
   1029 
   1030    Note that two labels separated by a loop-beginning note
   1031    must be kept distinct if we have not yet done loop-optimization,
   1032    because the gap between them is where loop-optimize
   1033    will want to move invariant code to.  CROSS_JUMP tells us
   1034    that loop-optimization is done with.  */
   1035 
   1036 void
   1037 mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
   1038 {
   1039   rtx asmop = extract_asm_operands (x);
   1040   if (asmop)
   1041     mark_jump_label_asm (asmop, insn);
   1042   else
   1043     mark_jump_label_1 (x, insn, in_mem != 0,
   1044 		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
   1045 }
   1046 
   1047 /* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
   1048    within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
   1049    jump-target; when the JUMP_LABEL field of INSN should be set or a
   1050    REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
   1051    note.  */
   1052 
   1053 static void
   1054 mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
   1055 {
   1056   RTX_CODE code = GET_CODE (x);
   1057   int i;
   1058   const char *fmt;
   1059 
   1060   switch (code)
   1061     {
   1062     case PC:
   1063     case REG:
   1064     case CLOBBER:
   1065     case CALL:
   1066       return;
   1067 
   1068     case RETURN:
   1069     case SIMPLE_RETURN:
   1070       if (is_target)
   1071 	{
   1072 	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
   1073 	  JUMP_LABEL (insn) = x;
   1074 	}
   1075       return;
   1076 
   1077     case MEM:
   1078       in_mem = true;
   1079       break;
   1080 
   1081     case SEQUENCE:
   1082       {
   1083 	rtx_sequence *seq = as_a <rtx_sequence *> (x);
   1084 	for (i = 0; i < seq->len (); i++)
   1085 	  mark_jump_label (PATTERN (seq->insn (i)),
   1086 			   seq->insn (i), 0);
   1087       }
   1088       return;
   1089 
   1090     case SYMBOL_REF:
   1091       if (!in_mem)
   1092 	return;
   1093 
   1094       /* If this is a constant-pool reference, see if it is a label.  */
   1095       if (CONSTANT_POOL_ADDRESS_P (x))
   1096 	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
   1097       break;
   1098 
   1099       /* Handle operands in the condition of an if-then-else as for a
   1100 	 non-jump insn.  */
   1101     case IF_THEN_ELSE:
   1102       if (!is_target)
   1103 	break;
   1104       mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
   1105       mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
   1106       mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
   1107       return;
   1108 
   1109     case LABEL_REF:
   1110       {
   1111 	rtx_insn *label = label_ref_label (x);
   1112 
   1113 	/* Ignore remaining references to unreachable labels that
   1114 	   have been deleted.  */
   1115 	if (NOTE_P (label)
   1116 	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
   1117 	  break;
   1118 
   1119 	gcc_assert (LABEL_P (label));
   1120 
   1121 	/* Ignore references to labels of containing functions.  */
   1122 	if (LABEL_REF_NONLOCAL_P (x))
   1123 	  break;
   1124 
   1125 	set_label_ref_label (x, label);
   1126 	if (! insn || ! insn->deleted ())
   1127 	  ++LABEL_NUSES (label);
   1128 
   1129 	if (insn)
   1130 	  {
   1131 	    if (is_target
   1132 		/* Do not change a previous setting of JUMP_LABEL.  If the
   1133 		   JUMP_LABEL slot is occupied by a different label,
   1134 		   create a note for this label.  */
   1135 		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
   1136 	      JUMP_LABEL (insn) = label;
   1137 	    else
   1138 	      {
   1139 		enum reg_note kind
   1140 		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
   1141 
   1142 		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
   1143 		   for LABEL unless there already is one.  All uses of
   1144 		   a label, except for the primary target of a jump,
   1145 		   must have such a note.  */
   1146 		if (! find_reg_note (insn, kind, label))
   1147 		  add_reg_note (insn, kind, label);
   1148 	      }
   1149 	  }
   1150 	return;
   1151       }
   1152 
   1153     /* Do walk the labels in a vector, but not the first operand of an
   1154        ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
   1155     case ADDR_VEC:
   1156     case ADDR_DIFF_VEC:
   1157       if (! insn->deleted ())
   1158 	{
   1159 	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
   1160 
   1161 	  for (i = 0; i < XVECLEN (x, eltnum); i++)
   1162 	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
   1163 			       is_target);
   1164 	}
   1165       return;
   1166 
   1167     default:
   1168       break;
   1169     }
   1170 
   1171   fmt = GET_RTX_FORMAT (code);
   1172 
   1173   /* The primary target of a tablejump is the label of the ADDR_VEC,
   1174      which is canonically mentioned *last* in the insn.  To get it
   1175      marked as JUMP_LABEL, we iterate over items in reverse order.  */
   1176   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1177     {
   1178       if (fmt[i] == 'e')
   1179 	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
   1180       else if (fmt[i] == 'E')
   1181 	{
   1182 	  int j;
   1183 
   1184 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   1185 	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
   1186 			       is_target);
   1187 	}
   1188     }
   1189 }
   1190 
   1191 /* Worker function for mark_jump_label.  Handle asm insns specially.
   1192    In particular, output operands need not be considered so we can
   1193    avoid re-scanning the replicated asm_operand.  Also, the asm_labels
   1194    need to be considered targets.  */
   1195 
   1196 static void
   1197 mark_jump_label_asm (rtx asmop, rtx_insn *insn)
   1198 {
   1199   int i;
   1200 
   1201   for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
   1202     mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
   1203 
   1204   for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
   1205     mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
   1206 }
   1207 
   1208 /* Delete insn INSN from the chain of insns and update label ref counts
   1210    and delete insns now unreachable.
   1211 
   1212    Returns the first insn after INSN that was not deleted.
   1213 
   1214    Usage of this instruction is deprecated.  Use delete_insn instead and
   1215    subsequent cfg_cleanup pass to delete unreachable code if needed.  */
   1216 
   1217 rtx_insn *
   1218 delete_related_insns (rtx uncast_insn)
   1219 {
   1220   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
   1221   int was_code_label = (LABEL_P (insn));
   1222   rtx note;
   1223   rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
   1224 
   1225   while (next && next->deleted ())
   1226     next = NEXT_INSN (next);
   1227 
   1228   /* This insn is already deleted => return first following nondeleted.  */
   1229   if (insn->deleted ())
   1230     return next;
   1231 
   1232   delete_insn (insn);
   1233 
   1234   /* If instruction is followed by a barrier,
   1235      delete the barrier too.  */
   1236 
   1237   if (next != 0 && BARRIER_P (next))
   1238     delete_insn (next);
   1239 
   1240   /* If deleting a jump, decrement the count of the label,
   1241      and delete the label if it is now unused.  */
   1242 
   1243   if (jump_to_label_p (insn))
   1244     {
   1245       rtx lab = JUMP_LABEL (insn);
   1246       rtx_jump_table_data *lab_next;
   1247 
   1248       if (LABEL_NUSES (lab) == 0)
   1249 	/* This can delete NEXT or PREV,
   1250 	   either directly if NEXT is JUMP_LABEL (INSN),
   1251 	   or indirectly through more levels of jumps.  */
   1252 	delete_related_insns (lab);
   1253       else if (tablejump_p (insn, NULL, &lab_next))
   1254 	{
   1255 	  /* If we're deleting the tablejump, delete the dispatch table.
   1256 	     We may not be able to kill the label immediately preceding
   1257 	     just yet, as it might be referenced in code leading up to
   1258 	     the tablejump.  */
   1259 	  delete_related_insns (lab_next);
   1260 	}
   1261     }
   1262 
   1263   /* Likewise if we're deleting a dispatch table.  */
   1264 
   1265   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
   1266     {
   1267       rtvec labels = table->get_labels ();
   1268       int i;
   1269       int len = GET_NUM_ELEM (labels);
   1270 
   1271       for (i = 0; i < len; i++)
   1272 	if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
   1273 	  delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
   1274       while (next && next->deleted ())
   1275 	next = NEXT_INSN (next);
   1276       return next;
   1277     }
   1278 
   1279   /* Likewise for any JUMP_P / INSN / CALL_INSN with a
   1280      REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
   1281   if (INSN_P (insn))
   1282     for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
   1283       if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
   1284 	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
   1285 	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
   1286 	  && LABEL_P (XEXP (note, 0)))
   1287 	if (LABEL_NUSES (XEXP (note, 0)) == 0)
   1288 	  delete_related_insns (XEXP (note, 0));
   1289 
   1290   while (prev && (prev->deleted () || NOTE_P (prev)))
   1291     prev = PREV_INSN (prev);
   1292 
   1293   /* If INSN was a label and a dispatch table follows it,
   1294      delete the dispatch table.  The tablejump must have gone already.
   1295      It isn't useful to fall through into a table.  */
   1296 
   1297   if (was_code_label
   1298       && NEXT_INSN (insn) != 0
   1299       && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
   1300     next = delete_related_insns (NEXT_INSN (insn));
   1301 
   1302   /* If INSN was a label, delete insns following it if now unreachable.  */
   1303 
   1304   if (was_code_label && prev && BARRIER_P (prev))
   1305     {
   1306       enum rtx_code code;
   1307       while (next)
   1308 	{
   1309 	  code = GET_CODE (next);
   1310 	  if (code == NOTE)
   1311 	    next = NEXT_INSN (next);
   1312 	  /* Keep going past other deleted labels to delete what follows.  */
   1313 	  else if (code == CODE_LABEL && next->deleted ())
   1314 	    next = NEXT_INSN (next);
   1315 	  /* Keep the (use (insn))s created by dbr_schedule, which needs
   1316 	     them in order to track liveness relative to a previous
   1317 	     barrier.  */
   1318 	  else if (INSN_P (next)
   1319 		   && GET_CODE (PATTERN (next)) == USE
   1320 		   && INSN_P (XEXP (PATTERN (next), 0)))
   1321 	    next = NEXT_INSN (next);
   1322 	  else if (code == BARRIER || INSN_P (next))
   1323 	    /* Note: if this deletes a jump, it can cause more
   1324 	       deletion of unreachable code, after a different label.
   1325 	       As long as the value from this recursive call is correct,
   1326 	       this invocation functions correctly.  */
   1327 	    next = delete_related_insns (next);
   1328 	  else
   1329 	    break;
   1330 	}
   1331     }
   1332 
   1333   /* I feel a little doubtful about this loop,
   1334      but I see no clean and sure alternative way
   1335      to find the first insn after INSN that is not now deleted.
   1336      I hope this works.  */
   1337   while (next && next->deleted ())
   1338     next = NEXT_INSN (next);
   1339   return next;
   1340 }
   1341 
   1342 /* Delete a range of insns from FROM to TO, inclusive.
   1344    This is for the sake of peephole optimization, so assume
   1345    that whatever these insns do will still be done by a new
   1346    peephole insn that will replace them.  */
   1347 
   1348 void
   1349 delete_for_peephole (rtx_insn *from, rtx_insn *to)
   1350 {
   1351   rtx_insn *insn = from;
   1352 
   1353   while (1)
   1354     {
   1355       rtx_insn *next = NEXT_INSN (insn);
   1356       rtx_insn *prev = PREV_INSN (insn);
   1357 
   1358       if (!NOTE_P (insn))
   1359 	{
   1360 	  insn->set_deleted();
   1361 
   1362 	  /* Patch this insn out of the chain.  */
   1363 	  /* We don't do this all at once, because we
   1364 	     must preserve all NOTEs.  */
   1365 	  if (prev)
   1366 	    SET_NEXT_INSN (prev) = next;
   1367 
   1368 	  if (next)
   1369 	    SET_PREV_INSN (next) = prev;
   1370 	}
   1371 
   1372       if (insn == to)
   1373 	break;
   1374       insn = next;
   1375     }
   1376 
   1377   /* Note that if TO is an unconditional jump
   1378      we *do not* delete the BARRIER that follows,
   1379      since the peephole that replaces this sequence
   1380      is also an unconditional jump in that case.  */
   1381 }
   1382 
   1383 /* A helper function for redirect_exp_1; examines its input X and returns
   1385    either a LABEL_REF around a label, or a RETURN if X was NULL.  */
   1386 static rtx
   1387 redirect_target (rtx x)
   1388 {
   1389   if (x == NULL_RTX)
   1390     return ret_rtx;
   1391   if (!ANY_RETURN_P (x))
   1392     return gen_rtx_LABEL_REF (Pmode, x);
   1393   return x;
   1394 }
   1395 
   1396 /* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
   1397    NLABEL as a return.  Accrue modifications into the change group.  */
   1398 
   1399 static void
   1400 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx_insn *insn)
   1401 {
   1402   rtx x = *loc;
   1403   RTX_CODE code = GET_CODE (x);
   1404   int i;
   1405   const char *fmt;
   1406 
   1407   if ((code == LABEL_REF && label_ref_label (x) == olabel)
   1408       || x == olabel)
   1409     {
   1410       x = redirect_target (nlabel);
   1411       if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
   1412  	x = gen_rtx_SET (pc_rtx, x);
   1413       validate_change (insn, loc, x, 1);
   1414       return;
   1415     }
   1416 
   1417   if (code == SET && SET_DEST (x) == pc_rtx
   1418       && ANY_RETURN_P (nlabel)
   1419       && GET_CODE (SET_SRC (x)) == LABEL_REF
   1420       && label_ref_label (SET_SRC (x)) == olabel)
   1421     {
   1422       validate_change (insn, loc, nlabel, 1);
   1423       return;
   1424     }
   1425 
   1426   if (code == IF_THEN_ELSE)
   1427     {
   1428       /* Skip the condition of an IF_THEN_ELSE.  We only want to
   1429          change jump destinations, not eventual label comparisons.  */
   1430       redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
   1431       redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
   1432       return;
   1433     }
   1434 
   1435   fmt = GET_RTX_FORMAT (code);
   1436   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1437     {
   1438       if (fmt[i] == 'e')
   1439 	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
   1440       else if (fmt[i] == 'E')
   1441 	{
   1442 	  int j;
   1443 	  for (j = 0; j < XVECLEN (x, i); j++)
   1444 	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
   1445 	}
   1446     }
   1447 }
   1448 
   1449 /* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
   1450    the modifications into the change group.  Return false if we did
   1451    not see how to do that.  */
   1452 
   1453 int
   1454 redirect_jump_1 (rtx_insn *jump, rtx nlabel)
   1455 {
   1456   int ochanges = num_validated_changes ();
   1457   rtx *loc, asmop;
   1458 
   1459   gcc_assert (nlabel != NULL_RTX);
   1460   asmop = extract_asm_operands (PATTERN (jump));
   1461   if (asmop)
   1462     {
   1463       if (nlabel == NULL)
   1464 	return 0;
   1465       gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
   1466       loc = &ASM_OPERANDS_LABEL (asmop, 0);
   1467     }
   1468   else if (GET_CODE (PATTERN (jump)) == PARALLEL)
   1469     loc = &XVECEXP (PATTERN (jump), 0, 0);
   1470   else
   1471     loc = &PATTERN (jump);
   1472 
   1473   redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
   1474   return num_validated_changes () > ochanges;
   1475 }
   1476 
   1477 /* Make JUMP go to NLABEL instead of where it jumps now.  If the old
   1478    jump target label is unused as a result, it and the code following
   1479    it may be deleted.
   1480 
   1481    Normally, NLABEL will be a label, but it may also be a RETURN rtx;
   1482    in that case we are to turn the jump into a (possibly conditional)
   1483    return insn.
   1484 
   1485    The return value will be 1 if the change was made, 0 if it wasn't
   1486    (this can only occur when trying to produce return insns).  */
   1487 
   1488 int
   1489 redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
   1490 {
   1491   rtx olabel = jump->jump_label ();
   1492 
   1493   if (!nlabel)
   1494     {
   1495       /* If there is no label, we are asked to redirect to the EXIT block.
   1496 	 When before the epilogue is emitted, return/simple_return cannot be
   1497 	 created so we return 0 immediately.  After the epilogue is emitted,
   1498 	 we always expect a label, either a non-null label, or a
   1499 	 return/simple_return RTX.  */
   1500 
   1501       if (!epilogue_completed)
   1502 	return 0;
   1503       gcc_unreachable ();
   1504     }
   1505 
   1506   if (nlabel == olabel)
   1507     return 1;
   1508 
   1509   if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
   1510     return 0;
   1511 
   1512   redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
   1513   return 1;
   1514 }
   1515 
   1516 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
   1517    NLABEL in JUMP.
   1518    If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
   1519    count has dropped to zero.  */
   1520 void
   1521 redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
   1522 		 int invert)
   1523 {
   1524   rtx note;
   1525 
   1526   gcc_assert (JUMP_LABEL (jump) == olabel);
   1527 
   1528   /* Negative DELETE_UNUSED used to be used to signalize behavior on
   1529      moving FUNCTION_END note.  Just sanity check that no user still worry
   1530      about this.  */
   1531   gcc_assert (delete_unused >= 0);
   1532   JUMP_LABEL (jump) = nlabel;
   1533   if (!ANY_RETURN_P (nlabel))
   1534     ++LABEL_NUSES (nlabel);
   1535 
   1536   /* Update labels in any REG_EQUAL note.  */
   1537   if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
   1538     {
   1539       if (ANY_RETURN_P (nlabel)
   1540 	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
   1541 	remove_note (jump, note);
   1542       else
   1543 	{
   1544 	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
   1545 	  confirm_change_group ();
   1546 	}
   1547     }
   1548 
   1549   /* Handle the case where we had a conditional crossing jump to a return
   1550      label and are now changing it into a direct conditional return.
   1551      The jump is no longer crossing in that case.  */
   1552   if (ANY_RETURN_P (nlabel))
   1553     CROSSING_JUMP_P (jump) = 0;
   1554 
   1555   if (!ANY_RETURN_P (olabel)
   1556       && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
   1557       /* Undefined labels will remain outside the insn stream.  */
   1558       && INSN_UID (olabel))
   1559     delete_related_insns (olabel);
   1560   if (invert)
   1561     invert_br_probabilities (jump);
   1562 }
   1563 
   1564 /* Invert the jump condition X contained in jump insn INSN.  Accrue the
   1565    modifications into the change group.  Return nonzero for success.  */
   1566 static int
   1567 invert_exp_1 (rtx x, rtx_insn *insn)
   1568 {
   1569   RTX_CODE code = GET_CODE (x);
   1570 
   1571   if (code == IF_THEN_ELSE)
   1572     {
   1573       rtx comp = XEXP (x, 0);
   1574       rtx tem;
   1575       enum rtx_code reversed_code;
   1576 
   1577       /* We can do this in two ways:  The preferable way, which can only
   1578 	 be done if this is not an integer comparison, is to reverse
   1579 	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
   1580 	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
   1581 
   1582       reversed_code = reversed_comparison_code (comp, insn);
   1583 
   1584       if (reversed_code != UNKNOWN)
   1585 	{
   1586 	  validate_change (insn, &XEXP (x, 0),
   1587 			   gen_rtx_fmt_ee (reversed_code,
   1588 					   GET_MODE (comp), XEXP (comp, 0),
   1589 					   XEXP (comp, 1)),
   1590 			   1);
   1591 	  return 1;
   1592 	}
   1593 
   1594       tem = XEXP (x, 1);
   1595       validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
   1596       validate_change (insn, &XEXP (x, 2), tem, 1);
   1597       return 1;
   1598     }
   1599   else
   1600     return 0;
   1601 }
   1602 
   1603 /* Invert the condition of the jump JUMP, and make it jump to label
   1604    NLABEL instead of where it jumps now.  Accrue changes into the
   1605    change group.  Return false if we didn't see how to perform the
   1606    inversion and redirection.  */
   1607 
   1608 int
   1609 invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
   1610 {
   1611   rtx x = pc_set (jump);
   1612   int ochanges;
   1613   int ok;
   1614 
   1615   ochanges = num_validated_changes ();
   1616   if (x == NULL)
   1617     return 0;
   1618   ok = invert_exp_1 (SET_SRC (x), jump);
   1619   gcc_assert (ok);
   1620 
   1621   if (num_validated_changes () == ochanges)
   1622     return 0;
   1623 
   1624   /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
   1625      in Pmode, so checking this is not merely an optimization.  */
   1626   return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
   1627 }
   1628 
   1629 /* Invert the condition of the jump JUMP, and make it jump to label
   1630    NLABEL instead of where it jumps now.  Return true if successful.  */
   1631 
   1632 int
   1633 invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
   1634 {
   1635   rtx olabel = JUMP_LABEL (jump);
   1636 
   1637   if (invert_jump_1 (jump, nlabel) && apply_change_group ())
   1638     {
   1639       redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
   1640       return 1;
   1641     }
   1642   cancel_changes (0);
   1643   return 0;
   1644 }
   1645 
   1646 
   1647 /* Like rtx_equal_p except that it considers two REGs as equal
   1649    if they renumber to the same value and considers two commutative
   1650    operations to be the same if the order of the operands has been
   1651    reversed.  */
   1652 
   1653 int
   1654 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
   1655 {
   1656   int i;
   1657   const enum rtx_code code = GET_CODE (x);
   1658   const char *fmt;
   1659 
   1660   if (x == y)
   1661     return 1;
   1662 
   1663   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
   1664       && (REG_P (y) || (GET_CODE (y) == SUBREG
   1665 				  && REG_P (SUBREG_REG (y)))))
   1666     {
   1667       int reg_x = -1, reg_y = -1;
   1668       poly_int64 byte_x = 0, byte_y = 0;
   1669       struct subreg_info info;
   1670 
   1671       if (GET_MODE (x) != GET_MODE (y))
   1672 	return 0;
   1673 
   1674       /* If we haven't done any renumbering, don't
   1675 	 make any assumptions.  */
   1676       if (reg_renumber == 0)
   1677 	return rtx_equal_p (x, y);
   1678 
   1679       if (code == SUBREG)
   1680 	{
   1681 	  reg_x = REGNO (SUBREG_REG (x));
   1682 	  byte_x = SUBREG_BYTE (x);
   1683 
   1684 	  if (reg_renumber[reg_x] >= 0)
   1685 	    {
   1686 	      subreg_get_info (reg_renumber[reg_x],
   1687 			       GET_MODE (SUBREG_REG (x)), byte_x,
   1688 			       GET_MODE (x), &info);
   1689 	      if (!info.representable_p)
   1690 		return 0;
   1691 	      reg_x = info.offset;
   1692 	      byte_x = 0;
   1693 	    }
   1694 	}
   1695       else
   1696 	{
   1697 	  reg_x = REGNO (x);
   1698 	  if (reg_renumber[reg_x] >= 0)
   1699 	    reg_x = reg_renumber[reg_x];
   1700 	}
   1701 
   1702       if (GET_CODE (y) == SUBREG)
   1703 	{
   1704 	  reg_y = REGNO (SUBREG_REG (y));
   1705 	  byte_y = SUBREG_BYTE (y);
   1706 
   1707 	  if (reg_renumber[reg_y] >= 0)
   1708 	    {
   1709 	      subreg_get_info (reg_renumber[reg_y],
   1710 			       GET_MODE (SUBREG_REG (y)), byte_y,
   1711 			       GET_MODE (y), &info);
   1712 	      if (!info.representable_p)
   1713 		return 0;
   1714 	      reg_y = info.offset;
   1715 	      byte_y = 0;
   1716 	    }
   1717 	}
   1718       else
   1719 	{
   1720 	  reg_y = REGNO (y);
   1721 	  if (reg_renumber[reg_y] >= 0)
   1722 	    reg_y = reg_renumber[reg_y];
   1723 	}
   1724 
   1725       return reg_x >= 0 && reg_x == reg_y && known_eq (byte_x, byte_y);
   1726     }
   1727 
   1728   /* Now we have disposed of all the cases
   1729      in which different rtx codes can match.  */
   1730   if (code != GET_CODE (y))
   1731     return 0;
   1732 
   1733   switch (code)
   1734     {
   1735     case PC:
   1736     case ADDR_VEC:
   1737     case ADDR_DIFF_VEC:
   1738     CASE_CONST_UNIQUE:
   1739       return 0;
   1740 
   1741     case CONST_VECTOR:
   1742       if (!same_vector_encodings_p (x, y))
   1743 	return false;
   1744       break;
   1745 
   1746     case LABEL_REF:
   1747       /* We can't assume nonlocal labels have their following insns yet.  */
   1748       if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
   1749 	return label_ref_label (x) == label_ref_label (y);
   1750 
   1751       /* Two label-refs are equivalent if they point at labels
   1752 	 in the same position in the instruction stream.  */
   1753       else
   1754 	{
   1755 	  rtx_insn *xi = next_nonnote_nondebug_insn (label_ref_label (x));
   1756 	  rtx_insn *yi = next_nonnote_nondebug_insn (label_ref_label (y));
   1757 	  while (xi && LABEL_P (xi))
   1758 	    xi = next_nonnote_nondebug_insn (xi);
   1759 	  while (yi && LABEL_P (yi))
   1760 	    yi = next_nonnote_nondebug_insn (yi);
   1761 	  return xi == yi;
   1762 	}
   1763 
   1764     case SYMBOL_REF:
   1765       return XSTR (x, 0) == XSTR (y, 0);
   1766 
   1767     case CODE_LABEL:
   1768       /* If we didn't match EQ equality above, they aren't the same.  */
   1769       return 0;
   1770 
   1771     default:
   1772       break;
   1773     }
   1774 
   1775   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
   1776 
   1777   if (GET_MODE (x) != GET_MODE (y))
   1778     return 0;
   1779 
   1780   /* MEMs referring to different address space are not equivalent.  */
   1781   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
   1782     return 0;
   1783 
   1784   /* For commutative operations, the RTX match if the operand match in any
   1785      order.  Also handle the simple binary and unary cases without a loop.  */
   1786   if (targetm.commutative_p (x, UNKNOWN))
   1787     return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
   1788 	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
   1789 	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
   1790 		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
   1791   else if (NON_COMMUTATIVE_P (x))
   1792     return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
   1793 	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
   1794   else if (UNARY_P (x))
   1795     return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
   1796 
   1797   /* Compare the elements.  If any pair of corresponding elements
   1798      fail to match, return 0 for the whole things.  */
   1799 
   1800   fmt = GET_RTX_FORMAT (code);
   1801   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
   1802     {
   1803       int j;
   1804       switch (fmt[i])
   1805 	{
   1806 	case 'w':
   1807 	  if (XWINT (x, i) != XWINT (y, i))
   1808 	    return 0;
   1809 	  break;
   1810 
   1811 	case 'i':
   1812 	  if (XINT (x, i) != XINT (y, i))
   1813 	    {
   1814 	      if (((code == ASM_OPERANDS && i == 6)
   1815 		   || (code == ASM_INPUT && i == 1)))
   1816 		break;
   1817 	      return 0;
   1818 	    }
   1819 	  break;
   1820 
   1821 	case 'p':
   1822 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
   1823 	    return 0;
   1824 	  break;
   1825 
   1826 	case 't':
   1827 	  if (XTREE (x, i) != XTREE (y, i))
   1828 	    return 0;
   1829 	  break;
   1830 
   1831 	case 's':
   1832 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
   1833 	    return 0;
   1834 	  break;
   1835 
   1836 	case 'e':
   1837 	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
   1838 	    return 0;
   1839 	  break;
   1840 
   1841 	case 'u':
   1842 	  if (XEXP (x, i) != XEXP (y, i))
   1843 	    return 0;
   1844 	  /* Fall through.  */
   1845 	case '0':
   1846 	  break;
   1847 
   1848 	case 'E':
   1849 	  if (XVECLEN (x, i) != XVECLEN (y, i))
   1850 	    return 0;
   1851 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
   1852 	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
   1853 	      return 0;
   1854 	  break;
   1855 
   1856 	default:
   1857 	  gcc_unreachable ();
   1858 	}
   1859     }
   1860   return 1;
   1861 }
   1862 
   1863 /* If X is a hard register or equivalent to one or a subregister of one,
   1865    return the hard register number.  If X is a pseudo register that was not
   1866    assigned a hard register, return the pseudo register number.  Otherwise,
   1867    return -1.  Any rtx is valid for X.  */
   1868 
   1869 int
   1870 true_regnum (const_rtx x)
   1871 {
   1872   if (REG_P (x))
   1873     {
   1874       if (REGNO (x) >= FIRST_PSEUDO_REGISTER
   1875 	  && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
   1876 	return reg_renumber[REGNO (x)];
   1877       return REGNO (x);
   1878     }
   1879   if (GET_CODE (x) == SUBREG)
   1880     {
   1881       int base = true_regnum (SUBREG_REG (x));
   1882       if (base >= 0
   1883 	  && base < FIRST_PSEUDO_REGISTER)
   1884 	{
   1885 	  struct subreg_info info;
   1886 
   1887 	  subreg_get_info (lra_in_progress
   1888 			   ? (unsigned) base : REGNO (SUBREG_REG (x)),
   1889 			   GET_MODE (SUBREG_REG (x)),
   1890 			   SUBREG_BYTE (x), GET_MODE (x), &info);
   1891 
   1892 	  if (info.representable_p)
   1893 	    return base + info.offset;
   1894 	}
   1895     }
   1896   return -1;
   1897 }
   1898 
   1899 /* Return regno of the register REG and handle subregs too.  */
   1900 unsigned int
   1901 reg_or_subregno (const_rtx reg)
   1902 {
   1903   if (GET_CODE (reg) == SUBREG)
   1904     reg = SUBREG_REG (reg);
   1905   gcc_assert (REG_P (reg));
   1906   return REGNO (reg);
   1907 }
   1908