Home | History | Annotate | Line # | Download | only in gcc
combine-stack-adj.cc revision 1.1.1.1
      1 /* Combine stack adjustments.
      2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
      3 
      4 This file is part of GCC.
      5 
      6 GCC is free software; you can redistribute it and/or modify it under
      7 the terms of the GNU General Public License as published by the Free
      8 Software Foundation; either version 3, or (at your option) any later
      9 version.
     10 
     11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14 for more details.
     15 
     16 You should have received a copy of the GNU General Public License
     17 along with GCC; see the file COPYING3.  If not see
     18 <http://www.gnu.org/licenses/>.  */
     19 
     20 /* Track stack adjustments and stack memory references.  Attempt to
     21    reduce the number of stack adjustments by back-propagating across
     22    the memory references.
     23 
     24    This is intended primarily for use with targets that do not define
     25    ACCUMULATE_OUTGOING_ARGS.  It is of significantly more value to
     26    targets that define PREFERRED_STACK_BOUNDARY more aligned than
     27    STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
     28    (e.g. x86 fp regs) which would ordinarily have to be implemented
     29    as a sub/mov pair due to restrictions in calls.cc.
     30 
     31    Propagation stops when any of the insns that need adjusting are
     32    (a) no longer valid because we've exceeded their range, (b) a
     33    non-trivial push instruction, or (c) a call instruction.
     34 
     35    Restriction B is based on the assumption that push instructions
     36    are smaller or faster.  If a port really wants to remove all
     37    pushes, it should have defined ACCUMULATE_OUTGOING_ARGS.  The
     38    one exception that is made is for an add immediately followed
     39    by a push.  */
     40 
     41 #include "config.h"
     42 #include "system.h"
     43 #include "coretypes.h"
     44 #include "backend.h"
     45 #include "rtl.h"
     46 #include "df.h"
     47 #include "insn-config.h"
     48 #include "memmodel.h"
     49 #include "emit-rtl.h"
     50 #include "recog.h"
     51 #include "cfgrtl.h"
     52 #include "tree-pass.h"
     53 #include "rtl-iter.h"
     54 
     55 
     56 /* This structure records two kinds of stack references between stack
     58    adjusting instructions: stack references in memory addresses for
     59    regular insns and all stack references for debug insns.  */
     60 
     61 struct csa_reflist
     62 {
     63   HOST_WIDE_INT sp_offset;
     64   rtx_insn *insn;
     65   rtx *ref;
     66   struct csa_reflist *next;
     67 };
     68 
     69 static int stack_memref_p (rtx);
     70 static rtx single_set_for_csa (rtx_insn *);
     71 static void free_csa_reflist (struct csa_reflist *);
     72 static struct csa_reflist *record_one_stack_ref (rtx_insn *, rtx *,
     73 						 struct csa_reflist *);
     74 static bool try_apply_stack_adjustment (rtx_insn *, struct csa_reflist *,
     75 					HOST_WIDE_INT, HOST_WIDE_INT,
     76 					bitmap, rtx_insn *);
     77 static void combine_stack_adjustments_for_block (basic_block, bitmap);
     78 
     79 
     80 /* Main entry point for stack adjustment combination.  */
     81 
     82 static void
     83 combine_stack_adjustments (void)
     84 {
     85   basic_block bb;
     86   bitmap live = BITMAP_ALLOC (&reg_obstack);
     87 
     88   FOR_EACH_BB_FN (bb, cfun)
     89     combine_stack_adjustments_for_block (bb, live);
     90 
     91   BITMAP_FREE (live);
     92 }
     93 
     94 /* Recognize a MEM of the form (sp) or (plus sp const).  */
     95 
     96 static int
     97 stack_memref_p (rtx x)
     98 {
     99   if (!MEM_P (x))
    100     return 0;
    101   x = XEXP (x, 0);
    102 
    103   if (x == stack_pointer_rtx)
    104     return 1;
    105   if (GET_CODE (x) == PLUS
    106       && XEXP (x, 0) == stack_pointer_rtx
    107       && CONST_INT_P (XEXP (x, 1)))
    108     return 1;
    109 
    110   return 0;
    111 }
    112 
    113 /* Recognize either normal single_set or the hack in i386.md for
    114    tying fp and sp adjustments.  */
    115 
    116 static rtx
    117 single_set_for_csa (rtx_insn *insn)
    118 {
    119   int i;
    120   rtx tmp = single_set (insn);
    121   if (tmp)
    122     return tmp;
    123 
    124   if (!NONJUMP_INSN_P (insn)
    125       || GET_CODE (PATTERN (insn)) != PARALLEL)
    126     return NULL_RTX;
    127 
    128   tmp = PATTERN (insn);
    129   if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
    130     return NULL_RTX;
    131 
    132   for (i = 1; i < XVECLEN (tmp, 0); ++i)
    133     {
    134       rtx this_rtx = XVECEXP (tmp, 0, i);
    135 
    136       /* The special case is allowing a no-op set.  */
    137       if (GET_CODE (this_rtx) == SET
    138 	  && SET_SRC (this_rtx) == SET_DEST (this_rtx))
    139 	;
    140       else if (GET_CODE (this_rtx) != CLOBBER
    141 	       && GET_CODE (this_rtx) != USE)
    142 	return NULL_RTX;
    143     }
    144 
    145   return XVECEXP (tmp, 0, 0);
    146 }
    147 
    148 /* Free the list of csa_reflist nodes.  */
    149 
    150 static void
    151 free_csa_reflist (struct csa_reflist *reflist)
    152 {
    153   struct csa_reflist *next;
    154   for (; reflist ; reflist = next)
    155     {
    156       next = reflist->next;
    157       free (reflist);
    158     }
    159 }
    160 
    161 /* Create a new csa_reflist node from the given stack reference.
    162    It is already known that the reference is either a MEM satisfying the
    163    predicate stack_memref_p or a REG representing the stack pointer.  */
    164 
    165 static struct csa_reflist *
    166 record_one_stack_ref (rtx_insn *insn, rtx *ref, struct csa_reflist *next_reflist)
    167 {
    168   struct csa_reflist *ml;
    169 
    170   ml = XNEW (struct csa_reflist);
    171 
    172   if (REG_P (*ref) || XEXP (*ref, 0) == stack_pointer_rtx)
    173     ml->sp_offset = 0;
    174   else
    175     ml->sp_offset = INTVAL (XEXP (XEXP (*ref, 0), 1));
    176 
    177   ml->insn = insn;
    178   ml->ref = ref;
    179   ml->next = next_reflist;
    180 
    181   return ml;
    182 }
    183 
    184 /* We only know how to adjust the CFA; no other frame-related changes
    185    may appear in any insn to be deleted.  */
    186 
    187 static bool
    188 no_unhandled_cfa (rtx_insn *insn)
    189 {
    190   if (!RTX_FRAME_RELATED_P (insn))
    191     return true;
    192 
    193   /* No CFA notes at all is a legacy interpretation like
    194      FRAME_RELATED_EXPR, and is context sensitive within
    195      the prologue state machine.  We can't handle that here.  */
    196   bool has_cfa_adjust = false;
    197 
    198   for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
    199     switch (REG_NOTE_KIND (link))
    200       {
    201       default:
    202         break;
    203       case REG_CFA_ADJUST_CFA:
    204 	has_cfa_adjust = true;
    205 	break;
    206 
    207       case REG_FRAME_RELATED_EXPR:
    208       case REG_CFA_DEF_CFA:
    209       case REG_CFA_OFFSET:
    210       case REG_CFA_REGISTER:
    211       case REG_CFA_EXPRESSION:
    212       case REG_CFA_RESTORE:
    213       case REG_CFA_SET_VDRAP:
    214       case REG_CFA_WINDOW_SAVE:
    215       case REG_CFA_FLUSH_QUEUE:
    216       case REG_CFA_TOGGLE_RA_MANGLE:
    217 	return false;
    218       }
    219 
    220   return has_cfa_adjust;
    221 }
    222 
    223 /* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
    224    as each of the memories and stack references in REFLIST.  Return true
    225    on success.  */
    226 
    227 static bool
    228 try_apply_stack_adjustment (rtx_insn *insn, struct csa_reflist *reflist,
    229 			    HOST_WIDE_INT new_adjust, HOST_WIDE_INT delta,
    230 			    bitmap live, rtx_insn *other_insn)
    231 {
    232   struct csa_reflist *ml;
    233   rtx set;
    234   bool remove_equal = false;
    235 
    236   set = single_set_for_csa (insn);
    237   if (MEM_P (SET_DEST (set)))
    238     validate_change (insn, &SET_DEST (set),
    239 		     replace_equiv_address (SET_DEST (set), stack_pointer_rtx),
    240 		     1);
    241   else if (REG_P (SET_SRC (set)))
    242     {
    243       if (other_insn == NULL_RTX || live == NULL)
    244 	return false;
    245       rtx other_set = single_set_for_csa (other_insn);
    246       if (SET_DEST (other_set) != stack_pointer_rtx
    247 	  || GET_CODE (SET_SRC (other_set)) != PLUS
    248 	  || XEXP (SET_SRC (other_set), 0) != stack_pointer_rtx
    249 	  || !CONST_INT_P (XEXP (SET_SRC (other_set), 1)))
    250 	return false;
    251       if (PATTERN (other_insn) != other_set)
    252 	{
    253 	  if (GET_CODE (PATTERN (other_insn)) != PARALLEL)
    254 	    return false;
    255 	  int i;
    256 	  rtx p = PATTERN (other_insn);
    257 	  for (i = 0; i < XVECLEN (p, 0); ++i)
    258 	    {
    259 	      rtx this_rtx = XVECEXP (p, 0, i);
    260 	      if (this_rtx == other_set)
    261 		continue;
    262 	      if (GET_CODE (this_rtx) != CLOBBER)
    263 		return false;
    264 	      if (!REG_P (XEXP (this_rtx, 0))
    265 		  || !HARD_REGISTER_P (XEXP (this_rtx, 0)))
    266 		return false;
    267 	      unsigned int end_regno = END_REGNO (XEXP (this_rtx, 0));
    268 	      for (unsigned int regno = REGNO (XEXP (this_rtx, 0));
    269 		   regno < end_regno; ++regno)
    270 		if (bitmap_bit_p (live, regno))
    271 		  return false;
    272 	    }
    273 	}
    274       validate_change (insn, &PATTERN (insn), copy_rtx (PATTERN (other_insn)),
    275 		       1);
    276       set = single_set_for_csa (insn);
    277       validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust),
    278 		       1);
    279       remove_equal = true;
    280     }
    281   else
    282     validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
    283 
    284   for (ml = reflist; ml ; ml = ml->next)
    285     {
    286       rtx new_addr = plus_constant (Pmode, stack_pointer_rtx,
    287 				    ml->sp_offset - delta);
    288       rtx new_val;
    289 
    290       if (MEM_P (*ml->ref))
    291 	new_val = replace_equiv_address_nv (*ml->ref, new_addr);
    292       else if (GET_MODE (*ml->ref) == GET_MODE (stack_pointer_rtx))
    293 	new_val = new_addr;
    294       else
    295 	new_val = lowpart_subreg (GET_MODE (*ml->ref), new_addr,
    296 				  GET_MODE (new_addr));
    297       validate_change (ml->insn, ml->ref, new_val, 1);
    298     }
    299 
    300   if (apply_change_group ())
    301     {
    302       /* Succeeded.  Update our knowledge of the stack references.  */
    303       for (ml = reflist; ml ; ml = ml->next)
    304 	ml->sp_offset -= delta;
    305 
    306       if (remove_equal)
    307 	remove_reg_equal_equiv_notes (insn);
    308       return true;
    309     }
    310   else
    311     return false;
    312 }
    313 
    314 /* For non-debug insns, record all stack memory references in INSN
    315    and return true if there were no other (unrecorded) references to the
    316    stack pointer.  For debug insns, record all stack references regardless
    317    of context and unconditionally return true.  */
    318 
    319 static bool
    320 record_stack_refs (rtx_insn *insn, struct csa_reflist **reflist)
    321 {
    322   subrtx_ptr_iterator::array_type array;
    323   FOR_EACH_SUBRTX_PTR (iter, array, &PATTERN (insn), NONCONST)
    324     {
    325       rtx *loc = *iter;
    326       rtx x = *loc;
    327       switch (GET_CODE (x))
    328 	{
    329 	case MEM:
    330 	  if (!reg_mentioned_p (stack_pointer_rtx, x))
    331 	    iter.skip_subrtxes ();
    332 	  /* We are not able to handle correctly all possible memrefs
    333 	     containing stack pointer, so this check is necessary.  */
    334 	  else if (stack_memref_p (x))
    335 	    {
    336 	      *reflist = record_one_stack_ref (insn, loc, *reflist);
    337 	      iter.skip_subrtxes ();
    338 	    }
    339 	  /* Try harder for DEBUG_INSNs, handle e.g.
    340 	     (mem (mem (sp + 16) + 4).  */
    341 	  else if (!DEBUG_INSN_P (insn))
    342 	    return false;
    343 	  break;
    344 
    345 	case REG:
    346 	  /* ??? We want be able to handle non-memory stack pointer
    347 	     references later.  For now just discard all insns referring to
    348 	     stack pointer outside mem expressions.  We would probably
    349 	     want to teach validate_replace to simplify expressions first.
    350 
    351 	     We can't just compare with STACK_POINTER_RTX because the
    352 	     reference to the stack pointer might be in some other mode.
    353 	     In particular, an explicit clobber in an asm statement will
    354 	     result in a QImode clobber.
    355 
    356 	     In DEBUG_INSNs, we want to replace all occurrences, otherwise
    357 	     they will cause -fcompare-debug failures.  */
    358 	  if (REGNO (x) == STACK_POINTER_REGNUM)
    359 	    {
    360 	      if (!DEBUG_INSN_P (insn))
    361 		return false;
    362 	      *reflist = record_one_stack_ref (insn, loc, *reflist);
    363 	    }
    364 	  break;
    365 
    366 	default:
    367 	  break;
    368 	}
    369     }
    370   return true;
    371 }
    372 
    373 /* If INSN has a REG_ARGS_SIZE note, move it to LAST.
    374    AFTER is true iff LAST follows INSN in the instruction stream.  */
    375 
    376 static void
    377 maybe_move_args_size_note (rtx_insn *last, rtx_insn *insn, bool after)
    378 {
    379   rtx note, last_note;
    380 
    381   note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
    382   if (note == NULL)
    383     return;
    384 
    385   last_note = find_reg_note (last, REG_ARGS_SIZE, NULL_RTX);
    386   if (last_note)
    387     {
    388       /* The ARGS_SIZE notes are *not* cumulative.  They represent an
    389 	 absolute value, and the "most recent" note wins.  */
    390       if (!after)
    391         XEXP (last_note, 0) = XEXP (note, 0);
    392     }
    393   else
    394     add_reg_note (last, REG_ARGS_SIZE, XEXP (note, 0));
    395 }
    396 
    397 /* Merge any REG_CFA_ADJUST_CFA note from SRC into DST.
    398    AFTER is true iff DST follows SRC in the instruction stream.  */
    399 
    400 static void
    401 maybe_merge_cfa_adjust (rtx_insn *dst, rtx_insn *src, bool after)
    402 {
    403   rtx snote = NULL, dnote = NULL;
    404   rtx sexp, dexp;
    405   rtx exp1, exp2;
    406 
    407   if (RTX_FRAME_RELATED_P (src))
    408     snote = find_reg_note (src, REG_CFA_ADJUST_CFA, NULL_RTX);
    409   if (snote == NULL)
    410     return;
    411   sexp = XEXP (snote, 0);
    412 
    413   if (RTX_FRAME_RELATED_P (dst))
    414     dnote = find_reg_note (dst, REG_CFA_ADJUST_CFA, NULL_RTX);
    415   if (dnote == NULL)
    416     {
    417       add_reg_note (dst, REG_CFA_ADJUST_CFA, sexp);
    418       return;
    419     }
    420   dexp = XEXP (dnote, 0);
    421 
    422   gcc_assert (GET_CODE (sexp) == SET);
    423   gcc_assert (GET_CODE (dexp) == SET);
    424 
    425   if (after)
    426     exp1 = dexp, exp2 = sexp;
    427   else
    428     exp1 = sexp, exp2 = dexp;
    429 
    430   SET_SRC (exp1) = simplify_replace_rtx (SET_SRC (exp1), SET_DEST (exp2),
    431 					 SET_SRC (exp2));
    432   XEXP (dnote, 0) = exp1;
    433 }
    434 
    435 /* Return the next (or previous) active insn within BB.  */
    436 
    437 static rtx_insn *
    438 prev_active_insn_bb (basic_block bb, rtx_insn *insn)
    439 {
    440   for (insn = PREV_INSN (insn);
    441        insn != PREV_INSN (BB_HEAD (bb));
    442        insn = PREV_INSN (insn))
    443     if (active_insn_p (insn))
    444       return insn;
    445   return NULL;
    446 }
    447 
    448 static rtx_insn *
    449 next_active_insn_bb (basic_block bb, rtx_insn *insn)
    450 {
    451   for (insn = NEXT_INSN (insn);
    452        insn != NEXT_INSN (BB_END (bb));
    453        insn = NEXT_INSN (insn))
    454     if (active_insn_p (insn))
    455       return insn;
    456   return NULL;
    457 }
    458 
    459 /* If INSN has a REG_ARGS_SIZE note, if possible move it to PREV.  Otherwise
    460    search for a nearby candidate within BB where we can stick the note.  */
    461 
    462 static void
    463 force_move_args_size_note (basic_block bb, rtx_insn *prev, rtx_insn *insn)
    464 {
    465   rtx note;
    466   rtx_insn *test, *next_candidate, *prev_candidate;
    467 
    468   /* If PREV exists, tail-call to the logic in the other function.  */
    469   if (prev)
    470     {
    471       maybe_move_args_size_note (prev, insn, false);
    472       return;
    473     }
    474 
    475   /* First, make sure there's anything that needs doing.  */
    476   note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
    477   if (note == NULL)
    478     return;
    479 
    480   /* We need to find a spot between the previous and next exception points
    481      where we can place the note and "properly" deallocate the arguments.  */
    482   next_candidate = prev_candidate = NULL;
    483 
    484   /* It is often the case that we have insns in the order:
    485 	call
    486 	add sp (previous deallocation)
    487 	sub sp (align for next arglist)
    488 	push arg
    489      and the add/sub cancel.  Therefore we begin by searching forward.  */
    490 
    491   test = insn;
    492   while ((test = next_active_insn_bb (bb, test)) != NULL)
    493     {
    494       /* Found an existing note: nothing to do.  */
    495       if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
    496         return;
    497       /* Found something that affects unwinding.  Stop searching.  */
    498       if (CALL_P (test) || !insn_nothrow_p (test))
    499 	break;
    500       if (next_candidate == NULL)
    501 	next_candidate = test;
    502     }
    503 
    504   test = insn;
    505   while ((test = prev_active_insn_bb (bb, test)) != NULL)
    506     {
    507       rtx tnote;
    508       /* Found a place that seems logical to adjust the stack.  */
    509       tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
    510       if (tnote)
    511 	{
    512 	  XEXP (tnote, 0) = XEXP (note, 0);
    513 	  return;
    514 	}
    515       if (prev_candidate == NULL)
    516 	prev_candidate = test;
    517       /* Found something that affects unwinding.  Stop searching.  */
    518       if (CALL_P (test) || !insn_nothrow_p (test))
    519 	break;
    520     }
    521 
    522   if (prev_candidate)
    523     test = prev_candidate;
    524   else if (next_candidate)
    525     test = next_candidate;
    526   else
    527     {
    528       /* ??? We *must* have a place, lest we ICE on the lost adjustment.
    529 	 Options are: dummy clobber insn, nop, or prevent the removal of
    530 	 the sp += 0 insn.  */
    531       /* TODO: Find another way to indicate to the dwarf2 code that we
    532 	 have not in fact lost an adjustment.  */
    533       test = emit_insn_before (gen_rtx_CLOBBER (VOIDmode, const0_rtx), insn);
    534     }
    535   add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
    536 }
    537 
    538 /* Subroutine of combine_stack_adjustments, called for each basic block.  */
    539 
    540 static void
    541 combine_stack_adjustments_for_block (basic_block bb, bitmap live)
    542 {
    543   HOST_WIDE_INT last_sp_adjust = 0;
    544   rtx_insn *last_sp_set = NULL;
    545   rtx_insn *last2_sp_set = NULL;
    546   bitmap last_sp_live = NULL;
    547   struct csa_reflist *reflist = NULL;
    548   bitmap copy = NULL;
    549   rtx_insn *insn, *next;
    550   rtx set;
    551   bool end_of_block = false;
    552 
    553   bitmap_copy (live, DF_LR_IN (bb));
    554   df_simulate_initialize_forwards (bb, live);
    555 
    556   for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
    557     {
    558       end_of_block = insn == BB_END (bb);
    559       next = NEXT_INSN (insn);
    560 
    561       if (! INSN_P (insn))
    562 	continue;
    563 
    564       set = single_set_for_csa (insn);
    565       if (set && find_reg_note (insn, REG_STACK_CHECK, NULL_RTX))
    566 	set = NULL_RTX;
    567       if (set)
    568 	{
    569 	  rtx dest = SET_DEST (set);
    570 	  rtx src = SET_SRC (set);
    571 	  HOST_WIDE_INT this_adjust = 0;
    572 
    573 	  /* Find constant additions to the stack pointer.  */
    574 	  if (dest == stack_pointer_rtx
    575 	      && GET_CODE (src) == PLUS
    576 	      && XEXP (src, 0) == stack_pointer_rtx
    577 	      && CONST_INT_P (XEXP (src, 1)))
    578 	    this_adjust = INTVAL (XEXP (src, 1));
    579 	  /* Or such additions turned by postreload into a store of
    580 	     equivalent register.  */
    581 	  else if (dest == stack_pointer_rtx
    582 		   && REG_P (src)
    583 		   && REGNO (src) != STACK_POINTER_REGNUM)
    584 	    if (rtx equal = find_reg_note (insn, REG_EQUAL, NULL_RTX))
    585 	      if (GET_CODE (XEXP (equal, 0)) == PLUS
    586 		  && XEXP (XEXP (equal, 0), 0) == stack_pointer_rtx
    587 		  && CONST_INT_P (XEXP (XEXP (equal, 0), 1)))
    588 		this_adjust = INTVAL (XEXP (XEXP (equal, 0), 1));
    589 
    590 	  if (this_adjust)
    591 	    {
    592 	      /* If we've not seen an adjustment previously, record
    593 		 it now and continue.  */
    594 	      if (! last_sp_set)
    595 		{
    596 		  last_sp_set = insn;
    597 		  last_sp_adjust = this_adjust;
    598 		  if (REG_P (src))
    599 		    {
    600 		      if (copy == NULL)
    601 			copy = BITMAP_ALLOC (&reg_obstack);
    602 		      last_sp_live = copy;
    603 		      bitmap_copy (last_sp_live, live);
    604 		    }
    605 		  else
    606 		    last_sp_live = NULL;
    607 		  df_simulate_one_insn_forwards (bb, insn, live);
    608 		  continue;
    609 		}
    610 
    611 	      /* If not all recorded refs can be adjusted, or the
    612 		 adjustment is now too large for a constant addition,
    613 		 we cannot merge the two stack adjustments.
    614 
    615 		 Also we need to be careful to not move stack pointer
    616 		 such that we create stack accesses outside the allocated
    617 		 area.  We can combine an allocation into the first insn,
    618 		 or a deallocation into the second insn.  We cannot
    619 		 combine an allocation followed by a deallocation.
    620 
    621 		 The only somewhat frequent occurrence of the later is when
    622 		 a function allocates a stack frame but does not use it.
    623 		 For this case, we would need to analyze rtl stream to be
    624 		 sure that allocated area is really unused.  This means not
    625 		 only checking the memory references, but also all registers
    626 		 or global memory references possibly containing a stack
    627 		 frame address.
    628 
    629 		 Perhaps the best way to address this problem is to teach
    630 		 gcc not to allocate stack for objects never used.  */
    631 
    632 	      /* Combine an allocation into the first instruction.  */
    633 	      if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
    634 		{
    635 		  if (no_unhandled_cfa (insn)
    636 		      && try_apply_stack_adjustment (last_sp_set, reflist,
    637 						     last_sp_adjust
    638 						     + this_adjust,
    639 						     this_adjust,
    640 						     last_sp_live,
    641 						     insn))
    642 		    {
    643 		      /* It worked!  */
    644 		      maybe_move_args_size_note (last_sp_set, insn, false);
    645 		      maybe_merge_cfa_adjust (last_sp_set, insn, false);
    646 		      delete_insn (insn);
    647 		      last_sp_adjust += this_adjust;
    648 		      last_sp_live = NULL;
    649 		      continue;
    650 		    }
    651 		}
    652 
    653 	      /* Otherwise we have a deallocation.  Do not combine with
    654 		 a previous allocation.  Combine into the second insn.  */
    655 	      else if (STACK_GROWS_DOWNWARD
    656 		       ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
    657 		{
    658 		  if (no_unhandled_cfa (last_sp_set)
    659 		      && !REG_P (src)
    660 		      && try_apply_stack_adjustment (insn, reflist,
    661 						     last_sp_adjust
    662 						     + this_adjust,
    663 						     -last_sp_adjust,
    664 						     NULL, NULL))
    665 		    {
    666 		      /* It worked!  */
    667 		      maybe_move_args_size_note (insn, last_sp_set, true);
    668 		      maybe_merge_cfa_adjust (insn, last_sp_set, true);
    669 		      delete_insn (last_sp_set);
    670 		      last_sp_set = insn;
    671 		      last_sp_adjust += this_adjust;
    672 		      last_sp_live = NULL;
    673 		      free_csa_reflist (reflist);
    674 		      reflist = NULL;
    675 		      df_simulate_one_insn_forwards (bb, insn, live);
    676 		      continue;
    677 		    }
    678 		}
    679 
    680 	      /* Combination failed.  Restart processing from here.  If
    681 		 deallocation+allocation conspired to cancel, we can
    682 		 delete the old deallocation insn.  */
    683 	      if (last_sp_set)
    684 		{
    685 		  if (last_sp_adjust == 0 && no_unhandled_cfa (last_sp_set))
    686 		    {
    687 		      maybe_move_args_size_note (insn, last_sp_set, true);
    688 		      maybe_merge_cfa_adjust (insn, last_sp_set, true);
    689 		      delete_insn (last_sp_set);
    690 		    }
    691 		  else
    692 		    last2_sp_set = last_sp_set;
    693 		}
    694 	      free_csa_reflist (reflist);
    695 	      reflist = NULL;
    696 	      last_sp_set = insn;
    697 	      last_sp_adjust = this_adjust;
    698 	      if (REG_P (src))
    699 		{
    700 		  if (copy == NULL)
    701 		    copy = BITMAP_ALLOC (&reg_obstack);
    702 		  last_sp_live = copy;
    703 		  bitmap_copy (last_sp_live, live);
    704 		}
    705 	      else
    706 		last_sp_live = NULL;
    707 	      df_simulate_one_insn_forwards (bb, insn, live);
    708 	      continue;
    709 	    }
    710 
    711 	  /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
    712 	     the previous adjustment and turn it into a simple store.  This
    713 	     is equivalent to anticipating the stack adjustment so this must
    714 	     be an allocation.  */
    715 	  if (MEM_P (dest)
    716 	      && ((STACK_GROWS_DOWNWARD
    717 		   ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
    718 		      && known_eq (last_sp_adjust,
    719 				   GET_MODE_SIZE (GET_MODE (dest))))
    720 		   : (GET_CODE (XEXP (dest, 0)) == PRE_INC
    721 		      && known_eq (-last_sp_adjust,
    722 				   GET_MODE_SIZE (GET_MODE (dest)))))
    723 		  || ((STACK_GROWS_DOWNWARD
    724 		       ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
    725 		      && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
    726 		      && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
    727 		      && XEXP (XEXP (XEXP (dest, 0), 1), 0)
    728 			 == stack_pointer_rtx
    729 		      && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
    730 		         == CONST_INT
    731 		      && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
    732 		         == -last_sp_adjust))
    733 	      && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
    734 	      && !reg_mentioned_p (stack_pointer_rtx, src)
    735 	      && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
    736 	      && try_apply_stack_adjustment (insn, reflist, 0,
    737 					     -last_sp_adjust,
    738 					     NULL, NULL))
    739 	    {
    740 	      if (last2_sp_set)
    741 		maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
    742 	      else
    743 	        maybe_move_args_size_note (insn, last_sp_set, true);
    744 	      delete_insn (last_sp_set);
    745 	      free_csa_reflist (reflist);
    746 	      reflist = NULL;
    747 	      last_sp_set = NULL;
    748 	      last_sp_adjust = 0;
    749 	      last_sp_live = NULL;
    750 	      df_simulate_one_insn_forwards (bb, insn, live);
    751 	      continue;
    752 	    }
    753 	}
    754 
    755       if (!CALL_P (insn) && last_sp_set && record_stack_refs (insn, &reflist))
    756 	{
    757 	  df_simulate_one_insn_forwards (bb, insn, live);
    758 	  continue;
    759 	}
    760 
    761       /* Otherwise, we were not able to process the instruction.
    762 	 Do not continue collecting data across such a one.  */
    763       if (last_sp_set
    764 	  && (CALL_P (insn)
    765 	      || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
    766 	{
    767 	  if (last_sp_set && last_sp_adjust == 0)
    768 	    {
    769 	      force_move_args_size_note (bb, last2_sp_set, last_sp_set);
    770 	      delete_insn (last_sp_set);
    771 	    }
    772 	  free_csa_reflist (reflist);
    773 	  reflist = NULL;
    774 	  last2_sp_set = NULL;
    775 	  last_sp_set = NULL;
    776 	  last_sp_adjust = 0;
    777 	  last_sp_live = NULL;
    778 	}
    779 
    780       df_simulate_one_insn_forwards (bb, insn, live);
    781     }
    782 
    783   if (last_sp_set && last_sp_adjust == 0)
    784     {
    785       force_move_args_size_note (bb, last2_sp_set, last_sp_set);
    786       delete_insn (last_sp_set);
    787     }
    788 
    789   if (reflist)
    790     free_csa_reflist (reflist);
    791   if (copy)
    792     BITMAP_FREE (copy);
    793 }
    794 
    795 static unsigned int
    797 rest_of_handle_stack_adjustments (void)
    798 {
    799   df_note_add_problem ();
    800   df_analyze ();
    801   combine_stack_adjustments ();
    802   return 0;
    803 }
    804 
    805 namespace {
    806 
    807 const pass_data pass_data_stack_adjustments =
    808 {
    809   RTL_PASS, /* type */
    810   "csa", /* name */
    811   OPTGROUP_NONE, /* optinfo_flags */
    812   TV_COMBINE_STACK_ADJUST, /* tv_id */
    813   0, /* properties_required */
    814   0, /* properties_provided */
    815   0, /* properties_destroyed */
    816   0, /* todo_flags_start */
    817   TODO_df_finish, /* todo_flags_finish */
    818 };
    819 
    820 class pass_stack_adjustments : public rtl_opt_pass
    821 {
    822 public:
    823   pass_stack_adjustments (gcc::context *ctxt)
    824     : rtl_opt_pass (pass_data_stack_adjustments, ctxt)
    825   {}
    826 
    827   /* opt_pass methods: */
    828   virtual bool gate (function *);
    829   virtual unsigned int execute (function *)
    830     {
    831       return rest_of_handle_stack_adjustments ();
    832     }
    833 
    834 }; // class pass_stack_adjustments
    835 
    836 bool
    837 pass_stack_adjustments::gate (function *)
    838 {
    839   /* This is kind of a heuristic.  We need to run combine_stack_adjustments
    840      even for machines with possibly nonzero TARGET_RETURN_POPS_ARGS
    841      and ACCUMULATE_OUTGOING_ARGS.  We expect that only ports having
    842      push instructions will have popping returns.  */
    843 #ifndef PUSH_ROUNDING
    844   if (ACCUMULATE_OUTGOING_ARGS)
    845     return false;
    846 #endif
    847   return flag_combine_stack_adjustments;
    848 }
    849 
    850 } // anon namespace
    851 
    852 rtl_opt_pass *
    853 make_pass_stack_adjustments (gcc::context *ctxt)
    854 {
    855   return new pass_stack_adjustments (ctxt);
    856 }
    857