Home | History | Annotate | Line # | Download | only in gcc
ipa-split.cc revision 1.1.1.1
      1  1.1  mrg /* Function splitting pass
      2  1.1  mrg    Copyright (C) 2010-2022 Free Software Foundation, Inc.
      3  1.1  mrg    Contributed by Jan Hubicka  <jh (at) suse.cz>
      4  1.1  mrg 
      5  1.1  mrg This file is part of GCC.
      6  1.1  mrg 
      7  1.1  mrg GCC is free software; you can redistribute it and/or modify it under
      8  1.1  mrg the terms of the GNU General Public License as published by the Free
      9  1.1  mrg Software Foundation; either version 3, or (at your option) any later
     10  1.1  mrg version.
     11  1.1  mrg 
     12  1.1  mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13  1.1  mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14  1.1  mrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15  1.1  mrg for more details.
     16  1.1  mrg 
     17  1.1  mrg You should have received a copy of the GNU General Public License
     18  1.1  mrg along with GCC; see the file COPYING3.  If not see
     19  1.1  mrg <http://www.gnu.org/licenses/>.  */
     20  1.1  mrg 
     21  1.1  mrg /* The purpose of this pass is to split function bodies to improve
     22  1.1  mrg    inlining.  I.e. for function of the form:
     23  1.1  mrg 
     24  1.1  mrg    func (...)
     25  1.1  mrg      {
     26  1.1  mrg        if (cheap_test)
     27  1.1  mrg 	 something_small
     28  1.1  mrg        else
     29  1.1  mrg 	 something_big
     30  1.1  mrg      }
     31  1.1  mrg 
     32  1.1  mrg    Produce:
     33  1.1  mrg 
     34  1.1  mrg    func.part (...)
     35  1.1  mrg      {
     36  1.1  mrg 	something_big
     37  1.1  mrg      }
     38  1.1  mrg 
     39  1.1  mrg    func (...)
     40  1.1  mrg      {
     41  1.1  mrg        if (cheap_test)
     42  1.1  mrg 	 something_small
     43  1.1  mrg        else
     44  1.1  mrg 	 func.part (...);
     45  1.1  mrg      }
     46  1.1  mrg 
     47  1.1  mrg    When func becomes inlinable and when cheap_test is often true, inlining func,
     48  1.1  mrg    but not fund.part leads to performance improvement similar as inlining
     49  1.1  mrg    original func while the code size growth is smaller.
     50  1.1  mrg 
     51  1.1  mrg    The pass is organized in three stages:
     52  1.1  mrg    1) Collect local info about basic block into BB_INFO structure and
     53  1.1  mrg       compute function body estimated size and time.
     54  1.1  mrg    2) Via DFS walk find all possible basic blocks where we can split
     55  1.1  mrg       and chose best one.
     56  1.1  mrg    3) If split point is found, split at the specified BB by creating a clone
     57  1.1  mrg       and updating function to call it.
     58  1.1  mrg 
     59  1.1  mrg    The decisions what functions to split are in execute_split_functions
     60  1.1  mrg    and consider_split.
     61  1.1  mrg 
     62  1.1  mrg    There are several possible future improvements for this pass including:
     63  1.1  mrg 
     64  1.1  mrg    1) Splitting to break up large functions
     65  1.1  mrg    2) Splitting to reduce stack frame usage
     66  1.1  mrg    3) Allow split part of function to use values computed in the header part.
     67  1.1  mrg       The values needs to be passed to split function, perhaps via same
     68  1.1  mrg       interface as for nested functions or as argument.
     69  1.1  mrg    4) Support for simple rematerialization.  I.e. when split part use
     70  1.1  mrg       value computed in header from function parameter in very cheap way, we
     71  1.1  mrg       can just recompute it.
     72  1.1  mrg    5) Support splitting of nested functions.
     73  1.1  mrg    6) Support non-SSA arguments.
     74  1.1  mrg    7) There is nothing preventing us from producing multiple parts of single function
     75  1.1  mrg       when needed or splitting also the parts.  */
     76  1.1  mrg 
     77  1.1  mrg #include "config.h"
     78  1.1  mrg #include "system.h"
     79  1.1  mrg #include "coretypes.h"
     80  1.1  mrg #include "backend.h"
     81  1.1  mrg #include "rtl.h"
     82  1.1  mrg #include "tree.h"
     83  1.1  mrg #include "gimple.h"
     84  1.1  mrg #include "cfghooks.h"
     85  1.1  mrg #include "alloc-pool.h"
     86  1.1  mrg #include "tree-pass.h"
     87  1.1  mrg #include "ssa.h"
     88  1.1  mrg #include "cgraph.h"
     89  1.1  mrg #include "diagnostic.h"
     90  1.1  mrg #include "fold-const.h"
     91  1.1  mrg #include "cfganal.h"
     92  1.1  mrg #include "calls.h"
     93  1.1  mrg #include "gimplify.h"
     94  1.1  mrg #include "gimple-iterator.h"
     95  1.1  mrg #include "gimplify-me.h"
     96  1.1  mrg #include "gimple-walk.h"
     97  1.1  mrg #include "symbol-summary.h"
     98  1.1  mrg #include "ipa-prop.h"
     99  1.1  mrg #include "tree-cfg.h"
    100  1.1  mrg #include "tree-into-ssa.h"
    101  1.1  mrg #include "tree-dfa.h"
    102  1.1  mrg #include "tree-inline.h"
    103  1.1  mrg #include "gimple-pretty-print.h"
    104  1.1  mrg #include "ipa-fnsummary.h"
    105  1.1  mrg #include "cfgloop.h"
    106  1.1  mrg #include "attribs.h"
    107  1.1  mrg 
    108  1.1  mrg /* Per basic block info.  */
    109  1.1  mrg 
    110  1.1  mrg class split_bb_info
    111  1.1  mrg {
    112  1.1  mrg public:
    113  1.1  mrg   unsigned int size;
    114  1.1  mrg   sreal time;
    115  1.1  mrg };
    116  1.1  mrg 
    117  1.1  mrg static vec<split_bb_info> bb_info_vec;
    118  1.1  mrg 
    119  1.1  mrg /* Description of split point.  */
    120  1.1  mrg 
    121  1.1  mrg class split_point
    122  1.1  mrg {
    123  1.1  mrg public:
    124  1.1  mrg   /* Size of the partitions.  */
    125  1.1  mrg   sreal header_time, split_time;
    126  1.1  mrg   unsigned int header_size, split_size;
    127  1.1  mrg 
    128  1.1  mrg   /* SSA names that need to be passed into spit function.  */
    129  1.1  mrg   bitmap ssa_names_to_pass;
    130  1.1  mrg 
    131  1.1  mrg   /* Basic block where we split (that will become entry point of new function.  */
    132  1.1  mrg   basic_block entry_bb;
    133  1.1  mrg 
    134  1.1  mrg   /* Count for entering the split part.
    135  1.1  mrg      This is not count of the entry_bb because it may be in loop.  */
    136  1.1  mrg   profile_count count;
    137  1.1  mrg 
    138  1.1  mrg   /* Basic blocks we are splitting away.  */
    139  1.1  mrg   bitmap split_bbs;
    140  1.1  mrg 
    141  1.1  mrg   /* True when return value is computed on split part and thus it needs
    142  1.1  mrg      to be returned.  */
    143  1.1  mrg   bool split_part_set_retval;
    144  1.1  mrg };
    145  1.1  mrg 
    146  1.1  mrg /* Best split point found.  */
    147  1.1  mrg 
    148  1.1  mrg class split_point best_split_point;
    149  1.1  mrg 
    150  1.1  mrg /* Set of basic blocks that are not allowed to dominate a split point.  */
    151  1.1  mrg 
    152  1.1  mrg static bitmap forbidden_dominators;
    153  1.1  mrg 
    154  1.1  mrg static tree find_retval (basic_block return_bb);
    155  1.1  mrg 
    156  1.1  mrg /* Callback for walk_stmt_load_store_addr_ops.  If T is non-SSA automatic
    157  1.1  mrg    variable, check it if it is present in bitmap passed via DATA.  */
    158  1.1  mrg 
    159  1.1  mrg static bool
    160  1.1  mrg test_nonssa_use (gimple *, tree t, tree, void *data)
    161  1.1  mrg {
    162  1.1  mrg   t = get_base_address (t);
    163  1.1  mrg 
    164  1.1  mrg   if (!t || is_gimple_reg (t))
    165  1.1  mrg     return false;
    166  1.1  mrg 
    167  1.1  mrg   if (TREE_CODE (t) == PARM_DECL
    168  1.1  mrg       || (VAR_P (t)
    169  1.1  mrg 	  && auto_var_in_fn_p (t, current_function_decl))
    170  1.1  mrg       || TREE_CODE (t) == RESULT_DECL
    171  1.1  mrg 	 /* Normal labels are part of CFG and will be handled gratefully.
    172  1.1  mrg 	    Forced labels however can be used directly by statements and
    173  1.1  mrg 	    need to stay in one partition along with their uses.  */
    174  1.1  mrg       || (TREE_CODE (t) == LABEL_DECL
    175  1.1  mrg 	  && FORCED_LABEL (t)))
    176  1.1  mrg     return bitmap_bit_p ((bitmap)data, DECL_UID (t));
    177  1.1  mrg 
    178  1.1  mrg   /* For DECL_BY_REFERENCE, the return value is actually a pointer.  We want
    179  1.1  mrg      to pretend that the value pointed to is actual result decl.  */
    180  1.1  mrg   if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
    181  1.1  mrg       && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
    182  1.1  mrg       && SSA_NAME_VAR (TREE_OPERAND (t, 0))
    183  1.1  mrg       && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
    184  1.1  mrg       && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
    185  1.1  mrg     return
    186  1.1  mrg       bitmap_bit_p ((bitmap)data,
    187  1.1  mrg 		    DECL_UID (DECL_RESULT (current_function_decl)));
    188  1.1  mrg 
    189  1.1  mrg   return false;
    190  1.1  mrg }
    191  1.1  mrg 
    192  1.1  mrg /* Dump split point CURRENT.  */
    193  1.1  mrg 
    194  1.1  mrg static void
    195  1.1  mrg dump_split_point (FILE * file, class split_point *current)
    196  1.1  mrg {
    197  1.1  mrg   fprintf (file,
    198  1.1  mrg 	   "Split point at BB %i\n"
    199  1.1  mrg 	   "  header time: %f header size: %i\n"
    200  1.1  mrg 	   "  split time: %f split size: %i\n  bbs: ",
    201  1.1  mrg 	   current->entry_bb->index, current->header_time.to_double (),
    202  1.1  mrg 	   current->header_size, current->split_time.to_double (),
    203  1.1  mrg 	   current->split_size);
    204  1.1  mrg   dump_bitmap (file, current->split_bbs);
    205  1.1  mrg   fprintf (file, "  SSA names to pass: ");
    206  1.1  mrg   dump_bitmap (file, current->ssa_names_to_pass);
    207  1.1  mrg }
    208  1.1  mrg 
    209  1.1  mrg /* Look for all BBs in header that might lead to the split part and verify
    210  1.1  mrg    that they are not defining any non-SSA var used by the split part.
    211  1.1  mrg    Parameters are the same as for consider_split.  */
    212  1.1  mrg 
    213  1.1  mrg static bool
    214  1.1  mrg verify_non_ssa_vars (class split_point *current, bitmap non_ssa_vars,
    215  1.1  mrg 		     basic_block return_bb)
    216  1.1  mrg {
    217  1.1  mrg   bitmap seen = BITMAP_ALLOC (NULL);
    218  1.1  mrg   vec<basic_block> worklist = vNULL;
    219  1.1  mrg   edge e;
    220  1.1  mrg   edge_iterator ei;
    221  1.1  mrg   bool ok = true;
    222  1.1  mrg   basic_block bb;
    223  1.1  mrg 
    224  1.1  mrg   FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
    225  1.1  mrg     if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
    226  1.1  mrg 	&& !bitmap_bit_p (current->split_bbs, e->src->index))
    227  1.1  mrg       {
    228  1.1  mrg         worklist.safe_push (e->src);
    229  1.1  mrg 	bitmap_set_bit (seen, e->src->index);
    230  1.1  mrg       }
    231  1.1  mrg 
    232  1.1  mrg   while (!worklist.is_empty ())
    233  1.1  mrg     {
    234  1.1  mrg       bb = worklist.pop ();
    235  1.1  mrg       FOR_EACH_EDGE (e, ei, bb->preds)
    236  1.1  mrg 	if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
    237  1.1  mrg 	    && bitmap_set_bit (seen, e->src->index))
    238  1.1  mrg 	  {
    239  1.1  mrg 	    gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
    240  1.1  mrg 					        e->src->index));
    241  1.1  mrg 	    worklist.safe_push (e->src);
    242  1.1  mrg 	  }
    243  1.1  mrg       for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
    244  1.1  mrg 	   gsi_next (&bsi))
    245  1.1  mrg 	{
    246  1.1  mrg 	  gimple *stmt = gsi_stmt (bsi);
    247  1.1  mrg 	  if (is_gimple_debug (stmt))
    248  1.1  mrg 	    continue;
    249  1.1  mrg 	  if (walk_stmt_load_store_addr_ops
    250  1.1  mrg 	      (stmt, non_ssa_vars, test_nonssa_use, test_nonssa_use,
    251  1.1  mrg 	       test_nonssa_use))
    252  1.1  mrg 	    {
    253  1.1  mrg 	      ok = false;
    254  1.1  mrg 	      goto done;
    255  1.1  mrg 	    }
    256  1.1  mrg 	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
    257  1.1  mrg 	    if (test_nonssa_use (stmt, gimple_label_label (label_stmt),
    258  1.1  mrg 				 NULL_TREE, non_ssa_vars))
    259  1.1  mrg 	      {
    260  1.1  mrg 		ok = false;
    261  1.1  mrg 		goto done;
    262  1.1  mrg 	      }
    263  1.1  mrg 	}
    264  1.1  mrg       for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
    265  1.1  mrg 	   gsi_next (&bsi))
    266  1.1  mrg 	{
    267  1.1  mrg 	  if (walk_stmt_load_store_addr_ops
    268  1.1  mrg 	      (gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
    269  1.1  mrg 	       test_nonssa_use))
    270  1.1  mrg 	    {
    271  1.1  mrg 	      ok = false;
    272  1.1  mrg 	      goto done;
    273  1.1  mrg 	    }
    274  1.1  mrg 	}
    275  1.1  mrg       FOR_EACH_EDGE (e, ei, bb->succs)
    276  1.1  mrg 	{
    277  1.1  mrg 	  if (e->dest != return_bb)
    278  1.1  mrg 	    continue;
    279  1.1  mrg 	  for (gphi_iterator bsi = gsi_start_phis (return_bb);
    280  1.1  mrg 	       !gsi_end_p (bsi);
    281  1.1  mrg 	       gsi_next (&bsi))
    282  1.1  mrg 	    {
    283  1.1  mrg 	      gphi *stmt = bsi.phi ();
    284  1.1  mrg 	      tree op = gimple_phi_arg_def (stmt, e->dest_idx);
    285  1.1  mrg 
    286  1.1  mrg 	      if (virtual_operand_p (gimple_phi_result (stmt)))
    287  1.1  mrg 		continue;
    288  1.1  mrg 	      if (TREE_CODE (op) != SSA_NAME
    289  1.1  mrg 		  && test_nonssa_use (stmt, op, op, non_ssa_vars))
    290  1.1  mrg 		{
    291  1.1  mrg 		  ok = false;
    292  1.1  mrg 		  goto done;
    293  1.1  mrg 		}
    294  1.1  mrg 	    }
    295  1.1  mrg 	}
    296  1.1  mrg     }
    297  1.1  mrg 
    298  1.1  mrg   /* Verify that the rest of function does not define any label
    299  1.1  mrg      used by the split part.  */
    300  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
    301  1.1  mrg     if (!bitmap_bit_p (current->split_bbs, bb->index)
    302  1.1  mrg 	&& !bitmap_bit_p (seen, bb->index))
    303  1.1  mrg       {
    304  1.1  mrg         gimple_stmt_iterator bsi;
    305  1.1  mrg         for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
    306  1.1  mrg 	  if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (bsi)))
    307  1.1  mrg 	    {
    308  1.1  mrg 	      if (test_nonssa_use (label_stmt,
    309  1.1  mrg 				   gimple_label_label (label_stmt),
    310  1.1  mrg 				   NULL_TREE, non_ssa_vars))
    311  1.1  mrg 		{
    312  1.1  mrg 		  ok = false;
    313  1.1  mrg 		  goto done;
    314  1.1  mrg 		}
    315  1.1  mrg 	    }
    316  1.1  mrg 	  else
    317  1.1  mrg 	    break;
    318  1.1  mrg       }
    319  1.1  mrg 
    320  1.1  mrg done:
    321  1.1  mrg   BITMAP_FREE (seen);
    322  1.1  mrg   worklist.release ();
    323  1.1  mrg   return ok;
    324  1.1  mrg }
    325  1.1  mrg 
    326  1.1  mrg /* If STMT is a call, check the callee against a list of forbidden
    327  1.1  mrg    predicate functions.  If a match is found, look for uses of the
    328  1.1  mrg    call result in condition statements that compare against zero.
    329  1.1  mrg    For each such use, find the block targeted by the condition
    330  1.1  mrg    statement for the nonzero result, and set the bit for this block
    331  1.1  mrg    in the forbidden dominators bitmap.  The purpose of this is to avoid
    332  1.1  mrg    selecting a split point where we are likely to lose the chance
    333  1.1  mrg    to optimize away an unused function call.  */
    334  1.1  mrg 
    335  1.1  mrg static void
    336  1.1  mrg check_forbidden_calls (gimple *stmt)
    337  1.1  mrg {
    338  1.1  mrg   imm_use_iterator use_iter;
    339  1.1  mrg   use_operand_p use_p;
    340  1.1  mrg   tree lhs;
    341  1.1  mrg 
    342  1.1  mrg   /* At the moment, __builtin_constant_p is the only forbidden
    343  1.1  mrg      predicate function call (see PR49642).  */
    344  1.1  mrg   if (!gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P))
    345  1.1  mrg     return;
    346  1.1  mrg 
    347  1.1  mrg   lhs = gimple_call_lhs (stmt);
    348  1.1  mrg 
    349  1.1  mrg   if (!lhs || TREE_CODE (lhs) != SSA_NAME)
    350  1.1  mrg     return;
    351  1.1  mrg 
    352  1.1  mrg   FOR_EACH_IMM_USE_FAST (use_p, use_iter, lhs)
    353  1.1  mrg     {
    354  1.1  mrg       tree op1;
    355  1.1  mrg       basic_block use_bb, forbidden_bb;
    356  1.1  mrg       enum tree_code code;
    357  1.1  mrg       edge true_edge, false_edge;
    358  1.1  mrg       gcond *use_stmt;
    359  1.1  mrg 
    360  1.1  mrg       use_stmt = dyn_cast <gcond *> (USE_STMT (use_p));
    361  1.1  mrg       if (!use_stmt)
    362  1.1  mrg 	continue;
    363  1.1  mrg 
    364  1.1  mrg       /* Assuming canonical form for GIMPLE_COND here, with constant
    365  1.1  mrg 	 in second position.  */
    366  1.1  mrg       op1 = gimple_cond_rhs (use_stmt);
    367  1.1  mrg       code = gimple_cond_code (use_stmt);
    368  1.1  mrg       use_bb = gimple_bb (use_stmt);
    369  1.1  mrg 
    370  1.1  mrg       extract_true_false_edges_from_block (use_bb, &true_edge, &false_edge);
    371  1.1  mrg 
    372  1.1  mrg       /* We're only interested in comparisons that distinguish
    373  1.1  mrg 	 unambiguously from zero.  */
    374  1.1  mrg       if (!integer_zerop (op1) || code == LE_EXPR || code == GE_EXPR)
    375  1.1  mrg 	continue;
    376  1.1  mrg 
    377  1.1  mrg       if (code == EQ_EXPR)
    378  1.1  mrg 	forbidden_bb = false_edge->dest;
    379  1.1  mrg       else
    380  1.1  mrg 	forbidden_bb = true_edge->dest;
    381  1.1  mrg 
    382  1.1  mrg       bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
    383  1.1  mrg     }
    384  1.1  mrg }
    385  1.1  mrg 
    386  1.1  mrg /* If BB is dominated by any block in the forbidden dominators set,
    387  1.1  mrg    return TRUE; else FALSE.  */
    388  1.1  mrg 
    389  1.1  mrg static bool
    390  1.1  mrg dominated_by_forbidden (basic_block bb)
    391  1.1  mrg {
    392  1.1  mrg   unsigned dom_bb;
    393  1.1  mrg   bitmap_iterator bi;
    394  1.1  mrg 
    395  1.1  mrg   EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
    396  1.1  mrg     {
    397  1.1  mrg       if (dominated_by_p (CDI_DOMINATORS, bb,
    398  1.1  mrg 			  BASIC_BLOCK_FOR_FN (cfun, dom_bb)))
    399  1.1  mrg 	return true;
    400  1.1  mrg     }
    401  1.1  mrg 
    402  1.1  mrg   return false;
    403  1.1  mrg }
    404  1.1  mrg 
    405  1.1  mrg /* For give split point CURRENT and return block RETURN_BB return 1
    406  1.1  mrg    if ssa name VAL is set by split part and 0 otherwise.  */
    407  1.1  mrg static bool
    408  1.1  mrg split_part_set_ssa_name_p (tree val, class split_point *current,
    409  1.1  mrg 			   basic_block return_bb)
    410  1.1  mrg {
    411  1.1  mrg   if (TREE_CODE (val) != SSA_NAME)
    412  1.1  mrg     return false;
    413  1.1  mrg 
    414  1.1  mrg   return (!SSA_NAME_IS_DEFAULT_DEF (val)
    415  1.1  mrg 	  && (bitmap_bit_p (current->split_bbs,
    416  1.1  mrg 			    gimple_bb (SSA_NAME_DEF_STMT (val))->index)
    417  1.1  mrg 	      || gimple_bb (SSA_NAME_DEF_STMT (val)) == return_bb));
    418  1.1  mrg }
    419  1.1  mrg 
    420  1.1  mrg /* We found an split_point CURRENT.  NON_SSA_VARS is bitmap of all non ssa
    421  1.1  mrg    variables used and RETURN_BB is return basic block.
    422  1.1  mrg    See if we can split function here.  */
    423  1.1  mrg 
    424  1.1  mrg static void
    425  1.1  mrg consider_split (class split_point *current, bitmap non_ssa_vars,
    426  1.1  mrg 		basic_block return_bb)
    427  1.1  mrg {
    428  1.1  mrg   tree parm;
    429  1.1  mrg   unsigned int num_args = 0;
    430  1.1  mrg   unsigned int call_overhead;
    431  1.1  mrg   edge e;
    432  1.1  mrg   edge_iterator ei;
    433  1.1  mrg   gphi_iterator bsi;
    434  1.1  mrg   unsigned int i;
    435  1.1  mrg   tree retval;
    436  1.1  mrg   bool back_edge = false;
    437  1.1  mrg 
    438  1.1  mrg   if (dump_file && (dump_flags & TDF_DETAILS))
    439  1.1  mrg     dump_split_point (dump_file, current);
    440  1.1  mrg 
    441  1.1  mrg   current->count = profile_count::zero ();
    442  1.1  mrg   FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
    443  1.1  mrg     {
    444  1.1  mrg       if (e->flags & EDGE_DFS_BACK)
    445  1.1  mrg 	back_edge = true;
    446  1.1  mrg       if (!bitmap_bit_p (current->split_bbs, e->src->index))
    447  1.1  mrg 	current->count += e->count ();
    448  1.1  mrg     }
    449  1.1  mrg 
    450  1.1  mrg   /* Do not split when we would end up calling function anyway.
    451  1.1  mrg      Compares are three state, use !(...<...) to also give up when outcome
    452  1.1  mrg      is unknown.  */
    453  1.1  mrg   if (!(current->count
    454  1.1  mrg        < (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale
    455  1.1  mrg 	   (param_partial_inlining_entry_probability, 100))))
    456  1.1  mrg     {
    457  1.1  mrg       /* When profile is guessed, we cannot expect it to give us
    458  1.1  mrg 	 realistic estimate on likeliness of function taking the
    459  1.1  mrg 	 complex path.  As a special case, when tail of the function is
    460  1.1  mrg 	 a loop, enable splitting since inlining code skipping the loop
    461  1.1  mrg 	 is likely noticeable win.  */
    462  1.1  mrg       if (back_edge
    463  1.1  mrg 	  && profile_status_for_fn (cfun) != PROFILE_READ
    464  1.1  mrg 	  && current->count
    465  1.1  mrg 		 < ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
    466  1.1  mrg 	{
    467  1.1  mrg 	  if (dump_file && (dump_flags & TDF_DETAILS))
    468  1.1  mrg 	    {
    469  1.1  mrg 	      fprintf (dump_file,
    470  1.1  mrg 		       "  Split before loop, accepting despite low counts");
    471  1.1  mrg 	      current->count.dump (dump_file);
    472  1.1  mrg 	      fprintf (dump_file, " ");
    473  1.1  mrg 	      ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.dump (dump_file);
    474  1.1  mrg 	    }
    475  1.1  mrg 	}
    476  1.1  mrg       else
    477  1.1  mrg 	{
    478  1.1  mrg 	  if (dump_file && (dump_flags & TDF_DETAILS))
    479  1.1  mrg 	    fprintf (dump_file,
    480  1.1  mrg 		     "  Refused: incoming frequency is too large.\n");
    481  1.1  mrg 	  return;
    482  1.1  mrg 	}
    483  1.1  mrg     }
    484  1.1  mrg 
    485  1.1  mrg   if (!current->header_size)
    486  1.1  mrg     {
    487  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    488  1.1  mrg 	fprintf (dump_file, "  Refused: header empty\n");
    489  1.1  mrg       return;
    490  1.1  mrg     }
    491  1.1  mrg 
    492  1.1  mrg   /* Verify that PHI args on entry are either virtual or all their operands
    493  1.1  mrg      incoming from header are the same.  */
    494  1.1  mrg   for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
    495  1.1  mrg     {
    496  1.1  mrg       gphi *stmt = bsi.phi ();
    497  1.1  mrg       tree val = NULL;
    498  1.1  mrg 
    499  1.1  mrg       if (virtual_operand_p (gimple_phi_result (stmt)))
    500  1.1  mrg 	continue;
    501  1.1  mrg       for (i = 0; i < gimple_phi_num_args (stmt); i++)
    502  1.1  mrg 	{
    503  1.1  mrg 	  edge e = gimple_phi_arg_edge (stmt, i);
    504  1.1  mrg 	  if (!bitmap_bit_p (current->split_bbs, e->src->index))
    505  1.1  mrg 	    {
    506  1.1  mrg 	      tree edge_val = gimple_phi_arg_def (stmt, i);
    507  1.1  mrg 	      if (val && edge_val != val)
    508  1.1  mrg 	        {
    509  1.1  mrg 		  if (dump_file && (dump_flags & TDF_DETAILS))
    510  1.1  mrg 		    fprintf (dump_file,
    511  1.1  mrg 			     "  Refused: entry BB has PHI with multiple variants\n");
    512  1.1  mrg 		  return;
    513  1.1  mrg 	        }
    514  1.1  mrg 	      val = edge_val;
    515  1.1  mrg 	    }
    516  1.1  mrg 	}
    517  1.1  mrg     }
    518  1.1  mrg 
    519  1.1  mrg 
    520  1.1  mrg   /* See what argument we will pass to the split function and compute
    521  1.1  mrg      call overhead.  */
    522  1.1  mrg   call_overhead = eni_size_weights.call_cost;
    523  1.1  mrg   for (parm = DECL_ARGUMENTS (current_function_decl); parm;
    524  1.1  mrg        parm = DECL_CHAIN (parm))
    525  1.1  mrg     {
    526  1.1  mrg       if (!is_gimple_reg (parm))
    527  1.1  mrg 	{
    528  1.1  mrg 	  if (bitmap_bit_p (non_ssa_vars, DECL_UID (parm)))
    529  1.1  mrg 	    {
    530  1.1  mrg 	      if (dump_file && (dump_flags & TDF_DETAILS))
    531  1.1  mrg 		fprintf (dump_file,
    532  1.1  mrg 			 "  Refused: need to pass non-ssa param values\n");
    533  1.1  mrg 	      return;
    534  1.1  mrg 	    }
    535  1.1  mrg 	}
    536  1.1  mrg       else
    537  1.1  mrg 	{
    538  1.1  mrg 	  tree ddef = ssa_default_def (cfun, parm);
    539  1.1  mrg 	  if (ddef
    540  1.1  mrg 	      && bitmap_bit_p (current->ssa_names_to_pass,
    541  1.1  mrg 			       SSA_NAME_VERSION (ddef)))
    542  1.1  mrg 	    {
    543  1.1  mrg 	      if (!VOID_TYPE_P (TREE_TYPE (parm)))
    544  1.1  mrg 		call_overhead += estimate_move_cost (TREE_TYPE (parm), false);
    545  1.1  mrg 	      num_args++;
    546  1.1  mrg 	    }
    547  1.1  mrg 	}
    548  1.1  mrg     }
    549  1.1  mrg   if (!VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
    550  1.1  mrg     call_overhead += estimate_move_cost (TREE_TYPE (TREE_TYPE
    551  1.1  mrg 						 (current_function_decl)),
    552  1.1  mrg 					 false);
    553  1.1  mrg 
    554  1.1  mrg   if (current->split_size <= call_overhead)
    555  1.1  mrg     {
    556  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    557  1.1  mrg 	fprintf (dump_file,
    558  1.1  mrg 		 "  Refused: split size is smaller than call overhead\n");
    559  1.1  mrg       return;
    560  1.1  mrg     }
    561  1.1  mrg   /* FIXME: The logic here is not very precise, because inliner does use
    562  1.1  mrg      inline predicates to reduce function body size.  We add 10 to anticipate
    563  1.1  mrg      that.  Next stage1 we should try to be more meaningful here.  */
    564  1.1  mrg   if (current->header_size + call_overhead
    565  1.1  mrg       >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl)
    566  1.1  mrg 			? param_max_inline_insns_single
    567  1.1  mrg 			: param_max_inline_insns_auto) + 10)
    568  1.1  mrg     {
    569  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    570  1.1  mrg 	fprintf (dump_file,
    571  1.1  mrg 		 "  Refused: header size is too large for inline candidate\n");
    572  1.1  mrg       return;
    573  1.1  mrg     }
    574  1.1  mrg 
    575  1.1  mrg   /* Splitting functions brings the target out of comdat group; this will
    576  1.1  mrg      lead to code duplication if the function is reused by other unit.
    577  1.1  mrg      Limit this duplication.  This is consistent with limit in tree-sra.cc
    578  1.1  mrg      FIXME: with LTO we ought to be able to do better!  */
    579  1.1  mrg   if (DECL_ONE_ONLY (current_function_decl)
    580  1.1  mrg       && current->split_size >= (unsigned int) param_max_inline_insns_auto + 10)
    581  1.1  mrg     {
    582  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    583  1.1  mrg 	fprintf (dump_file,
    584  1.1  mrg 		 "  Refused: function is COMDAT and tail is too large\n");
    585  1.1  mrg       return;
    586  1.1  mrg     }
    587  1.1  mrg   /* For comdat functions also reject very small tails; those will likely get
    588  1.1  mrg      inlined back and we do not want to risk the duplication overhead.
    589  1.1  mrg      FIXME: with LTO we ought to be able to do better!  */
    590  1.1  mrg   if (DECL_ONE_ONLY (current_function_decl)
    591  1.1  mrg       && current->split_size
    592  1.1  mrg 	 <= (unsigned int) param_early_inlining_insns / 2)
    593  1.1  mrg     {
    594  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    595  1.1  mrg 	fprintf (dump_file,
    596  1.1  mrg 		 "  Refused: function is COMDAT and tail is too small\n");
    597  1.1  mrg       return;
    598  1.1  mrg     }
    599  1.1  mrg 
    600  1.1  mrg   /* FIXME: we currently can pass only SSA function parameters to the split
    601  1.1  mrg      arguments.  Once parm_adjustment infrastructure is supported by cloning,
    602  1.1  mrg      we can pass more than that.  */
    603  1.1  mrg   if (num_args != bitmap_count_bits (current->ssa_names_to_pass))
    604  1.1  mrg     {
    605  1.1  mrg 
    606  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    607  1.1  mrg 	fprintf (dump_file,
    608  1.1  mrg 		 "  Refused: need to pass non-param values\n");
    609  1.1  mrg       return;
    610  1.1  mrg     }
    611  1.1  mrg 
    612  1.1  mrg   /* When there are non-ssa vars used in the split region, see if they
    613  1.1  mrg      are used in the header region.  If so, reject the split.
    614  1.1  mrg      FIXME: we can use nested function support to access both.  */
    615  1.1  mrg   if (!bitmap_empty_p (non_ssa_vars)
    616  1.1  mrg       && !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
    617  1.1  mrg     {
    618  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    619  1.1  mrg 	fprintf (dump_file,
    620  1.1  mrg 		 "  Refused: split part has non-ssa uses\n");
    621  1.1  mrg       return;
    622  1.1  mrg     }
    623  1.1  mrg 
    624  1.1  mrg   /* If the split point is dominated by a forbidden block, reject
    625  1.1  mrg      the split.  */
    626  1.1  mrg   if (!bitmap_empty_p (forbidden_dominators)
    627  1.1  mrg       && dominated_by_forbidden (current->entry_bb))
    628  1.1  mrg     {
    629  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    630  1.1  mrg 	fprintf (dump_file,
    631  1.1  mrg 		 "  Refused: split point dominated by forbidden block\n");
    632  1.1  mrg       return;
    633  1.1  mrg     }
    634  1.1  mrg 
    635  1.1  mrg   /* See if retval used by return bb is computed by header or split part.
    636  1.1  mrg      When it is computed by split part, we need to produce return statement
    637  1.1  mrg      in the split part and add code to header to pass it around.
    638  1.1  mrg 
    639  1.1  mrg      This is bit tricky to test:
    640  1.1  mrg        1) When there is no return_bb or no return value, we always pass
    641  1.1  mrg           value around.
    642  1.1  mrg        2) Invariants are always computed by caller.
    643  1.1  mrg        3) For SSA we need to look if defining statement is in header or split part
    644  1.1  mrg        4) For non-SSA we need to look where the var is computed. */
    645  1.1  mrg   retval = find_retval (return_bb);
    646  1.1  mrg   if (!retval)
    647  1.1  mrg     {
    648  1.1  mrg       /* If there is a return_bb with no return value in function returning
    649  1.1  mrg 	 value by reference, also make the split part return void, otherwise
    650  1.1  mrg 	 we expansion would try to create a non-POD temporary, which is
    651  1.1  mrg 	 invalid.  */
    652  1.1  mrg       if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
    653  1.1  mrg 	  && DECL_RESULT (current_function_decl)
    654  1.1  mrg 	  && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
    655  1.1  mrg 	current->split_part_set_retval = false;
    656  1.1  mrg       else
    657  1.1  mrg 	current->split_part_set_retval = true;
    658  1.1  mrg     }
    659  1.1  mrg   else if (is_gimple_min_invariant (retval))
    660  1.1  mrg     current->split_part_set_retval = false;
    661  1.1  mrg   /* Special case is value returned by reference we record as if it was non-ssa
    662  1.1  mrg      set to result_decl.  */
    663  1.1  mrg   else if (TREE_CODE (retval) == SSA_NAME
    664  1.1  mrg 	   && SSA_NAME_VAR (retval)
    665  1.1  mrg 	   && TREE_CODE (SSA_NAME_VAR (retval)) == RESULT_DECL
    666  1.1  mrg 	   && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
    667  1.1  mrg     current->split_part_set_retval
    668  1.1  mrg        = bitmap_bit_p (non_ssa_vars, DECL_UID (SSA_NAME_VAR (retval)));
    669  1.1  mrg   else if (TREE_CODE (retval) == SSA_NAME)
    670  1.1  mrg     current->split_part_set_retval
    671  1.1  mrg       = split_part_set_ssa_name_p (retval, current, return_bb);
    672  1.1  mrg   else if (TREE_CODE (retval) == PARM_DECL)
    673  1.1  mrg     current->split_part_set_retval = false;
    674  1.1  mrg   else if (VAR_P (retval)
    675  1.1  mrg 	   || TREE_CODE (retval) == RESULT_DECL)
    676  1.1  mrg     current->split_part_set_retval
    677  1.1  mrg       = bitmap_bit_p (non_ssa_vars, DECL_UID (retval));
    678  1.1  mrg   else
    679  1.1  mrg     current->split_part_set_retval = true;
    680  1.1  mrg 
    681  1.1  mrg   /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
    682  1.1  mrg      for the return value.  If there are other PHIs, give up.  */
    683  1.1  mrg   if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
    684  1.1  mrg     {
    685  1.1  mrg       gphi_iterator psi;
    686  1.1  mrg 
    687  1.1  mrg       for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
    688  1.1  mrg 	if (!virtual_operand_p (gimple_phi_result (psi.phi ()))
    689  1.1  mrg 	    && !(retval
    690  1.1  mrg 		 && current->split_part_set_retval
    691  1.1  mrg 		 && TREE_CODE (retval) == SSA_NAME
    692  1.1  mrg 		 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
    693  1.1  mrg 		 && SSA_NAME_DEF_STMT (retval) == psi.phi ()))
    694  1.1  mrg 	  {
    695  1.1  mrg 	    if (dump_file && (dump_flags & TDF_DETAILS))
    696  1.1  mrg 	      fprintf (dump_file,
    697  1.1  mrg 		       "  Refused: return bb has extra PHIs\n");
    698  1.1  mrg 	    return;
    699  1.1  mrg 	  }
    700  1.1  mrg     }
    701  1.1  mrg 
    702  1.1  mrg   if (dump_file && (dump_flags & TDF_DETAILS))
    703  1.1  mrg     fprintf (dump_file, "  Accepted!\n");
    704  1.1  mrg 
    705  1.1  mrg   /* At the moment chose split point with lowest count and that leaves
    706  1.1  mrg      out smallest size of header.
    707  1.1  mrg      In future we might re-consider this heuristics.  */
    708  1.1  mrg   if (!best_split_point.split_bbs
    709  1.1  mrg       || best_split_point.count
    710  1.1  mrg 	 > current->count
    711  1.1  mrg       || (best_split_point.count == current->count
    712  1.1  mrg 	  && best_split_point.split_size < current->split_size))
    713  1.1  mrg 
    714  1.1  mrg     {
    715  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    716  1.1  mrg 	fprintf (dump_file, "  New best split point!\n");
    717  1.1  mrg       if (best_split_point.ssa_names_to_pass)
    718  1.1  mrg 	{
    719  1.1  mrg 	  BITMAP_FREE (best_split_point.ssa_names_to_pass);
    720  1.1  mrg 	  BITMAP_FREE (best_split_point.split_bbs);
    721  1.1  mrg 	}
    722  1.1  mrg       best_split_point = *current;
    723  1.1  mrg       best_split_point.ssa_names_to_pass = BITMAP_ALLOC (NULL);
    724  1.1  mrg       bitmap_copy (best_split_point.ssa_names_to_pass,
    725  1.1  mrg 		   current->ssa_names_to_pass);
    726  1.1  mrg       best_split_point.split_bbs = BITMAP_ALLOC (NULL);
    727  1.1  mrg       bitmap_copy (best_split_point.split_bbs, current->split_bbs);
    728  1.1  mrg     }
    729  1.1  mrg }
    730  1.1  mrg 
    731  1.1  mrg /* Return basic block containing RETURN statement.  We allow basic blocks
    732  1.1  mrg    of the form:
    733  1.1  mrg    <retval> = tmp_var;
    734  1.1  mrg    return <retval>
    735  1.1  mrg    but return_bb cannot be more complex than this (except for
    736  1.1  mrg    -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
    737  1.1  mrg    If nothing is found, return the exit block.
    738  1.1  mrg 
    739  1.1  mrg    When there are multiple RETURN statement, chose one with return value,
    740  1.1  mrg    since that one is more likely shared by multiple code paths.
    741  1.1  mrg 
    742  1.1  mrg    Return BB is special, because for function splitting it is the only
    743  1.1  mrg    basic block that is duplicated in between header and split part of the
    744  1.1  mrg    function.
    745  1.1  mrg 
    746  1.1  mrg    TODO: We might support multiple return blocks.  */
    747  1.1  mrg 
    748  1.1  mrg static basic_block
    749  1.1  mrg find_return_bb (void)
    750  1.1  mrg {
    751  1.1  mrg   edge e;
    752  1.1  mrg   basic_block return_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
    753  1.1  mrg   gimple_stmt_iterator bsi;
    754  1.1  mrg   bool found_return = false;
    755  1.1  mrg   tree retval = NULL_TREE;
    756  1.1  mrg 
    757  1.1  mrg   if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun)))
    758  1.1  mrg     return return_bb;
    759  1.1  mrg 
    760  1.1  mrg   e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
    761  1.1  mrg   for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
    762  1.1  mrg     {
    763  1.1  mrg       gimple *stmt = gsi_stmt (bsi);
    764  1.1  mrg       if (gimple_code (stmt) == GIMPLE_LABEL
    765  1.1  mrg 	  || is_gimple_debug (stmt)
    766  1.1  mrg 	  || gimple_clobber_p (stmt))
    767  1.1  mrg 	;
    768  1.1  mrg       else if (gimple_code (stmt) == GIMPLE_ASSIGN
    769  1.1  mrg 	       && found_return
    770  1.1  mrg 	       && gimple_assign_single_p (stmt)
    771  1.1  mrg 	       && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt),
    772  1.1  mrg 				     current_function_decl)
    773  1.1  mrg 		   || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
    774  1.1  mrg 	       && retval == gimple_assign_lhs (stmt))
    775  1.1  mrg 	;
    776  1.1  mrg       else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
    777  1.1  mrg 	{
    778  1.1  mrg 	  found_return = true;
    779  1.1  mrg 	  retval = gimple_return_retval (return_stmt);
    780  1.1  mrg 	}
    781  1.1  mrg       /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
    782  1.1  mrg 	 bb.  */
    783  1.1  mrg       else if ((flag_sanitize & SANITIZE_THREAD)
    784  1.1  mrg 	       && gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
    785  1.1  mrg 	;
    786  1.1  mrg       else
    787  1.1  mrg 	break;
    788  1.1  mrg     }
    789  1.1  mrg   if (gsi_end_p (bsi) && found_return)
    790  1.1  mrg     return_bb = e->src;
    791  1.1  mrg 
    792  1.1  mrg   return return_bb;
    793  1.1  mrg }
    794  1.1  mrg 
    795  1.1  mrg /* Given return basic block RETURN_BB, see where return value is really
    796  1.1  mrg    stored.  */
    797  1.1  mrg static tree
    798  1.1  mrg find_retval (basic_block return_bb)
    799  1.1  mrg {
    800  1.1  mrg   gimple_stmt_iterator bsi;
    801  1.1  mrg   for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
    802  1.1  mrg     if (greturn *return_stmt = dyn_cast <greturn *> (gsi_stmt (bsi)))
    803  1.1  mrg       return gimple_return_retval (return_stmt);
    804  1.1  mrg     else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
    805  1.1  mrg 	     && !gimple_clobber_p (gsi_stmt (bsi)))
    806  1.1  mrg       return gimple_assign_rhs1 (gsi_stmt (bsi));
    807  1.1  mrg   return NULL;
    808  1.1  mrg }
    809  1.1  mrg 
    810  1.1  mrg /* Callback for walk_stmt_load_store_addr_ops.  If T is non-SSA automatic
    811  1.1  mrg    variable, mark it as used in bitmap passed via DATA.
    812  1.1  mrg    Return true when access to T prevents splitting the function.  */
    813  1.1  mrg 
    814  1.1  mrg static bool
    815  1.1  mrg mark_nonssa_use (gimple *, tree t, tree, void *data)
    816  1.1  mrg {
    817  1.1  mrg   t = get_base_address (t);
    818  1.1  mrg 
    819  1.1  mrg   if (!t || is_gimple_reg (t))
    820  1.1  mrg     return false;
    821  1.1  mrg 
    822  1.1  mrg   /* At present we can't pass non-SSA arguments to split function.
    823  1.1  mrg      FIXME: this can be relaxed by passing references to arguments.  */
    824  1.1  mrg   if (TREE_CODE (t) == PARM_DECL)
    825  1.1  mrg     {
    826  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    827  1.1  mrg 	fprintf (dump_file,
    828  1.1  mrg 		 "Cannot split: use of non-ssa function parameter.\n");
    829  1.1  mrg       return true;
    830  1.1  mrg     }
    831  1.1  mrg 
    832  1.1  mrg   if ((VAR_P (t) && auto_var_in_fn_p (t, current_function_decl))
    833  1.1  mrg       || TREE_CODE (t) == RESULT_DECL
    834  1.1  mrg       || (TREE_CODE (t) == LABEL_DECL && FORCED_LABEL (t)))
    835  1.1  mrg     bitmap_set_bit ((bitmap)data, DECL_UID (t));
    836  1.1  mrg 
    837  1.1  mrg   /* For DECL_BY_REFERENCE, the return value is actually a pointer.  We want
    838  1.1  mrg      to pretend that the value pointed to is actual result decl.  */
    839  1.1  mrg   if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
    840  1.1  mrg       && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
    841  1.1  mrg       && SSA_NAME_VAR (TREE_OPERAND (t, 0))
    842  1.1  mrg       && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
    843  1.1  mrg       && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
    844  1.1  mrg     return
    845  1.1  mrg       bitmap_bit_p ((bitmap)data,
    846  1.1  mrg 		    DECL_UID (DECL_RESULT (current_function_decl)));
    847  1.1  mrg 
    848  1.1  mrg   return false;
    849  1.1  mrg }
    850  1.1  mrg 
    851  1.1  mrg /* Compute local properties of basic block BB we collect when looking for
    852  1.1  mrg    split points.  We look for ssa defs and store them in SET_SSA_NAMES,
    853  1.1  mrg    for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
    854  1.1  mrg    vars stored in NON_SSA_VARS.
    855  1.1  mrg 
    856  1.1  mrg    When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
    857  1.1  mrg 
    858  1.1  mrg    Return false when BB contains something that prevents it from being put into
    859  1.1  mrg    split function.  */
    860  1.1  mrg 
    861  1.1  mrg static bool
    862  1.1  mrg visit_bb (basic_block bb, basic_block return_bb,
    863  1.1  mrg 	  bitmap set_ssa_names, bitmap used_ssa_names,
    864  1.1  mrg 	  bitmap non_ssa_vars)
    865  1.1  mrg {
    866  1.1  mrg   edge e;
    867  1.1  mrg   edge_iterator ei;
    868  1.1  mrg   bool can_split = true;
    869  1.1  mrg 
    870  1.1  mrg   for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
    871  1.1  mrg        gsi_next (&bsi))
    872  1.1  mrg     {
    873  1.1  mrg       gimple *stmt = gsi_stmt (bsi);
    874  1.1  mrg       tree op;
    875  1.1  mrg       ssa_op_iter iter;
    876  1.1  mrg 
    877  1.1  mrg       if (is_gimple_debug (stmt))
    878  1.1  mrg 	continue;
    879  1.1  mrg 
    880  1.1  mrg       if (gimple_clobber_p (stmt))
    881  1.1  mrg 	continue;
    882  1.1  mrg 
    883  1.1  mrg       /* FIXME: We can split regions containing EH.  We cannot however
    884  1.1  mrg 	 split RESX, EH_DISPATCH and EH_POINTER referring to same region
    885  1.1  mrg 	 into different partitions.  This would require tracking of
    886  1.1  mrg 	 EH regions and checking in consider_split_point if they
    887  1.1  mrg 	 are not used elsewhere.  */
    888  1.1  mrg       if (gimple_code (stmt) == GIMPLE_RESX)
    889  1.1  mrg 	{
    890  1.1  mrg 	  if (dump_file && (dump_flags & TDF_DETAILS))
    891  1.1  mrg 	    fprintf (dump_file, "Cannot split: resx.\n");
    892  1.1  mrg 	  can_split = false;
    893  1.1  mrg 	}
    894  1.1  mrg       if (gimple_code (stmt) == GIMPLE_EH_DISPATCH)
    895  1.1  mrg 	{
    896  1.1  mrg 	  if (dump_file && (dump_flags & TDF_DETAILS))
    897  1.1  mrg 	    fprintf (dump_file, "Cannot split: eh dispatch.\n");
    898  1.1  mrg 	  can_split = false;
    899  1.1  mrg 	}
    900  1.1  mrg 
    901  1.1  mrg       /* Check calls that would prevent splitting.  */
    902  1.1  mrg       if (gimple_code (stmt) == GIMPLE_CALL)
    903  1.1  mrg 	{
    904  1.1  mrg 	  if (tree decl = gimple_call_fndecl (stmt))
    905  1.1  mrg 	    {
    906  1.1  mrg 	      /* Check builtins that would prevent splitting.  */
    907  1.1  mrg 	      if (fndecl_built_in_p (decl, BUILT_IN_NORMAL))
    908  1.1  mrg 		switch (DECL_FUNCTION_CODE (decl))
    909  1.1  mrg 		  {
    910  1.1  mrg 		  /* FIXME: once we will allow passing non-parm values to
    911  1.1  mrg 		     split part, we need to be sure to handle correct
    912  1.1  mrg 		     builtin_stack_save and builtin_stack_restore.  At the
    913  1.1  mrg 		     moment we are safe; there is no way to store
    914  1.1  mrg 		     builtin_stack_save result in non-SSA variable since all
    915  1.1  mrg 		     calls to those are compiler generated.  */
    916  1.1  mrg 		  case BUILT_IN_APPLY:
    917  1.1  mrg 		  case BUILT_IN_APPLY_ARGS:
    918  1.1  mrg 		  case BUILT_IN_VA_START:
    919  1.1  mrg 		    if (dump_file && (dump_flags & TDF_DETAILS))
    920  1.1  mrg 		      fprintf (dump_file,
    921  1.1  mrg 			       "Cannot split: builtin_apply and va_start.\n");
    922  1.1  mrg 		    can_split = false;
    923  1.1  mrg 		    break;
    924  1.1  mrg 		  case BUILT_IN_EH_POINTER:
    925  1.1  mrg 		    if (dump_file && (dump_flags & TDF_DETAILS))
    926  1.1  mrg 		      fprintf (dump_file,
    927  1.1  mrg 			       "Cannot split: builtin_eh_pointer.\n");
    928  1.1  mrg 		    can_split = false;
    929  1.1  mrg 		    break;
    930  1.1  mrg 		  default:
    931  1.1  mrg 		    break;
    932  1.1  mrg 		  }
    933  1.1  mrg 
    934  1.1  mrg 	      /* Calls to functions (which have the warning or error
    935  1.1  mrg 		 attribute on them) should not be split off into another
    936  1.1  mrg 		 function.  */
    937  1.1  mrg 	      if (lookup_attribute ("warning", DECL_ATTRIBUTES (decl))
    938  1.1  mrg 		   || lookup_attribute ("error", DECL_ATTRIBUTES (decl)))
    939  1.1  mrg 		{
    940  1.1  mrg 		  if (dump_file && (dump_flags & TDF_DETAILS))
    941  1.1  mrg 		    fprintf (dump_file,
    942  1.1  mrg 			     "Cannot split: warning or error attribute.\n");
    943  1.1  mrg 		  can_split = false;
    944  1.1  mrg 		}
    945  1.1  mrg 	    }
    946  1.1  mrg 	}
    947  1.1  mrg 
    948  1.1  mrg       FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
    949  1.1  mrg 	bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
    950  1.1  mrg       FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
    951  1.1  mrg 	bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
    952  1.1  mrg       can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
    953  1.1  mrg 						   mark_nonssa_use,
    954  1.1  mrg 						   mark_nonssa_use,
    955  1.1  mrg 						   mark_nonssa_use);
    956  1.1  mrg     }
    957  1.1  mrg   for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
    958  1.1  mrg        gsi_next (&bsi))
    959  1.1  mrg     {
    960  1.1  mrg       gphi *stmt = bsi.phi ();
    961  1.1  mrg       unsigned int i;
    962  1.1  mrg 
    963  1.1  mrg       if (virtual_operand_p (gimple_phi_result (stmt)))
    964  1.1  mrg 	continue;
    965  1.1  mrg       bitmap_set_bit (set_ssa_names,
    966  1.1  mrg 		      SSA_NAME_VERSION (gimple_phi_result (stmt)));
    967  1.1  mrg       for (i = 0; i < gimple_phi_num_args (stmt); i++)
    968  1.1  mrg 	{
    969  1.1  mrg 	  tree op = gimple_phi_arg_def (stmt, i);
    970  1.1  mrg 	  if (TREE_CODE (op) == SSA_NAME)
    971  1.1  mrg 	    bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
    972  1.1  mrg 	}
    973  1.1  mrg       can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
    974  1.1  mrg 						   mark_nonssa_use,
    975  1.1  mrg 						   mark_nonssa_use,
    976  1.1  mrg 						   mark_nonssa_use);
    977  1.1  mrg     }
    978  1.1  mrg   /* Record also uses coming from PHI operand in return BB.  */
    979  1.1  mrg   FOR_EACH_EDGE (e, ei, bb->succs)
    980  1.1  mrg     if (e->dest == return_bb)
    981  1.1  mrg       {
    982  1.1  mrg 	for (gphi_iterator bsi = gsi_start_phis (return_bb);
    983  1.1  mrg 	     !gsi_end_p (bsi);
    984  1.1  mrg 	     gsi_next (&bsi))
    985  1.1  mrg 	  {
    986  1.1  mrg 	    gphi *stmt = bsi.phi ();
    987  1.1  mrg 	    tree op = gimple_phi_arg_def (stmt, e->dest_idx);
    988  1.1  mrg 
    989  1.1  mrg 	    if (virtual_operand_p (gimple_phi_result (stmt)))
    990  1.1  mrg 	      continue;
    991  1.1  mrg 	    if (TREE_CODE (op) == SSA_NAME)
    992  1.1  mrg 	      bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
    993  1.1  mrg 	    else
    994  1.1  mrg 	      can_split &= !mark_nonssa_use (stmt, op, op, non_ssa_vars);
    995  1.1  mrg 	  }
    996  1.1  mrg       }
    997  1.1  mrg   return can_split;
    998  1.1  mrg }
    999  1.1  mrg 
   1000  1.1  mrg /* Stack entry for recursive DFS walk in find_split_point.  */
   1001  1.1  mrg 
   1002  1.1  mrg class stack_entry
   1003  1.1  mrg {
   1004  1.1  mrg public:
   1005  1.1  mrg   /* Basic block we are examining.  */
   1006  1.1  mrg   basic_block bb;
   1007  1.1  mrg 
   1008  1.1  mrg   /* SSA names set and used by the BB and all BBs reachable
   1009  1.1  mrg      from it via DFS walk.  */
   1010  1.1  mrg   bitmap set_ssa_names, used_ssa_names;
   1011  1.1  mrg   bitmap non_ssa_vars;
   1012  1.1  mrg 
   1013  1.1  mrg   /* All BBS visited from this BB via DFS walk.  */
   1014  1.1  mrg   bitmap bbs_visited;
   1015  1.1  mrg 
   1016  1.1  mrg   /* Last examined edge in DFS walk.  Since we walk unoriented graph,
   1017  1.1  mrg      the value is up to sum of incoming and outgoing edges of BB.  */
   1018  1.1  mrg   unsigned int edge_num;
   1019  1.1  mrg 
   1020  1.1  mrg   /* Stack entry index of earliest BB reachable from current BB
   1021  1.1  mrg      or any BB visited later in DFS walk.  */
   1022  1.1  mrg   int earliest;
   1023  1.1  mrg 
   1024  1.1  mrg   /* Overall time and size of all BBs reached from this BB in DFS walk.  */
   1025  1.1  mrg   sreal overall_time;
   1026  1.1  mrg   int overall_size;
   1027  1.1  mrg 
   1028  1.1  mrg   /* When false we cannot split on this BB.  */
   1029  1.1  mrg   bool can_split;
   1030  1.1  mrg };
   1031  1.1  mrg 
   1032  1.1  mrg 
   1033  1.1  mrg /* Find all articulations and call consider_split on them.
   1034  1.1  mrg    OVERALL_TIME and OVERALL_SIZE is time and size of the function.
   1035  1.1  mrg 
   1036  1.1  mrg    We perform basic algorithm for finding an articulation in a graph
   1037  1.1  mrg    created from CFG by considering it to be an unoriented graph.
   1038  1.1  mrg 
   1039  1.1  mrg    The articulation is discovered via DFS walk. We collect earliest
   1040  1.1  mrg    basic block on stack that is reachable via backward edge.  Articulation
   1041  1.1  mrg    is any basic block such that there is no backward edge bypassing it.
   1042  1.1  mrg    To reduce stack usage we maintain heap allocated stack in STACK vector.
   1043  1.1  mrg    AUX pointer of BB is set to index it appears in the stack or -1 once
   1044  1.1  mrg    it is visited and popped off the stack.
   1045  1.1  mrg 
   1046  1.1  mrg    The algorithm finds articulation after visiting the whole component
   1047  1.1  mrg    reachable by it.  This makes it convenient to collect information about
   1048  1.1  mrg    the component used by consider_split.  */
   1049  1.1  mrg 
   1050  1.1  mrg static void
   1051  1.1  mrg find_split_points (basic_block return_bb, sreal overall_time, int overall_size)
   1052  1.1  mrg {
   1053  1.1  mrg   stack_entry first;
   1054  1.1  mrg   vec<stack_entry> stack = vNULL;
   1055  1.1  mrg   basic_block bb;
   1056  1.1  mrg   class split_point current;
   1057  1.1  mrg 
   1058  1.1  mrg   current.header_time = overall_time;
   1059  1.1  mrg   current.header_size = overall_size;
   1060  1.1  mrg   current.split_time = 0;
   1061  1.1  mrg   current.split_size = 0;
   1062  1.1  mrg   current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
   1063  1.1  mrg 
   1064  1.1  mrg   first.bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
   1065  1.1  mrg   first.edge_num = 0;
   1066  1.1  mrg   first.overall_time = 0;
   1067  1.1  mrg   first.overall_size = 0;
   1068  1.1  mrg   first.earliest = INT_MAX;
   1069  1.1  mrg   first.set_ssa_names = 0;
   1070  1.1  mrg   first.used_ssa_names = 0;
   1071  1.1  mrg   first.non_ssa_vars = 0;
   1072  1.1  mrg   first.bbs_visited = 0;
   1073  1.1  mrg   first.can_split = false;
   1074  1.1  mrg   stack.safe_push (first);
   1075  1.1  mrg   ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(intptr_t)-1;
   1076  1.1  mrg 
   1077  1.1  mrg   while (!stack.is_empty ())
   1078  1.1  mrg     {
   1079  1.1  mrg       stack_entry *entry = &stack.last ();
   1080  1.1  mrg 
   1081  1.1  mrg       /* We are walking an acyclic graph, so edge_num counts
   1082  1.1  mrg 	 succ and pred edges together.  However when considering
   1083  1.1  mrg          articulation, we want to have processed everything reachable
   1084  1.1  mrg 	 from articulation but nothing that reaches into it.  */
   1085  1.1  mrg       if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
   1086  1.1  mrg 	  && entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
   1087  1.1  mrg 	{
   1088  1.1  mrg 	  int pos = stack.length ();
   1089  1.1  mrg 	  entry->can_split &= visit_bb (entry->bb, return_bb,
   1090  1.1  mrg 					entry->set_ssa_names,
   1091  1.1  mrg 					entry->used_ssa_names,
   1092  1.1  mrg 					entry->non_ssa_vars);
   1093  1.1  mrg 	  if (pos <= entry->earliest && !entry->can_split
   1094  1.1  mrg 	      && dump_file && (dump_flags & TDF_DETAILS))
   1095  1.1  mrg 	    fprintf (dump_file,
   1096  1.1  mrg 		     "found articulation at bb %i but cannot split\n",
   1097  1.1  mrg 		     entry->bb->index);
   1098  1.1  mrg 	  if (pos <= entry->earliest && entry->can_split)
   1099  1.1  mrg 	     {
   1100  1.1  mrg 	       if (dump_file && (dump_flags & TDF_DETAILS))
   1101  1.1  mrg 		 fprintf (dump_file, "found articulation at bb %i\n",
   1102  1.1  mrg 			  entry->bb->index);
   1103  1.1  mrg 	       current.entry_bb = entry->bb;
   1104  1.1  mrg 	       current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
   1105  1.1  mrg 	       bitmap_and_compl (current.ssa_names_to_pass,
   1106  1.1  mrg 				 entry->used_ssa_names, entry->set_ssa_names);
   1107  1.1  mrg 	       current.header_time = overall_time - entry->overall_time;
   1108  1.1  mrg 	       current.header_size = overall_size - entry->overall_size;
   1109  1.1  mrg 	       current.split_time = entry->overall_time;
   1110  1.1  mrg 	       current.split_size = entry->overall_size;
   1111  1.1  mrg 	       current.split_bbs = entry->bbs_visited;
   1112  1.1  mrg 	       consider_split (&current, entry->non_ssa_vars, return_bb);
   1113  1.1  mrg 	       BITMAP_FREE (current.ssa_names_to_pass);
   1114  1.1  mrg 	     }
   1115  1.1  mrg 	}
   1116  1.1  mrg       /* Do actual DFS walk.  */
   1117  1.1  mrg       if (entry->edge_num
   1118  1.1  mrg 	  < (EDGE_COUNT (entry->bb->succs)
   1119  1.1  mrg 	     + EDGE_COUNT (entry->bb->preds)))
   1120  1.1  mrg 	{
   1121  1.1  mrg           edge e;
   1122  1.1  mrg 	  basic_block dest;
   1123  1.1  mrg 	  if (entry->edge_num < EDGE_COUNT (entry->bb->succs))
   1124  1.1  mrg 	    {
   1125  1.1  mrg 	      e = EDGE_SUCC (entry->bb, entry->edge_num);
   1126  1.1  mrg 	      dest = e->dest;
   1127  1.1  mrg 	    }
   1128  1.1  mrg 	  else
   1129  1.1  mrg 	    {
   1130  1.1  mrg 	      e = EDGE_PRED (entry->bb, entry->edge_num
   1131  1.1  mrg 			     - EDGE_COUNT (entry->bb->succs));
   1132  1.1  mrg 	      dest = e->src;
   1133  1.1  mrg 	    }
   1134  1.1  mrg 
   1135  1.1  mrg 	  entry->edge_num++;
   1136  1.1  mrg 
   1137  1.1  mrg 	  /* New BB to visit, push it to the stack.  */
   1138  1.1  mrg 	  if (dest != return_bb && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
   1139  1.1  mrg 	      && !dest->aux)
   1140  1.1  mrg 	    {
   1141  1.1  mrg 	      stack_entry new_entry;
   1142  1.1  mrg 
   1143  1.1  mrg 	      new_entry.bb = dest;
   1144  1.1  mrg 	      new_entry.edge_num = 0;
   1145  1.1  mrg 	      new_entry.overall_time
   1146  1.1  mrg 		 = bb_info_vec[dest->index].time;
   1147  1.1  mrg 	      new_entry.overall_size
   1148  1.1  mrg 		 = bb_info_vec[dest->index].size;
   1149  1.1  mrg 	      new_entry.earliest = INT_MAX;
   1150  1.1  mrg 	      new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
   1151  1.1  mrg 	      new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
   1152  1.1  mrg 	      new_entry.bbs_visited = BITMAP_ALLOC (NULL);
   1153  1.1  mrg 	      new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
   1154  1.1  mrg 	      new_entry.can_split = true;
   1155  1.1  mrg 	      bitmap_set_bit (new_entry.bbs_visited, dest->index);
   1156  1.1  mrg 	      stack.safe_push (new_entry);
   1157  1.1  mrg 	      dest->aux = (void *)(intptr_t)stack.length ();
   1158  1.1  mrg 	    }
   1159  1.1  mrg 	  /* Back edge found, record the earliest point.  */
   1160  1.1  mrg 	  else if ((intptr_t)dest->aux > 0
   1161  1.1  mrg 		   && (intptr_t)dest->aux < entry->earliest)
   1162  1.1  mrg 	    entry->earliest = (intptr_t)dest->aux;
   1163  1.1  mrg 	}
   1164  1.1  mrg       /* We are done with examining the edges.  Pop off the value from stack
   1165  1.1  mrg 	 and merge stuff we accumulate during the walk.  */
   1166  1.1  mrg       else if (entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
   1167  1.1  mrg 	{
   1168  1.1  mrg 	  stack_entry *prev = &stack[stack.length () - 2];
   1169  1.1  mrg 
   1170  1.1  mrg 	  entry->bb->aux = (void *)(intptr_t)-1;
   1171  1.1  mrg 	  prev->can_split &= entry->can_split;
   1172  1.1  mrg 	  if (prev->set_ssa_names)
   1173  1.1  mrg 	    {
   1174  1.1  mrg 	      bitmap_ior_into (prev->set_ssa_names, entry->set_ssa_names);
   1175  1.1  mrg 	      bitmap_ior_into (prev->used_ssa_names, entry->used_ssa_names);
   1176  1.1  mrg 	      bitmap_ior_into (prev->bbs_visited, entry->bbs_visited);
   1177  1.1  mrg 	      bitmap_ior_into (prev->non_ssa_vars, entry->non_ssa_vars);
   1178  1.1  mrg 	    }
   1179  1.1  mrg 	  if (prev->earliest > entry->earliest)
   1180  1.1  mrg 	    prev->earliest = entry->earliest;
   1181  1.1  mrg 	  prev->overall_time += entry->overall_time;
   1182  1.1  mrg 	  prev->overall_size += entry->overall_size;
   1183  1.1  mrg 	  BITMAP_FREE (entry->set_ssa_names);
   1184  1.1  mrg 	  BITMAP_FREE (entry->used_ssa_names);
   1185  1.1  mrg 	  BITMAP_FREE (entry->bbs_visited);
   1186  1.1  mrg 	  BITMAP_FREE (entry->non_ssa_vars);
   1187  1.1  mrg 	  stack.pop ();
   1188  1.1  mrg 	}
   1189  1.1  mrg       else
   1190  1.1  mrg         stack.pop ();
   1191  1.1  mrg     }
   1192  1.1  mrg   ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
   1193  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
   1194  1.1  mrg     bb->aux = NULL;
   1195  1.1  mrg   stack.release ();
   1196  1.1  mrg   BITMAP_FREE (current.ssa_names_to_pass);
   1197  1.1  mrg }
   1198  1.1  mrg 
   1199  1.1  mrg /* Split function at SPLIT_POINT.  */
   1200  1.1  mrg 
   1201  1.1  mrg static void
   1202  1.1  mrg split_function (basic_block return_bb, class split_point *split_point,
   1203  1.1  mrg 		bool add_tsan_func_exit)
   1204  1.1  mrg {
   1205  1.1  mrg   vec<tree> args_to_pass = vNULL;
   1206  1.1  mrg   bitmap args_to_skip;
   1207  1.1  mrg   tree parm;
   1208  1.1  mrg   int num = 0;
   1209  1.1  mrg   cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
   1210  1.1  mrg   basic_block call_bb;
   1211  1.1  mrg   gcall *call, *tsan_func_exit_call = NULL;
   1212  1.1  mrg   edge e;
   1213  1.1  mrg   edge_iterator ei;
   1214  1.1  mrg   tree retval = NULL, real_retval = NULL;
   1215  1.1  mrg   gimple *last_stmt = NULL;
   1216  1.1  mrg   unsigned int i;
   1217  1.1  mrg   tree arg, ddef;
   1218  1.1  mrg 
   1219  1.1  mrg   if (dump_file)
   1220  1.1  mrg     {
   1221  1.1  mrg       fprintf (dump_file, "\n\nSplitting function at:\n");
   1222  1.1  mrg       dump_split_point (dump_file, split_point);
   1223  1.1  mrg     }
   1224  1.1  mrg 
   1225  1.1  mrg   if (cur_node->can_change_signature)
   1226  1.1  mrg     args_to_skip = BITMAP_ALLOC (NULL);
   1227  1.1  mrg   else
   1228  1.1  mrg     args_to_skip = NULL;
   1229  1.1  mrg 
   1230  1.1  mrg   /* Collect the parameters of new function and args_to_skip bitmap.  */
   1231  1.1  mrg   for (parm = DECL_ARGUMENTS (current_function_decl);
   1232  1.1  mrg        parm; parm = DECL_CHAIN (parm), num++)
   1233  1.1  mrg     if (args_to_skip
   1234  1.1  mrg 	&& (!is_gimple_reg (parm)
   1235  1.1  mrg 	    || (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
   1236  1.1  mrg 	    || !bitmap_bit_p (split_point->ssa_names_to_pass,
   1237  1.1  mrg 			      SSA_NAME_VERSION (ddef))))
   1238  1.1  mrg       bitmap_set_bit (args_to_skip, num);
   1239  1.1  mrg     else
   1240  1.1  mrg       {
   1241  1.1  mrg 	/* This parm might not have been used up to now, but is going to be
   1242  1.1  mrg 	   used, hence register it.  */
   1243  1.1  mrg 	if (is_gimple_reg (parm))
   1244  1.1  mrg 	  arg = get_or_create_ssa_default_def (cfun, parm);
   1245  1.1  mrg 	else
   1246  1.1  mrg 	  arg = parm;
   1247  1.1  mrg 
   1248  1.1  mrg 	if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
   1249  1.1  mrg 	  arg = fold_convert (DECL_ARG_TYPE (parm), arg);
   1250  1.1  mrg 	args_to_pass.safe_push (arg);
   1251  1.1  mrg       }
   1252  1.1  mrg 
   1253  1.1  mrg   /* See if the split function will return.  */
   1254  1.1  mrg   bool split_part_return_p = false;
   1255  1.1  mrg   FOR_EACH_EDGE (e, ei, return_bb->preds)
   1256  1.1  mrg     {
   1257  1.1  mrg       if (bitmap_bit_p (split_point->split_bbs, e->src->index))
   1258  1.1  mrg 	split_part_return_p = true;
   1259  1.1  mrg     }
   1260  1.1  mrg 
   1261  1.1  mrg   /* Add return block to what will become the split function.
   1262  1.1  mrg      We do not return; no return block is needed.  */
   1263  1.1  mrg   if (!split_part_return_p)
   1264  1.1  mrg     ;
   1265  1.1  mrg   /* We have no return block, so nothing is needed.  */
   1266  1.1  mrg   else if (return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
   1267  1.1  mrg     ;
   1268  1.1  mrg   /* When we do not want to return value, we need to construct
   1269  1.1  mrg      new return block with empty return statement.
   1270  1.1  mrg      FIXME: Once we are able to change return type, we should change function
   1271  1.1  mrg      to return void instead of just outputting function with undefined return
   1272  1.1  mrg      value.  For structures this affects quality of codegen.  */
   1273  1.1  mrg   else if ((retval = find_retval (return_bb))
   1274  1.1  mrg 	   && !split_point->split_part_set_retval)
   1275  1.1  mrg     {
   1276  1.1  mrg       bool redirected = true;
   1277  1.1  mrg       basic_block new_return_bb = create_basic_block (NULL, 0, return_bb);
   1278  1.1  mrg       gimple_stmt_iterator gsi = gsi_start_bb (new_return_bb);
   1279  1.1  mrg       gsi_insert_after (&gsi, gimple_build_return (NULL), GSI_NEW_STMT);
   1280  1.1  mrg       new_return_bb->count = profile_count::zero ();
   1281  1.1  mrg       while (redirected)
   1282  1.1  mrg 	{
   1283  1.1  mrg 	  redirected = false;
   1284  1.1  mrg 	  FOR_EACH_EDGE (e, ei, return_bb->preds)
   1285  1.1  mrg 	    if (bitmap_bit_p (split_point->split_bbs, e->src->index))
   1286  1.1  mrg 	      {
   1287  1.1  mrg 		new_return_bb->count += e->count ();
   1288  1.1  mrg 		redirect_edge_and_branch (e, new_return_bb);
   1289  1.1  mrg 		redirected = true;
   1290  1.1  mrg 		break;
   1291  1.1  mrg 	      }
   1292  1.1  mrg 	}
   1293  1.1  mrg       e = make_single_succ_edge (new_return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
   1294  1.1  mrg       add_bb_to_loop (new_return_bb, current_loops->tree_root);
   1295  1.1  mrg       bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
   1296  1.1  mrg     }
   1297  1.1  mrg   /* When we pass around the value, use existing return block.  */
   1298  1.1  mrg   else
   1299  1.1  mrg     bitmap_set_bit (split_point->split_bbs, return_bb->index);
   1300  1.1  mrg 
   1301  1.1  mrg   /* If RETURN_BB has virtual operand PHIs, they must be removed and the
   1302  1.1  mrg      virtual operand marked for renaming as we change the CFG in a way that
   1303  1.1  mrg      tree-inline is not able to compensate for.
   1304  1.1  mrg 
   1305  1.1  mrg      Note this can happen whether or not we have a return value.  If we have
   1306  1.1  mrg      a return value, then RETURN_BB may have PHIs for real operands too.  */
   1307  1.1  mrg   if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
   1308  1.1  mrg     {
   1309  1.1  mrg       bool phi_p = false;
   1310  1.1  mrg       for (gphi_iterator gsi = gsi_start_phis (return_bb);
   1311  1.1  mrg 	   !gsi_end_p (gsi);)
   1312  1.1  mrg 	{
   1313  1.1  mrg 	  gphi *stmt = gsi.phi ();
   1314  1.1  mrg 	  if (!virtual_operand_p (gimple_phi_result (stmt)))
   1315  1.1  mrg 	    {
   1316  1.1  mrg 	      gsi_next (&gsi);
   1317  1.1  mrg 	      continue;
   1318  1.1  mrg 	    }
   1319  1.1  mrg 	  mark_virtual_phi_result_for_renaming (stmt);
   1320  1.1  mrg 	  remove_phi_node (&gsi, true);
   1321  1.1  mrg 	  phi_p = true;
   1322  1.1  mrg 	}
   1323  1.1  mrg       /* In reality we have to rename the reaching definition of the
   1324  1.1  mrg 	 virtual operand at return_bb as we will eventually release it
   1325  1.1  mrg 	 when we remove the code region we outlined.
   1326  1.1  mrg 	 So we have to rename all immediate virtual uses of that region
   1327  1.1  mrg 	 if we didn't see a PHI definition yet.  */
   1328  1.1  mrg       /* ???  In real reality we want to set the reaching vdef of the
   1329  1.1  mrg          entry of the SESE region as the vuse of the call and the reaching
   1330  1.1  mrg 	 vdef of the exit of the SESE region as the vdef of the call.  */
   1331  1.1  mrg       if (!phi_p)
   1332  1.1  mrg 	for (gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
   1333  1.1  mrg 	     !gsi_end_p (gsi);
   1334  1.1  mrg 	     gsi_next (&gsi))
   1335  1.1  mrg 	  {
   1336  1.1  mrg 	    gimple *stmt = gsi_stmt (gsi);
   1337  1.1  mrg 	    if (gimple_vuse (stmt))
   1338  1.1  mrg 	      {
   1339  1.1  mrg 		gimple_set_vuse (stmt, NULL_TREE);
   1340  1.1  mrg 		update_stmt (stmt);
   1341  1.1  mrg 	      }
   1342  1.1  mrg 	    if (gimple_vdef (stmt))
   1343  1.1  mrg 	      break;
   1344  1.1  mrg 	  }
   1345  1.1  mrg     }
   1346  1.1  mrg 
   1347  1.1  mrg   ipa_param_adjustments *adjustments;
   1348  1.1  mrg   bool skip_return = (!split_part_return_p
   1349  1.1  mrg 		      || !split_point->split_part_set_retval);
   1350  1.1  mrg   /* TODO: Perhaps get rid of args_to_skip entirely, after we make sure the
   1351  1.1  mrg      debug info generation and discrepancy avoiding works well too.  */
   1352  1.1  mrg   if ((args_to_skip && !bitmap_empty_p (args_to_skip))
   1353  1.1  mrg       || skip_return)
   1354  1.1  mrg     {
   1355  1.1  mrg       vec<ipa_adjusted_param, va_gc> *new_params = NULL;
   1356  1.1  mrg       unsigned j;
   1357  1.1  mrg       for (parm = DECL_ARGUMENTS (current_function_decl), j = 0;
   1358  1.1  mrg 	   parm; parm = DECL_CHAIN (parm), j++)
   1359  1.1  mrg 	if (!args_to_skip || !bitmap_bit_p (args_to_skip, j))
   1360  1.1  mrg 	  {
   1361  1.1  mrg 	    ipa_adjusted_param adj;
   1362  1.1  mrg 	    memset (&adj, 0, sizeof (adj));
   1363  1.1  mrg 	    adj.op = IPA_PARAM_OP_COPY;
   1364  1.1  mrg 	    adj.base_index = j;
   1365  1.1  mrg 	    adj.prev_clone_index = j;
   1366  1.1  mrg 	    vec_safe_push (new_params, adj);
   1367  1.1  mrg 	  }
   1368  1.1  mrg       adjustments = new ipa_param_adjustments (new_params, j, skip_return);
   1369  1.1  mrg     }
   1370  1.1  mrg   else
   1371  1.1  mrg     adjustments = NULL;
   1372  1.1  mrg 
   1373  1.1  mrg   /* Now create the actual clone.  */
   1374  1.1  mrg   cgraph_edge::rebuild_edges ();
   1375  1.1  mrg   node = cur_node->create_version_clone_with_body
   1376  1.1  mrg     (vNULL, NULL, adjustments,
   1377  1.1  mrg      split_point->split_bbs, split_point->entry_bb, "part");
   1378  1.1  mrg   delete adjustments;
   1379  1.1  mrg   node->split_part = true;
   1380  1.1  mrg 
   1381  1.1  mrg   if (cur_node->same_comdat_group)
   1382  1.1  mrg     {
   1383  1.1  mrg       /* TODO: call is versionable if we make sure that all
   1384  1.1  mrg 	 callers are inside of a comdat group.  */
   1385  1.1  mrg       cur_node->calls_comdat_local = true;
   1386  1.1  mrg       node->add_to_same_comdat_group (cur_node);
   1387  1.1  mrg     }
   1388  1.1  mrg 
   1389  1.1  mrg 
   1390  1.1  mrg   /* Let's take a time profile for splitted function.  */
   1391  1.1  mrg   if (cur_node->tp_first_run)
   1392  1.1  mrg     node->tp_first_run = cur_node->tp_first_run + 1;
   1393  1.1  mrg 
   1394  1.1  mrg   /* For usual cloning it is enough to clear builtin only when signature
   1395  1.1  mrg      changes.  For partial inlining we however cannot expect the part
   1396  1.1  mrg      of builtin implementation to have same semantic as the whole.  */
   1397  1.1  mrg   if (fndecl_built_in_p (node->decl))
   1398  1.1  mrg     set_decl_built_in_function (node->decl, NOT_BUILT_IN, 0);
   1399  1.1  mrg 
   1400  1.1  mrg   /* If return_bb contains any clobbers that refer to SSA_NAMEs
   1401  1.1  mrg      set in the split part, remove them.  Also reset debug stmts that
   1402  1.1  mrg      refer to SSA_NAMEs set in the split part.  */
   1403  1.1  mrg   if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
   1404  1.1  mrg     {
   1405  1.1  mrg       gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
   1406  1.1  mrg       while (!gsi_end_p (gsi))
   1407  1.1  mrg 	{
   1408  1.1  mrg 	  tree op;
   1409  1.1  mrg 	  ssa_op_iter iter;
   1410  1.1  mrg 	  gimple *stmt = gsi_stmt (gsi);
   1411  1.1  mrg 	  bool remove = false;
   1412  1.1  mrg 	  if (gimple_clobber_p (stmt) || is_gimple_debug (stmt))
   1413  1.1  mrg 	    FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
   1414  1.1  mrg 	      {
   1415  1.1  mrg 		basic_block bb = gimple_bb (SSA_NAME_DEF_STMT (op));
   1416  1.1  mrg 		if (op != retval
   1417  1.1  mrg 		    && bb
   1418  1.1  mrg 		    && bb != return_bb
   1419  1.1  mrg 		    && bitmap_bit_p (split_point->split_bbs, bb->index))
   1420  1.1  mrg 		  {
   1421  1.1  mrg 		    if (is_gimple_debug (stmt))
   1422  1.1  mrg 		      {
   1423  1.1  mrg 			gimple_debug_bind_reset_value (stmt);
   1424  1.1  mrg 			update_stmt (stmt);
   1425  1.1  mrg 		      }
   1426  1.1  mrg 		    else
   1427  1.1  mrg 		      remove = true;
   1428  1.1  mrg 		    break;
   1429  1.1  mrg 		  }
   1430  1.1  mrg 	      }
   1431  1.1  mrg 	  if (remove)
   1432  1.1  mrg 	    gsi_remove (&gsi, true);
   1433  1.1  mrg 	  else
   1434  1.1  mrg 	    gsi_next (&gsi);
   1435  1.1  mrg 	}
   1436  1.1  mrg     }
   1437  1.1  mrg 
   1438  1.1  mrg   /* If the original function is declared inline, there is no point in issuing
   1439  1.1  mrg      a warning for the non-inlinable part.  */
   1440  1.1  mrg   DECL_NO_INLINE_WARNING_P (node->decl) = 1;
   1441  1.1  mrg   cur_node->remove_callees ();
   1442  1.1  mrg   cur_node->remove_all_references ();
   1443  1.1  mrg   if (!split_part_return_p)
   1444  1.1  mrg     TREE_THIS_VOLATILE (node->decl) = 1;
   1445  1.1  mrg   if (dump_file)
   1446  1.1  mrg     dump_function_to_file (node->decl, dump_file, dump_flags);
   1447  1.1  mrg 
   1448  1.1  mrg   /* Create the basic block we place call into.  It is the entry basic block
   1449  1.1  mrg      split after last label.  */
   1450  1.1  mrg   call_bb = split_point->entry_bb;
   1451  1.1  mrg   for (gimple_stmt_iterator gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
   1452  1.1  mrg     if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
   1453  1.1  mrg       {
   1454  1.1  mrg 	last_stmt = gsi_stmt (gsi);
   1455  1.1  mrg 	gsi_next (&gsi);
   1456  1.1  mrg       }
   1457  1.1  mrg     else
   1458  1.1  mrg       break;
   1459  1.1  mrg   call_bb->count = split_point->count;
   1460  1.1  mrg   e = split_block (split_point->entry_bb, last_stmt);
   1461  1.1  mrg   remove_edge (e);
   1462  1.1  mrg 
   1463  1.1  mrg   /* Produce the call statement.  */
   1464  1.1  mrg   gimple_stmt_iterator gsi = gsi_last_bb (call_bb);
   1465  1.1  mrg   FOR_EACH_VEC_ELT (args_to_pass, i, arg)
   1466  1.1  mrg     if (!is_gimple_val (arg))
   1467  1.1  mrg       {
   1468  1.1  mrg 	arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
   1469  1.1  mrg 					false, GSI_CONTINUE_LINKING);
   1470  1.1  mrg 	args_to_pass[i] = arg;
   1471  1.1  mrg       }
   1472  1.1  mrg   call = gimple_build_call_vec (node->decl, args_to_pass);
   1473  1.1  mrg   gimple_set_block (call, DECL_INITIAL (current_function_decl));
   1474  1.1  mrg   args_to_pass.release ();
   1475  1.1  mrg 
   1476  1.1  mrg   /* For optimized away parameters, add on the caller side
   1477  1.1  mrg      before the call
   1478  1.1  mrg      DEBUG D#X => parm_Y(D)
   1479  1.1  mrg      stmts and associate D#X with parm in decl_debug_args_lookup
   1480  1.1  mrg      vector to say for debug info that if parameter parm had been passed,
   1481  1.1  mrg      it would have value parm_Y(D).  */
   1482  1.1  mrg   if (args_to_skip)
   1483  1.1  mrg     {
   1484  1.1  mrg       vec<tree, va_gc> **debug_args = NULL;
   1485  1.1  mrg       unsigned i = 0, len = 0;
   1486  1.1  mrg       if (MAY_HAVE_DEBUG_BIND_STMTS)
   1487  1.1  mrg 	{
   1488  1.1  mrg 	  debug_args = decl_debug_args_lookup (node->decl);
   1489  1.1  mrg 	  if (debug_args)
   1490  1.1  mrg 	    len = vec_safe_length (*debug_args);
   1491  1.1  mrg 	}
   1492  1.1  mrg       for (parm = DECL_ARGUMENTS (current_function_decl), num = 0;
   1493  1.1  mrg 	   parm; parm = DECL_CHAIN (parm), num++)
   1494  1.1  mrg 	if (bitmap_bit_p (args_to_skip, num) && is_gimple_reg (parm))
   1495  1.1  mrg 	  {
   1496  1.1  mrg 	    tree ddecl;
   1497  1.1  mrg 	    gimple *def_temp;
   1498  1.1  mrg 
   1499  1.1  mrg 	    /* This needs to be done even without
   1500  1.1  mrg 	       MAY_HAVE_DEBUG_BIND_STMTS, otherwise if it didn't exist
   1501  1.1  mrg 	       before, we'd end up with different SSA_NAME_VERSIONs
   1502  1.1  mrg 	       between -g and -g0.  */
   1503  1.1  mrg 	    arg = get_or_create_ssa_default_def (cfun, parm);
   1504  1.1  mrg 	    if (!MAY_HAVE_DEBUG_BIND_STMTS || debug_args == NULL)
   1505  1.1  mrg 	      continue;
   1506  1.1  mrg 
   1507  1.1  mrg 	    while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
   1508  1.1  mrg 	      i += 2;
   1509  1.1  mrg 	    if (i >= len)
   1510  1.1  mrg 	      continue;
   1511  1.1  mrg 	    ddecl = (**debug_args)[i + 1];
   1512  1.1  mrg 	    def_temp
   1513  1.1  mrg 	      = gimple_build_debug_bind (ddecl, unshare_expr (arg), call);
   1514  1.1  mrg 	    gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
   1515  1.1  mrg 	  }
   1516  1.1  mrg       BITMAP_FREE (args_to_skip);
   1517  1.1  mrg     }
   1518  1.1  mrg 
   1519  1.1  mrg   /* We avoid address being taken on any variable used by split part,
   1520  1.1  mrg      so return slot optimization is always possible.  Moreover this is
   1521  1.1  mrg      required to make DECL_BY_REFERENCE work.  */
   1522  1.1  mrg   if (aggregate_value_p (DECL_RESULT (current_function_decl),
   1523  1.1  mrg 			 TREE_TYPE (current_function_decl))
   1524  1.1  mrg       && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl)))
   1525  1.1  mrg 	  || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))))
   1526  1.1  mrg     gimple_call_set_return_slot_opt (call, true);
   1527  1.1  mrg 
   1528  1.1  mrg   if (add_tsan_func_exit)
   1529  1.1  mrg     tsan_func_exit_call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
   1530  1.1  mrg 
   1531  1.1  mrg   /* Update return value.  This is bit tricky.  When we do not return,
   1532  1.1  mrg      do nothing.  When we return we might need to update return_bb
   1533  1.1  mrg      or produce a new return statement.  */
   1534  1.1  mrg   if (!split_part_return_p)
   1535  1.1  mrg     {
   1536  1.1  mrg       gsi_insert_after (&gsi, call, GSI_NEW_STMT);
   1537  1.1  mrg       if (tsan_func_exit_call)
   1538  1.1  mrg 	gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
   1539  1.1  mrg     }
   1540  1.1  mrg   else
   1541  1.1  mrg     {
   1542  1.1  mrg       e = make_single_succ_edge (call_bb, return_bb,
   1543  1.1  mrg 				 return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
   1544  1.1  mrg 				 ? 0 : EDGE_FALLTHRU);
   1545  1.1  mrg 
   1546  1.1  mrg       /* If there is return basic block, see what value we need to store
   1547  1.1  mrg          return value into and put call just before it.  */
   1548  1.1  mrg       if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
   1549  1.1  mrg 	{
   1550  1.1  mrg 	  real_retval = retval;
   1551  1.1  mrg 	  if (real_retval && split_point->split_part_set_retval)
   1552  1.1  mrg 	    {
   1553  1.1  mrg 	      gphi_iterator psi;
   1554  1.1  mrg 
   1555  1.1  mrg 	      /* See if we need new SSA_NAME for the result.
   1556  1.1  mrg 		 When DECL_BY_REFERENCE is true, retval is actually pointer to
   1557  1.1  mrg 		 return value and it is constant in whole function.  */
   1558  1.1  mrg 	      if (TREE_CODE (retval) == SSA_NAME
   1559  1.1  mrg 		  && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
   1560  1.1  mrg 		{
   1561  1.1  mrg 		  retval = copy_ssa_name (retval, call);
   1562  1.1  mrg 
   1563  1.1  mrg 		  /* See if there is PHI defining return value.  */
   1564  1.1  mrg 		  for (psi = gsi_start_phis (return_bb);
   1565  1.1  mrg 		       !gsi_end_p (psi); gsi_next (&psi))
   1566  1.1  mrg 		    if (!virtual_operand_p (gimple_phi_result (psi.phi ())))
   1567  1.1  mrg 		      break;
   1568  1.1  mrg 
   1569  1.1  mrg 		  /* When there is PHI, just update its value.  */
   1570  1.1  mrg 		  if (TREE_CODE (retval) == SSA_NAME
   1571  1.1  mrg 		      && !gsi_end_p (psi))
   1572  1.1  mrg 		    add_phi_arg (psi.phi (), retval, e, UNKNOWN_LOCATION);
   1573  1.1  mrg 		  /* Otherwise update the return BB itself.
   1574  1.1  mrg 		     find_return_bb allows at most one assignment to return value,
   1575  1.1  mrg 		     so update first statement.  */
   1576  1.1  mrg 		  else
   1577  1.1  mrg 		    {
   1578  1.1  mrg 		      gimple_stmt_iterator bsi;
   1579  1.1  mrg 		      for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
   1580  1.1  mrg 			   gsi_next (&bsi))
   1581  1.1  mrg 			if (greturn *return_stmt
   1582  1.1  mrg 			      = dyn_cast <greturn *> (gsi_stmt (bsi)))
   1583  1.1  mrg 			  {
   1584  1.1  mrg 			    gimple_return_set_retval (return_stmt, retval);
   1585  1.1  mrg 			    break;
   1586  1.1  mrg 			  }
   1587  1.1  mrg 			else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
   1588  1.1  mrg 				 && !gimple_clobber_p (gsi_stmt (bsi)))
   1589  1.1  mrg 			  {
   1590  1.1  mrg 			    gimple_assign_set_rhs1 (gsi_stmt (bsi), retval);
   1591  1.1  mrg 			    break;
   1592  1.1  mrg 			  }
   1593  1.1  mrg 		      update_stmt (gsi_stmt (bsi));
   1594  1.1  mrg 		      /* Also adjust clobbers and debug stmts in return_bb.  */
   1595  1.1  mrg 		      for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
   1596  1.1  mrg 			   gsi_next (&bsi))
   1597  1.1  mrg 			{
   1598  1.1  mrg 			  gimple *stmt = gsi_stmt (bsi);
   1599  1.1  mrg 			  if (gimple_clobber_p (stmt)
   1600  1.1  mrg 			      || is_gimple_debug (stmt))
   1601  1.1  mrg 			    {
   1602  1.1  mrg 			      ssa_op_iter iter;
   1603  1.1  mrg 			      use_operand_p use_p;
   1604  1.1  mrg 			      bool update = false;
   1605  1.1  mrg 			      FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
   1606  1.1  mrg 							SSA_OP_USE)
   1607  1.1  mrg 				if (USE_FROM_PTR (use_p) == real_retval)
   1608  1.1  mrg 				  {
   1609  1.1  mrg 				    SET_USE (use_p, retval);
   1610  1.1  mrg 				    update = true;
   1611  1.1  mrg 				  }
   1612  1.1  mrg 			      if (update)
   1613  1.1  mrg 				update_stmt (stmt);
   1614  1.1  mrg 			    }
   1615  1.1  mrg 			}
   1616  1.1  mrg 		    }
   1617  1.1  mrg 		}
   1618  1.1  mrg 	      if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
   1619  1.1  mrg 		{
   1620  1.1  mrg 		  gimple_call_set_lhs (call, build_simple_mem_ref (retval));
   1621  1.1  mrg 		  gsi_insert_after (&gsi, call, GSI_NEW_STMT);
   1622  1.1  mrg 		}
   1623  1.1  mrg 	      else
   1624  1.1  mrg 		{
   1625  1.1  mrg 		  tree restype;
   1626  1.1  mrg 		  restype = TREE_TYPE (DECL_RESULT (current_function_decl));
   1627  1.1  mrg 		  gsi_insert_after (&gsi, call, GSI_NEW_STMT);
   1628  1.1  mrg 		  if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
   1629  1.1  mrg 		    {
   1630  1.1  mrg 		      gimple *cpy;
   1631  1.1  mrg 		      tree tem = create_tmp_reg (restype);
   1632  1.1  mrg 		      tem = make_ssa_name (tem, call);
   1633  1.1  mrg 		      cpy = gimple_build_assign (retval, NOP_EXPR, tem);
   1634  1.1  mrg 		      gsi_insert_after (&gsi, cpy, GSI_NEW_STMT);
   1635  1.1  mrg 		      retval = tem;
   1636  1.1  mrg 		    }
   1637  1.1  mrg 		  gimple_call_set_lhs (call, retval);
   1638  1.1  mrg 		  update_stmt (call);
   1639  1.1  mrg 		}
   1640  1.1  mrg 	    }
   1641  1.1  mrg 	  else
   1642  1.1  mrg 	    gsi_insert_after (&gsi, call, GSI_NEW_STMT);
   1643  1.1  mrg 	  if (tsan_func_exit_call)
   1644  1.1  mrg 	    gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
   1645  1.1  mrg 	}
   1646  1.1  mrg       /* We don't use return block (there is either no return in function or
   1647  1.1  mrg 	 multiple of them).  So create new basic block with return statement.
   1648  1.1  mrg 	 */
   1649  1.1  mrg       else
   1650  1.1  mrg 	{
   1651  1.1  mrg 	  greturn *ret;
   1652  1.1  mrg 	  if (split_point->split_part_set_retval
   1653  1.1  mrg 	      && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
   1654  1.1  mrg 	    {
   1655  1.1  mrg 	      retval = DECL_RESULT (current_function_decl);
   1656  1.1  mrg 
   1657  1.1  mrg 	      /* We use temporary register to hold value when aggregate_value_p
   1658  1.1  mrg 		 is false.  Similarly for DECL_BY_REFERENCE we must avoid extra
   1659  1.1  mrg 		 copy.  */
   1660  1.1  mrg 	      if (!aggregate_value_p (retval, TREE_TYPE (current_function_decl))
   1661  1.1  mrg 		  && !DECL_BY_REFERENCE (retval))
   1662  1.1  mrg 		retval = create_tmp_reg (TREE_TYPE (retval));
   1663  1.1  mrg 	      if (is_gimple_reg (retval))
   1664  1.1  mrg 		{
   1665  1.1  mrg 		  /* When returning by reference, there is only one SSA name
   1666  1.1  mrg 		     assigned to RESULT_DECL (that is pointer to return value).
   1667  1.1  mrg 		     Look it up or create new one if it is missing.  */
   1668  1.1  mrg 		  if (DECL_BY_REFERENCE (retval))
   1669  1.1  mrg 		    retval = get_or_create_ssa_default_def (cfun, retval);
   1670  1.1  mrg 		  /* Otherwise produce new SSA name for return value.  */
   1671  1.1  mrg 		  else
   1672  1.1  mrg 		    retval = make_ssa_name (retval, call);
   1673  1.1  mrg 		}
   1674  1.1  mrg 	      if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
   1675  1.1  mrg 	        gimple_call_set_lhs (call, build_simple_mem_ref (retval));
   1676  1.1  mrg 	      else
   1677  1.1  mrg 	        gimple_call_set_lhs (call, retval);
   1678  1.1  mrg 	      gsi_insert_after (&gsi, call, GSI_NEW_STMT);
   1679  1.1  mrg 	    }
   1680  1.1  mrg 	  else
   1681  1.1  mrg 	    {
   1682  1.1  mrg 	      gsi_insert_after (&gsi, call, GSI_NEW_STMT);
   1683  1.1  mrg 	      if (retval
   1684  1.1  mrg 		  && is_gimple_reg_type (TREE_TYPE (retval))
   1685  1.1  mrg 		  && !is_gimple_val (retval))
   1686  1.1  mrg 		{
   1687  1.1  mrg 		  gassign *g
   1688  1.1  mrg 		    = gimple_build_assign (make_ssa_name (TREE_TYPE (retval)),
   1689  1.1  mrg 					   retval);
   1690  1.1  mrg 		  retval = gimple_assign_lhs (g);
   1691  1.1  mrg 		  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
   1692  1.1  mrg 		}
   1693  1.1  mrg 	    }
   1694  1.1  mrg 	  if (tsan_func_exit_call)
   1695  1.1  mrg 	    gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
   1696  1.1  mrg 	  ret = gimple_build_return (retval);
   1697  1.1  mrg 	  gsi_insert_after (&gsi, ret, GSI_NEW_STMT);
   1698  1.1  mrg 	}
   1699  1.1  mrg     }
   1700  1.1  mrg   free_dominance_info (CDI_DOMINATORS);
   1701  1.1  mrg   free_dominance_info (CDI_POST_DOMINATORS);
   1702  1.1  mrg   compute_fn_summary (node, true);
   1703  1.1  mrg }
   1704  1.1  mrg 
   1705  1.1  mrg /* Execute function splitting pass.  */
   1706  1.1  mrg 
   1707  1.1  mrg static unsigned int
   1708  1.1  mrg execute_split_functions (void)
   1709  1.1  mrg {
   1710  1.1  mrg   gimple_stmt_iterator bsi;
   1711  1.1  mrg   basic_block bb;
   1712  1.1  mrg   sreal overall_time = 0;
   1713  1.1  mrg   int overall_size = 0;
   1714  1.1  mrg   int todo = 0;
   1715  1.1  mrg   struct cgraph_node *node = cgraph_node::get (current_function_decl);
   1716  1.1  mrg 
   1717  1.1  mrg   if (flags_from_decl_or_type (current_function_decl)
   1718  1.1  mrg       & (ECF_NORETURN|ECF_MALLOC))
   1719  1.1  mrg     {
   1720  1.1  mrg       if (dump_file)
   1721  1.1  mrg 	fprintf (dump_file, "Not splitting: noreturn/malloc function.\n");
   1722  1.1  mrg       return 0;
   1723  1.1  mrg     }
   1724  1.1  mrg   if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
   1725  1.1  mrg     {
   1726  1.1  mrg       if (dump_file)
   1727  1.1  mrg 	fprintf (dump_file, "Not splitting: main function.\n");
   1728  1.1  mrg       return 0;
   1729  1.1  mrg     }
   1730  1.1  mrg   if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
   1731  1.1  mrg     {
   1732  1.1  mrg       if (dump_file)
   1733  1.1  mrg 	fprintf (dump_file, "Not splitting: function is unlikely executed.\n");
   1734  1.1  mrg       return 0;
   1735  1.1  mrg     }
   1736  1.1  mrg   /* This can be relaxed; function might become inlinable after splitting
   1737  1.1  mrg      away the uninlinable part.  */
   1738  1.1  mrg   if (ipa_fn_summaries
   1739  1.1  mrg       && ipa_fn_summaries->get (node)
   1740  1.1  mrg       && !ipa_fn_summaries->get (node)->inlinable)
   1741  1.1  mrg     {
   1742  1.1  mrg       if (dump_file)
   1743  1.1  mrg 	fprintf (dump_file, "Not splitting: not inlinable.\n");
   1744  1.1  mrg       return 0;
   1745  1.1  mrg     }
   1746  1.1  mrg   if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
   1747  1.1  mrg     {
   1748  1.1  mrg       if (dump_file)
   1749  1.1  mrg 	fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
   1750  1.1  mrg       return 0;
   1751  1.1  mrg     }
   1752  1.1  mrg   /* This can be relaxed; most of versioning tests actually prevents
   1753  1.1  mrg      a duplication.  */
   1754  1.1  mrg   if (!tree_versionable_function_p (current_function_decl))
   1755  1.1  mrg     {
   1756  1.1  mrg       if (dump_file)
   1757  1.1  mrg 	fprintf (dump_file, "Not splitting: not versionable.\n");
   1758  1.1  mrg       return 0;
   1759  1.1  mrg     }
   1760  1.1  mrg   /* FIXME: we could support this.  */
   1761  1.1  mrg   if (DECL_STRUCT_FUNCTION (current_function_decl)->static_chain_decl)
   1762  1.1  mrg     {
   1763  1.1  mrg       if (dump_file)
   1764  1.1  mrg 	fprintf (dump_file, "Not splitting: nested function.\n");
   1765  1.1  mrg       return 0;
   1766  1.1  mrg     }
   1767  1.1  mrg 
   1768  1.1  mrg   /* See if it makes sense to try to split.
   1769  1.1  mrg      It makes sense to split if we inline, that is if we have direct calls to
   1770  1.1  mrg      handle or direct calls are possibly going to appear as result of indirect
   1771  1.1  mrg      inlining or LTO.  Also handle -fprofile-generate as LTO to allow non-LTO
   1772  1.1  mrg      training for LTO -fprofile-use build.
   1773  1.1  mrg 
   1774  1.1  mrg      Note that we are not completely conservative about disqualifying functions
   1775  1.1  mrg      called once.  It is possible that the caller is called more then once and
   1776  1.1  mrg      then inlining would still benefit.  */
   1777  1.1  mrg   if ((!node->callers
   1778  1.1  mrg        /* Local functions called once will be completely inlined most of time.  */
   1779  1.1  mrg        || (!node->callers->next_caller && node->local))
   1780  1.1  mrg       && !node->address_taken
   1781  1.1  mrg       && !node->has_aliases_p ()
   1782  1.1  mrg       && (!flag_lto || !node->externally_visible))
   1783  1.1  mrg     {
   1784  1.1  mrg       if (dump_file)
   1785  1.1  mrg 	fprintf (dump_file, "Not splitting: not called directly "
   1786  1.1  mrg 		 "or called once.\n");
   1787  1.1  mrg       return 0;
   1788  1.1  mrg     }
   1789  1.1  mrg 
   1790  1.1  mrg   /* FIXME: We can actually split if splitting reduces call overhead.  */
   1791  1.1  mrg   if (!flag_inline_small_functions
   1792  1.1  mrg       && !DECL_DECLARED_INLINE_P (current_function_decl))
   1793  1.1  mrg     {
   1794  1.1  mrg       if (dump_file)
   1795  1.1  mrg 	fprintf (dump_file, "Not splitting: not autoinlining and function"
   1796  1.1  mrg 		 " is not inline.\n");
   1797  1.1  mrg       return 0;
   1798  1.1  mrg     }
   1799  1.1  mrg 
   1800  1.1  mrg   if (lookup_attribute ("noinline", DECL_ATTRIBUTES (current_function_decl)))
   1801  1.1  mrg     {
   1802  1.1  mrg       if (dump_file)
   1803  1.1  mrg 	fprintf (dump_file, "Not splitting: function is noinline.\n");
   1804  1.1  mrg       return 0;
   1805  1.1  mrg     }
   1806  1.1  mrg   if (lookup_attribute ("section", DECL_ATTRIBUTES (current_function_decl)))
   1807  1.1  mrg     {
   1808  1.1  mrg       if (dump_file)
   1809  1.1  mrg 	fprintf (dump_file, "Not splitting: function is in user defined "
   1810  1.1  mrg 		 "section.\n");
   1811  1.1  mrg       return 0;
   1812  1.1  mrg     }
   1813  1.1  mrg 
   1814  1.1  mrg   /* We enforce splitting after loop headers when profile info is not
   1815  1.1  mrg      available.  */
   1816  1.1  mrg   if (profile_status_for_fn (cfun) != PROFILE_READ)
   1817  1.1  mrg     mark_dfs_back_edges ();
   1818  1.1  mrg 
   1819  1.1  mrg   /* Initialize bitmap to track forbidden calls.  */
   1820  1.1  mrg   forbidden_dominators = BITMAP_ALLOC (NULL);
   1821  1.1  mrg   calculate_dominance_info (CDI_DOMINATORS);
   1822  1.1  mrg 
   1823  1.1  mrg   /* Compute local info about basic blocks and determine function size/time.  */
   1824  1.1  mrg   bb_info_vec.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1, true);
   1825  1.1  mrg   best_split_point.split_bbs = NULL;
   1826  1.1  mrg   basic_block return_bb = find_return_bb ();
   1827  1.1  mrg   int tsan_exit_found = -1;
   1828  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
   1829  1.1  mrg     {
   1830  1.1  mrg       sreal time = 0;
   1831  1.1  mrg       int size = 0;
   1832  1.1  mrg       sreal freq = bb->count.to_sreal_scale
   1833  1.1  mrg 			 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
   1834  1.1  mrg 
   1835  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
   1836  1.1  mrg 	fprintf (dump_file, "Basic block %i\n", bb->index);
   1837  1.1  mrg 
   1838  1.1  mrg       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
   1839  1.1  mrg 	{
   1840  1.1  mrg 	  sreal this_time;
   1841  1.1  mrg 	  int this_size;
   1842  1.1  mrg 	  gimple *stmt = gsi_stmt (bsi);
   1843  1.1  mrg 
   1844  1.1  mrg 	  this_size = estimate_num_insns (stmt, &eni_size_weights);
   1845  1.1  mrg 	  this_time = (sreal)estimate_num_insns (stmt, &eni_time_weights)
   1846  1.1  mrg 			 * freq;
   1847  1.1  mrg 	  size += this_size;
   1848  1.1  mrg 	  time += this_time;
   1849  1.1  mrg 	  check_forbidden_calls (stmt);
   1850  1.1  mrg 
   1851  1.1  mrg 	  if (dump_file && (dump_flags & TDF_DETAILS))
   1852  1.1  mrg 	    {
   1853  1.1  mrg 	      fprintf (dump_file, "  freq:%4.2f size:%3i time:%4.2f ",
   1854  1.1  mrg 		       freq.to_double (), this_size, this_time.to_double ());
   1855  1.1  mrg 	      print_gimple_stmt (dump_file, stmt, 0);
   1856  1.1  mrg 	    }
   1857  1.1  mrg 
   1858  1.1  mrg 	  if ((flag_sanitize & SANITIZE_THREAD)
   1859  1.1  mrg 	      && gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
   1860  1.1  mrg 	    {
   1861  1.1  mrg 	      /* We handle TSAN_FUNC_EXIT for splitting either in the
   1862  1.1  mrg 		 return_bb, or in its immediate predecessors.  */
   1863  1.1  mrg 	      if ((bb != return_bb && !find_edge (bb, return_bb))
   1864  1.1  mrg 		  || (tsan_exit_found != -1
   1865  1.1  mrg 		      && tsan_exit_found != (bb != return_bb)))
   1866  1.1  mrg 		{
   1867  1.1  mrg 		  if (dump_file)
   1868  1.1  mrg 		    fprintf (dump_file, "Not splitting: TSAN_FUNC_EXIT"
   1869  1.1  mrg 			     " in unexpected basic block.\n");
   1870  1.1  mrg 		  BITMAP_FREE (forbidden_dominators);
   1871  1.1  mrg 		  bb_info_vec.release ();
   1872  1.1  mrg 		  return 0;
   1873  1.1  mrg 		}
   1874  1.1  mrg 	      tsan_exit_found = bb != return_bb;
   1875  1.1  mrg 	    }
   1876  1.1  mrg 	}
   1877  1.1  mrg       overall_time += time;
   1878  1.1  mrg       overall_size += size;
   1879  1.1  mrg       bb_info_vec[bb->index].time = time;
   1880  1.1  mrg       bb_info_vec[bb->index].size = size;
   1881  1.1  mrg     }
   1882  1.1  mrg   find_split_points (return_bb, overall_time, overall_size);
   1883  1.1  mrg   if (best_split_point.split_bbs)
   1884  1.1  mrg     {
   1885  1.1  mrg       split_function (return_bb, &best_split_point, tsan_exit_found == 1);
   1886  1.1  mrg       BITMAP_FREE (best_split_point.ssa_names_to_pass);
   1887  1.1  mrg       BITMAP_FREE (best_split_point.split_bbs);
   1888  1.1  mrg       todo = TODO_update_ssa | TODO_cleanup_cfg;
   1889  1.1  mrg     }
   1890  1.1  mrg   BITMAP_FREE (forbidden_dominators);
   1891  1.1  mrg   bb_info_vec.release ();
   1892  1.1  mrg   return todo;
   1893  1.1  mrg }
   1894  1.1  mrg 
   1895  1.1  mrg namespace {
   1896  1.1  mrg 
   1897  1.1  mrg const pass_data pass_data_split_functions =
   1898  1.1  mrg {
   1899  1.1  mrg   GIMPLE_PASS, /* type */
   1900  1.1  mrg   "fnsplit", /* name */
   1901  1.1  mrg   OPTGROUP_NONE, /* optinfo_flags */
   1902  1.1  mrg   TV_IPA_FNSPLIT, /* tv_id */
   1903  1.1  mrg   PROP_cfg, /* properties_required */
   1904  1.1  mrg   0, /* properties_provided */
   1905  1.1  mrg   0, /* properties_destroyed */
   1906  1.1  mrg   0, /* todo_flags_start */
   1907  1.1  mrg   0, /* todo_flags_finish */
   1908  1.1  mrg };
   1909  1.1  mrg 
   1910  1.1  mrg class pass_split_functions : public gimple_opt_pass
   1911  1.1  mrg {
   1912  1.1  mrg public:
   1913  1.1  mrg   pass_split_functions (gcc::context *ctxt)
   1914  1.1  mrg     : gimple_opt_pass (pass_data_split_functions, ctxt)
   1915  1.1  mrg   {}
   1916  1.1  mrg 
   1917  1.1  mrg   /* opt_pass methods: */
   1918  1.1  mrg   virtual bool gate (function *);
   1919  1.1  mrg   virtual unsigned int execute (function *)
   1920  1.1  mrg     {
   1921  1.1  mrg       return execute_split_functions ();
   1922  1.1  mrg     }
   1923  1.1  mrg 
   1924  1.1  mrg }; // class pass_split_functions
   1925  1.1  mrg 
   1926  1.1  mrg bool
   1927  1.1  mrg pass_split_functions::gate (function *)
   1928  1.1  mrg {
   1929  1.1  mrg   /* When doing profile feedback, we want to execute the pass after profiling
   1930  1.1  mrg      is read.  So disable one in early optimization.  */
   1931  1.1  mrg   return (flag_partial_inlining
   1932  1.1  mrg 	  && !profile_arc_flag && !flag_branch_probabilities);
   1933  1.1  mrg }
   1934  1.1  mrg 
   1935  1.1  mrg } // anon namespace
   1936  1.1  mrg 
   1937  1.1  mrg gimple_opt_pass *
   1938  1.1  mrg make_pass_split_functions (gcc::context *ctxt)
   1939  1.1  mrg {
   1940  1.1  mrg   return new pass_split_functions (ctxt);
   1941  1.1  mrg }
   1942  1.1  mrg 
   1943  1.1  mrg /* Execute function splitting pass.  */
   1944  1.1  mrg 
   1945  1.1  mrg static unsigned int
   1946  1.1  mrg execute_feedback_split_functions (void)
   1947  1.1  mrg {
   1948  1.1  mrg   unsigned int retval = execute_split_functions ();
   1949  1.1  mrg   if (retval)
   1950  1.1  mrg     retval |= TODO_rebuild_cgraph_edges;
   1951  1.1  mrg   return retval;
   1952  1.1  mrg }
   1953  1.1  mrg 
   1954  1.1  mrg namespace {
   1955  1.1  mrg 
   1956  1.1  mrg const pass_data pass_data_feedback_split_functions =
   1957  1.1  mrg {
   1958  1.1  mrg   GIMPLE_PASS, /* type */
   1959  1.1  mrg   "feedback_fnsplit", /* name */
   1960  1.1  mrg   OPTGROUP_NONE, /* optinfo_flags */
   1961  1.1  mrg   TV_IPA_FNSPLIT, /* tv_id */
   1962  1.1  mrg   PROP_cfg, /* properties_required */
   1963  1.1  mrg   0, /* properties_provided */
   1964  1.1  mrg   0, /* properties_destroyed */
   1965  1.1  mrg   0, /* todo_flags_start */
   1966  1.1  mrg   0, /* todo_flags_finish */
   1967  1.1  mrg };
   1968  1.1  mrg 
   1969  1.1  mrg class pass_feedback_split_functions : public gimple_opt_pass
   1970  1.1  mrg {
   1971  1.1  mrg public:
   1972  1.1  mrg   pass_feedback_split_functions (gcc::context *ctxt)
   1973  1.1  mrg     : gimple_opt_pass (pass_data_feedback_split_functions, ctxt)
   1974  1.1  mrg   {}
   1975  1.1  mrg 
   1976  1.1  mrg   /* opt_pass methods: */
   1977  1.1  mrg   virtual bool gate (function *);
   1978  1.1  mrg   virtual unsigned int execute (function *)
   1979  1.1  mrg     {
   1980  1.1  mrg       return execute_feedback_split_functions ();
   1981  1.1  mrg     }
   1982  1.1  mrg 
   1983  1.1  mrg }; // class pass_feedback_split_functions
   1984  1.1  mrg 
   1985  1.1  mrg bool
   1986  1.1  mrg pass_feedback_split_functions::gate (function *)
   1987  1.1  mrg {
   1988  1.1  mrg   /* We don't need to split when profiling at all, we are producing
   1989  1.1  mrg      lousy code anyway.  */
   1990  1.1  mrg   return (flag_partial_inlining
   1991  1.1  mrg 	  && flag_branch_probabilities);
   1992  1.1  mrg }
   1993  1.1  mrg 
   1994  1.1  mrg } // anon namespace
   1995  1.1  mrg 
   1996  1.1  mrg gimple_opt_pass *
   1997  1.1  mrg make_pass_feedback_split_functions (gcc::context *ctxt)
   1998  1.1  mrg {
   1999  1.1  mrg   return new pass_feedback_split_functions (ctxt);
   2000  1.1  mrg }
   2001