Home | History | Annotate | Line # | Download | only in gcc
ipa-pure-const.cc revision 1.1.1.1
      1 /* Callgraph based analysis of static variables.
      2    Copyright (C) 2004-2022 Free Software Foundation, Inc.
      3    Contributed by Kenneth Zadeck <zadeck (at) naturalbridge.com>
      4 
      5 This file is part of GCC.
      6 
      7 GCC is free software; you can redistribute it and/or modify it under
      8 the terms of the GNU General Public License as published by the Free
      9 Software Foundation; either version 3, or (at your option) any later
     10 version.
     11 
     12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15 for more details.
     16 
     17 You should have received a copy of the GNU General Public License
     18 along with GCC; see the file COPYING3.  If not see
     19 <http://www.gnu.org/licenses/>.  */
     20 
     21 /* This file marks functions as being either const (TREE_READONLY) or
     22    pure (DECL_PURE_P).  It can also set a variant of these that
     23    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
     24 
     25    This must be run after inlining decisions have been made since
     26    otherwise, the local sets will not contain information that is
     27    consistent with post inlined state.  The global sets are not prone
     28    to this problem since they are by definition transitive.  */
     29 
     30 /* The code in this module is called by the ipa pass manager. It
     31    should be one of the later passes since it's information is used by
     32    the rest of the compilation. */
     33 
     34 #include "config.h"
     35 #include "system.h"
     36 #include "coretypes.h"
     37 #include "backend.h"
     38 #include "target.h"
     39 #include "tree.h"
     40 #include "gimple.h"
     41 #include "tree-pass.h"
     42 #include "tree-streamer.h"
     43 #include "cgraph.h"
     44 #include "diagnostic.h"
     45 #include "calls.h"
     46 #include "cfganal.h"
     47 #include "tree-eh.h"
     48 #include "gimple-iterator.h"
     49 #include "gimple-walk.h"
     50 #include "tree-cfg.h"
     51 #include "tree-ssa-loop-niter.h"
     52 #include "langhooks.h"
     53 #include "ipa-utils.h"
     54 #include "gimple-pretty-print.h"
     55 #include "cfgloop.h"
     56 #include "tree-scalar-evolution.h"
     57 #include "intl.h"
     58 #include "opts.h"
     59 #include "ssa.h"
     60 #include "alloc-pool.h"
     61 #include "symbol-summary.h"
     62 #include "ipa-prop.h"
     63 #include "ipa-fnsummary.h"
     64 #include "symtab-thunks.h"
     65 #include "dbgcnt.h"
     66 
     67 /* Lattice values for const and pure functions.  Everything starts out
     68    being const, then may drop to pure and then neither depending on
     69    what is found.  */
     70 enum pure_const_state_e
     71 {
     72   IPA_CONST,
     73   IPA_PURE,
     74   IPA_NEITHER
     75 };
     76 
     77 static const char *pure_const_names[3] = {"const", "pure", "neither"};
     78 
     79 enum malloc_state_e
     80 {
     81   STATE_MALLOC_TOP,
     82   STATE_MALLOC,
     83   STATE_MALLOC_BOTTOM
     84 };
     85 
     86 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
     87 
     88 /* Holder for the const_state.  There is one of these per function
     89    decl.  */
     90 class funct_state_d
     91 {
     92 public:
     93   funct_state_d (): pure_const_state (IPA_NEITHER),
     94     state_previously_known (IPA_NEITHER), looping_previously_known (true),
     95     looping (true), can_throw (true), can_free (true),
     96     malloc_state (STATE_MALLOC_BOTTOM) {}
     97 
     98   funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
     99     state_previously_known (s.state_previously_known),
    100     looping_previously_known (s.looping_previously_known),
    101     looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
    102     malloc_state (s.malloc_state) {}
    103 
    104   /* See above.  */
    105   enum pure_const_state_e pure_const_state;
    106   /* What user set here; we can be always sure about this.  */
    107   enum pure_const_state_e state_previously_known;
    108   bool looping_previously_known;
    109 
    110   /* True if the function could possibly infinite loop.  There are a
    111      lot of ways that this could be determined.  We are pretty
    112      conservative here.  While it is possible to cse pure and const
    113      calls, it is not legal to have dce get rid of the call if there
    114      is a possibility that the call could infinite loop since this is
    115      a behavioral change.  */
    116   bool looping;
    117 
    118   bool can_throw;
    119 
    120   /* If function can call free, munmap or otherwise make previously
    121      non-trapping memory accesses trapping.  */
    122   bool can_free;
    123 
    124   enum malloc_state_e malloc_state;
    125 };
    126 
    127 typedef class funct_state_d * funct_state;
    128 
    129 /* The storage of the funct_state is abstracted because there is the
    130    possibility that it may be desirable to move this to the cgraph
    131    local info.  */
    132 
    133 class funct_state_summary_t:
    134   public fast_function_summary <funct_state_d *, va_heap>
    135 {
    136 public:
    137   funct_state_summary_t (symbol_table *symtab):
    138     fast_function_summary <funct_state_d *, va_heap> (symtab) {}
    139 
    140   virtual void insert (cgraph_node *, funct_state_d *state);
    141   virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
    142 			  funct_state_d *src_data,
    143 			  funct_state_d *dst_data);
    144 };
    145 
    146 static funct_state_summary_t *funct_state_summaries = NULL;
    147 
    148 static bool gate_pure_const (void);
    149 
    150 namespace {
    151 
    152 const pass_data pass_data_ipa_pure_const =
    153 {
    154   IPA_PASS, /* type */
    155   "pure-const", /* name */
    156   OPTGROUP_NONE, /* optinfo_flags */
    157   TV_IPA_PURE_CONST, /* tv_id */
    158   0, /* properties_required */
    159   0, /* properties_provided */
    160   0, /* properties_destroyed */
    161   0, /* todo_flags_start */
    162   0, /* todo_flags_finish */
    163 };
    164 
    165 class pass_ipa_pure_const : public ipa_opt_pass_d
    166 {
    167 public:
    168   pass_ipa_pure_const(gcc::context *ctxt);
    169 
    170   /* opt_pass methods: */
    171   bool gate (function *) { return gate_pure_const (); }
    172   unsigned int execute (function *fun);
    173 
    174   void register_hooks (void);
    175 
    176 private:
    177   bool init_p;
    178 }; // class pass_ipa_pure_const
    179 
    180 } // anon namespace
    181 
    182 /* Try to guess if function body will always be visible to compiler
    183    when compiling the call and whether compiler will be able
    184    to propagate the information by itself.  */
    185 
    186 static bool
    187 function_always_visible_to_compiler_p (tree decl)
    188 {
    189   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
    190 	  || DECL_COMDAT (decl));
    191 }
    192 
    193 /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
    194    is true if the function is known to be finite.  The diagnostic is
    195    controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
    196    OPTION, this function may initialize it and it is always returned
    197    by the function.  */
    198 
    199 static hash_set<tree> *
    200 suggest_attribute (int option, tree decl, bool known_finite,
    201 		   hash_set<tree> *warned_about,
    202 		   const char * attrib_name)
    203 {
    204   if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
    205     return warned_about;
    206   if (TREE_THIS_VOLATILE (decl)
    207       || (known_finite && function_always_visible_to_compiler_p (decl)))
    208     return warned_about;
    209 
    210   if (!warned_about)
    211     warned_about = new hash_set<tree>;
    212   if (warned_about->contains (decl))
    213     return warned_about;
    214   warned_about->add (decl);
    215   warning_at (DECL_SOURCE_LOCATION (decl),
    216 	      option,
    217 	      known_finite
    218 	      ? G_("function might be candidate for attribute %qs")
    219 	      : G_("function might be candidate for attribute %qs"
    220 		   " if it is known to return normally"), attrib_name);
    221   return warned_about;
    222 }
    223 
    224 /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
    225    is true if the function is known to be finite.  */
    226 
    227 static void
    228 warn_function_pure (tree decl, bool known_finite)
    229 {
    230   /* Declaring a void function pure makes no sense and is diagnosed
    231      by -Wattributes because calling it would have no effect.  */
    232   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
    233     return;
    234 
    235   static hash_set<tree> *warned_about;
    236   warned_about
    237     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
    238 			 known_finite, warned_about, "pure");
    239 }
    240 
    241 /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
    242    is true if the function is known to be finite.  */
    243 
    244 static void
    245 warn_function_const (tree decl, bool known_finite)
    246 {
    247   /* Declaring a void function const makes no sense is diagnosed
    248      by -Wattributes because calling it would have no effect.  */
    249   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
    250     return;
    251 
    252   static hash_set<tree> *warned_about;
    253   warned_about
    254     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
    255 			 known_finite, warned_about, "const");
    256 }
    257 
    258 /* Emit suggestion about __attribute__((malloc)) for DECL.  */
    259 
    260 static void
    261 warn_function_malloc (tree decl)
    262 {
    263   static hash_set<tree> *warned_about;
    264   warned_about
    265     = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
    266 			 true, warned_about, "malloc");
    267 }
    268 
    269 /* Emit suggestion about __attribute__((noreturn)) for DECL.  */
    270 
    271 static void
    272 warn_function_noreturn (tree decl)
    273 {
    274   tree original_decl = decl;
    275 
    276   static hash_set<tree> *warned_about;
    277   if (!lang_hooks.missing_noreturn_ok_p (decl)
    278       && targetm.warn_func_return (decl))
    279     warned_about
    280       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
    281 			   true, warned_about, "noreturn");
    282 }
    283 
    284 void
    285 warn_function_cold (tree decl)
    286 {
    287   tree original_decl = decl;
    288 
    289   static hash_set<tree> *warned_about;
    290   warned_about
    291     = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
    292 			 true, warned_about, "cold");
    293 }
    294 
    295 /* Check to see if the use (or definition when CHECKING_WRITE is true)
    296    variable T is legal in a function that is either pure or const.  */
    297 
    298 static inline void
    299 check_decl (funct_state local,
    300 	    tree t, bool checking_write, bool ipa)
    301 {
    302   /* Do not want to do anything with volatile except mark any
    303      function that uses one to be not const or pure.  */
    304   if (TREE_THIS_VOLATILE (t))
    305     {
    306       local->pure_const_state = IPA_NEITHER;
    307       if (dump_file)
    308         fprintf (dump_file, "    Volatile operand is not const/pure\n");
    309       return;
    310     }
    311 
    312   /* Do not care about a local automatic that is not static.  */
    313   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
    314     return;
    315 
    316   /* If the variable has the "used" attribute, treat it as if it had a
    317      been touched by the devil.  */
    318   if (DECL_PRESERVE_P (t))
    319     {
    320       local->pure_const_state = IPA_NEITHER;
    321       if (dump_file)
    322         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
    323       return;
    324     }
    325 
    326   /* In IPA mode we are not interested in checking actual loads and stores;
    327      they will be processed at propagation time using ipa_ref.  */
    328   if (ipa)
    329     return;
    330 
    331   /* Since we have dealt with the locals and params cases above, if we
    332      are CHECKING_WRITE, this cannot be a pure or constant
    333      function.  */
    334   if (checking_write)
    335     {
    336       local->pure_const_state = IPA_NEITHER;
    337       if (dump_file)
    338         fprintf (dump_file, "    static/global memory write is not const/pure\n");
    339       return;
    340     }
    341 
    342   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
    343     {
    344       /* Readonly reads are safe.  */
    345       if (TREE_READONLY (t))
    346 	return; /* Read of a constant, do not change the function state.  */
    347       else
    348 	{
    349           if (dump_file)
    350             fprintf (dump_file, "    global memory read is not const\n");
    351 	  /* Just a regular read.  */
    352 	  if (local->pure_const_state == IPA_CONST)
    353 	    local->pure_const_state = IPA_PURE;
    354 	}
    355     }
    356   else
    357     {
    358       /* Compilation level statics can be read if they are readonly
    359 	 variables.  */
    360       if (TREE_READONLY (t))
    361 	return;
    362 
    363       if (dump_file)
    364 	fprintf (dump_file, "    static memory read is not const\n");
    365       /* Just a regular read.  */
    366       if (local->pure_const_state == IPA_CONST)
    367 	local->pure_const_state = IPA_PURE;
    368     }
    369 }
    370 
    371 
    372 /* Check to see if the use (or definition when CHECKING_WRITE is true)
    373    variable T is legal in a function that is either pure or const.  */
    374 
    375 static inline void
    376 check_op (funct_state local, tree t, bool checking_write)
    377 {
    378   t = get_base_address (t);
    379   if (t && TREE_THIS_VOLATILE (t))
    380     {
    381       local->pure_const_state = IPA_NEITHER;
    382       if (dump_file)
    383 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
    384       return;
    385     }
    386   else if (refs_local_or_readonly_memory_p (t))
    387     {
    388       if (dump_file)
    389 	fprintf (dump_file, "    Indirect ref to local or readonly "
    390 		 "memory is OK\n");
    391       return;
    392     }
    393   else if (checking_write)
    394     {
    395       local->pure_const_state = IPA_NEITHER;
    396       if (dump_file)
    397 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
    398       return;
    399     }
    400   else
    401     {
    402       if (dump_file)
    403 	fprintf (dump_file, "    Indirect ref read is not const\n");
    404       if (local->pure_const_state == IPA_CONST)
    405 	local->pure_const_state = IPA_PURE;
    406     }
    407 }
    408 
    409 /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
    410 
    411 static void
    412 state_from_flags (enum pure_const_state_e *state, bool *looping,
    413 	          int flags, bool cannot_lead_to_return)
    414 {
    415   *looping = false;
    416   if (flags & ECF_LOOPING_CONST_OR_PURE)
    417     {
    418       *looping = true;
    419       if (dump_file && (dump_flags & TDF_DETAILS))
    420 	fprintf (dump_file, " looping\n");
    421     }
    422   if (flags & ECF_CONST)
    423     {
    424       *state = IPA_CONST;
    425       if (dump_file && (dump_flags & TDF_DETAILS))
    426 	fprintf (dump_file, " const\n");
    427     }
    428   else if (flags & ECF_PURE)
    429     {
    430       *state = IPA_PURE;
    431       if (dump_file && (dump_flags & TDF_DETAILS))
    432 	fprintf (dump_file, " pure\n");
    433     }
    434   else if (cannot_lead_to_return)
    435     {
    436       *state = IPA_PURE;
    437       *looping = true;
    438       if (dump_file && (dump_flags & TDF_DETAILS))
    439 	fprintf (dump_file, " ignoring side effects->pure looping\n");
    440     }
    441   else
    442     {
    443       if (dump_file && (dump_flags & TDF_DETAILS))
    444 	fprintf (dump_file, " neither\n");
    445       *state = IPA_NEITHER;
    446       *looping = true;
    447     }
    448 }
    449 
    450 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
    451    into STATE and LOOPING better of the two variants.
    452    Be sure to merge looping correctly.  IPA_NEITHER functions
    453    have looping 0 even if they don't have to return.  */
    454 
    455 static inline void
    456 better_state (enum pure_const_state_e *state, bool *looping,
    457 	      enum pure_const_state_e state2, bool looping2)
    458 {
    459   if (state2 < *state)
    460     {
    461       if (*state == IPA_NEITHER)
    462 	*looping = looping2;
    463       else
    464 	*looping = MIN (*looping, looping2);
    465       *state = state2;
    466     }
    467   else if (state2 != IPA_NEITHER)
    468     *looping = MIN (*looping, looping2);
    469 }
    470 
    471 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
    472    into STATE and LOOPING worse of the two variants.
    473    N is the actual node called.  */
    474 
    475 static inline void
    476 worse_state (enum pure_const_state_e *state, bool *looping,
    477 	     enum pure_const_state_e state2, bool looping2,
    478 	     struct symtab_node *from,
    479 	     struct symtab_node *to)
    480 {
    481   /* Consider function:
    482 
    483      bool a(int *p)
    484      {
    485        return *p==*p;
    486      }
    487 
    488      During early optimization we will turn this into:
    489 
    490      bool a(int *p)
    491      {
    492        return true;
    493      }
    494 
    495      Now if this function will be detected as CONST however when interposed it
    496      may end up being just pure.  We always must assume the worst scenario here.
    497    */
    498   if (*state == IPA_CONST && state2 == IPA_CONST
    499       && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
    500     {
    501       if (dump_file && (dump_flags & TDF_DETAILS))
    502 	fprintf (dump_file, "Dropping state to PURE because call to %s may not "
    503 		 "bind to current def.\n", to->dump_name ());
    504       state2 = IPA_PURE;
    505     }
    506   *state = MAX (*state, state2);
    507   *looping = MAX (*looping, looping2);
    508 }
    509 
    510 /* Recognize special cases of builtins that are by themselves not const
    511    but function using them is.  */
    512 bool
    513 builtin_safe_for_const_function_p (bool *looping, tree callee)
    514 {
    515   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    516     switch (DECL_FUNCTION_CODE (callee))
    517       {
    518       case BUILT_IN_RETURN:
    519       case BUILT_IN_UNREACHABLE:
    520       CASE_BUILT_IN_ALLOCA:
    521       case BUILT_IN_STACK_SAVE:
    522       case BUILT_IN_STACK_RESTORE:
    523       case BUILT_IN_EH_POINTER:
    524       case BUILT_IN_EH_FILTER:
    525       case BUILT_IN_UNWIND_RESUME:
    526       case BUILT_IN_CXA_END_CLEANUP:
    527       case BUILT_IN_EH_COPY_VALUES:
    528       case BUILT_IN_FRAME_ADDRESS:
    529       case BUILT_IN_APPLY_ARGS:
    530       case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
    531       case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
    532       case BUILT_IN_DWARF_CFA:
    533       case BUILT_IN_RETURN_ADDRESS:
    534 	*looping = false;
    535 	return true;
    536       case BUILT_IN_PREFETCH:
    537 	*looping = true;
    538 	return true;
    539       default:
    540 	break;
    541       }
    542   return false;
    543 }
    544 
    545 /* Check the parameters of a function call to CALL_EXPR to see if
    546    there are any references in the parameters that are not allowed for
    547    pure or const functions.  Also check to see if this is either an
    548    indirect call, a call outside the compilation unit, or has special
    549    attributes that may also effect the purity.  The CALL_EXPR node for
    550    the entire call expression.  */
    551 
    552 static void
    553 check_call (funct_state local, gcall *call, bool ipa)
    554 {
    555   int flags = gimple_call_flags (call);
    556   tree callee_t = gimple_call_fndecl (call);
    557   bool possibly_throws = stmt_could_throw_p (cfun, call);
    558   bool possibly_throws_externally = (possibly_throws
    559   				     && stmt_can_throw_external (cfun, call));
    560 
    561   if (possibly_throws)
    562     {
    563       unsigned int i;
    564       for (i = 0; i < gimple_num_ops (call); i++)
    565         if (gimple_op (call, i)
    566 	    && tree_could_throw_p (gimple_op (call, i)))
    567 	  {
    568 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
    569 	      {
    570 		if (dump_file)
    571 		  fprintf (dump_file, "    operand can throw; looping\n");
    572 		local->looping = true;
    573 	      }
    574 	    if (possibly_throws_externally)
    575 	      {
    576 		if (dump_file)
    577 		  fprintf (dump_file, "    operand can throw externally\n");
    578 		local->can_throw = true;
    579 	      }
    580 	  }
    581     }
    582 
    583   /* The const and pure flags are set by a variety of places in the
    584      compiler (including here).  If someone has already set the flags
    585      for the callee, (such as for some of the builtins) we will use
    586      them, otherwise we will compute our own information.
    587 
    588      Const and pure functions have less clobber effects than other
    589      functions so we process these first.  Otherwise if it is a call
    590      outside the compilation unit or an indirect call we punt.  This
    591      leaves local calls which will be processed by following the call
    592      graph.  */
    593   if (callee_t)
    594     {
    595       bool call_looping;
    596 
    597       if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
    598 	  && !nonfreeing_call_p (call))
    599 	local->can_free = true;
    600 
    601       if (builtin_safe_for_const_function_p (&call_looping, callee_t))
    602 	{
    603 	  worse_state (&local->pure_const_state, &local->looping,
    604 		       IPA_CONST, call_looping,
    605 		       NULL, NULL);
    606 	  return;
    607 	}
    608       /* When bad things happen to bad functions, they cannot be const
    609 	 or pure.  */
    610       if (setjmp_call_p (callee_t))
    611 	{
    612 	  if (dump_file)
    613 	    fprintf (dump_file, "    setjmp is not const/pure\n");
    614           local->looping = true;
    615 	  local->pure_const_state = IPA_NEITHER;
    616 	}
    617 
    618       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
    619 	switch (DECL_FUNCTION_CODE (callee_t))
    620 	  {
    621 	  case BUILT_IN_LONGJMP:
    622 	  case BUILT_IN_NONLOCAL_GOTO:
    623 	    if (dump_file)
    624 	      fprintf (dump_file,
    625 		       "    longjmp and nonlocal goto is not const/pure\n");
    626 	    local->pure_const_state = IPA_NEITHER;
    627 	    local->looping = true;
    628 	    break;
    629 	  default:
    630 	    break;
    631 	  }
    632     }
    633   else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
    634     local->can_free = true;
    635 
    636   /* When not in IPA mode, we can still handle self recursion.  */
    637   if (!ipa && callee_t
    638       && recursive_call_p (current_function_decl, callee_t))
    639     {
    640       if (dump_file)
    641         fprintf (dump_file, "    Recursive call can loop.\n");
    642       local->looping = true;
    643     }
    644   /* Either callee is unknown or we are doing local analysis.
    645      Look to see if there are any bits available for the callee (such as by
    646      declaration or because it is builtin) and process solely on the basis of
    647      those bits.  Handle internal calls always, those calls don't have
    648      corresponding cgraph edges and thus aren't processed during
    649      the propagation.  */
    650   else if (!ipa || gimple_call_internal_p (call))
    651     {
    652       enum pure_const_state_e call_state;
    653       bool call_looping;
    654       if (possibly_throws && cfun->can_throw_non_call_exceptions)
    655         {
    656 	  if (dump_file)
    657 	    fprintf (dump_file, "    can throw; looping\n");
    658           local->looping = true;
    659 	}
    660       if (possibly_throws_externally)
    661         {
    662 	  if (dump_file)
    663 	    {
    664 	      fprintf (dump_file, "    can throw externally to lp %i\n",
    665 	      	       lookup_stmt_eh_lp (call));
    666 	      if (callee_t)
    667 		fprintf (dump_file, "     callee:%s\n",
    668 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
    669 	    }
    670           local->can_throw = true;
    671 	}
    672       if (dump_file && (dump_flags & TDF_DETAILS))
    673 	fprintf (dump_file, "    checking flags for call:");
    674       state_from_flags (&call_state, &call_looping, flags,
    675 			((flags & (ECF_NORETURN | ECF_NOTHROW))
    676 			 == (ECF_NORETURN | ECF_NOTHROW))
    677 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
    678       worse_state (&local->pure_const_state, &local->looping,
    679 		   call_state, call_looping, NULL, NULL);
    680     }
    681   /* Direct functions calls are handled by IPA propagation.  */
    682 }
    683 
    684 /* Wrapper around check_decl for loads in local more.  */
    685 
    686 static bool
    687 check_load (gimple *, tree op, tree, void *data)
    688 {
    689   if (DECL_P (op))
    690     check_decl ((funct_state)data, op, false, false);
    691   else
    692     check_op ((funct_state)data, op, false);
    693   return false;
    694 }
    695 
    696 /* Wrapper around check_decl for stores in local more.  */
    697 
    698 static bool
    699 check_store (gimple *, tree op, tree, void *data)
    700 {
    701   if (DECL_P (op))
    702     check_decl ((funct_state)data, op, true, false);
    703   else
    704     check_op ((funct_state)data, op, true);
    705   return false;
    706 }
    707 
    708 /* Wrapper around check_decl for loads in ipa mode.  */
    709 
    710 static bool
    711 check_ipa_load (gimple *, tree op, tree, void *data)
    712 {
    713   if (DECL_P (op))
    714     check_decl ((funct_state)data, op, false, true);
    715   else
    716     check_op ((funct_state)data, op, false);
    717   return false;
    718 }
    719 
    720 /* Wrapper around check_decl for stores in ipa mode.  */
    721 
    722 static bool
    723 check_ipa_store (gimple *, tree op, tree, void *data)
    724 {
    725   if (DECL_P (op))
    726     check_decl ((funct_state)data, op, true, true);
    727   else
    728     check_op ((funct_state)data, op, true);
    729   return false;
    730 }
    731 
    732 /* Look into pointer pointed to by GSIP and figure out what interesting side
    733    effects it has.  */
    734 static void
    735 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
    736 {
    737   gimple *stmt = gsi_stmt (*gsip);
    738 
    739   if (is_gimple_debug (stmt))
    740     return;
    741 
    742   /* Do consider clobber as side effects before IPA, so we rather inline
    743      C++ destructors and keep clobber semantics than eliminate them.
    744 
    745      Similar logic is in ipa-modref.
    746 
    747      TODO: We may get smarter during early optimizations on these and let
    748      functions containing only clobbers to be optimized more.  This is a common
    749      case of C++ destructors.  */
    750 
    751   if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
    752     return;
    753 
    754   if (dump_file)
    755     {
    756       fprintf (dump_file, "  scanning: ");
    757       print_gimple_stmt (dump_file, stmt, 0);
    758     }
    759 
    760   if (gimple_has_volatile_ops (stmt)
    761       && !gimple_clobber_p (stmt))
    762     {
    763       local->pure_const_state = IPA_NEITHER;
    764       if (dump_file)
    765 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
    766     }
    767 
    768   /* Look for loads and stores.  */
    769   walk_stmt_load_store_ops (stmt, local,
    770 			    ipa ? check_ipa_load : check_load,
    771 			    ipa ? check_ipa_store :  check_store);
    772 
    773   if (gimple_code (stmt) != GIMPLE_CALL
    774       && stmt_could_throw_p (cfun, stmt))
    775     {
    776       if (cfun->can_throw_non_call_exceptions)
    777 	{
    778 	  if (dump_file)
    779 	    fprintf (dump_file, "    can throw; looping\n");
    780 	  local->looping = true;
    781 	}
    782       if (stmt_can_throw_external (cfun, stmt))
    783 	{
    784 	  if (dump_file)
    785 	    fprintf (dump_file, "    can throw externally\n");
    786 	  local->can_throw = true;
    787 	}
    788       else
    789 	if (dump_file)
    790 	  fprintf (dump_file, "    can throw\n");
    791     }
    792   switch (gimple_code (stmt))
    793     {
    794     case GIMPLE_CALL:
    795       check_call (local, as_a <gcall *> (stmt), ipa);
    796       break;
    797     case GIMPLE_LABEL:
    798       if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
    799 	/* Target of long jump. */
    800 	{
    801           if (dump_file)
    802             fprintf (dump_file, "    nonlocal label is not const/pure\n");
    803 	  local->pure_const_state = IPA_NEITHER;
    804 	}
    805       break;
    806     case GIMPLE_ASM:
    807       if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
    808 	{
    809 	  if (dump_file)
    810 	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
    811 	  /* Abandon all hope, ye who enter here. */
    812 	  local->pure_const_state = IPA_NEITHER;
    813 	  local->can_free = true;
    814 	}
    815       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
    816 	{
    817 	  if (dump_file)
    818 	    fprintf (dump_file, "    volatile is not const/pure\n");
    819 	  /* Abandon all hope, ye who enter here. */
    820 	  local->pure_const_state = IPA_NEITHER;
    821 	  local->looping = true;
    822 	  local->can_free = true;
    823 	}
    824       return;
    825     default:
    826       break;
    827     }
    828 }
    829 
    830 /* Check that RETVAL is used only in STMT and in comparisons against 0.
    831    RETVAL is return value of the function and STMT is return stmt.  */
    832 
    833 static bool
    834 check_retval_uses (tree retval, gimple *stmt)
    835 {
    836   imm_use_iterator use_iter;
    837   gimple *use_stmt;
    838 
    839   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
    840     if (gcond *cond = dyn_cast<gcond *> (use_stmt))
    841       {
    842 	tree op2 = gimple_cond_rhs (cond);
    843 	if (!integer_zerop (op2))
    844 	  return false;
    845       }
    846     else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
    847       {
    848 	enum tree_code code = gimple_assign_rhs_code (ga);
    849 	if (TREE_CODE_CLASS (code) != tcc_comparison)
    850 	  return false;
    851 	if (!integer_zerop (gimple_assign_rhs2 (ga)))
    852 	  return false;
    853       }
    854     else if (is_gimple_debug (use_stmt))
    855       ;
    856     else if (use_stmt != stmt)
    857       return false;
    858 
    859   return true;
    860 }
    861 
    862 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
    863    attribute. Currently this function does a very conservative analysis.
    864    FUN is considered to be a candidate if
    865    1) It returns a value of pointer type.
    866    2) SSA_NAME_DEF_STMT (return_value) is either a function call or
    867       a phi, and element of phi is either NULL or
    868       SSA_NAME_DEF_STMT(element) is function call.
    869    3) The return-value has immediate uses only within comparisons (gcond or gassign)
    870       and return_stmt (and likewise a phi arg has immediate use only within comparison
    871       or the phi stmt).  */
    872 
    873 #define DUMP_AND_RETURN(reason)  \
    874 {  \
    875   if (dump_file && (dump_flags & TDF_DETAILS))  \
    876     fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
    877 	     (node->dump_name ()), (reason));  \
    878   return false;  \
    879 }
    880 
    881 static bool
    882 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
    883 		      bitmap visited)
    884 {
    885   cgraph_node *node = cgraph_node::get_create (fun->decl);
    886   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
    887     return true;
    888 
    889   if (!check_retval_uses (retval, ret_stmt))
    890     DUMP_AND_RETURN("Return value has uses outside return stmt"
    891 		    " and comparisons against 0.")
    892 
    893   gimple *def = SSA_NAME_DEF_STMT (retval);
    894 
    895   if (gcall *call_stmt = dyn_cast<gcall *> (def))
    896     {
    897       tree callee_decl = gimple_call_fndecl (call_stmt);
    898       if (!callee_decl)
    899 	return false;
    900 
    901       if (!ipa && !DECL_IS_MALLOC (callee_decl))
    902 	DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
    903 			" non-ipa mode.")
    904 
    905       cgraph_edge *cs = node->get_edge (call_stmt);
    906       if (cs)
    907 	{
    908 	  ipa_call_summary *es = ipa_call_summaries->get_create (cs);
    909 	  es->is_return_callee_uncaptured = true;
    910 	}
    911     }
    912 
    913     else if (gphi *phi = dyn_cast<gphi *> (def))
    914       {
    915 	bool all_args_zero = true;
    916 	for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
    917 	  {
    918 	    tree arg = gimple_phi_arg_def (phi, i);
    919 	    if (integer_zerop (arg))
    920 	      continue;
    921 
    922 	    all_args_zero = false;
    923 	    if (TREE_CODE (arg) != SSA_NAME)
    924 	      DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
    925 	    if (!check_retval_uses (arg, phi))
    926 	      DUMP_AND_RETURN ("phi arg has uses outside phi"
    927 				 " and comparisons against 0.")
    928 
    929 	    gimple *arg_def = SSA_NAME_DEF_STMT (arg);
    930 	    if (is_a<gphi *> (arg_def))
    931 	      {
    932 		if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
    933 		    DUMP_AND_RETURN ("nested phi fail")
    934 		continue;
    935 	      }
    936 
    937 	    gcall *call_stmt = dyn_cast<gcall *> (arg_def);
    938 	    if (!call_stmt)
    939 	      DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
    940 
    941 	    tree callee_decl = gimple_call_fndecl (call_stmt);
    942 	    if (!callee_decl)
    943 	      return false;
    944 	    if (!ipa && !DECL_IS_MALLOC (callee_decl))
    945 	      DUMP_AND_RETURN("callee_decl does not have malloc attribute"
    946 			      " for non-ipa mode.")
    947 
    948 	    cgraph_edge *cs = node->get_edge (call_stmt);
    949 	    if (cs)
    950 	      {
    951 		ipa_call_summary *es = ipa_call_summaries->get_create (cs);
    952 		es->is_return_callee_uncaptured = true;
    953 	      }
    954 	  }
    955 
    956 	if (all_args_zero)
    957 	  DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
    958       }
    959 
    960     else
    961       DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
    962 
    963   return true;
    964 }
    965 
    966 static bool
    967 malloc_candidate_p (function *fun, bool ipa)
    968 {
    969   basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
    970   edge e;
    971   edge_iterator ei;
    972   cgraph_node *node = cgraph_node::get_create (fun->decl);
    973 
    974   if (EDGE_COUNT (exit_block->preds) == 0
    975       || !flag_delete_null_pointer_checks)
    976     return false;
    977 
    978   auto_bitmap visited;
    979   FOR_EACH_EDGE (e, ei, exit_block->preds)
    980     {
    981       gimple_stmt_iterator gsi = gsi_last_bb (e->src);
    982       greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
    983 
    984       if (!ret_stmt)
    985 	return false;
    986 
    987       tree retval = gimple_return_retval (ret_stmt);
    988       if (!retval)
    989 	DUMP_AND_RETURN("No return value.")
    990 
    991       if (TREE_CODE (retval) != SSA_NAME
    992 	  || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
    993 	DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
    994 
    995       if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
    996 	return false;
    997     }
    998 
    999   if (dump_file && (dump_flags & TDF_DETAILS))
   1000     fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
   1001 	     IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
   1002   return true;
   1003 }
   1004 
   1005 #undef DUMP_AND_RETURN
   1006 
   1007 /* Return true if function is known to be finite.  */
   1008 
   1009 bool
   1010 finite_function_p ()
   1011 {
   1012   /* Const functions cannot have back edges (an
   1013      indication of possible infinite loop side
   1014      effect.  */
   1015   bool finite = true;
   1016   if (mark_dfs_back_edges ())
   1017     {
   1018       /* Preheaders are needed for SCEV to work.
   1019 	 Simple latches and recorded exits improve chances that loop will
   1020 	 proved to be finite in testcases such as in loop-15.c
   1021 	 and loop-24.c  */
   1022       loop_optimizer_init (LOOPS_HAVE_PREHEADERS
   1023 			   | LOOPS_HAVE_SIMPLE_LATCHES
   1024 			   | LOOPS_HAVE_RECORDED_EXITS);
   1025       if (dump_file && (dump_flags & TDF_DETAILS))
   1026 	flow_loops_dump (dump_file, NULL, 0);
   1027       if (mark_irreducible_loops ())
   1028 	{
   1029 	  if (dump_file)
   1030 	    fprintf (dump_file, "    has irreducible loops\n");
   1031 	  finite = false;
   1032 	}
   1033       else
   1034 	{
   1035 	  scev_initialize ();
   1036 	  for (auto loop : loops_list (cfun, 0))
   1037 	    if (!finite_loop_p (loop))
   1038 	      {
   1039 		if (dump_file)
   1040 		  fprintf (dump_file, "    cannot prove finiteness of "
   1041 			   "loop %i\n", loop->num);
   1042 		finite =false;
   1043 		break;
   1044 	      }
   1045 	  scev_finalize ();
   1046 	}
   1047       loop_optimizer_finalize ();
   1048     }
   1049   return finite;
   1050 }
   1051 
   1052 /* This is the main routine for finding the reference patterns for
   1053    global variables within a function FN.  */
   1054 
   1055 static funct_state
   1056 analyze_function (struct cgraph_node *fn, bool ipa)
   1057 {
   1058   tree decl = fn->decl;
   1059   funct_state l;
   1060   basic_block this_block;
   1061 
   1062   l = XCNEW (class funct_state_d);
   1063   l->pure_const_state = IPA_CONST;
   1064   l->state_previously_known = IPA_NEITHER;
   1065   l->looping_previously_known = true;
   1066   l->looping = false;
   1067   l->can_throw = false;
   1068   l->can_free = false;
   1069   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
   1070 		    flags_from_decl_or_type (fn->decl),
   1071 		    fn->cannot_return_p ());
   1072 
   1073   if (fn->thunk || fn->alias)
   1074     {
   1075       /* Thunk gets propagated through, so nothing interesting happens.  */
   1076       gcc_assert (ipa);
   1077       if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
   1078 	l->pure_const_state = IPA_NEITHER;
   1079       return l;
   1080     }
   1081 
   1082   if (dump_file)
   1083     {
   1084       fprintf (dump_file, "\n\n local analysis of %s\n ",
   1085 	       fn->dump_name ());
   1086     }
   1087 
   1088   push_cfun (DECL_STRUCT_FUNCTION (decl));
   1089 
   1090   FOR_EACH_BB_FN (this_block, cfun)
   1091     {
   1092       gimple_stmt_iterator gsi;
   1093       struct walk_stmt_info wi;
   1094 
   1095       memset (&wi, 0, sizeof (wi));
   1096       for (gsi = gsi_start_bb (this_block);
   1097 	   !gsi_end_p (gsi);
   1098 	   gsi_next (&gsi))
   1099 	{
   1100 	  /* NULL memory accesses terminates BB.  These accesses are known
   1101 	     to trip undefined behaviour.  gimple-ssa-isolate-paths turns them
   1102 	     to volatile accesses and adds builtin_trap call which would
   1103 	     confuse us otherwise.  */
   1104 	  if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
   1105 						  null_pointer_node))
   1106 	    {
   1107 	      if (dump_file)
   1108 		fprintf (dump_file, "  NULL memory access; terminating BB%s\n",
   1109 			 flag_non_call_exceptions ? "; looping" : "");
   1110 	      if (flag_non_call_exceptions)
   1111 		{
   1112 		  l->looping = true;
   1113 		  if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
   1114 		    {
   1115 		      if (dump_file)
   1116 			fprintf (dump_file, "    can throw externally\n");
   1117 		      l->can_throw = true;
   1118 		    }
   1119 		}
   1120 	      break;
   1121 	    }
   1122 	  check_stmt (&gsi, l, ipa);
   1123 	  if (l->pure_const_state == IPA_NEITHER
   1124 	      && l->looping
   1125 	      && l->can_throw
   1126 	      && l->can_free)
   1127 	    goto end;
   1128 	}
   1129     }
   1130 
   1131 end:
   1132   if (l->pure_const_state != IPA_NEITHER
   1133       && !l->looping
   1134       && !finite_function_p ())
   1135     l->looping = true;
   1136 
   1137   if (dump_file && (dump_flags & TDF_DETAILS))
   1138     fprintf (dump_file, "    checking previously known:");
   1139 
   1140   better_state (&l->pure_const_state, &l->looping,
   1141 		l->state_previously_known,
   1142 		l->looping_previously_known);
   1143   if (TREE_NOTHROW (decl))
   1144     l->can_throw = false;
   1145 
   1146   l->malloc_state = STATE_MALLOC_BOTTOM;
   1147   if (DECL_IS_MALLOC (decl))
   1148     l->malloc_state = STATE_MALLOC;
   1149   else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
   1150     l->malloc_state = STATE_MALLOC_TOP;
   1151   else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
   1152     l->malloc_state = STATE_MALLOC;
   1153 
   1154   pop_cfun ();
   1155   if (dump_file)
   1156     {
   1157       if (l->looping)
   1158         fprintf (dump_file, "Function is locally looping.\n");
   1159       if (l->can_throw)
   1160         fprintf (dump_file, "Function is locally throwing.\n");
   1161       if (l->pure_const_state == IPA_CONST)
   1162         fprintf (dump_file, "Function is locally const.\n");
   1163       if (l->pure_const_state == IPA_PURE)
   1164         fprintf (dump_file, "Function is locally pure.\n");
   1165       if (l->can_free)
   1166 	fprintf (dump_file, "Function can locally free.\n");
   1167       if (l->malloc_state == STATE_MALLOC)
   1168 	fprintf (dump_file, "Function is locally malloc.\n");
   1169     }
   1170   return l;
   1171 }
   1172 
   1173 void
   1174 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
   1175 {
   1176   /* There are some shared nodes, in particular the initializers on
   1177      static declarations.  We do not need to scan them more than once
   1178      since all we would be interested in are the addressof
   1179      operations.  */
   1180   if (opt_for_fn (node->decl, flag_ipa_pure_const))
   1181     {
   1182       funct_state_d *a = analyze_function (node, true);
   1183       new (state) funct_state_d (*a);
   1184       free (a);
   1185     }
   1186   else
   1187     /* Do not keep stale summaries.  */
   1188     funct_state_summaries->remove (node);
   1189 }
   1190 
   1191 /* Called when new clone is inserted to callgraph late.  */
   1192 
   1193 void
   1194 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
   1195 				  funct_state_d *src_data,
   1196 				  funct_state_d *dst_data)
   1197 {
   1198   new (dst_data) funct_state_d (*src_data);
   1199   if (dst_data->malloc_state == STATE_MALLOC
   1200       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
   1201     dst_data->malloc_state = STATE_MALLOC_BOTTOM;
   1202 }
   1203 
   1204 
   1205 void
   1207 pass_ipa_pure_const::
   1208 register_hooks (void)
   1209 {
   1210   if (init_p)
   1211     return;
   1212 
   1213   init_p = true;
   1214 
   1215   funct_state_summaries = new funct_state_summary_t (symtab);
   1216 }
   1217 
   1218 
   1219 /* Analyze each function in the cgraph to see if it is locally PURE or
   1220    CONST.  */
   1221 
   1222 static void
   1223 pure_const_generate_summary (void)
   1224 {
   1225   struct cgraph_node *node;
   1226 
   1227   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
   1228   pass->register_hooks ();
   1229 
   1230   /* Process all of the functions.
   1231 
   1232      We process AVAIL_INTERPOSABLE functions.  We cannot use the results
   1233      by default, but the info can be used at LTO with -fwhole-program or
   1234      when function got cloned and the clone is AVAILABLE.  */
   1235 
   1236   FOR_EACH_DEFINED_FUNCTION (node)
   1237     if (opt_for_fn (node->decl, flag_ipa_pure_const))
   1238       {
   1239 	funct_state_d *a = analyze_function (node, true);
   1240 	new (funct_state_summaries->get_create (node)) funct_state_d (*a);
   1241 	free (a);
   1242       }
   1243 }
   1244 
   1245 
   1246 /* Serialize the ipa info for lto.  */
   1247 
   1248 static void
   1249 pure_const_write_summary (void)
   1250 {
   1251   struct cgraph_node *node;
   1252   struct lto_simple_output_block *ob
   1253     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
   1254   unsigned int count = 0;
   1255   lto_symtab_encoder_iterator lsei;
   1256   lto_symtab_encoder_t encoder;
   1257 
   1258   encoder = lto_get_out_decl_state ()->symtab_node_encoder;
   1259 
   1260   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
   1261        lsei_next_function_in_partition (&lsei))
   1262     {
   1263       node = lsei_cgraph_node (lsei);
   1264       if (node->definition && funct_state_summaries->exists (node))
   1265 	count++;
   1266     }
   1267 
   1268   streamer_write_uhwi_stream (ob->main_stream, count);
   1269 
   1270   /* Process all of the functions.  */
   1271   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
   1272        lsei_next_function_in_partition (&lsei))
   1273     {
   1274       node = lsei_cgraph_node (lsei);
   1275       funct_state_d *fs = funct_state_summaries->get (node);
   1276       if (node->definition && fs != NULL)
   1277 	{
   1278 	  struct bitpack_d bp;
   1279 	  int node_ref;
   1280 	  lto_symtab_encoder_t encoder;
   1281 
   1282 	  encoder = ob->decl_state->symtab_node_encoder;
   1283 	  node_ref = lto_symtab_encoder_encode (encoder, node);
   1284 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
   1285 
   1286 	  /* Note that flags will need to be read in the opposite
   1287 	     order as we are pushing the bitflags into FLAGS.  */
   1288 	  bp = bitpack_create (ob->main_stream);
   1289 	  bp_pack_value (&bp, fs->pure_const_state, 2);
   1290 	  bp_pack_value (&bp, fs->state_previously_known, 2);
   1291 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
   1292 	  bp_pack_value (&bp, fs->looping, 1);
   1293 	  bp_pack_value (&bp, fs->can_throw, 1);
   1294 	  bp_pack_value (&bp, fs->can_free, 1);
   1295 	  bp_pack_value (&bp, fs->malloc_state, 2);
   1296 	  streamer_write_bitpack (&bp);
   1297 	}
   1298     }
   1299 
   1300   lto_destroy_simple_output_block (ob);
   1301 }
   1302 
   1303 
   1304 /* Deserialize the ipa info for lto.  */
   1305 
   1306 static void
   1307 pure_const_read_summary (void)
   1308 {
   1309   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
   1310   struct lto_file_decl_data *file_data;
   1311   unsigned int j = 0;
   1312 
   1313   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
   1314   pass->register_hooks ();
   1315 
   1316   while ((file_data = file_data_vec[j++]))
   1317     {
   1318       const char *data;
   1319       size_t len;
   1320       class lto_input_block *ib
   1321 	= lto_create_simple_input_block (file_data,
   1322 					 LTO_section_ipa_pure_const,
   1323 					 &data, &len);
   1324       if (ib)
   1325 	{
   1326 	  unsigned int i;
   1327 	  unsigned int count = streamer_read_uhwi (ib);
   1328 
   1329 	  for (i = 0; i < count; i++)
   1330 	    {
   1331 	      unsigned int index;
   1332 	      struct cgraph_node *node;
   1333 	      struct bitpack_d bp;
   1334 	      funct_state fs;
   1335 	      lto_symtab_encoder_t encoder;
   1336 
   1337 	      index = streamer_read_uhwi (ib);
   1338 	      encoder = file_data->symtab_node_encoder;
   1339 	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
   1340 									index));
   1341 
   1342 	      fs = funct_state_summaries->get_create (node);
   1343 	      /* Note that the flags must be read in the opposite
   1344 		 order in which they were written (the bitflags were
   1345 		 pushed into FLAGS).  */
   1346 	      bp = streamer_read_bitpack (ib);
   1347 	      fs->pure_const_state
   1348 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
   1349 	      fs->state_previously_known
   1350 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
   1351 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
   1352 	      fs->looping = bp_unpack_value (&bp, 1);
   1353 	      fs->can_throw = bp_unpack_value (&bp, 1);
   1354 	      fs->can_free = bp_unpack_value (&bp, 1);
   1355 	      fs->malloc_state
   1356 			= (enum malloc_state_e) bp_unpack_value (&bp, 2);
   1357 
   1358 	      if (dump_file)
   1359 		{
   1360 		  int flags = flags_from_decl_or_type (node->decl);
   1361 		  fprintf (dump_file, "Read info for %s ", node->dump_name ());
   1362 		  if (flags & ECF_CONST)
   1363 		    fprintf (dump_file, " const");
   1364 		  if (flags & ECF_PURE)
   1365 		    fprintf (dump_file, " pure");
   1366 		  if (flags & ECF_NOTHROW)
   1367 		    fprintf (dump_file, " nothrow");
   1368 		  fprintf (dump_file, "\n  pure const state: %s\n",
   1369 			   pure_const_names[fs->pure_const_state]);
   1370 		  fprintf (dump_file, "  previously known state: %s\n",
   1371 			   pure_const_names[fs->state_previously_known]);
   1372 		  if (fs->looping)
   1373 		    fprintf (dump_file,"  function is locally looping\n");
   1374 		  if (fs->looping_previously_known)
   1375 		    fprintf (dump_file,"  function is previously known looping\n");
   1376 		  if (fs->can_throw)
   1377 		    fprintf (dump_file,"  function is locally throwing\n");
   1378 		  if (fs->can_free)
   1379 		    fprintf (dump_file,"  function can locally free\n");
   1380 		  fprintf (dump_file, "\n malloc state: %s\n",
   1381 			   malloc_state_names[fs->malloc_state]);
   1382 		}
   1383 	    }
   1384 
   1385 	  lto_destroy_simple_input_block (file_data,
   1386 					  LTO_section_ipa_pure_const,
   1387 					  ib, data, len);
   1388 	}
   1389     }
   1390 }
   1391 
   1392 /* We only propagate across edges that can throw externally and their callee
   1393    is not interposable.  */
   1394 
   1395 static bool
   1396 ignore_edge_for_nothrow (struct cgraph_edge *e)
   1397 {
   1398   if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
   1399     return true;
   1400 
   1401   enum availability avail;
   1402   cgraph_node *ultimate_target
   1403     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
   1404   if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
   1405     return true;
   1406   return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
   1407 	   && !e->callee->binds_to_current_def_p (e->caller))
   1408 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
   1409 	  || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
   1410 }
   1411 
   1412 /* Return true if NODE is self recursive function.
   1413    Indirectly recursive functions appears as non-trivial strongly
   1414    connected components, so we need to care about self recursion
   1415    only.  */
   1416 
   1417 static bool
   1418 self_recursive_p (struct cgraph_node *node)
   1419 {
   1420   struct cgraph_edge *e;
   1421   for (e = node->callees; e; e = e->next_callee)
   1422     if (e->callee->function_symbol () == node)
   1423       return true;
   1424   return false;
   1425 }
   1426 
   1427 /* Return true if N is cdtor that is not const or pure.  In this case we may
   1428    need to remove unreachable function if it is marked const/pure.  */
   1429 
   1430 static bool
   1431 cdtor_p (cgraph_node *n, void *)
   1432 {
   1433   if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
   1434     return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
   1435 	    || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
   1436   return false;
   1437 }
   1438 
   1439 /* Skip edges from and to nodes without ipa_pure_const enabled.
   1440    Ignore not available symbols.  */
   1441 
   1442 static bool
   1443 ignore_edge_for_pure_const (struct cgraph_edge *e)
   1444 {
   1445   enum availability avail;
   1446   cgraph_node *ultimate_target
   1447     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
   1448 
   1449   return (avail <= AVAIL_INTERPOSABLE
   1450 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
   1451 	  || !opt_for_fn (ultimate_target->decl,
   1452 			  flag_ipa_pure_const));
   1453 }
   1454 
   1455 /* Return true if function should be skipped for local pure const analysis.  */
   1456 
   1457 static bool
   1458 skip_function_for_local_pure_const (struct cgraph_node *node)
   1459 {
   1460   /* Because we do not schedule pass_fixup_cfg over whole program after early
   1461      optimizations we must not promote functions that are called by already
   1462      processed functions.  */
   1463 
   1464   if (function_called_by_processed_nodes_p ())
   1465     {
   1466       if (dump_file)
   1467 	fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
   1468       return true;
   1469     }
   1470   /* Save some work and do not analyze functions which are interposable and
   1471      do not have any non-interposable aliases.  */
   1472   if (node->get_availability () <= AVAIL_INTERPOSABLE
   1473       && !flag_lto
   1474       && !node->has_aliases_p ())
   1475     {
   1476       if (dump_file)
   1477 	fprintf (dump_file,
   1478 		 "Function is interposable; not analyzing.\n");
   1479       return true;
   1480     }
   1481   return false;
   1482 }
   1483 
   1484 /* Make function const and output warning.  If LOCAL is true,
   1485    return true if anything changed.  Otherwise return true if
   1486    we may have introduced removale ctors.  */
   1487 
   1488 bool
   1489 ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
   1490 {
   1491   bool cdtor = false;
   1492 
   1493   if (TREE_READONLY (node->decl)
   1494       && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
   1495     return false;
   1496   warn_function_const (node->decl, !looping);
   1497   if (local && skip_function_for_local_pure_const (node))
   1498     return false;
   1499   if (dump_file)
   1500     fprintf (dump_file, "Function found to be %sconst: %s\n",
   1501 	     looping ? "looping " : "",
   1502 	     node->dump_name ());
   1503   if (!local && !looping)
   1504     cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
   1505   if (!dbg_cnt (ipa_attr))
   1506     return false;
   1507   if (node->set_const_flag (true, looping))
   1508     {
   1509       if (dump_file)
   1510 	fprintf (dump_file,
   1511 		 "Declaration updated to be %sconst: %s\n",
   1512 		 looping ? "looping " : "",
   1513 		 node->dump_name ());
   1514       if (local)
   1515 	return true;
   1516       return cdtor;
   1517     }
   1518   return false;
   1519 }
   1520 
   1521 /* Make function const and output warning.  If LOCAL is true,
   1522    return true if anything changed.  Otherwise return true if
   1523    we may have introduced removale ctors.  */
   1524 
   1525 bool
   1526 ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
   1527 {
   1528   bool cdtor = false;
   1529 
   1530   if (TREE_READONLY (node->decl)
   1531       || (DECL_PURE_P (node->decl)
   1532 	  && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
   1533     return false;
   1534   warn_function_pure (node->decl, !looping);
   1535   if (local && skip_function_for_local_pure_const (node))
   1536     return false;
   1537   if (dump_file)
   1538     fprintf (dump_file, "Function found to be %spure: %s\n",
   1539 	     looping ? "looping " : "",
   1540 	     node->dump_name ());
   1541   if (!local && !looping)
   1542     cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
   1543   if (!dbg_cnt (ipa_attr))
   1544     return false;
   1545   if (node->set_pure_flag (true, looping))
   1546     {
   1547       if (dump_file)
   1548 	fprintf (dump_file,
   1549 		 "Declaration updated to be %spure: %s\n",
   1550 		 looping ? "looping " : "",
   1551 		 node->dump_name ());
   1552       if (local)
   1553 	return true;
   1554       return cdtor;
   1555     }
   1556   return false;
   1557 }
   1558 
   1559 /* Produce transitive closure over the callgraph and compute pure/const
   1560    attributes.  */
   1561 
   1562 static bool
   1563 propagate_pure_const (void)
   1564 {
   1565   struct cgraph_node *node;
   1566   struct cgraph_node *w;
   1567   struct cgraph_node **order =
   1568     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
   1569   int order_pos;
   1570   int i;
   1571   struct ipa_dfs_info * w_info;
   1572   bool remove_p = false;
   1573 
   1574   order_pos = ipa_reduced_postorder (order, true,
   1575 				     ignore_edge_for_pure_const);
   1576   if (dump_file)
   1577     {
   1578       cgraph_node::dump_cgraph (dump_file);
   1579       ipa_print_order (dump_file, "reduced", order, order_pos);
   1580     }
   1581 
   1582   /* Propagate the local information through the call graph to produce
   1583      the global information.  All the nodes within a cycle will have
   1584      the same info so we collapse cycles first.  Then we can do the
   1585      propagation in one pass from the leaves to the roots.  */
   1586   for (i = 0; i < order_pos; i++ )
   1587     {
   1588       enum pure_const_state_e pure_const_state = IPA_CONST;
   1589       bool looping = false;
   1590       int count = 0;
   1591       node = order[i];
   1592 
   1593       if (node->alias)
   1594 	continue;
   1595 
   1596       if (dump_file && (dump_flags & TDF_DETAILS))
   1597 	fprintf (dump_file, "Starting cycle\n");
   1598 
   1599       /* Find the worst state for any node in the cycle.  */
   1600       w = node;
   1601       while (w && pure_const_state != IPA_NEITHER)
   1602 	{
   1603 	  struct cgraph_edge *e;
   1604 	  struct cgraph_edge *ie;
   1605 	  int i;
   1606 	  struct ipa_ref *ref = NULL;
   1607 
   1608 	  funct_state w_l = funct_state_summaries->get_create (w);
   1609 	  if (dump_file && (dump_flags & TDF_DETAILS))
   1610 	    fprintf (dump_file, "  Visiting %s state:%s looping %i\n",
   1611 		     w->dump_name (),
   1612 		     pure_const_names[w_l->pure_const_state],
   1613 		     w_l->looping);
   1614 
   1615 	  /* First merge in function body properties.
   1616 	     We are safe to pass NULL as FROM and TO because we will take care
   1617 	     of possible interposition when walking callees.  */
   1618 	  worse_state (&pure_const_state, &looping,
   1619 		       w_l->pure_const_state, w_l->looping,
   1620 		       NULL, NULL);
   1621 	  if (pure_const_state == IPA_NEITHER)
   1622 	    break;
   1623 
   1624 	  count++;
   1625 
   1626 	  /* We consider recursive cycles as possibly infinite.
   1627 	     This might be relaxed since infinite recursion leads to stack
   1628 	     overflow.  */
   1629 	  if (count > 1)
   1630 	    looping = true;
   1631 
   1632 	  /* Now walk the edges and merge in callee properties.  */
   1633 	  for (e = w->callees; e && pure_const_state != IPA_NEITHER;
   1634 	       e = e->next_callee)
   1635 	    {
   1636 	      enum availability avail;
   1637 	      struct cgraph_node *y = e->callee->
   1638 				function_or_virtual_thunk_symbol (&avail,
   1639 								  e->caller);
   1640 	      enum pure_const_state_e edge_state = IPA_CONST;
   1641 	      bool edge_looping = false;
   1642 
   1643 	      if (e->recursive_p ())
   1644 		looping = true;
   1645 
   1646 	      if (dump_file && (dump_flags & TDF_DETAILS))
   1647 		{
   1648 		  fprintf (dump_file, "    Call to %s",
   1649 			   e->callee->dump_name ());
   1650 		}
   1651 	      if (avail > AVAIL_INTERPOSABLE)
   1652 		{
   1653 		  funct_state y_l = funct_state_summaries->get_create (y);
   1654 
   1655 		  if (dump_file && (dump_flags & TDF_DETAILS))
   1656 		    {
   1657 		      fprintf (dump_file,
   1658 			       " state:%s looping:%i\n",
   1659 			       pure_const_names[y_l->pure_const_state],
   1660 			       y_l->looping);
   1661 		    }
   1662 		  if (y_l->pure_const_state > IPA_PURE
   1663 		      && e->cannot_lead_to_return_p ())
   1664 		    {
   1665 		      if (dump_file && (dump_flags & TDF_DETAILS))
   1666 			fprintf (dump_file,
   1667 				 "        Ignoring side effects"
   1668 				 " -> pure, looping\n");
   1669 		      edge_state = IPA_PURE;
   1670 		      edge_looping = true;
   1671 		    }
   1672 		  else
   1673 		    {
   1674 		      edge_state = y_l->pure_const_state;
   1675 		      edge_looping = y_l->looping;
   1676 		    }
   1677 		}
   1678 	      else if (builtin_safe_for_const_function_p (&edge_looping,
   1679 							   y->decl))
   1680 		edge_state = IPA_CONST;
   1681 	      else
   1682 		state_from_flags (&edge_state, &edge_looping,
   1683 				  flags_from_decl_or_type (y->decl),
   1684 				  e->cannot_lead_to_return_p ());
   1685 
   1686 	      /* Merge the results with what we already know.  */
   1687 	      better_state (&edge_state, &edge_looping,
   1688 			    w_l->state_previously_known,
   1689 			    w_l->looping_previously_known);
   1690 	      worse_state (&pure_const_state, &looping,
   1691 			   edge_state, edge_looping, e->caller, e->callee);
   1692 	      if (pure_const_state == IPA_NEITHER)
   1693 	        break;
   1694 	    }
   1695 
   1696 	  /* Now process the indirect call.  */
   1697           for (ie = w->indirect_calls;
   1698 	       ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
   1699 	    {
   1700 	      enum pure_const_state_e edge_state = IPA_CONST;
   1701 	      bool edge_looping = false;
   1702 
   1703 	      if (dump_file && (dump_flags & TDF_DETAILS))
   1704 		fprintf (dump_file, "    Indirect call");
   1705 	      state_from_flags (&edge_state, &edge_looping,
   1706 			        ie->indirect_info->ecf_flags,
   1707 				ie->cannot_lead_to_return_p ());
   1708 	      /* Merge the results with what we already know.  */
   1709 	      better_state (&edge_state, &edge_looping,
   1710 			    w_l->state_previously_known,
   1711 			    w_l->looping_previously_known);
   1712 	      worse_state (&pure_const_state, &looping,
   1713 			   edge_state, edge_looping, NULL, NULL);
   1714 	      if (pure_const_state == IPA_NEITHER)
   1715 	        break;
   1716 	    }
   1717 
   1718 	  /* And finally all loads and stores.  */
   1719 	  for (i = 0; w->iterate_reference (i, ref)
   1720 	       && pure_const_state != IPA_NEITHER; i++)
   1721 	    {
   1722 	      enum pure_const_state_e ref_state = IPA_CONST;
   1723 	      bool ref_looping = false;
   1724 	      switch (ref->use)
   1725 		{
   1726 		case IPA_REF_LOAD:
   1727 		  /* readonly reads are safe.  */
   1728 		  if (TREE_READONLY (ref->referred->decl))
   1729 		    break;
   1730 		  if (dump_file && (dump_flags & TDF_DETAILS))
   1731 		    fprintf (dump_file, "    nonreadonly global var read\n");
   1732 		  ref_state = IPA_PURE;
   1733 		  break;
   1734 		case IPA_REF_STORE:
   1735 		  if (ref->cannot_lead_to_return ())
   1736 		    break;
   1737 		  ref_state = IPA_NEITHER;
   1738 		  if (dump_file && (dump_flags & TDF_DETAILS))
   1739 		    fprintf (dump_file, "    global var write\n");
   1740 		  break;
   1741 		case IPA_REF_ADDR:
   1742 		  break;
   1743 		default:
   1744 		  gcc_unreachable ();
   1745 		}
   1746 	      better_state (&ref_state, &ref_looping,
   1747 			    w_l->state_previously_known,
   1748 			    w_l->looping_previously_known);
   1749 	      worse_state (&pure_const_state, &looping,
   1750 			   ref_state, ref_looping, NULL, NULL);
   1751 	      if (pure_const_state == IPA_NEITHER)
   1752 		break;
   1753 	    }
   1754 	  w_info = (struct ipa_dfs_info *) w->aux;
   1755 	  w = w_info->next_cycle;
   1756 	}
   1757       if (dump_file && (dump_flags & TDF_DETAILS))
   1758 	fprintf (dump_file, "Result %s looping %i\n",
   1759 		 pure_const_names [pure_const_state],
   1760 		 looping);
   1761 
   1762       /* Find the worst state of can_free for any node in the cycle.  */
   1763       bool can_free = false;
   1764       w = node;
   1765       while (w && !can_free)
   1766 	{
   1767 	  struct cgraph_edge *e;
   1768 	  funct_state w_l = funct_state_summaries->get (w);
   1769 
   1770 	  if (w_l->can_free
   1771 	      || w->get_availability () == AVAIL_INTERPOSABLE
   1772 	      || w->indirect_calls)
   1773 	    can_free = true;
   1774 
   1775 	  for (e = w->callees; e && !can_free; e = e->next_callee)
   1776 	    {
   1777 	      enum availability avail;
   1778 	      struct cgraph_node *y = e->callee->
   1779 				function_or_virtual_thunk_symbol (&avail,
   1780 								  e->caller);
   1781 
   1782 	      if (avail > AVAIL_INTERPOSABLE)
   1783 		can_free = funct_state_summaries->get (y)->can_free;
   1784 	      else
   1785 		can_free = true;
   1786 	    }
   1787 	  w_info = (struct ipa_dfs_info *) w->aux;
   1788 	  w = w_info->next_cycle;
   1789 	}
   1790 
   1791       /* Copy back the region's pure_const_state which is shared by
   1792 	 all nodes in the region.  */
   1793       w = node;
   1794       while (w)
   1795 	{
   1796 	  funct_state w_l = funct_state_summaries->get (w);
   1797 	  enum pure_const_state_e this_state = pure_const_state;
   1798 	  bool this_looping = looping;
   1799 
   1800 	  w_l->can_free = can_free;
   1801 	  w->nonfreeing_fn = !can_free;
   1802 	  if (!can_free && dump_file)
   1803 	    fprintf (dump_file, "Function found not to call free: %s\n",
   1804 		     w->dump_name ());
   1805 
   1806 	  if (w_l->state_previously_known != IPA_NEITHER
   1807 	      && this_state > w_l->state_previously_known)
   1808 	    {
   1809 	      if (this_state == IPA_NEITHER)
   1810 		this_looping = w_l->looping_previously_known;
   1811 	      this_state = w_l->state_previously_known;
   1812 	    }
   1813 	  if (!this_looping && self_recursive_p (w))
   1814 	    this_looping = true;
   1815 	  if (!w_l->looping_previously_known)
   1816 	    this_looping = false;
   1817 
   1818 	  /* All nodes within a cycle share the same info.  */
   1819 	  w_l->pure_const_state = this_state;
   1820 	  w_l->looping = this_looping;
   1821 
   1822 	  /* Inline clones share declaration with their offline copies;
   1823 	     do not modify their declarations since the offline copy may
   1824 	     be different.  */
   1825 	  if (!w->inlined_to)
   1826 	    switch (this_state)
   1827 	      {
   1828 	      case IPA_CONST:
   1829 		remove_p |= ipa_make_function_const (w, this_looping, false);
   1830 		break;
   1831 
   1832 	      case IPA_PURE:
   1833 		remove_p |= ipa_make_function_pure (w, this_looping, false);
   1834 		break;
   1835 
   1836 	      default:
   1837 		break;
   1838 	      }
   1839 	  w_info = (struct ipa_dfs_info *) w->aux;
   1840 	  w = w_info->next_cycle;
   1841 	}
   1842     }
   1843 
   1844   ipa_free_postorder_info ();
   1845   free (order);
   1846   return remove_p;
   1847 }
   1848 
   1849 /* Produce transitive closure over the callgraph and compute nothrow
   1850    attributes.  */
   1851 
   1852 static void
   1853 propagate_nothrow (void)
   1854 {
   1855   struct cgraph_node *node;
   1856   struct cgraph_node *w;
   1857   struct cgraph_node **order =
   1858     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
   1859   int order_pos;
   1860   int i;
   1861   struct ipa_dfs_info * w_info;
   1862 
   1863   order_pos = ipa_reduced_postorder (order, true,
   1864 				     ignore_edge_for_nothrow);
   1865   if (dump_file)
   1866     {
   1867       cgraph_node::dump_cgraph (dump_file);
   1868       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
   1869     }
   1870 
   1871   /* Propagate the local information through the call graph to produce
   1872      the global information.  All the nodes within a cycle will have
   1873      the same info so we collapse cycles first.  Then we can do the
   1874      propagation in one pass from the leaves to the roots.  */
   1875   for (i = 0; i < order_pos; i++ )
   1876     {
   1877       bool can_throw = false;
   1878       node = order[i];
   1879 
   1880       if (node->alias)
   1881 	continue;
   1882 
   1883       /* Find the worst state for any node in the cycle.  */
   1884       w = node;
   1885       while (w && !can_throw)
   1886 	{
   1887 	  struct cgraph_edge *e, *ie;
   1888 
   1889 	  if (!TREE_NOTHROW (w->decl))
   1890 	    {
   1891 	      funct_state w_l = funct_state_summaries->get_create (w);
   1892 
   1893 	      if (w_l->can_throw
   1894 		  || w->get_availability () == AVAIL_INTERPOSABLE)
   1895 		can_throw = true;
   1896 
   1897 	      for (e = w->callees; e && !can_throw; e = e->next_callee)
   1898 		{
   1899 		  enum availability avail;
   1900 
   1901 		  if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
   1902 		    continue;
   1903 
   1904 		  struct cgraph_node *y = e->callee->
   1905 				   function_or_virtual_thunk_symbol (&avail,
   1906 								     e->caller);
   1907 
   1908 		  /* We can use info about the callee only if we know it
   1909 		     cannot be interposed.
   1910 		     When callee is compiled with non-call exceptions we also
   1911 		     must check that the declaration is bound to current
   1912 		     body as other semantically equivalent body may still
   1913 		     throw.  */
   1914 		  if (avail <= AVAIL_INTERPOSABLE
   1915 		      || (!TREE_NOTHROW (y->decl)
   1916 			  && (funct_state_summaries->get_create (y)->can_throw
   1917 			      || (opt_for_fn (y->decl, flag_non_call_exceptions)
   1918 				  && !e->callee->binds_to_current_def_p (w)))))
   1919 		    can_throw = true;
   1920 		}
   1921 	      for (ie = w->indirect_calls; ie && !can_throw;
   1922 		   ie = ie->next_callee)
   1923 		if (ie->can_throw_external
   1924 		    && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
   1925 		  can_throw = true;
   1926 	    }
   1927 	  w_info = (struct ipa_dfs_info *) w->aux;
   1928 	  w = w_info->next_cycle;
   1929 	}
   1930 
   1931       /* Copy back the region's pure_const_state which is shared by
   1932 	 all nodes in the region.  */
   1933       w = node;
   1934       while (w)
   1935 	{
   1936 	  funct_state w_l = funct_state_summaries->get_create (w);
   1937 	  if (!can_throw && !TREE_NOTHROW (w->decl))
   1938 	    {
   1939 	      /* Inline clones share declaration with their offline copies;
   1940 		 do not modify their declarations since the offline copy may
   1941 		 be different.  */
   1942 	      if (!w->inlined_to)
   1943 		{
   1944 		  w->set_nothrow_flag (true);
   1945 		  if (dump_file)
   1946 		    fprintf (dump_file, "Function found to be nothrow: %s\n",
   1947 			     w->dump_name ());
   1948 		}
   1949 	    }
   1950 	  else if (can_throw && !TREE_NOTHROW (w->decl))
   1951 	    w_l->can_throw = true;
   1952 	  w_info = (struct ipa_dfs_info *) w->aux;
   1953 	  w = w_info->next_cycle;
   1954 	}
   1955     }
   1956 
   1957   ipa_free_postorder_info ();
   1958   free (order);
   1959 }
   1960 
   1961 /* Debugging function to dump state of malloc lattice.  */
   1962 
   1963 DEBUG_FUNCTION
   1964 static void
   1965 dump_malloc_lattice (FILE *dump_file, const char *s)
   1966 {
   1967   if (!dump_file)
   1968     return;
   1969 
   1970   fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
   1971   cgraph_node *node;
   1972   FOR_EACH_FUNCTION (node)
   1973     {
   1974       funct_state fs = funct_state_summaries->get (node);
   1975       if (fs)
   1976 	fprintf (dump_file, "%s: %s\n", node->dump_name (),
   1977 		 malloc_state_names[fs->malloc_state]);
   1978     }
   1979 }
   1980 
   1981 /* Propagate malloc attribute across the callgraph.  */
   1982 
   1983 static void
   1984 propagate_malloc (void)
   1985 {
   1986   cgraph_node *node;
   1987   FOR_EACH_FUNCTION (node)
   1988     {
   1989       if (DECL_IS_MALLOC (node->decl))
   1990 	if (!funct_state_summaries->exists (node))
   1991 	  {
   1992 	    funct_state fs = funct_state_summaries->get_create (node);
   1993 	    fs->malloc_state = STATE_MALLOC;
   1994 	  }
   1995     }
   1996 
   1997   dump_malloc_lattice (dump_file, "Initial");
   1998   struct cgraph_node **order
   1999     = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
   2000   int order_pos = ipa_reverse_postorder (order);
   2001   bool changed = true;
   2002 
   2003   while (changed)
   2004     {
   2005       changed = false;
   2006       /* Walk in postorder.  */
   2007       for (int i = order_pos - 1; i >= 0; --i)
   2008 	{
   2009 	  cgraph_node *node = order[i];
   2010 	  if (node->alias
   2011 	      || !node->definition
   2012 	      || !funct_state_summaries->exists (node))
   2013 	    continue;
   2014 
   2015 	  funct_state l = funct_state_summaries->get (node);
   2016 
   2017 	  /* FIXME: add support for indirect-calls.  */
   2018 	  if (node->indirect_calls)
   2019 	    {
   2020 	      l->malloc_state = STATE_MALLOC_BOTTOM;
   2021 	      continue;
   2022 	    }
   2023 
   2024 	  if (node->get_availability () <= AVAIL_INTERPOSABLE)
   2025 	    {
   2026 	      l->malloc_state = STATE_MALLOC_BOTTOM;
   2027 	      continue;
   2028 	    }
   2029 
   2030 	  if (l->malloc_state == STATE_MALLOC_BOTTOM)
   2031 	    continue;
   2032 
   2033 	  auto_vec<cgraph_node *, 16> callees;
   2034 	  for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
   2035 	    {
   2036 	      ipa_call_summary *es = ipa_call_summaries->get_create (cs);
   2037 	      if (es && es->is_return_callee_uncaptured)
   2038 		callees.safe_push (cs->callee);
   2039 	    }
   2040 
   2041 	  malloc_state_e new_state = l->malloc_state;
   2042 	  for (unsigned j = 0; j < callees.length (); j++)
   2043 	    {
   2044 	      cgraph_node *callee = callees[j];
   2045 	      if (!funct_state_summaries->exists (node))
   2046 		{
   2047 		  new_state = STATE_MALLOC_BOTTOM;
   2048 		  break;
   2049 		}
   2050 	      malloc_state_e callee_state
   2051 		= funct_state_summaries->get_create (callee)->malloc_state;
   2052 	      if (new_state < callee_state)
   2053 		new_state = callee_state;
   2054 	    }
   2055 	  if (new_state != l->malloc_state)
   2056 	    {
   2057 	      changed = true;
   2058 	      l->malloc_state = new_state;
   2059 	    }
   2060 	}
   2061     }
   2062 
   2063   FOR_EACH_DEFINED_FUNCTION (node)
   2064     if (funct_state_summaries->exists (node))
   2065       {
   2066 	funct_state l = funct_state_summaries->get (node);
   2067 	if (!node->alias
   2068 	    && l->malloc_state == STATE_MALLOC
   2069 	    && !node->inlined_to
   2070 	    && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
   2071 	  {
   2072 	    if (dump_file && (dump_flags & TDF_DETAILS))
   2073 	      fprintf (dump_file, "Function %s found to be malloc\n",
   2074 		       node->dump_name ());
   2075 
   2076 	    bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
   2077 	    node->set_malloc_flag (true);
   2078 	    if (!malloc_decl_p && warn_suggest_attribute_malloc)
   2079 		warn_function_malloc (node->decl);
   2080 	  }
   2081       }
   2082 
   2083   dump_malloc_lattice (dump_file, "after propagation");
   2084   ipa_free_postorder_info ();
   2085   free (order);
   2086 }
   2087 
   2088 /* Produce the global information by preforming a transitive closure
   2089    on the local information that was produced by generate_summary.  */
   2090 
   2091 unsigned int
   2092 pass_ipa_pure_const::
   2093 execute (function *)
   2094 {
   2095   bool remove_p;
   2096 
   2097   /* Nothrow makes more function to not lead to return and improve
   2098      later analysis.  */
   2099   propagate_nothrow ();
   2100   propagate_malloc ();
   2101   remove_p = propagate_pure_const ();
   2102 
   2103   delete funct_state_summaries;
   2104   return remove_p ? TODO_remove_functions : 0;
   2105 }
   2106 
   2107 static bool
   2108 gate_pure_const (void)
   2109 {
   2110   return flag_ipa_pure_const || in_lto_p;
   2111 }
   2112 
   2113 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
   2114     : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
   2115 		     pure_const_generate_summary, /* generate_summary */
   2116 		     pure_const_write_summary, /* write_summary */
   2117 		     pure_const_read_summary, /* read_summary */
   2118 		     NULL, /* write_optimization_summary */
   2119 		     NULL, /* read_optimization_summary */
   2120 		     NULL, /* stmt_fixup */
   2121 		     0, /* function_transform_todo_flags_start */
   2122 		     NULL, /* function_transform */
   2123 		     NULL), /* variable_transform */
   2124   init_p (false) {}
   2125 
   2126 ipa_opt_pass_d *
   2127 make_pass_ipa_pure_const (gcc::context *ctxt)
   2128 {
   2129   return new pass_ipa_pure_const (ctxt);
   2130 }
   2131 
   2132 /* Simple local pass for pure const discovery reusing the analysis from
   2133    ipa_pure_const.   This pass is effective when executed together with
   2134    other optimization passes in early optimization pass queue.  */
   2135 
   2136 namespace {
   2137 
   2138 const pass_data pass_data_local_pure_const =
   2139 {
   2140   GIMPLE_PASS, /* type */
   2141   "local-pure-const", /* name */
   2142   OPTGROUP_NONE, /* optinfo_flags */
   2143   TV_IPA_PURE_CONST, /* tv_id */
   2144   0, /* properties_required */
   2145   0, /* properties_provided */
   2146   0, /* properties_destroyed */
   2147   0, /* todo_flags_start */
   2148   0, /* todo_flags_finish */
   2149 };
   2150 
   2151 class pass_local_pure_const : public gimple_opt_pass
   2152 {
   2153 public:
   2154   pass_local_pure_const (gcc::context *ctxt)
   2155     : gimple_opt_pass (pass_data_local_pure_const, ctxt)
   2156   {}
   2157 
   2158   /* opt_pass methods: */
   2159   opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
   2160   virtual bool gate (function *) { return gate_pure_const (); }
   2161   virtual unsigned int execute (function *);
   2162 
   2163 }; // class pass_local_pure_const
   2164 
   2165 unsigned int
   2166 pass_local_pure_const::execute (function *fun)
   2167 {
   2168   bool changed = false;
   2169   funct_state l;
   2170   bool skip;
   2171   struct cgraph_node *node;
   2172 
   2173   node = cgraph_node::get (current_function_decl);
   2174   skip = skip_function_for_local_pure_const (node);
   2175 
   2176   if (!warn_suggest_attribute_const
   2177       && !warn_suggest_attribute_pure
   2178       && skip)
   2179     return 0;
   2180 
   2181   l = analyze_function (node, false);
   2182 
   2183   /* Do NORETURN discovery.  */
   2184   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
   2185       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
   2186     {
   2187       warn_function_noreturn (fun->decl);
   2188       if (dump_file)
   2189 	fprintf (dump_file, "Function found to be noreturn: %s\n",
   2190 		 current_function_name ());
   2191 
   2192       /* Update declaration and reduce profile to executed once.  */
   2193       if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
   2194 	changed = true;
   2195       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
   2196 	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
   2197     }
   2198 
   2199   switch (l->pure_const_state)
   2200     {
   2201     case IPA_CONST:
   2202       changed |= ipa_make_function_const
   2203 		   (cgraph_node::get (current_function_decl), l->looping, true);
   2204       break;
   2205 
   2206     case IPA_PURE:
   2207       changed |= ipa_make_function_pure
   2208 		   (cgraph_node::get (current_function_decl), l->looping, true);
   2209       break;
   2210 
   2211     default:
   2212       break;
   2213     }
   2214   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
   2215     {
   2216       node->set_nothrow_flag (true);
   2217       changed = true;
   2218       if (dump_file)
   2219 	fprintf (dump_file, "Function found to be nothrow: %s\n",
   2220 		 current_function_name ());
   2221     }
   2222 
   2223   if (l->malloc_state == STATE_MALLOC
   2224       && !DECL_IS_MALLOC (current_function_decl))
   2225     {
   2226       node->set_malloc_flag (true);
   2227       if (warn_suggest_attribute_malloc)
   2228 	warn_function_malloc (node->decl);
   2229       changed = true;
   2230       if (dump_file)
   2231 	fprintf (dump_file, "Function found to be malloc: %s\n",
   2232 		 node->dump_name ());
   2233     }
   2234 
   2235   free (l);
   2236   if (changed)
   2237     return execute_fixup_cfg ();
   2238   else
   2239     return 0;
   2240 }
   2241 
   2242 } // anon namespace
   2243 
   2244 gimple_opt_pass *
   2245 make_pass_local_pure_const (gcc::context *ctxt)
   2246 {
   2247   return new pass_local_pure_const (ctxt);
   2248 }
   2249 
   2250 /* Emit noreturn warnings.  */
   2251 
   2252 namespace {
   2253 
   2254 const pass_data pass_data_warn_function_noreturn =
   2255 {
   2256   GIMPLE_PASS, /* type */
   2257   "*warn_function_noreturn", /* name */
   2258   OPTGROUP_NONE, /* optinfo_flags */
   2259   TV_NONE, /* tv_id */
   2260   PROP_cfg, /* properties_required */
   2261   0, /* properties_provided */
   2262   0, /* properties_destroyed */
   2263   0, /* todo_flags_start */
   2264   0, /* todo_flags_finish */
   2265 };
   2266 
   2267 class pass_warn_function_noreturn : public gimple_opt_pass
   2268 {
   2269 public:
   2270   pass_warn_function_noreturn (gcc::context *ctxt)
   2271     : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
   2272   {}
   2273 
   2274   /* opt_pass methods: */
   2275   virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
   2276   virtual unsigned int execute (function *fun)
   2277     {
   2278       if (!TREE_THIS_VOLATILE (current_function_decl)
   2279 	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
   2280 	warn_function_noreturn (current_function_decl);
   2281       return 0;
   2282     }
   2283 
   2284 }; // class pass_warn_function_noreturn
   2285 
   2286 } // anon namespace
   2287 
   2288 gimple_opt_pass *
   2289 make_pass_warn_function_noreturn (gcc::context *ctxt)
   2290 {
   2291   return new pass_warn_function_noreturn (ctxt);
   2292 }
   2293 
   2294 /* Simple local pass for pure const discovery reusing the analysis from
   2295    ipa_pure_const.   This pass is effective when executed together with
   2296    other optimization passes in early optimization pass queue.  */
   2297 
   2298 namespace {
   2299 
   2300 const pass_data pass_data_nothrow =
   2301 {
   2302   GIMPLE_PASS, /* type */
   2303   "nothrow", /* name */
   2304   OPTGROUP_NONE, /* optinfo_flags */
   2305   TV_IPA_PURE_CONST, /* tv_id */
   2306   0, /* properties_required */
   2307   0, /* properties_provided */
   2308   0, /* properties_destroyed */
   2309   0, /* todo_flags_start */
   2310   0, /* todo_flags_finish */
   2311 };
   2312 
   2313 class pass_nothrow : public gimple_opt_pass
   2314 {
   2315 public:
   2316   pass_nothrow (gcc::context *ctxt)
   2317     : gimple_opt_pass (pass_data_nothrow, ctxt)
   2318   {}
   2319 
   2320   /* opt_pass methods: */
   2321   opt_pass * clone () { return new pass_nothrow (m_ctxt); }
   2322   virtual bool gate (function *) { return optimize; }
   2323   virtual unsigned int execute (function *);
   2324 
   2325 }; // class pass_nothrow
   2326 
   2327 unsigned int
   2328 pass_nothrow::execute (function *)
   2329 {
   2330   struct cgraph_node *node;
   2331   basic_block this_block;
   2332 
   2333   if (TREE_NOTHROW (current_function_decl))
   2334     return 0;
   2335 
   2336   node = cgraph_node::get (current_function_decl);
   2337 
   2338   /* We run during lowering, we cannot really use availability yet.  */
   2339   if (cgraph_node::get (current_function_decl)->get_availability ()
   2340       <= AVAIL_INTERPOSABLE)
   2341     {
   2342       if (dump_file)
   2343         fprintf (dump_file, "Function is interposable;"
   2344 	         " not analyzing.\n");
   2345       return true;
   2346     }
   2347 
   2348   FOR_EACH_BB_FN (this_block, cfun)
   2349     {
   2350       for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
   2351 	   !gsi_end_p (gsi);
   2352 	   gsi_next (&gsi))
   2353         if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
   2354 	  {
   2355 	    if (is_gimple_call (gsi_stmt (gsi)))
   2356 	      {
   2357 		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
   2358 		if (callee_t && recursive_call_p (current_function_decl,
   2359 						  callee_t))
   2360 		  continue;
   2361 	      }
   2362 
   2363 	    if (dump_file)
   2364 	      {
   2365 		fprintf (dump_file, "Statement can throw: ");
   2366 		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
   2367 	      }
   2368 	    return 0;
   2369 	  }
   2370     }
   2371 
   2372   node->set_nothrow_flag (true);
   2373 
   2374   bool cfg_changed = false;
   2375   if (self_recursive_p (node))
   2376     FOR_EACH_BB_FN (this_block, cfun)
   2377       if (gimple *g = last_stmt (this_block))
   2378 	if (is_gimple_call (g))
   2379 	  {
   2380 	    tree callee_t = gimple_call_fndecl (g);
   2381 	    if (callee_t
   2382 		&& recursive_call_p (current_function_decl, callee_t)
   2383 		&& maybe_clean_eh_stmt (g)
   2384 		&& gimple_purge_dead_eh_edges (this_block))
   2385 	      cfg_changed = true;
   2386 	  }
   2387 
   2388   if (dump_file)
   2389     fprintf (dump_file, "Function found to be nothrow: %s\n",
   2390 	     current_function_name ());
   2391   return cfg_changed ? TODO_cleanup_cfg : 0;
   2392 }
   2393 
   2394 } // anon namespace
   2395 
   2396 gimple_opt_pass *
   2397 make_pass_nothrow (gcc::context *ctxt)
   2398 {
   2399   return new pass_nothrow (ctxt);
   2400 }
   2401