Home | History | Annotate | Line # | Download | only in gcc
ipa-pure-const.cc revision 1.1
      1  1.1  mrg /* Callgraph based analysis of static variables.
      2  1.1  mrg    Copyright (C) 2004-2022 Free Software Foundation, Inc.
      3  1.1  mrg    Contributed by Kenneth Zadeck <zadeck (at) naturalbridge.com>
      4  1.1  mrg 
      5  1.1  mrg This file is part of GCC.
      6  1.1  mrg 
      7  1.1  mrg GCC is free software; you can redistribute it and/or modify it under
      8  1.1  mrg the terms of the GNU General Public License as published by the Free
      9  1.1  mrg Software Foundation; either version 3, or (at your option) any later
     10  1.1  mrg version.
     11  1.1  mrg 
     12  1.1  mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13  1.1  mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14  1.1  mrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15  1.1  mrg for more details.
     16  1.1  mrg 
     17  1.1  mrg You should have received a copy of the GNU General Public License
     18  1.1  mrg along with GCC; see the file COPYING3.  If not see
     19  1.1  mrg <http://www.gnu.org/licenses/>.  */
     20  1.1  mrg 
     21  1.1  mrg /* This file marks functions as being either const (TREE_READONLY) or
     22  1.1  mrg    pure (DECL_PURE_P).  It can also set a variant of these that
     23  1.1  mrg    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
     24  1.1  mrg 
     25  1.1  mrg    This must be run after inlining decisions have been made since
     26  1.1  mrg    otherwise, the local sets will not contain information that is
     27  1.1  mrg    consistent with post inlined state.  The global sets are not prone
     28  1.1  mrg    to this problem since they are by definition transitive.  */
     29  1.1  mrg 
     30  1.1  mrg /* The code in this module is called by the ipa pass manager. It
     31  1.1  mrg    should be one of the later passes since it's information is used by
     32  1.1  mrg    the rest of the compilation. */
     33  1.1  mrg 
     34  1.1  mrg #include "config.h"
     35  1.1  mrg #include "system.h"
     36  1.1  mrg #include "coretypes.h"
     37  1.1  mrg #include "backend.h"
     38  1.1  mrg #include "target.h"
     39  1.1  mrg #include "tree.h"
     40  1.1  mrg #include "gimple.h"
     41  1.1  mrg #include "tree-pass.h"
     42  1.1  mrg #include "tree-streamer.h"
     43  1.1  mrg #include "cgraph.h"
     44  1.1  mrg #include "diagnostic.h"
     45  1.1  mrg #include "calls.h"
     46  1.1  mrg #include "cfganal.h"
     47  1.1  mrg #include "tree-eh.h"
     48  1.1  mrg #include "gimple-iterator.h"
     49  1.1  mrg #include "gimple-walk.h"
     50  1.1  mrg #include "tree-cfg.h"
     51  1.1  mrg #include "tree-ssa-loop-niter.h"
     52  1.1  mrg #include "langhooks.h"
     53  1.1  mrg #include "ipa-utils.h"
     54  1.1  mrg #include "gimple-pretty-print.h"
     55  1.1  mrg #include "cfgloop.h"
     56  1.1  mrg #include "tree-scalar-evolution.h"
     57  1.1  mrg #include "intl.h"
     58  1.1  mrg #include "opts.h"
     59  1.1  mrg #include "ssa.h"
     60  1.1  mrg #include "alloc-pool.h"
     61  1.1  mrg #include "symbol-summary.h"
     62  1.1  mrg #include "ipa-prop.h"
     63  1.1  mrg #include "ipa-fnsummary.h"
     64  1.1  mrg #include "symtab-thunks.h"
     65  1.1  mrg #include "dbgcnt.h"
     66  1.1  mrg 
     67  1.1  mrg /* Lattice values for const and pure functions.  Everything starts out
     68  1.1  mrg    being const, then may drop to pure and then neither depending on
     69  1.1  mrg    what is found.  */
     70  1.1  mrg enum pure_const_state_e
     71  1.1  mrg {
     72  1.1  mrg   IPA_CONST,
     73  1.1  mrg   IPA_PURE,
     74  1.1  mrg   IPA_NEITHER
     75  1.1  mrg };
     76  1.1  mrg 
     77  1.1  mrg static const char *pure_const_names[3] = {"const", "pure", "neither"};
     78  1.1  mrg 
     79  1.1  mrg enum malloc_state_e
     80  1.1  mrg {
     81  1.1  mrg   STATE_MALLOC_TOP,
     82  1.1  mrg   STATE_MALLOC,
     83  1.1  mrg   STATE_MALLOC_BOTTOM
     84  1.1  mrg };
     85  1.1  mrg 
     86  1.1  mrg static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
     87  1.1  mrg 
     88  1.1  mrg /* Holder for the const_state.  There is one of these per function
     89  1.1  mrg    decl.  */
     90  1.1  mrg class funct_state_d
     91  1.1  mrg {
     92  1.1  mrg public:
     93  1.1  mrg   funct_state_d (): pure_const_state (IPA_NEITHER),
     94  1.1  mrg     state_previously_known (IPA_NEITHER), looping_previously_known (true),
     95  1.1  mrg     looping (true), can_throw (true), can_free (true),
     96  1.1  mrg     malloc_state (STATE_MALLOC_BOTTOM) {}
     97  1.1  mrg 
     98  1.1  mrg   funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
     99  1.1  mrg     state_previously_known (s.state_previously_known),
    100  1.1  mrg     looping_previously_known (s.looping_previously_known),
    101  1.1  mrg     looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
    102  1.1  mrg     malloc_state (s.malloc_state) {}
    103  1.1  mrg 
    104  1.1  mrg   /* See above.  */
    105  1.1  mrg   enum pure_const_state_e pure_const_state;
    106  1.1  mrg   /* What user set here; we can be always sure about this.  */
    107  1.1  mrg   enum pure_const_state_e state_previously_known;
    108  1.1  mrg   bool looping_previously_known;
    109  1.1  mrg 
    110  1.1  mrg   /* True if the function could possibly infinite loop.  There are a
    111  1.1  mrg      lot of ways that this could be determined.  We are pretty
    112  1.1  mrg      conservative here.  While it is possible to cse pure and const
    113  1.1  mrg      calls, it is not legal to have dce get rid of the call if there
    114  1.1  mrg      is a possibility that the call could infinite loop since this is
    115  1.1  mrg      a behavioral change.  */
    116  1.1  mrg   bool looping;
    117  1.1  mrg 
    118  1.1  mrg   bool can_throw;
    119  1.1  mrg 
    120  1.1  mrg   /* If function can call free, munmap or otherwise make previously
    121  1.1  mrg      non-trapping memory accesses trapping.  */
    122  1.1  mrg   bool can_free;
    123  1.1  mrg 
    124  1.1  mrg   enum malloc_state_e malloc_state;
    125  1.1  mrg };
    126  1.1  mrg 
    127  1.1  mrg typedef class funct_state_d * funct_state;
    128  1.1  mrg 
    129  1.1  mrg /* The storage of the funct_state is abstracted because there is the
    130  1.1  mrg    possibility that it may be desirable to move this to the cgraph
    131  1.1  mrg    local info.  */
    132  1.1  mrg 
    133  1.1  mrg class funct_state_summary_t:
    134  1.1  mrg   public fast_function_summary <funct_state_d *, va_heap>
    135  1.1  mrg {
    136  1.1  mrg public:
    137  1.1  mrg   funct_state_summary_t (symbol_table *symtab):
    138  1.1  mrg     fast_function_summary <funct_state_d *, va_heap> (symtab) {}
    139  1.1  mrg 
    140  1.1  mrg   virtual void insert (cgraph_node *, funct_state_d *state);
    141  1.1  mrg   virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
    142  1.1  mrg 			  funct_state_d *src_data,
    143  1.1  mrg 			  funct_state_d *dst_data);
    144  1.1  mrg };
    145  1.1  mrg 
    146  1.1  mrg static funct_state_summary_t *funct_state_summaries = NULL;
    147  1.1  mrg 
    148  1.1  mrg static bool gate_pure_const (void);
    149  1.1  mrg 
    150  1.1  mrg namespace {
    151  1.1  mrg 
    152  1.1  mrg const pass_data pass_data_ipa_pure_const =
    153  1.1  mrg {
    154  1.1  mrg   IPA_PASS, /* type */
    155  1.1  mrg   "pure-const", /* name */
    156  1.1  mrg   OPTGROUP_NONE, /* optinfo_flags */
    157  1.1  mrg   TV_IPA_PURE_CONST, /* tv_id */
    158  1.1  mrg   0, /* properties_required */
    159  1.1  mrg   0, /* properties_provided */
    160  1.1  mrg   0, /* properties_destroyed */
    161  1.1  mrg   0, /* todo_flags_start */
    162  1.1  mrg   0, /* todo_flags_finish */
    163  1.1  mrg };
    164  1.1  mrg 
    165  1.1  mrg class pass_ipa_pure_const : public ipa_opt_pass_d
    166  1.1  mrg {
    167  1.1  mrg public:
    168  1.1  mrg   pass_ipa_pure_const(gcc::context *ctxt);
    169  1.1  mrg 
    170  1.1  mrg   /* opt_pass methods: */
    171  1.1  mrg   bool gate (function *) { return gate_pure_const (); }
    172  1.1  mrg   unsigned int execute (function *fun);
    173  1.1  mrg 
    174  1.1  mrg   void register_hooks (void);
    175  1.1  mrg 
    176  1.1  mrg private:
    177  1.1  mrg   bool init_p;
    178  1.1  mrg }; // class pass_ipa_pure_const
    179  1.1  mrg 
    180  1.1  mrg } // anon namespace
    181  1.1  mrg 
    182  1.1  mrg /* Try to guess if function body will always be visible to compiler
    183  1.1  mrg    when compiling the call and whether compiler will be able
    184  1.1  mrg    to propagate the information by itself.  */
    185  1.1  mrg 
    186  1.1  mrg static bool
    187  1.1  mrg function_always_visible_to_compiler_p (tree decl)
    188  1.1  mrg {
    189  1.1  mrg   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
    190  1.1  mrg 	  || DECL_COMDAT (decl));
    191  1.1  mrg }
    192  1.1  mrg 
    193  1.1  mrg /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
    194  1.1  mrg    is true if the function is known to be finite.  The diagnostic is
    195  1.1  mrg    controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
    196  1.1  mrg    OPTION, this function may initialize it and it is always returned
    197  1.1  mrg    by the function.  */
    198  1.1  mrg 
    199  1.1  mrg static hash_set<tree> *
    200  1.1  mrg suggest_attribute (int option, tree decl, bool known_finite,
    201  1.1  mrg 		   hash_set<tree> *warned_about,
    202  1.1  mrg 		   const char * attrib_name)
    203  1.1  mrg {
    204  1.1  mrg   if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
    205  1.1  mrg     return warned_about;
    206  1.1  mrg   if (TREE_THIS_VOLATILE (decl)
    207  1.1  mrg       || (known_finite && function_always_visible_to_compiler_p (decl)))
    208  1.1  mrg     return warned_about;
    209  1.1  mrg 
    210  1.1  mrg   if (!warned_about)
    211  1.1  mrg     warned_about = new hash_set<tree>;
    212  1.1  mrg   if (warned_about->contains (decl))
    213  1.1  mrg     return warned_about;
    214  1.1  mrg   warned_about->add (decl);
    215  1.1  mrg   warning_at (DECL_SOURCE_LOCATION (decl),
    216  1.1  mrg 	      option,
    217  1.1  mrg 	      known_finite
    218  1.1  mrg 	      ? G_("function might be candidate for attribute %qs")
    219  1.1  mrg 	      : G_("function might be candidate for attribute %qs"
    220  1.1  mrg 		   " if it is known to return normally"), attrib_name);
    221  1.1  mrg   return warned_about;
    222  1.1  mrg }
    223  1.1  mrg 
    224  1.1  mrg /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
    225  1.1  mrg    is true if the function is known to be finite.  */
    226  1.1  mrg 
    227  1.1  mrg static void
    228  1.1  mrg warn_function_pure (tree decl, bool known_finite)
    229  1.1  mrg {
    230  1.1  mrg   /* Declaring a void function pure makes no sense and is diagnosed
    231  1.1  mrg      by -Wattributes because calling it would have no effect.  */
    232  1.1  mrg   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
    233  1.1  mrg     return;
    234  1.1  mrg 
    235  1.1  mrg   static hash_set<tree> *warned_about;
    236  1.1  mrg   warned_about
    237  1.1  mrg     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
    238  1.1  mrg 			 known_finite, warned_about, "pure");
    239  1.1  mrg }
    240  1.1  mrg 
    241  1.1  mrg /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
    242  1.1  mrg    is true if the function is known to be finite.  */
    243  1.1  mrg 
    244  1.1  mrg static void
    245  1.1  mrg warn_function_const (tree decl, bool known_finite)
    246  1.1  mrg {
    247  1.1  mrg   /* Declaring a void function const makes no sense is diagnosed
    248  1.1  mrg      by -Wattributes because calling it would have no effect.  */
    249  1.1  mrg   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
    250  1.1  mrg     return;
    251  1.1  mrg 
    252  1.1  mrg   static hash_set<tree> *warned_about;
    253  1.1  mrg   warned_about
    254  1.1  mrg     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
    255  1.1  mrg 			 known_finite, warned_about, "const");
    256  1.1  mrg }
    257  1.1  mrg 
    258  1.1  mrg /* Emit suggestion about __attribute__((malloc)) for DECL.  */
    259  1.1  mrg 
    260  1.1  mrg static void
    261  1.1  mrg warn_function_malloc (tree decl)
    262  1.1  mrg {
    263  1.1  mrg   static hash_set<tree> *warned_about;
    264  1.1  mrg   warned_about
    265  1.1  mrg     = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
    266  1.1  mrg 			 true, warned_about, "malloc");
    267  1.1  mrg }
    268  1.1  mrg 
    269  1.1  mrg /* Emit suggestion about __attribute__((noreturn)) for DECL.  */
    270  1.1  mrg 
    271  1.1  mrg static void
    272  1.1  mrg warn_function_noreturn (tree decl)
    273  1.1  mrg {
    274  1.1  mrg   tree original_decl = decl;
    275  1.1  mrg 
    276  1.1  mrg   static hash_set<tree> *warned_about;
    277  1.1  mrg   if (!lang_hooks.missing_noreturn_ok_p (decl)
    278  1.1  mrg       && targetm.warn_func_return (decl))
    279  1.1  mrg     warned_about
    280  1.1  mrg       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
    281  1.1  mrg 			   true, warned_about, "noreturn");
    282  1.1  mrg }
    283  1.1  mrg 
    284  1.1  mrg void
    285  1.1  mrg warn_function_cold (tree decl)
    286  1.1  mrg {
    287  1.1  mrg   tree original_decl = decl;
    288  1.1  mrg 
    289  1.1  mrg   static hash_set<tree> *warned_about;
    290  1.1  mrg   warned_about
    291  1.1  mrg     = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
    292  1.1  mrg 			 true, warned_about, "cold");
    293  1.1  mrg }
    294  1.1  mrg 
    295  1.1  mrg /* Check to see if the use (or definition when CHECKING_WRITE is true)
    296  1.1  mrg    variable T is legal in a function that is either pure or const.  */
    297  1.1  mrg 
    298  1.1  mrg static inline void
    299  1.1  mrg check_decl (funct_state local,
    300  1.1  mrg 	    tree t, bool checking_write, bool ipa)
    301  1.1  mrg {
    302  1.1  mrg   /* Do not want to do anything with volatile except mark any
    303  1.1  mrg      function that uses one to be not const or pure.  */
    304  1.1  mrg   if (TREE_THIS_VOLATILE (t))
    305  1.1  mrg     {
    306  1.1  mrg       local->pure_const_state = IPA_NEITHER;
    307  1.1  mrg       if (dump_file)
    308  1.1  mrg         fprintf (dump_file, "    Volatile operand is not const/pure\n");
    309  1.1  mrg       return;
    310  1.1  mrg     }
    311  1.1  mrg 
    312  1.1  mrg   /* Do not care about a local automatic that is not static.  */
    313  1.1  mrg   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
    314  1.1  mrg     return;
    315  1.1  mrg 
    316  1.1  mrg   /* If the variable has the "used" attribute, treat it as if it had a
    317  1.1  mrg      been touched by the devil.  */
    318  1.1  mrg   if (DECL_PRESERVE_P (t))
    319  1.1  mrg     {
    320  1.1  mrg       local->pure_const_state = IPA_NEITHER;
    321  1.1  mrg       if (dump_file)
    322  1.1  mrg         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
    323  1.1  mrg       return;
    324  1.1  mrg     }
    325  1.1  mrg 
    326  1.1  mrg   /* In IPA mode we are not interested in checking actual loads and stores;
    327  1.1  mrg      they will be processed at propagation time using ipa_ref.  */
    328  1.1  mrg   if (ipa)
    329  1.1  mrg     return;
    330  1.1  mrg 
    331  1.1  mrg   /* Since we have dealt with the locals and params cases above, if we
    332  1.1  mrg      are CHECKING_WRITE, this cannot be a pure or constant
    333  1.1  mrg      function.  */
    334  1.1  mrg   if (checking_write)
    335  1.1  mrg     {
    336  1.1  mrg       local->pure_const_state = IPA_NEITHER;
    337  1.1  mrg       if (dump_file)
    338  1.1  mrg         fprintf (dump_file, "    static/global memory write is not const/pure\n");
    339  1.1  mrg       return;
    340  1.1  mrg     }
    341  1.1  mrg 
    342  1.1  mrg   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
    343  1.1  mrg     {
    344  1.1  mrg       /* Readonly reads are safe.  */
    345  1.1  mrg       if (TREE_READONLY (t))
    346  1.1  mrg 	return; /* Read of a constant, do not change the function state.  */
    347  1.1  mrg       else
    348  1.1  mrg 	{
    349  1.1  mrg           if (dump_file)
    350  1.1  mrg             fprintf (dump_file, "    global memory read is not const\n");
    351  1.1  mrg 	  /* Just a regular read.  */
    352  1.1  mrg 	  if (local->pure_const_state == IPA_CONST)
    353  1.1  mrg 	    local->pure_const_state = IPA_PURE;
    354  1.1  mrg 	}
    355  1.1  mrg     }
    356  1.1  mrg   else
    357  1.1  mrg     {
    358  1.1  mrg       /* Compilation level statics can be read if they are readonly
    359  1.1  mrg 	 variables.  */
    360  1.1  mrg       if (TREE_READONLY (t))
    361  1.1  mrg 	return;
    362  1.1  mrg 
    363  1.1  mrg       if (dump_file)
    364  1.1  mrg 	fprintf (dump_file, "    static memory read is not const\n");
    365  1.1  mrg       /* Just a regular read.  */
    366  1.1  mrg       if (local->pure_const_state == IPA_CONST)
    367  1.1  mrg 	local->pure_const_state = IPA_PURE;
    368  1.1  mrg     }
    369  1.1  mrg }
    370  1.1  mrg 
    371  1.1  mrg 
    372  1.1  mrg /* Check to see if the use (or definition when CHECKING_WRITE is true)
    373  1.1  mrg    variable T is legal in a function that is either pure or const.  */
    374  1.1  mrg 
    375  1.1  mrg static inline void
    376  1.1  mrg check_op (funct_state local, tree t, bool checking_write)
    377  1.1  mrg {
    378  1.1  mrg   t = get_base_address (t);
    379  1.1  mrg   if (t && TREE_THIS_VOLATILE (t))
    380  1.1  mrg     {
    381  1.1  mrg       local->pure_const_state = IPA_NEITHER;
    382  1.1  mrg       if (dump_file)
    383  1.1  mrg 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
    384  1.1  mrg       return;
    385  1.1  mrg     }
    386  1.1  mrg   else if (refs_local_or_readonly_memory_p (t))
    387  1.1  mrg     {
    388  1.1  mrg       if (dump_file)
    389  1.1  mrg 	fprintf (dump_file, "    Indirect ref to local or readonly "
    390  1.1  mrg 		 "memory is OK\n");
    391  1.1  mrg       return;
    392  1.1  mrg     }
    393  1.1  mrg   else if (checking_write)
    394  1.1  mrg     {
    395  1.1  mrg       local->pure_const_state = IPA_NEITHER;
    396  1.1  mrg       if (dump_file)
    397  1.1  mrg 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
    398  1.1  mrg       return;
    399  1.1  mrg     }
    400  1.1  mrg   else
    401  1.1  mrg     {
    402  1.1  mrg       if (dump_file)
    403  1.1  mrg 	fprintf (dump_file, "    Indirect ref read is not const\n");
    404  1.1  mrg       if (local->pure_const_state == IPA_CONST)
    405  1.1  mrg 	local->pure_const_state = IPA_PURE;
    406  1.1  mrg     }
    407  1.1  mrg }
    408  1.1  mrg 
    409  1.1  mrg /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
    410  1.1  mrg 
    411  1.1  mrg static void
    412  1.1  mrg state_from_flags (enum pure_const_state_e *state, bool *looping,
    413  1.1  mrg 	          int flags, bool cannot_lead_to_return)
    414  1.1  mrg {
    415  1.1  mrg   *looping = false;
    416  1.1  mrg   if (flags & ECF_LOOPING_CONST_OR_PURE)
    417  1.1  mrg     {
    418  1.1  mrg       *looping = true;
    419  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    420  1.1  mrg 	fprintf (dump_file, " looping\n");
    421  1.1  mrg     }
    422  1.1  mrg   if (flags & ECF_CONST)
    423  1.1  mrg     {
    424  1.1  mrg       *state = IPA_CONST;
    425  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    426  1.1  mrg 	fprintf (dump_file, " const\n");
    427  1.1  mrg     }
    428  1.1  mrg   else if (flags & ECF_PURE)
    429  1.1  mrg     {
    430  1.1  mrg       *state = IPA_PURE;
    431  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    432  1.1  mrg 	fprintf (dump_file, " pure\n");
    433  1.1  mrg     }
    434  1.1  mrg   else if (cannot_lead_to_return)
    435  1.1  mrg     {
    436  1.1  mrg       *state = IPA_PURE;
    437  1.1  mrg       *looping = true;
    438  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    439  1.1  mrg 	fprintf (dump_file, " ignoring side effects->pure looping\n");
    440  1.1  mrg     }
    441  1.1  mrg   else
    442  1.1  mrg     {
    443  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    444  1.1  mrg 	fprintf (dump_file, " neither\n");
    445  1.1  mrg       *state = IPA_NEITHER;
    446  1.1  mrg       *looping = true;
    447  1.1  mrg     }
    448  1.1  mrg }
    449  1.1  mrg 
    450  1.1  mrg /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
    451  1.1  mrg    into STATE and LOOPING better of the two variants.
    452  1.1  mrg    Be sure to merge looping correctly.  IPA_NEITHER functions
    453  1.1  mrg    have looping 0 even if they don't have to return.  */
    454  1.1  mrg 
    455  1.1  mrg static inline void
    456  1.1  mrg better_state (enum pure_const_state_e *state, bool *looping,
    457  1.1  mrg 	      enum pure_const_state_e state2, bool looping2)
    458  1.1  mrg {
    459  1.1  mrg   if (state2 < *state)
    460  1.1  mrg     {
    461  1.1  mrg       if (*state == IPA_NEITHER)
    462  1.1  mrg 	*looping = looping2;
    463  1.1  mrg       else
    464  1.1  mrg 	*looping = MIN (*looping, looping2);
    465  1.1  mrg       *state = state2;
    466  1.1  mrg     }
    467  1.1  mrg   else if (state2 != IPA_NEITHER)
    468  1.1  mrg     *looping = MIN (*looping, looping2);
    469  1.1  mrg }
    470  1.1  mrg 
    471  1.1  mrg /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
    472  1.1  mrg    into STATE and LOOPING worse of the two variants.
    473  1.1  mrg    N is the actual node called.  */
    474  1.1  mrg 
    475  1.1  mrg static inline void
    476  1.1  mrg worse_state (enum pure_const_state_e *state, bool *looping,
    477  1.1  mrg 	     enum pure_const_state_e state2, bool looping2,
    478  1.1  mrg 	     struct symtab_node *from,
    479  1.1  mrg 	     struct symtab_node *to)
    480  1.1  mrg {
    481  1.1  mrg   /* Consider function:
    482  1.1  mrg 
    483  1.1  mrg      bool a(int *p)
    484  1.1  mrg      {
    485  1.1  mrg        return *p==*p;
    486  1.1  mrg      }
    487  1.1  mrg 
    488  1.1  mrg      During early optimization we will turn this into:
    489  1.1  mrg 
    490  1.1  mrg      bool a(int *p)
    491  1.1  mrg      {
    492  1.1  mrg        return true;
    493  1.1  mrg      }
    494  1.1  mrg 
    495  1.1  mrg      Now if this function will be detected as CONST however when interposed it
    496  1.1  mrg      may end up being just pure.  We always must assume the worst scenario here.
    497  1.1  mrg    */
    498  1.1  mrg   if (*state == IPA_CONST && state2 == IPA_CONST
    499  1.1  mrg       && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
    500  1.1  mrg     {
    501  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    502  1.1  mrg 	fprintf (dump_file, "Dropping state to PURE because call to %s may not "
    503  1.1  mrg 		 "bind to current def.\n", to->dump_name ());
    504  1.1  mrg       state2 = IPA_PURE;
    505  1.1  mrg     }
    506  1.1  mrg   *state = MAX (*state, state2);
    507  1.1  mrg   *looping = MAX (*looping, looping2);
    508  1.1  mrg }
    509  1.1  mrg 
    510  1.1  mrg /* Recognize special cases of builtins that are by themselves not const
    511  1.1  mrg    but function using them is.  */
    512  1.1  mrg bool
    513  1.1  mrg builtin_safe_for_const_function_p (bool *looping, tree callee)
    514  1.1  mrg {
    515  1.1  mrg   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    516  1.1  mrg     switch (DECL_FUNCTION_CODE (callee))
    517  1.1  mrg       {
    518  1.1  mrg       case BUILT_IN_RETURN:
    519  1.1  mrg       case BUILT_IN_UNREACHABLE:
    520  1.1  mrg       CASE_BUILT_IN_ALLOCA:
    521  1.1  mrg       case BUILT_IN_STACK_SAVE:
    522  1.1  mrg       case BUILT_IN_STACK_RESTORE:
    523  1.1  mrg       case BUILT_IN_EH_POINTER:
    524  1.1  mrg       case BUILT_IN_EH_FILTER:
    525  1.1  mrg       case BUILT_IN_UNWIND_RESUME:
    526  1.1  mrg       case BUILT_IN_CXA_END_CLEANUP:
    527  1.1  mrg       case BUILT_IN_EH_COPY_VALUES:
    528  1.1  mrg       case BUILT_IN_FRAME_ADDRESS:
    529  1.1  mrg       case BUILT_IN_APPLY_ARGS:
    530  1.1  mrg       case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
    531  1.1  mrg       case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
    532  1.1  mrg       case BUILT_IN_DWARF_CFA:
    533  1.1  mrg       case BUILT_IN_RETURN_ADDRESS:
    534  1.1  mrg 	*looping = false;
    535  1.1  mrg 	return true;
    536  1.1  mrg       case BUILT_IN_PREFETCH:
    537  1.1  mrg 	*looping = true;
    538  1.1  mrg 	return true;
    539  1.1  mrg       default:
    540  1.1  mrg 	break;
    541  1.1  mrg       }
    542  1.1  mrg   return false;
    543  1.1  mrg }
    544  1.1  mrg 
    545  1.1  mrg /* Check the parameters of a function call to CALL_EXPR to see if
    546  1.1  mrg    there are any references in the parameters that are not allowed for
    547  1.1  mrg    pure or const functions.  Also check to see if this is either an
    548  1.1  mrg    indirect call, a call outside the compilation unit, or has special
    549  1.1  mrg    attributes that may also effect the purity.  The CALL_EXPR node for
    550  1.1  mrg    the entire call expression.  */
    551  1.1  mrg 
    552  1.1  mrg static void
    553  1.1  mrg check_call (funct_state local, gcall *call, bool ipa)
    554  1.1  mrg {
    555  1.1  mrg   int flags = gimple_call_flags (call);
    556  1.1  mrg   tree callee_t = gimple_call_fndecl (call);
    557  1.1  mrg   bool possibly_throws = stmt_could_throw_p (cfun, call);
    558  1.1  mrg   bool possibly_throws_externally = (possibly_throws
    559  1.1  mrg   				     && stmt_can_throw_external (cfun, call));
    560  1.1  mrg 
    561  1.1  mrg   if (possibly_throws)
    562  1.1  mrg     {
    563  1.1  mrg       unsigned int i;
    564  1.1  mrg       for (i = 0; i < gimple_num_ops (call); i++)
    565  1.1  mrg         if (gimple_op (call, i)
    566  1.1  mrg 	    && tree_could_throw_p (gimple_op (call, i)))
    567  1.1  mrg 	  {
    568  1.1  mrg 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
    569  1.1  mrg 	      {
    570  1.1  mrg 		if (dump_file)
    571  1.1  mrg 		  fprintf (dump_file, "    operand can throw; looping\n");
    572  1.1  mrg 		local->looping = true;
    573  1.1  mrg 	      }
    574  1.1  mrg 	    if (possibly_throws_externally)
    575  1.1  mrg 	      {
    576  1.1  mrg 		if (dump_file)
    577  1.1  mrg 		  fprintf (dump_file, "    operand can throw externally\n");
    578  1.1  mrg 		local->can_throw = true;
    579  1.1  mrg 	      }
    580  1.1  mrg 	  }
    581  1.1  mrg     }
    582  1.1  mrg 
    583  1.1  mrg   /* The const and pure flags are set by a variety of places in the
    584  1.1  mrg      compiler (including here).  If someone has already set the flags
    585  1.1  mrg      for the callee, (such as for some of the builtins) we will use
    586  1.1  mrg      them, otherwise we will compute our own information.
    587  1.1  mrg 
    588  1.1  mrg      Const and pure functions have less clobber effects than other
    589  1.1  mrg      functions so we process these first.  Otherwise if it is a call
    590  1.1  mrg      outside the compilation unit or an indirect call we punt.  This
    591  1.1  mrg      leaves local calls which will be processed by following the call
    592  1.1  mrg      graph.  */
    593  1.1  mrg   if (callee_t)
    594  1.1  mrg     {
    595  1.1  mrg       bool call_looping;
    596  1.1  mrg 
    597  1.1  mrg       if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
    598  1.1  mrg 	  && !nonfreeing_call_p (call))
    599  1.1  mrg 	local->can_free = true;
    600  1.1  mrg 
    601  1.1  mrg       if (builtin_safe_for_const_function_p (&call_looping, callee_t))
    602  1.1  mrg 	{
    603  1.1  mrg 	  worse_state (&local->pure_const_state, &local->looping,
    604  1.1  mrg 		       IPA_CONST, call_looping,
    605  1.1  mrg 		       NULL, NULL);
    606  1.1  mrg 	  return;
    607  1.1  mrg 	}
    608  1.1  mrg       /* When bad things happen to bad functions, they cannot be const
    609  1.1  mrg 	 or pure.  */
    610  1.1  mrg       if (setjmp_call_p (callee_t))
    611  1.1  mrg 	{
    612  1.1  mrg 	  if (dump_file)
    613  1.1  mrg 	    fprintf (dump_file, "    setjmp is not const/pure\n");
    614  1.1  mrg           local->looping = true;
    615  1.1  mrg 	  local->pure_const_state = IPA_NEITHER;
    616  1.1  mrg 	}
    617  1.1  mrg 
    618  1.1  mrg       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
    619  1.1  mrg 	switch (DECL_FUNCTION_CODE (callee_t))
    620  1.1  mrg 	  {
    621  1.1  mrg 	  case BUILT_IN_LONGJMP:
    622  1.1  mrg 	  case BUILT_IN_NONLOCAL_GOTO:
    623  1.1  mrg 	    if (dump_file)
    624  1.1  mrg 	      fprintf (dump_file,
    625  1.1  mrg 		       "    longjmp and nonlocal goto is not const/pure\n");
    626  1.1  mrg 	    local->pure_const_state = IPA_NEITHER;
    627  1.1  mrg 	    local->looping = true;
    628  1.1  mrg 	    break;
    629  1.1  mrg 	  default:
    630  1.1  mrg 	    break;
    631  1.1  mrg 	  }
    632  1.1  mrg     }
    633  1.1  mrg   else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
    634  1.1  mrg     local->can_free = true;
    635  1.1  mrg 
    636  1.1  mrg   /* When not in IPA mode, we can still handle self recursion.  */
    637  1.1  mrg   if (!ipa && callee_t
    638  1.1  mrg       && recursive_call_p (current_function_decl, callee_t))
    639  1.1  mrg     {
    640  1.1  mrg       if (dump_file)
    641  1.1  mrg         fprintf (dump_file, "    Recursive call can loop.\n");
    642  1.1  mrg       local->looping = true;
    643  1.1  mrg     }
    644  1.1  mrg   /* Either callee is unknown or we are doing local analysis.
    645  1.1  mrg      Look to see if there are any bits available for the callee (such as by
    646  1.1  mrg      declaration or because it is builtin) and process solely on the basis of
    647  1.1  mrg      those bits.  Handle internal calls always, those calls don't have
    648  1.1  mrg      corresponding cgraph edges and thus aren't processed during
    649  1.1  mrg      the propagation.  */
    650  1.1  mrg   else if (!ipa || gimple_call_internal_p (call))
    651  1.1  mrg     {
    652  1.1  mrg       enum pure_const_state_e call_state;
    653  1.1  mrg       bool call_looping;
    654  1.1  mrg       if (possibly_throws && cfun->can_throw_non_call_exceptions)
    655  1.1  mrg         {
    656  1.1  mrg 	  if (dump_file)
    657  1.1  mrg 	    fprintf (dump_file, "    can throw; looping\n");
    658  1.1  mrg           local->looping = true;
    659  1.1  mrg 	}
    660  1.1  mrg       if (possibly_throws_externally)
    661  1.1  mrg         {
    662  1.1  mrg 	  if (dump_file)
    663  1.1  mrg 	    {
    664  1.1  mrg 	      fprintf (dump_file, "    can throw externally to lp %i\n",
    665  1.1  mrg 	      	       lookup_stmt_eh_lp (call));
    666  1.1  mrg 	      if (callee_t)
    667  1.1  mrg 		fprintf (dump_file, "     callee:%s\n",
    668  1.1  mrg 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
    669  1.1  mrg 	    }
    670  1.1  mrg           local->can_throw = true;
    671  1.1  mrg 	}
    672  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
    673  1.1  mrg 	fprintf (dump_file, "    checking flags for call:");
    674  1.1  mrg       state_from_flags (&call_state, &call_looping, flags,
    675  1.1  mrg 			((flags & (ECF_NORETURN | ECF_NOTHROW))
    676  1.1  mrg 			 == (ECF_NORETURN | ECF_NOTHROW))
    677  1.1  mrg 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
    678  1.1  mrg       worse_state (&local->pure_const_state, &local->looping,
    679  1.1  mrg 		   call_state, call_looping, NULL, NULL);
    680  1.1  mrg     }
    681  1.1  mrg   /* Direct functions calls are handled by IPA propagation.  */
    682  1.1  mrg }
    683  1.1  mrg 
    684  1.1  mrg /* Wrapper around check_decl for loads in local more.  */
    685  1.1  mrg 
    686  1.1  mrg static bool
    687  1.1  mrg check_load (gimple *, tree op, tree, void *data)
    688  1.1  mrg {
    689  1.1  mrg   if (DECL_P (op))
    690  1.1  mrg     check_decl ((funct_state)data, op, false, false);
    691  1.1  mrg   else
    692  1.1  mrg     check_op ((funct_state)data, op, false);
    693  1.1  mrg   return false;
    694  1.1  mrg }
    695  1.1  mrg 
    696  1.1  mrg /* Wrapper around check_decl for stores in local more.  */
    697  1.1  mrg 
    698  1.1  mrg static bool
    699  1.1  mrg check_store (gimple *, tree op, tree, void *data)
    700  1.1  mrg {
    701  1.1  mrg   if (DECL_P (op))
    702  1.1  mrg     check_decl ((funct_state)data, op, true, false);
    703  1.1  mrg   else
    704  1.1  mrg     check_op ((funct_state)data, op, true);
    705  1.1  mrg   return false;
    706  1.1  mrg }
    707  1.1  mrg 
    708  1.1  mrg /* Wrapper around check_decl for loads in ipa mode.  */
    709  1.1  mrg 
    710  1.1  mrg static bool
    711  1.1  mrg check_ipa_load (gimple *, tree op, tree, void *data)
    712  1.1  mrg {
    713  1.1  mrg   if (DECL_P (op))
    714  1.1  mrg     check_decl ((funct_state)data, op, false, true);
    715  1.1  mrg   else
    716  1.1  mrg     check_op ((funct_state)data, op, false);
    717  1.1  mrg   return false;
    718  1.1  mrg }
    719  1.1  mrg 
    720  1.1  mrg /* Wrapper around check_decl for stores in ipa mode.  */
    721  1.1  mrg 
    722  1.1  mrg static bool
    723  1.1  mrg check_ipa_store (gimple *, tree op, tree, void *data)
    724  1.1  mrg {
    725  1.1  mrg   if (DECL_P (op))
    726  1.1  mrg     check_decl ((funct_state)data, op, true, true);
    727  1.1  mrg   else
    728  1.1  mrg     check_op ((funct_state)data, op, true);
    729  1.1  mrg   return false;
    730  1.1  mrg }
    731  1.1  mrg 
    732  1.1  mrg /* Look into pointer pointed to by GSIP and figure out what interesting side
    733  1.1  mrg    effects it has.  */
    734  1.1  mrg static void
    735  1.1  mrg check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
    736  1.1  mrg {
    737  1.1  mrg   gimple *stmt = gsi_stmt (*gsip);
    738  1.1  mrg 
    739  1.1  mrg   if (is_gimple_debug (stmt))
    740  1.1  mrg     return;
    741  1.1  mrg 
    742  1.1  mrg   /* Do consider clobber as side effects before IPA, so we rather inline
    743  1.1  mrg      C++ destructors and keep clobber semantics than eliminate them.
    744  1.1  mrg 
    745  1.1  mrg      Similar logic is in ipa-modref.
    746  1.1  mrg 
    747  1.1  mrg      TODO: We may get smarter during early optimizations on these and let
    748  1.1  mrg      functions containing only clobbers to be optimized more.  This is a common
    749  1.1  mrg      case of C++ destructors.  */
    750  1.1  mrg 
    751  1.1  mrg   if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
    752  1.1  mrg     return;
    753  1.1  mrg 
    754  1.1  mrg   if (dump_file)
    755  1.1  mrg     {
    756  1.1  mrg       fprintf (dump_file, "  scanning: ");
    757  1.1  mrg       print_gimple_stmt (dump_file, stmt, 0);
    758  1.1  mrg     }
    759  1.1  mrg 
    760  1.1  mrg   if (gimple_has_volatile_ops (stmt)
    761  1.1  mrg       && !gimple_clobber_p (stmt))
    762  1.1  mrg     {
    763  1.1  mrg       local->pure_const_state = IPA_NEITHER;
    764  1.1  mrg       if (dump_file)
    765  1.1  mrg 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
    766  1.1  mrg     }
    767  1.1  mrg 
    768  1.1  mrg   /* Look for loads and stores.  */
    769  1.1  mrg   walk_stmt_load_store_ops (stmt, local,
    770  1.1  mrg 			    ipa ? check_ipa_load : check_load,
    771  1.1  mrg 			    ipa ? check_ipa_store :  check_store);
    772  1.1  mrg 
    773  1.1  mrg   if (gimple_code (stmt) != GIMPLE_CALL
    774  1.1  mrg       && stmt_could_throw_p (cfun, stmt))
    775  1.1  mrg     {
    776  1.1  mrg       if (cfun->can_throw_non_call_exceptions)
    777  1.1  mrg 	{
    778  1.1  mrg 	  if (dump_file)
    779  1.1  mrg 	    fprintf (dump_file, "    can throw; looping\n");
    780  1.1  mrg 	  local->looping = true;
    781  1.1  mrg 	}
    782  1.1  mrg       if (stmt_can_throw_external (cfun, stmt))
    783  1.1  mrg 	{
    784  1.1  mrg 	  if (dump_file)
    785  1.1  mrg 	    fprintf (dump_file, "    can throw externally\n");
    786  1.1  mrg 	  local->can_throw = true;
    787  1.1  mrg 	}
    788  1.1  mrg       else
    789  1.1  mrg 	if (dump_file)
    790  1.1  mrg 	  fprintf (dump_file, "    can throw\n");
    791  1.1  mrg     }
    792  1.1  mrg   switch (gimple_code (stmt))
    793  1.1  mrg     {
    794  1.1  mrg     case GIMPLE_CALL:
    795  1.1  mrg       check_call (local, as_a <gcall *> (stmt), ipa);
    796  1.1  mrg       break;
    797  1.1  mrg     case GIMPLE_LABEL:
    798  1.1  mrg       if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
    799  1.1  mrg 	/* Target of long jump. */
    800  1.1  mrg 	{
    801  1.1  mrg           if (dump_file)
    802  1.1  mrg             fprintf (dump_file, "    nonlocal label is not const/pure\n");
    803  1.1  mrg 	  local->pure_const_state = IPA_NEITHER;
    804  1.1  mrg 	}
    805  1.1  mrg       break;
    806  1.1  mrg     case GIMPLE_ASM:
    807  1.1  mrg       if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
    808  1.1  mrg 	{
    809  1.1  mrg 	  if (dump_file)
    810  1.1  mrg 	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
    811  1.1  mrg 	  /* Abandon all hope, ye who enter here. */
    812  1.1  mrg 	  local->pure_const_state = IPA_NEITHER;
    813  1.1  mrg 	  local->can_free = true;
    814  1.1  mrg 	}
    815  1.1  mrg       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
    816  1.1  mrg 	{
    817  1.1  mrg 	  if (dump_file)
    818  1.1  mrg 	    fprintf (dump_file, "    volatile is not const/pure\n");
    819  1.1  mrg 	  /* Abandon all hope, ye who enter here. */
    820  1.1  mrg 	  local->pure_const_state = IPA_NEITHER;
    821  1.1  mrg 	  local->looping = true;
    822  1.1  mrg 	  local->can_free = true;
    823  1.1  mrg 	}
    824  1.1  mrg       return;
    825  1.1  mrg     default:
    826  1.1  mrg       break;
    827  1.1  mrg     }
    828  1.1  mrg }
    829  1.1  mrg 
    830  1.1  mrg /* Check that RETVAL is used only in STMT and in comparisons against 0.
    831  1.1  mrg    RETVAL is return value of the function and STMT is return stmt.  */
    832  1.1  mrg 
    833  1.1  mrg static bool
    834  1.1  mrg check_retval_uses (tree retval, gimple *stmt)
    835  1.1  mrg {
    836  1.1  mrg   imm_use_iterator use_iter;
    837  1.1  mrg   gimple *use_stmt;
    838  1.1  mrg 
    839  1.1  mrg   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
    840  1.1  mrg     if (gcond *cond = dyn_cast<gcond *> (use_stmt))
    841  1.1  mrg       {
    842  1.1  mrg 	tree op2 = gimple_cond_rhs (cond);
    843  1.1  mrg 	if (!integer_zerop (op2))
    844  1.1  mrg 	  return false;
    845  1.1  mrg       }
    846  1.1  mrg     else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
    847  1.1  mrg       {
    848  1.1  mrg 	enum tree_code code = gimple_assign_rhs_code (ga);
    849  1.1  mrg 	if (TREE_CODE_CLASS (code) != tcc_comparison)
    850  1.1  mrg 	  return false;
    851  1.1  mrg 	if (!integer_zerop (gimple_assign_rhs2 (ga)))
    852  1.1  mrg 	  return false;
    853  1.1  mrg       }
    854  1.1  mrg     else if (is_gimple_debug (use_stmt))
    855  1.1  mrg       ;
    856  1.1  mrg     else if (use_stmt != stmt)
    857  1.1  mrg       return false;
    858  1.1  mrg 
    859  1.1  mrg   return true;
    860  1.1  mrg }
    861  1.1  mrg 
    862  1.1  mrg /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
    863  1.1  mrg    attribute. Currently this function does a very conservative analysis.
    864  1.1  mrg    FUN is considered to be a candidate if
    865  1.1  mrg    1) It returns a value of pointer type.
    866  1.1  mrg    2) SSA_NAME_DEF_STMT (return_value) is either a function call or
    867  1.1  mrg       a phi, and element of phi is either NULL or
    868  1.1  mrg       SSA_NAME_DEF_STMT(element) is function call.
    869  1.1  mrg    3) The return-value has immediate uses only within comparisons (gcond or gassign)
    870  1.1  mrg       and return_stmt (and likewise a phi arg has immediate use only within comparison
    871  1.1  mrg       or the phi stmt).  */
    872  1.1  mrg 
    873  1.1  mrg #define DUMP_AND_RETURN(reason)  \
    874  1.1  mrg {  \
    875  1.1  mrg   if (dump_file && (dump_flags & TDF_DETAILS))  \
    876  1.1  mrg     fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
    877  1.1  mrg 	     (node->dump_name ()), (reason));  \
    878  1.1  mrg   return false;  \
    879  1.1  mrg }
    880  1.1  mrg 
    881  1.1  mrg static bool
    882  1.1  mrg malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
    883  1.1  mrg 		      bitmap visited)
    884  1.1  mrg {
    885  1.1  mrg   cgraph_node *node = cgraph_node::get_create (fun->decl);
    886  1.1  mrg   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
    887  1.1  mrg     return true;
    888  1.1  mrg 
    889  1.1  mrg   if (!check_retval_uses (retval, ret_stmt))
    890  1.1  mrg     DUMP_AND_RETURN("Return value has uses outside return stmt"
    891  1.1  mrg 		    " and comparisons against 0.")
    892  1.1  mrg 
    893  1.1  mrg   gimple *def = SSA_NAME_DEF_STMT (retval);
    894  1.1  mrg 
    895  1.1  mrg   if (gcall *call_stmt = dyn_cast<gcall *> (def))
    896  1.1  mrg     {
    897  1.1  mrg       tree callee_decl = gimple_call_fndecl (call_stmt);
    898  1.1  mrg       if (!callee_decl)
    899  1.1  mrg 	return false;
    900  1.1  mrg 
    901  1.1  mrg       if (!ipa && !DECL_IS_MALLOC (callee_decl))
    902  1.1  mrg 	DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
    903  1.1  mrg 			" non-ipa mode.")
    904  1.1  mrg 
    905  1.1  mrg       cgraph_edge *cs = node->get_edge (call_stmt);
    906  1.1  mrg       if (cs)
    907  1.1  mrg 	{
    908  1.1  mrg 	  ipa_call_summary *es = ipa_call_summaries->get_create (cs);
    909  1.1  mrg 	  es->is_return_callee_uncaptured = true;
    910  1.1  mrg 	}
    911  1.1  mrg     }
    912  1.1  mrg 
    913  1.1  mrg     else if (gphi *phi = dyn_cast<gphi *> (def))
    914  1.1  mrg       {
    915  1.1  mrg 	bool all_args_zero = true;
    916  1.1  mrg 	for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
    917  1.1  mrg 	  {
    918  1.1  mrg 	    tree arg = gimple_phi_arg_def (phi, i);
    919  1.1  mrg 	    if (integer_zerop (arg))
    920  1.1  mrg 	      continue;
    921  1.1  mrg 
    922  1.1  mrg 	    all_args_zero = false;
    923  1.1  mrg 	    if (TREE_CODE (arg) != SSA_NAME)
    924  1.1  mrg 	      DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
    925  1.1  mrg 	    if (!check_retval_uses (arg, phi))
    926  1.1  mrg 	      DUMP_AND_RETURN ("phi arg has uses outside phi"
    927  1.1  mrg 				 " and comparisons against 0.")
    928  1.1  mrg 
    929  1.1  mrg 	    gimple *arg_def = SSA_NAME_DEF_STMT (arg);
    930  1.1  mrg 	    if (is_a<gphi *> (arg_def))
    931  1.1  mrg 	      {
    932  1.1  mrg 		if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
    933  1.1  mrg 		    DUMP_AND_RETURN ("nested phi fail")
    934  1.1  mrg 		continue;
    935  1.1  mrg 	      }
    936  1.1  mrg 
    937  1.1  mrg 	    gcall *call_stmt = dyn_cast<gcall *> (arg_def);
    938  1.1  mrg 	    if (!call_stmt)
    939  1.1  mrg 	      DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
    940  1.1  mrg 
    941  1.1  mrg 	    tree callee_decl = gimple_call_fndecl (call_stmt);
    942  1.1  mrg 	    if (!callee_decl)
    943  1.1  mrg 	      return false;
    944  1.1  mrg 	    if (!ipa && !DECL_IS_MALLOC (callee_decl))
    945  1.1  mrg 	      DUMP_AND_RETURN("callee_decl does not have malloc attribute"
    946  1.1  mrg 			      " for non-ipa mode.")
    947  1.1  mrg 
    948  1.1  mrg 	    cgraph_edge *cs = node->get_edge (call_stmt);
    949  1.1  mrg 	    if (cs)
    950  1.1  mrg 	      {
    951  1.1  mrg 		ipa_call_summary *es = ipa_call_summaries->get_create (cs);
    952  1.1  mrg 		es->is_return_callee_uncaptured = true;
    953  1.1  mrg 	      }
    954  1.1  mrg 	  }
    955  1.1  mrg 
    956  1.1  mrg 	if (all_args_zero)
    957  1.1  mrg 	  DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
    958  1.1  mrg       }
    959  1.1  mrg 
    960  1.1  mrg     else
    961  1.1  mrg       DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
    962  1.1  mrg 
    963  1.1  mrg   return true;
    964  1.1  mrg }
    965  1.1  mrg 
    966  1.1  mrg static bool
    967  1.1  mrg malloc_candidate_p (function *fun, bool ipa)
    968  1.1  mrg {
    969  1.1  mrg   basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
    970  1.1  mrg   edge e;
    971  1.1  mrg   edge_iterator ei;
    972  1.1  mrg   cgraph_node *node = cgraph_node::get_create (fun->decl);
    973  1.1  mrg 
    974  1.1  mrg   if (EDGE_COUNT (exit_block->preds) == 0
    975  1.1  mrg       || !flag_delete_null_pointer_checks)
    976  1.1  mrg     return false;
    977  1.1  mrg 
    978  1.1  mrg   auto_bitmap visited;
    979  1.1  mrg   FOR_EACH_EDGE (e, ei, exit_block->preds)
    980  1.1  mrg     {
    981  1.1  mrg       gimple_stmt_iterator gsi = gsi_last_bb (e->src);
    982  1.1  mrg       greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
    983  1.1  mrg 
    984  1.1  mrg       if (!ret_stmt)
    985  1.1  mrg 	return false;
    986  1.1  mrg 
    987  1.1  mrg       tree retval = gimple_return_retval (ret_stmt);
    988  1.1  mrg       if (!retval)
    989  1.1  mrg 	DUMP_AND_RETURN("No return value.")
    990  1.1  mrg 
    991  1.1  mrg       if (TREE_CODE (retval) != SSA_NAME
    992  1.1  mrg 	  || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
    993  1.1  mrg 	DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
    994  1.1  mrg 
    995  1.1  mrg       if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
    996  1.1  mrg 	return false;
    997  1.1  mrg     }
    998  1.1  mrg 
    999  1.1  mrg   if (dump_file && (dump_flags & TDF_DETAILS))
   1000  1.1  mrg     fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
   1001  1.1  mrg 	     IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
   1002  1.1  mrg   return true;
   1003  1.1  mrg }
   1004  1.1  mrg 
   1005  1.1  mrg #undef DUMP_AND_RETURN
   1006  1.1  mrg 
   1007  1.1  mrg /* Return true if function is known to be finite.  */
   1008  1.1  mrg 
   1009  1.1  mrg bool
   1010  1.1  mrg finite_function_p ()
   1011  1.1  mrg {
   1012  1.1  mrg   /* Const functions cannot have back edges (an
   1013  1.1  mrg      indication of possible infinite loop side
   1014  1.1  mrg      effect.  */
   1015  1.1  mrg   bool finite = true;
   1016  1.1  mrg   if (mark_dfs_back_edges ())
   1017  1.1  mrg     {
   1018  1.1  mrg       /* Preheaders are needed for SCEV to work.
   1019  1.1  mrg 	 Simple latches and recorded exits improve chances that loop will
   1020  1.1  mrg 	 proved to be finite in testcases such as in loop-15.c
   1021  1.1  mrg 	 and loop-24.c  */
   1022  1.1  mrg       loop_optimizer_init (LOOPS_HAVE_PREHEADERS
   1023  1.1  mrg 			   | LOOPS_HAVE_SIMPLE_LATCHES
   1024  1.1  mrg 			   | LOOPS_HAVE_RECORDED_EXITS);
   1025  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
   1026  1.1  mrg 	flow_loops_dump (dump_file, NULL, 0);
   1027  1.1  mrg       if (mark_irreducible_loops ())
   1028  1.1  mrg 	{
   1029  1.1  mrg 	  if (dump_file)
   1030  1.1  mrg 	    fprintf (dump_file, "    has irreducible loops\n");
   1031  1.1  mrg 	  finite = false;
   1032  1.1  mrg 	}
   1033  1.1  mrg       else
   1034  1.1  mrg 	{
   1035  1.1  mrg 	  scev_initialize ();
   1036  1.1  mrg 	  for (auto loop : loops_list (cfun, 0))
   1037  1.1  mrg 	    if (!finite_loop_p (loop))
   1038  1.1  mrg 	      {
   1039  1.1  mrg 		if (dump_file)
   1040  1.1  mrg 		  fprintf (dump_file, "    cannot prove finiteness of "
   1041  1.1  mrg 			   "loop %i\n", loop->num);
   1042  1.1  mrg 		finite =false;
   1043  1.1  mrg 		break;
   1044  1.1  mrg 	      }
   1045  1.1  mrg 	  scev_finalize ();
   1046  1.1  mrg 	}
   1047  1.1  mrg       loop_optimizer_finalize ();
   1048  1.1  mrg     }
   1049  1.1  mrg   return finite;
   1050  1.1  mrg }
   1051  1.1  mrg 
   1052  1.1  mrg /* This is the main routine for finding the reference patterns for
   1053  1.1  mrg    global variables within a function FN.  */
   1054  1.1  mrg 
   1055  1.1  mrg static funct_state
   1056  1.1  mrg analyze_function (struct cgraph_node *fn, bool ipa)
   1057  1.1  mrg {
   1058  1.1  mrg   tree decl = fn->decl;
   1059  1.1  mrg   funct_state l;
   1060  1.1  mrg   basic_block this_block;
   1061  1.1  mrg 
   1062  1.1  mrg   l = XCNEW (class funct_state_d);
   1063  1.1  mrg   l->pure_const_state = IPA_CONST;
   1064  1.1  mrg   l->state_previously_known = IPA_NEITHER;
   1065  1.1  mrg   l->looping_previously_known = true;
   1066  1.1  mrg   l->looping = false;
   1067  1.1  mrg   l->can_throw = false;
   1068  1.1  mrg   l->can_free = false;
   1069  1.1  mrg   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
   1070  1.1  mrg 		    flags_from_decl_or_type (fn->decl),
   1071  1.1  mrg 		    fn->cannot_return_p ());
   1072  1.1  mrg 
   1073  1.1  mrg   if (fn->thunk || fn->alias)
   1074  1.1  mrg     {
   1075  1.1  mrg       /* Thunk gets propagated through, so nothing interesting happens.  */
   1076  1.1  mrg       gcc_assert (ipa);
   1077  1.1  mrg       if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
   1078  1.1  mrg 	l->pure_const_state = IPA_NEITHER;
   1079  1.1  mrg       return l;
   1080  1.1  mrg     }
   1081  1.1  mrg 
   1082  1.1  mrg   if (dump_file)
   1083  1.1  mrg     {
   1084  1.1  mrg       fprintf (dump_file, "\n\n local analysis of %s\n ",
   1085  1.1  mrg 	       fn->dump_name ());
   1086  1.1  mrg     }
   1087  1.1  mrg 
   1088  1.1  mrg   push_cfun (DECL_STRUCT_FUNCTION (decl));
   1089  1.1  mrg 
   1090  1.1  mrg   FOR_EACH_BB_FN (this_block, cfun)
   1091  1.1  mrg     {
   1092  1.1  mrg       gimple_stmt_iterator gsi;
   1093  1.1  mrg       struct walk_stmt_info wi;
   1094  1.1  mrg 
   1095  1.1  mrg       memset (&wi, 0, sizeof (wi));
   1096  1.1  mrg       for (gsi = gsi_start_bb (this_block);
   1097  1.1  mrg 	   !gsi_end_p (gsi);
   1098  1.1  mrg 	   gsi_next (&gsi))
   1099  1.1  mrg 	{
   1100  1.1  mrg 	  /* NULL memory accesses terminates BB.  These accesses are known
   1101  1.1  mrg 	     to trip undefined behaviour.  gimple-ssa-isolate-paths turns them
   1102  1.1  mrg 	     to volatile accesses and adds builtin_trap call which would
   1103  1.1  mrg 	     confuse us otherwise.  */
   1104  1.1  mrg 	  if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
   1105  1.1  mrg 						  null_pointer_node))
   1106  1.1  mrg 	    {
   1107  1.1  mrg 	      if (dump_file)
   1108  1.1  mrg 		fprintf (dump_file, "  NULL memory access; terminating BB%s\n",
   1109  1.1  mrg 			 flag_non_call_exceptions ? "; looping" : "");
   1110  1.1  mrg 	      if (flag_non_call_exceptions)
   1111  1.1  mrg 		{
   1112  1.1  mrg 		  l->looping = true;
   1113  1.1  mrg 		  if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
   1114  1.1  mrg 		    {
   1115  1.1  mrg 		      if (dump_file)
   1116  1.1  mrg 			fprintf (dump_file, "    can throw externally\n");
   1117  1.1  mrg 		      l->can_throw = true;
   1118  1.1  mrg 		    }
   1119  1.1  mrg 		}
   1120  1.1  mrg 	      break;
   1121  1.1  mrg 	    }
   1122  1.1  mrg 	  check_stmt (&gsi, l, ipa);
   1123  1.1  mrg 	  if (l->pure_const_state == IPA_NEITHER
   1124  1.1  mrg 	      && l->looping
   1125  1.1  mrg 	      && l->can_throw
   1126  1.1  mrg 	      && l->can_free)
   1127  1.1  mrg 	    goto end;
   1128  1.1  mrg 	}
   1129  1.1  mrg     }
   1130  1.1  mrg 
   1131  1.1  mrg end:
   1132  1.1  mrg   if (l->pure_const_state != IPA_NEITHER
   1133  1.1  mrg       && !l->looping
   1134  1.1  mrg       && !finite_function_p ())
   1135  1.1  mrg     l->looping = true;
   1136  1.1  mrg 
   1137  1.1  mrg   if (dump_file && (dump_flags & TDF_DETAILS))
   1138  1.1  mrg     fprintf (dump_file, "    checking previously known:");
   1139  1.1  mrg 
   1140  1.1  mrg   better_state (&l->pure_const_state, &l->looping,
   1141  1.1  mrg 		l->state_previously_known,
   1142  1.1  mrg 		l->looping_previously_known);
   1143  1.1  mrg   if (TREE_NOTHROW (decl))
   1144  1.1  mrg     l->can_throw = false;
   1145  1.1  mrg 
   1146  1.1  mrg   l->malloc_state = STATE_MALLOC_BOTTOM;
   1147  1.1  mrg   if (DECL_IS_MALLOC (decl))
   1148  1.1  mrg     l->malloc_state = STATE_MALLOC;
   1149  1.1  mrg   else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
   1150  1.1  mrg     l->malloc_state = STATE_MALLOC_TOP;
   1151  1.1  mrg   else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
   1152  1.1  mrg     l->malloc_state = STATE_MALLOC;
   1153  1.1  mrg 
   1154  1.1  mrg   pop_cfun ();
   1155  1.1  mrg   if (dump_file)
   1156  1.1  mrg     {
   1157  1.1  mrg       if (l->looping)
   1158  1.1  mrg         fprintf (dump_file, "Function is locally looping.\n");
   1159  1.1  mrg       if (l->can_throw)
   1160  1.1  mrg         fprintf (dump_file, "Function is locally throwing.\n");
   1161  1.1  mrg       if (l->pure_const_state == IPA_CONST)
   1162  1.1  mrg         fprintf (dump_file, "Function is locally const.\n");
   1163  1.1  mrg       if (l->pure_const_state == IPA_PURE)
   1164  1.1  mrg         fprintf (dump_file, "Function is locally pure.\n");
   1165  1.1  mrg       if (l->can_free)
   1166  1.1  mrg 	fprintf (dump_file, "Function can locally free.\n");
   1167  1.1  mrg       if (l->malloc_state == STATE_MALLOC)
   1168  1.1  mrg 	fprintf (dump_file, "Function is locally malloc.\n");
   1169  1.1  mrg     }
   1170  1.1  mrg   return l;
   1171  1.1  mrg }
   1172  1.1  mrg 
   1173  1.1  mrg void
   1174  1.1  mrg funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
   1175  1.1  mrg {
   1176  1.1  mrg   /* There are some shared nodes, in particular the initializers on
   1177  1.1  mrg      static declarations.  We do not need to scan them more than once
   1178  1.1  mrg      since all we would be interested in are the addressof
   1179  1.1  mrg      operations.  */
   1180  1.1  mrg   if (opt_for_fn (node->decl, flag_ipa_pure_const))
   1181  1.1  mrg     {
   1182  1.1  mrg       funct_state_d *a = analyze_function (node, true);
   1183  1.1  mrg       new (state) funct_state_d (*a);
   1184  1.1  mrg       free (a);
   1185  1.1  mrg     }
   1186  1.1  mrg   else
   1187  1.1  mrg     /* Do not keep stale summaries.  */
   1188  1.1  mrg     funct_state_summaries->remove (node);
   1189  1.1  mrg }
   1190  1.1  mrg 
   1191  1.1  mrg /* Called when new clone is inserted to callgraph late.  */
   1192  1.1  mrg 
   1193  1.1  mrg void
   1194  1.1  mrg funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
   1195  1.1  mrg 				  funct_state_d *src_data,
   1196  1.1  mrg 				  funct_state_d *dst_data)
   1197  1.1  mrg {
   1198  1.1  mrg   new (dst_data) funct_state_d (*src_data);
   1199  1.1  mrg   if (dst_data->malloc_state == STATE_MALLOC
   1200  1.1  mrg       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
   1201  1.1  mrg     dst_data->malloc_state = STATE_MALLOC_BOTTOM;
   1202  1.1  mrg }
   1203  1.1  mrg 
   1204  1.1  mrg 
   1205  1.1  mrg void
   1207  1.1  mrg pass_ipa_pure_const::
   1208  1.1  mrg register_hooks (void)
   1209  1.1  mrg {
   1210  1.1  mrg   if (init_p)
   1211  1.1  mrg     return;
   1212  1.1  mrg 
   1213  1.1  mrg   init_p = true;
   1214  1.1  mrg 
   1215  1.1  mrg   funct_state_summaries = new funct_state_summary_t (symtab);
   1216  1.1  mrg }
   1217  1.1  mrg 
   1218  1.1  mrg 
   1219  1.1  mrg /* Analyze each function in the cgraph to see if it is locally PURE or
   1220  1.1  mrg    CONST.  */
   1221  1.1  mrg 
   1222  1.1  mrg static void
   1223  1.1  mrg pure_const_generate_summary (void)
   1224  1.1  mrg {
   1225  1.1  mrg   struct cgraph_node *node;
   1226  1.1  mrg 
   1227  1.1  mrg   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
   1228  1.1  mrg   pass->register_hooks ();
   1229  1.1  mrg 
   1230  1.1  mrg   /* Process all of the functions.
   1231  1.1  mrg 
   1232  1.1  mrg      We process AVAIL_INTERPOSABLE functions.  We cannot use the results
   1233  1.1  mrg      by default, but the info can be used at LTO with -fwhole-program or
   1234  1.1  mrg      when function got cloned and the clone is AVAILABLE.  */
   1235  1.1  mrg 
   1236  1.1  mrg   FOR_EACH_DEFINED_FUNCTION (node)
   1237  1.1  mrg     if (opt_for_fn (node->decl, flag_ipa_pure_const))
   1238  1.1  mrg       {
   1239  1.1  mrg 	funct_state_d *a = analyze_function (node, true);
   1240  1.1  mrg 	new (funct_state_summaries->get_create (node)) funct_state_d (*a);
   1241  1.1  mrg 	free (a);
   1242  1.1  mrg       }
   1243  1.1  mrg }
   1244  1.1  mrg 
   1245  1.1  mrg 
   1246  1.1  mrg /* Serialize the ipa info for lto.  */
   1247  1.1  mrg 
   1248  1.1  mrg static void
   1249  1.1  mrg pure_const_write_summary (void)
   1250  1.1  mrg {
   1251  1.1  mrg   struct cgraph_node *node;
   1252  1.1  mrg   struct lto_simple_output_block *ob
   1253  1.1  mrg     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
   1254  1.1  mrg   unsigned int count = 0;
   1255  1.1  mrg   lto_symtab_encoder_iterator lsei;
   1256  1.1  mrg   lto_symtab_encoder_t encoder;
   1257  1.1  mrg 
   1258  1.1  mrg   encoder = lto_get_out_decl_state ()->symtab_node_encoder;
   1259  1.1  mrg 
   1260  1.1  mrg   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
   1261  1.1  mrg        lsei_next_function_in_partition (&lsei))
   1262  1.1  mrg     {
   1263  1.1  mrg       node = lsei_cgraph_node (lsei);
   1264  1.1  mrg       if (node->definition && funct_state_summaries->exists (node))
   1265  1.1  mrg 	count++;
   1266  1.1  mrg     }
   1267  1.1  mrg 
   1268  1.1  mrg   streamer_write_uhwi_stream (ob->main_stream, count);
   1269  1.1  mrg 
   1270  1.1  mrg   /* Process all of the functions.  */
   1271  1.1  mrg   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
   1272  1.1  mrg        lsei_next_function_in_partition (&lsei))
   1273  1.1  mrg     {
   1274  1.1  mrg       node = lsei_cgraph_node (lsei);
   1275  1.1  mrg       funct_state_d *fs = funct_state_summaries->get (node);
   1276  1.1  mrg       if (node->definition && fs != NULL)
   1277  1.1  mrg 	{
   1278  1.1  mrg 	  struct bitpack_d bp;
   1279  1.1  mrg 	  int node_ref;
   1280  1.1  mrg 	  lto_symtab_encoder_t encoder;
   1281  1.1  mrg 
   1282  1.1  mrg 	  encoder = ob->decl_state->symtab_node_encoder;
   1283  1.1  mrg 	  node_ref = lto_symtab_encoder_encode (encoder, node);
   1284  1.1  mrg 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
   1285  1.1  mrg 
   1286  1.1  mrg 	  /* Note that flags will need to be read in the opposite
   1287  1.1  mrg 	     order as we are pushing the bitflags into FLAGS.  */
   1288  1.1  mrg 	  bp = bitpack_create (ob->main_stream);
   1289  1.1  mrg 	  bp_pack_value (&bp, fs->pure_const_state, 2);
   1290  1.1  mrg 	  bp_pack_value (&bp, fs->state_previously_known, 2);
   1291  1.1  mrg 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
   1292  1.1  mrg 	  bp_pack_value (&bp, fs->looping, 1);
   1293  1.1  mrg 	  bp_pack_value (&bp, fs->can_throw, 1);
   1294  1.1  mrg 	  bp_pack_value (&bp, fs->can_free, 1);
   1295  1.1  mrg 	  bp_pack_value (&bp, fs->malloc_state, 2);
   1296  1.1  mrg 	  streamer_write_bitpack (&bp);
   1297  1.1  mrg 	}
   1298  1.1  mrg     }
   1299  1.1  mrg 
   1300  1.1  mrg   lto_destroy_simple_output_block (ob);
   1301  1.1  mrg }
   1302  1.1  mrg 
   1303  1.1  mrg 
   1304  1.1  mrg /* Deserialize the ipa info for lto.  */
   1305  1.1  mrg 
   1306  1.1  mrg static void
   1307  1.1  mrg pure_const_read_summary (void)
   1308  1.1  mrg {
   1309  1.1  mrg   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
   1310  1.1  mrg   struct lto_file_decl_data *file_data;
   1311  1.1  mrg   unsigned int j = 0;
   1312  1.1  mrg 
   1313  1.1  mrg   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
   1314  1.1  mrg   pass->register_hooks ();
   1315  1.1  mrg 
   1316  1.1  mrg   while ((file_data = file_data_vec[j++]))
   1317  1.1  mrg     {
   1318  1.1  mrg       const char *data;
   1319  1.1  mrg       size_t len;
   1320  1.1  mrg       class lto_input_block *ib
   1321  1.1  mrg 	= lto_create_simple_input_block (file_data,
   1322  1.1  mrg 					 LTO_section_ipa_pure_const,
   1323  1.1  mrg 					 &data, &len);
   1324  1.1  mrg       if (ib)
   1325  1.1  mrg 	{
   1326  1.1  mrg 	  unsigned int i;
   1327  1.1  mrg 	  unsigned int count = streamer_read_uhwi (ib);
   1328  1.1  mrg 
   1329  1.1  mrg 	  for (i = 0; i < count; i++)
   1330  1.1  mrg 	    {
   1331  1.1  mrg 	      unsigned int index;
   1332  1.1  mrg 	      struct cgraph_node *node;
   1333  1.1  mrg 	      struct bitpack_d bp;
   1334  1.1  mrg 	      funct_state fs;
   1335  1.1  mrg 	      lto_symtab_encoder_t encoder;
   1336  1.1  mrg 
   1337  1.1  mrg 	      index = streamer_read_uhwi (ib);
   1338  1.1  mrg 	      encoder = file_data->symtab_node_encoder;
   1339  1.1  mrg 	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
   1340  1.1  mrg 									index));
   1341  1.1  mrg 
   1342  1.1  mrg 	      fs = funct_state_summaries->get_create (node);
   1343  1.1  mrg 	      /* Note that the flags must be read in the opposite
   1344  1.1  mrg 		 order in which they were written (the bitflags were
   1345  1.1  mrg 		 pushed into FLAGS).  */
   1346  1.1  mrg 	      bp = streamer_read_bitpack (ib);
   1347  1.1  mrg 	      fs->pure_const_state
   1348  1.1  mrg 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
   1349  1.1  mrg 	      fs->state_previously_known
   1350  1.1  mrg 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
   1351  1.1  mrg 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
   1352  1.1  mrg 	      fs->looping = bp_unpack_value (&bp, 1);
   1353  1.1  mrg 	      fs->can_throw = bp_unpack_value (&bp, 1);
   1354  1.1  mrg 	      fs->can_free = bp_unpack_value (&bp, 1);
   1355  1.1  mrg 	      fs->malloc_state
   1356  1.1  mrg 			= (enum malloc_state_e) bp_unpack_value (&bp, 2);
   1357  1.1  mrg 
   1358  1.1  mrg 	      if (dump_file)
   1359  1.1  mrg 		{
   1360  1.1  mrg 		  int flags = flags_from_decl_or_type (node->decl);
   1361  1.1  mrg 		  fprintf (dump_file, "Read info for %s ", node->dump_name ());
   1362  1.1  mrg 		  if (flags & ECF_CONST)
   1363  1.1  mrg 		    fprintf (dump_file, " const");
   1364  1.1  mrg 		  if (flags & ECF_PURE)
   1365  1.1  mrg 		    fprintf (dump_file, " pure");
   1366  1.1  mrg 		  if (flags & ECF_NOTHROW)
   1367  1.1  mrg 		    fprintf (dump_file, " nothrow");
   1368  1.1  mrg 		  fprintf (dump_file, "\n  pure const state: %s\n",
   1369  1.1  mrg 			   pure_const_names[fs->pure_const_state]);
   1370  1.1  mrg 		  fprintf (dump_file, "  previously known state: %s\n",
   1371  1.1  mrg 			   pure_const_names[fs->state_previously_known]);
   1372  1.1  mrg 		  if (fs->looping)
   1373  1.1  mrg 		    fprintf (dump_file,"  function is locally looping\n");
   1374  1.1  mrg 		  if (fs->looping_previously_known)
   1375  1.1  mrg 		    fprintf (dump_file,"  function is previously known looping\n");
   1376  1.1  mrg 		  if (fs->can_throw)
   1377  1.1  mrg 		    fprintf (dump_file,"  function is locally throwing\n");
   1378  1.1  mrg 		  if (fs->can_free)
   1379  1.1  mrg 		    fprintf (dump_file,"  function can locally free\n");
   1380  1.1  mrg 		  fprintf (dump_file, "\n malloc state: %s\n",
   1381  1.1  mrg 			   malloc_state_names[fs->malloc_state]);
   1382  1.1  mrg 		}
   1383  1.1  mrg 	    }
   1384  1.1  mrg 
   1385  1.1  mrg 	  lto_destroy_simple_input_block (file_data,
   1386  1.1  mrg 					  LTO_section_ipa_pure_const,
   1387  1.1  mrg 					  ib, data, len);
   1388  1.1  mrg 	}
   1389  1.1  mrg     }
   1390  1.1  mrg }
   1391  1.1  mrg 
   1392  1.1  mrg /* We only propagate across edges that can throw externally and their callee
   1393  1.1  mrg    is not interposable.  */
   1394  1.1  mrg 
   1395  1.1  mrg static bool
   1396  1.1  mrg ignore_edge_for_nothrow (struct cgraph_edge *e)
   1397  1.1  mrg {
   1398  1.1  mrg   if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
   1399  1.1  mrg     return true;
   1400  1.1  mrg 
   1401  1.1  mrg   enum availability avail;
   1402  1.1  mrg   cgraph_node *ultimate_target
   1403  1.1  mrg     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
   1404  1.1  mrg   if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
   1405  1.1  mrg     return true;
   1406  1.1  mrg   return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
   1407  1.1  mrg 	   && !e->callee->binds_to_current_def_p (e->caller))
   1408  1.1  mrg 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
   1409  1.1  mrg 	  || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
   1410  1.1  mrg }
   1411  1.1  mrg 
   1412  1.1  mrg /* Return true if NODE is self recursive function.
   1413  1.1  mrg    Indirectly recursive functions appears as non-trivial strongly
   1414  1.1  mrg    connected components, so we need to care about self recursion
   1415  1.1  mrg    only.  */
   1416  1.1  mrg 
   1417  1.1  mrg static bool
   1418  1.1  mrg self_recursive_p (struct cgraph_node *node)
   1419  1.1  mrg {
   1420  1.1  mrg   struct cgraph_edge *e;
   1421  1.1  mrg   for (e = node->callees; e; e = e->next_callee)
   1422  1.1  mrg     if (e->callee->function_symbol () == node)
   1423  1.1  mrg       return true;
   1424  1.1  mrg   return false;
   1425  1.1  mrg }
   1426  1.1  mrg 
   1427  1.1  mrg /* Return true if N is cdtor that is not const or pure.  In this case we may
   1428  1.1  mrg    need to remove unreachable function if it is marked const/pure.  */
   1429  1.1  mrg 
   1430  1.1  mrg static bool
   1431  1.1  mrg cdtor_p (cgraph_node *n, void *)
   1432  1.1  mrg {
   1433  1.1  mrg   if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
   1434  1.1  mrg     return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
   1435  1.1  mrg 	    || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
   1436  1.1  mrg   return false;
   1437  1.1  mrg }
   1438  1.1  mrg 
   1439  1.1  mrg /* Skip edges from and to nodes without ipa_pure_const enabled.
   1440  1.1  mrg    Ignore not available symbols.  */
   1441  1.1  mrg 
   1442  1.1  mrg static bool
   1443  1.1  mrg ignore_edge_for_pure_const (struct cgraph_edge *e)
   1444  1.1  mrg {
   1445  1.1  mrg   enum availability avail;
   1446  1.1  mrg   cgraph_node *ultimate_target
   1447  1.1  mrg     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
   1448  1.1  mrg 
   1449  1.1  mrg   return (avail <= AVAIL_INTERPOSABLE
   1450  1.1  mrg 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
   1451  1.1  mrg 	  || !opt_for_fn (ultimate_target->decl,
   1452  1.1  mrg 			  flag_ipa_pure_const));
   1453  1.1  mrg }
   1454  1.1  mrg 
   1455  1.1  mrg /* Return true if function should be skipped for local pure const analysis.  */
   1456  1.1  mrg 
   1457  1.1  mrg static bool
   1458  1.1  mrg skip_function_for_local_pure_const (struct cgraph_node *node)
   1459  1.1  mrg {
   1460  1.1  mrg   /* Because we do not schedule pass_fixup_cfg over whole program after early
   1461  1.1  mrg      optimizations we must not promote functions that are called by already
   1462  1.1  mrg      processed functions.  */
   1463  1.1  mrg 
   1464  1.1  mrg   if (function_called_by_processed_nodes_p ())
   1465  1.1  mrg     {
   1466  1.1  mrg       if (dump_file)
   1467  1.1  mrg 	fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
   1468  1.1  mrg       return true;
   1469  1.1  mrg     }
   1470  1.1  mrg   /* Save some work and do not analyze functions which are interposable and
   1471  1.1  mrg      do not have any non-interposable aliases.  */
   1472  1.1  mrg   if (node->get_availability () <= AVAIL_INTERPOSABLE
   1473  1.1  mrg       && !flag_lto
   1474  1.1  mrg       && !node->has_aliases_p ())
   1475  1.1  mrg     {
   1476  1.1  mrg       if (dump_file)
   1477  1.1  mrg 	fprintf (dump_file,
   1478  1.1  mrg 		 "Function is interposable; not analyzing.\n");
   1479  1.1  mrg       return true;
   1480  1.1  mrg     }
   1481  1.1  mrg   return false;
   1482  1.1  mrg }
   1483  1.1  mrg 
   1484  1.1  mrg /* Make function const and output warning.  If LOCAL is true,
   1485  1.1  mrg    return true if anything changed.  Otherwise return true if
   1486  1.1  mrg    we may have introduced removale ctors.  */
   1487  1.1  mrg 
   1488  1.1  mrg bool
   1489  1.1  mrg ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
   1490  1.1  mrg {
   1491  1.1  mrg   bool cdtor = false;
   1492  1.1  mrg 
   1493  1.1  mrg   if (TREE_READONLY (node->decl)
   1494  1.1  mrg       && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
   1495  1.1  mrg     return false;
   1496  1.1  mrg   warn_function_const (node->decl, !looping);
   1497  1.1  mrg   if (local && skip_function_for_local_pure_const (node))
   1498  1.1  mrg     return false;
   1499  1.1  mrg   if (dump_file)
   1500  1.1  mrg     fprintf (dump_file, "Function found to be %sconst: %s\n",
   1501  1.1  mrg 	     looping ? "looping " : "",
   1502  1.1  mrg 	     node->dump_name ());
   1503  1.1  mrg   if (!local && !looping)
   1504  1.1  mrg     cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
   1505  1.1  mrg   if (!dbg_cnt (ipa_attr))
   1506  1.1  mrg     return false;
   1507  1.1  mrg   if (node->set_const_flag (true, looping))
   1508  1.1  mrg     {
   1509  1.1  mrg       if (dump_file)
   1510  1.1  mrg 	fprintf (dump_file,
   1511  1.1  mrg 		 "Declaration updated to be %sconst: %s\n",
   1512  1.1  mrg 		 looping ? "looping " : "",
   1513  1.1  mrg 		 node->dump_name ());
   1514  1.1  mrg       if (local)
   1515  1.1  mrg 	return true;
   1516  1.1  mrg       return cdtor;
   1517  1.1  mrg     }
   1518  1.1  mrg   return false;
   1519  1.1  mrg }
   1520  1.1  mrg 
   1521  1.1  mrg /* Make function const and output warning.  If LOCAL is true,
   1522  1.1  mrg    return true if anything changed.  Otherwise return true if
   1523  1.1  mrg    we may have introduced removale ctors.  */
   1524  1.1  mrg 
   1525  1.1  mrg bool
   1526  1.1  mrg ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
   1527  1.1  mrg {
   1528  1.1  mrg   bool cdtor = false;
   1529  1.1  mrg 
   1530  1.1  mrg   if (TREE_READONLY (node->decl)
   1531  1.1  mrg       || (DECL_PURE_P (node->decl)
   1532  1.1  mrg 	  && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
   1533  1.1  mrg     return false;
   1534  1.1  mrg   warn_function_pure (node->decl, !looping);
   1535  1.1  mrg   if (local && skip_function_for_local_pure_const (node))
   1536  1.1  mrg     return false;
   1537  1.1  mrg   if (dump_file)
   1538  1.1  mrg     fprintf (dump_file, "Function found to be %spure: %s\n",
   1539  1.1  mrg 	     looping ? "looping " : "",
   1540  1.1  mrg 	     node->dump_name ());
   1541  1.1  mrg   if (!local && !looping)
   1542  1.1  mrg     cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
   1543  1.1  mrg   if (!dbg_cnt (ipa_attr))
   1544  1.1  mrg     return false;
   1545  1.1  mrg   if (node->set_pure_flag (true, looping))
   1546  1.1  mrg     {
   1547  1.1  mrg       if (dump_file)
   1548  1.1  mrg 	fprintf (dump_file,
   1549  1.1  mrg 		 "Declaration updated to be %spure: %s\n",
   1550  1.1  mrg 		 looping ? "looping " : "",
   1551  1.1  mrg 		 node->dump_name ());
   1552  1.1  mrg       if (local)
   1553  1.1  mrg 	return true;
   1554  1.1  mrg       return cdtor;
   1555  1.1  mrg     }
   1556  1.1  mrg   return false;
   1557  1.1  mrg }
   1558  1.1  mrg 
   1559  1.1  mrg /* Produce transitive closure over the callgraph and compute pure/const
   1560  1.1  mrg    attributes.  */
   1561  1.1  mrg 
   1562  1.1  mrg static bool
   1563  1.1  mrg propagate_pure_const (void)
   1564  1.1  mrg {
   1565  1.1  mrg   struct cgraph_node *node;
   1566  1.1  mrg   struct cgraph_node *w;
   1567  1.1  mrg   struct cgraph_node **order =
   1568  1.1  mrg     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
   1569  1.1  mrg   int order_pos;
   1570  1.1  mrg   int i;
   1571  1.1  mrg   struct ipa_dfs_info * w_info;
   1572  1.1  mrg   bool remove_p = false;
   1573  1.1  mrg 
   1574  1.1  mrg   order_pos = ipa_reduced_postorder (order, true,
   1575  1.1  mrg 				     ignore_edge_for_pure_const);
   1576  1.1  mrg   if (dump_file)
   1577  1.1  mrg     {
   1578  1.1  mrg       cgraph_node::dump_cgraph (dump_file);
   1579  1.1  mrg       ipa_print_order (dump_file, "reduced", order, order_pos);
   1580  1.1  mrg     }
   1581  1.1  mrg 
   1582  1.1  mrg   /* Propagate the local information through the call graph to produce
   1583  1.1  mrg      the global information.  All the nodes within a cycle will have
   1584  1.1  mrg      the same info so we collapse cycles first.  Then we can do the
   1585  1.1  mrg      propagation in one pass from the leaves to the roots.  */
   1586  1.1  mrg   for (i = 0; i < order_pos; i++ )
   1587  1.1  mrg     {
   1588  1.1  mrg       enum pure_const_state_e pure_const_state = IPA_CONST;
   1589  1.1  mrg       bool looping = false;
   1590  1.1  mrg       int count = 0;
   1591  1.1  mrg       node = order[i];
   1592  1.1  mrg 
   1593  1.1  mrg       if (node->alias)
   1594  1.1  mrg 	continue;
   1595  1.1  mrg 
   1596  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
   1597  1.1  mrg 	fprintf (dump_file, "Starting cycle\n");
   1598  1.1  mrg 
   1599  1.1  mrg       /* Find the worst state for any node in the cycle.  */
   1600  1.1  mrg       w = node;
   1601  1.1  mrg       while (w && pure_const_state != IPA_NEITHER)
   1602  1.1  mrg 	{
   1603  1.1  mrg 	  struct cgraph_edge *e;
   1604  1.1  mrg 	  struct cgraph_edge *ie;
   1605  1.1  mrg 	  int i;
   1606  1.1  mrg 	  struct ipa_ref *ref = NULL;
   1607  1.1  mrg 
   1608  1.1  mrg 	  funct_state w_l = funct_state_summaries->get_create (w);
   1609  1.1  mrg 	  if (dump_file && (dump_flags & TDF_DETAILS))
   1610  1.1  mrg 	    fprintf (dump_file, "  Visiting %s state:%s looping %i\n",
   1611  1.1  mrg 		     w->dump_name (),
   1612  1.1  mrg 		     pure_const_names[w_l->pure_const_state],
   1613  1.1  mrg 		     w_l->looping);
   1614  1.1  mrg 
   1615  1.1  mrg 	  /* First merge in function body properties.
   1616  1.1  mrg 	     We are safe to pass NULL as FROM and TO because we will take care
   1617  1.1  mrg 	     of possible interposition when walking callees.  */
   1618  1.1  mrg 	  worse_state (&pure_const_state, &looping,
   1619  1.1  mrg 		       w_l->pure_const_state, w_l->looping,
   1620  1.1  mrg 		       NULL, NULL);
   1621  1.1  mrg 	  if (pure_const_state == IPA_NEITHER)
   1622  1.1  mrg 	    break;
   1623  1.1  mrg 
   1624  1.1  mrg 	  count++;
   1625  1.1  mrg 
   1626  1.1  mrg 	  /* We consider recursive cycles as possibly infinite.
   1627  1.1  mrg 	     This might be relaxed since infinite recursion leads to stack
   1628  1.1  mrg 	     overflow.  */
   1629  1.1  mrg 	  if (count > 1)
   1630  1.1  mrg 	    looping = true;
   1631  1.1  mrg 
   1632  1.1  mrg 	  /* Now walk the edges and merge in callee properties.  */
   1633  1.1  mrg 	  for (e = w->callees; e && pure_const_state != IPA_NEITHER;
   1634  1.1  mrg 	       e = e->next_callee)
   1635  1.1  mrg 	    {
   1636  1.1  mrg 	      enum availability avail;
   1637  1.1  mrg 	      struct cgraph_node *y = e->callee->
   1638  1.1  mrg 				function_or_virtual_thunk_symbol (&avail,
   1639  1.1  mrg 								  e->caller);
   1640  1.1  mrg 	      enum pure_const_state_e edge_state = IPA_CONST;
   1641  1.1  mrg 	      bool edge_looping = false;
   1642  1.1  mrg 
   1643  1.1  mrg 	      if (e->recursive_p ())
   1644  1.1  mrg 		looping = true;
   1645  1.1  mrg 
   1646  1.1  mrg 	      if (dump_file && (dump_flags & TDF_DETAILS))
   1647  1.1  mrg 		{
   1648  1.1  mrg 		  fprintf (dump_file, "    Call to %s",
   1649  1.1  mrg 			   e->callee->dump_name ());
   1650  1.1  mrg 		}
   1651  1.1  mrg 	      if (avail > AVAIL_INTERPOSABLE)
   1652  1.1  mrg 		{
   1653  1.1  mrg 		  funct_state y_l = funct_state_summaries->get_create (y);
   1654  1.1  mrg 
   1655  1.1  mrg 		  if (dump_file && (dump_flags & TDF_DETAILS))
   1656  1.1  mrg 		    {
   1657  1.1  mrg 		      fprintf (dump_file,
   1658  1.1  mrg 			       " state:%s looping:%i\n",
   1659  1.1  mrg 			       pure_const_names[y_l->pure_const_state],
   1660  1.1  mrg 			       y_l->looping);
   1661  1.1  mrg 		    }
   1662  1.1  mrg 		  if (y_l->pure_const_state > IPA_PURE
   1663  1.1  mrg 		      && e->cannot_lead_to_return_p ())
   1664  1.1  mrg 		    {
   1665  1.1  mrg 		      if (dump_file && (dump_flags & TDF_DETAILS))
   1666  1.1  mrg 			fprintf (dump_file,
   1667  1.1  mrg 				 "        Ignoring side effects"
   1668  1.1  mrg 				 " -> pure, looping\n");
   1669  1.1  mrg 		      edge_state = IPA_PURE;
   1670  1.1  mrg 		      edge_looping = true;
   1671  1.1  mrg 		    }
   1672  1.1  mrg 		  else
   1673  1.1  mrg 		    {
   1674  1.1  mrg 		      edge_state = y_l->pure_const_state;
   1675  1.1  mrg 		      edge_looping = y_l->looping;
   1676  1.1  mrg 		    }
   1677  1.1  mrg 		}
   1678  1.1  mrg 	      else if (builtin_safe_for_const_function_p (&edge_looping,
   1679  1.1  mrg 							   y->decl))
   1680  1.1  mrg 		edge_state = IPA_CONST;
   1681  1.1  mrg 	      else
   1682  1.1  mrg 		state_from_flags (&edge_state, &edge_looping,
   1683  1.1  mrg 				  flags_from_decl_or_type (y->decl),
   1684  1.1  mrg 				  e->cannot_lead_to_return_p ());
   1685  1.1  mrg 
   1686  1.1  mrg 	      /* Merge the results with what we already know.  */
   1687  1.1  mrg 	      better_state (&edge_state, &edge_looping,
   1688  1.1  mrg 			    w_l->state_previously_known,
   1689  1.1  mrg 			    w_l->looping_previously_known);
   1690  1.1  mrg 	      worse_state (&pure_const_state, &looping,
   1691  1.1  mrg 			   edge_state, edge_looping, e->caller, e->callee);
   1692  1.1  mrg 	      if (pure_const_state == IPA_NEITHER)
   1693  1.1  mrg 	        break;
   1694  1.1  mrg 	    }
   1695  1.1  mrg 
   1696  1.1  mrg 	  /* Now process the indirect call.  */
   1697  1.1  mrg           for (ie = w->indirect_calls;
   1698  1.1  mrg 	       ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
   1699  1.1  mrg 	    {
   1700  1.1  mrg 	      enum pure_const_state_e edge_state = IPA_CONST;
   1701  1.1  mrg 	      bool edge_looping = false;
   1702  1.1  mrg 
   1703  1.1  mrg 	      if (dump_file && (dump_flags & TDF_DETAILS))
   1704  1.1  mrg 		fprintf (dump_file, "    Indirect call");
   1705  1.1  mrg 	      state_from_flags (&edge_state, &edge_looping,
   1706  1.1  mrg 			        ie->indirect_info->ecf_flags,
   1707  1.1  mrg 				ie->cannot_lead_to_return_p ());
   1708  1.1  mrg 	      /* Merge the results with what we already know.  */
   1709  1.1  mrg 	      better_state (&edge_state, &edge_looping,
   1710  1.1  mrg 			    w_l->state_previously_known,
   1711  1.1  mrg 			    w_l->looping_previously_known);
   1712  1.1  mrg 	      worse_state (&pure_const_state, &looping,
   1713  1.1  mrg 			   edge_state, edge_looping, NULL, NULL);
   1714  1.1  mrg 	      if (pure_const_state == IPA_NEITHER)
   1715  1.1  mrg 	        break;
   1716  1.1  mrg 	    }
   1717  1.1  mrg 
   1718  1.1  mrg 	  /* And finally all loads and stores.  */
   1719  1.1  mrg 	  for (i = 0; w->iterate_reference (i, ref)
   1720  1.1  mrg 	       && pure_const_state != IPA_NEITHER; i++)
   1721  1.1  mrg 	    {
   1722  1.1  mrg 	      enum pure_const_state_e ref_state = IPA_CONST;
   1723  1.1  mrg 	      bool ref_looping = false;
   1724  1.1  mrg 	      switch (ref->use)
   1725  1.1  mrg 		{
   1726  1.1  mrg 		case IPA_REF_LOAD:
   1727  1.1  mrg 		  /* readonly reads are safe.  */
   1728  1.1  mrg 		  if (TREE_READONLY (ref->referred->decl))
   1729  1.1  mrg 		    break;
   1730  1.1  mrg 		  if (dump_file && (dump_flags & TDF_DETAILS))
   1731  1.1  mrg 		    fprintf (dump_file, "    nonreadonly global var read\n");
   1732  1.1  mrg 		  ref_state = IPA_PURE;
   1733  1.1  mrg 		  break;
   1734  1.1  mrg 		case IPA_REF_STORE:
   1735  1.1  mrg 		  if (ref->cannot_lead_to_return ())
   1736  1.1  mrg 		    break;
   1737  1.1  mrg 		  ref_state = IPA_NEITHER;
   1738  1.1  mrg 		  if (dump_file && (dump_flags & TDF_DETAILS))
   1739  1.1  mrg 		    fprintf (dump_file, "    global var write\n");
   1740  1.1  mrg 		  break;
   1741  1.1  mrg 		case IPA_REF_ADDR:
   1742  1.1  mrg 		  break;
   1743  1.1  mrg 		default:
   1744  1.1  mrg 		  gcc_unreachable ();
   1745  1.1  mrg 		}
   1746  1.1  mrg 	      better_state (&ref_state, &ref_looping,
   1747  1.1  mrg 			    w_l->state_previously_known,
   1748  1.1  mrg 			    w_l->looping_previously_known);
   1749  1.1  mrg 	      worse_state (&pure_const_state, &looping,
   1750  1.1  mrg 			   ref_state, ref_looping, NULL, NULL);
   1751  1.1  mrg 	      if (pure_const_state == IPA_NEITHER)
   1752  1.1  mrg 		break;
   1753  1.1  mrg 	    }
   1754  1.1  mrg 	  w_info = (struct ipa_dfs_info *) w->aux;
   1755  1.1  mrg 	  w = w_info->next_cycle;
   1756  1.1  mrg 	}
   1757  1.1  mrg       if (dump_file && (dump_flags & TDF_DETAILS))
   1758  1.1  mrg 	fprintf (dump_file, "Result %s looping %i\n",
   1759  1.1  mrg 		 pure_const_names [pure_const_state],
   1760  1.1  mrg 		 looping);
   1761  1.1  mrg 
   1762  1.1  mrg       /* Find the worst state of can_free for any node in the cycle.  */
   1763  1.1  mrg       bool can_free = false;
   1764  1.1  mrg       w = node;
   1765  1.1  mrg       while (w && !can_free)
   1766  1.1  mrg 	{
   1767  1.1  mrg 	  struct cgraph_edge *e;
   1768  1.1  mrg 	  funct_state w_l = funct_state_summaries->get (w);
   1769  1.1  mrg 
   1770  1.1  mrg 	  if (w_l->can_free
   1771  1.1  mrg 	      || w->get_availability () == AVAIL_INTERPOSABLE
   1772  1.1  mrg 	      || w->indirect_calls)
   1773  1.1  mrg 	    can_free = true;
   1774  1.1  mrg 
   1775  1.1  mrg 	  for (e = w->callees; e && !can_free; e = e->next_callee)
   1776  1.1  mrg 	    {
   1777  1.1  mrg 	      enum availability avail;
   1778  1.1  mrg 	      struct cgraph_node *y = e->callee->
   1779  1.1  mrg 				function_or_virtual_thunk_symbol (&avail,
   1780  1.1  mrg 								  e->caller);
   1781  1.1  mrg 
   1782  1.1  mrg 	      if (avail > AVAIL_INTERPOSABLE)
   1783  1.1  mrg 		can_free = funct_state_summaries->get (y)->can_free;
   1784  1.1  mrg 	      else
   1785  1.1  mrg 		can_free = true;
   1786  1.1  mrg 	    }
   1787  1.1  mrg 	  w_info = (struct ipa_dfs_info *) w->aux;
   1788  1.1  mrg 	  w = w_info->next_cycle;
   1789  1.1  mrg 	}
   1790  1.1  mrg 
   1791  1.1  mrg       /* Copy back the region's pure_const_state which is shared by
   1792  1.1  mrg 	 all nodes in the region.  */
   1793  1.1  mrg       w = node;
   1794  1.1  mrg       while (w)
   1795  1.1  mrg 	{
   1796  1.1  mrg 	  funct_state w_l = funct_state_summaries->get (w);
   1797  1.1  mrg 	  enum pure_const_state_e this_state = pure_const_state;
   1798  1.1  mrg 	  bool this_looping = looping;
   1799  1.1  mrg 
   1800  1.1  mrg 	  w_l->can_free = can_free;
   1801  1.1  mrg 	  w->nonfreeing_fn = !can_free;
   1802  1.1  mrg 	  if (!can_free && dump_file)
   1803  1.1  mrg 	    fprintf (dump_file, "Function found not to call free: %s\n",
   1804  1.1  mrg 		     w->dump_name ());
   1805  1.1  mrg 
   1806  1.1  mrg 	  if (w_l->state_previously_known != IPA_NEITHER
   1807  1.1  mrg 	      && this_state > w_l->state_previously_known)
   1808  1.1  mrg 	    {
   1809  1.1  mrg 	      if (this_state == IPA_NEITHER)
   1810  1.1  mrg 		this_looping = w_l->looping_previously_known;
   1811  1.1  mrg 	      this_state = w_l->state_previously_known;
   1812  1.1  mrg 	    }
   1813  1.1  mrg 	  if (!this_looping && self_recursive_p (w))
   1814  1.1  mrg 	    this_looping = true;
   1815  1.1  mrg 	  if (!w_l->looping_previously_known)
   1816  1.1  mrg 	    this_looping = false;
   1817  1.1  mrg 
   1818  1.1  mrg 	  /* All nodes within a cycle share the same info.  */
   1819  1.1  mrg 	  w_l->pure_const_state = this_state;
   1820  1.1  mrg 	  w_l->looping = this_looping;
   1821  1.1  mrg 
   1822  1.1  mrg 	  /* Inline clones share declaration with their offline copies;
   1823  1.1  mrg 	     do not modify their declarations since the offline copy may
   1824  1.1  mrg 	     be different.  */
   1825  1.1  mrg 	  if (!w->inlined_to)
   1826  1.1  mrg 	    switch (this_state)
   1827  1.1  mrg 	      {
   1828  1.1  mrg 	      case IPA_CONST:
   1829  1.1  mrg 		remove_p |= ipa_make_function_const (w, this_looping, false);
   1830  1.1  mrg 		break;
   1831  1.1  mrg 
   1832  1.1  mrg 	      case IPA_PURE:
   1833  1.1  mrg 		remove_p |= ipa_make_function_pure (w, this_looping, false);
   1834  1.1  mrg 		break;
   1835  1.1  mrg 
   1836  1.1  mrg 	      default:
   1837  1.1  mrg 		break;
   1838  1.1  mrg 	      }
   1839  1.1  mrg 	  w_info = (struct ipa_dfs_info *) w->aux;
   1840  1.1  mrg 	  w = w_info->next_cycle;
   1841  1.1  mrg 	}
   1842  1.1  mrg     }
   1843  1.1  mrg 
   1844  1.1  mrg   ipa_free_postorder_info ();
   1845  1.1  mrg   free (order);
   1846  1.1  mrg   return remove_p;
   1847  1.1  mrg }
   1848  1.1  mrg 
   1849  1.1  mrg /* Produce transitive closure over the callgraph and compute nothrow
   1850  1.1  mrg    attributes.  */
   1851  1.1  mrg 
   1852  1.1  mrg static void
   1853  1.1  mrg propagate_nothrow (void)
   1854  1.1  mrg {
   1855  1.1  mrg   struct cgraph_node *node;
   1856  1.1  mrg   struct cgraph_node *w;
   1857  1.1  mrg   struct cgraph_node **order =
   1858  1.1  mrg     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
   1859  1.1  mrg   int order_pos;
   1860  1.1  mrg   int i;
   1861  1.1  mrg   struct ipa_dfs_info * w_info;
   1862  1.1  mrg 
   1863  1.1  mrg   order_pos = ipa_reduced_postorder (order, true,
   1864  1.1  mrg 				     ignore_edge_for_nothrow);
   1865  1.1  mrg   if (dump_file)
   1866  1.1  mrg     {
   1867  1.1  mrg       cgraph_node::dump_cgraph (dump_file);
   1868  1.1  mrg       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
   1869  1.1  mrg     }
   1870  1.1  mrg 
   1871  1.1  mrg   /* Propagate the local information through the call graph to produce
   1872  1.1  mrg      the global information.  All the nodes within a cycle will have
   1873  1.1  mrg      the same info so we collapse cycles first.  Then we can do the
   1874  1.1  mrg      propagation in one pass from the leaves to the roots.  */
   1875  1.1  mrg   for (i = 0; i < order_pos; i++ )
   1876  1.1  mrg     {
   1877  1.1  mrg       bool can_throw = false;
   1878  1.1  mrg       node = order[i];
   1879  1.1  mrg 
   1880  1.1  mrg       if (node->alias)
   1881  1.1  mrg 	continue;
   1882  1.1  mrg 
   1883  1.1  mrg       /* Find the worst state for any node in the cycle.  */
   1884  1.1  mrg       w = node;
   1885  1.1  mrg       while (w && !can_throw)
   1886  1.1  mrg 	{
   1887  1.1  mrg 	  struct cgraph_edge *e, *ie;
   1888  1.1  mrg 
   1889  1.1  mrg 	  if (!TREE_NOTHROW (w->decl))
   1890  1.1  mrg 	    {
   1891  1.1  mrg 	      funct_state w_l = funct_state_summaries->get_create (w);
   1892  1.1  mrg 
   1893  1.1  mrg 	      if (w_l->can_throw
   1894  1.1  mrg 		  || w->get_availability () == AVAIL_INTERPOSABLE)
   1895  1.1  mrg 		can_throw = true;
   1896  1.1  mrg 
   1897  1.1  mrg 	      for (e = w->callees; e && !can_throw; e = e->next_callee)
   1898  1.1  mrg 		{
   1899  1.1  mrg 		  enum availability avail;
   1900  1.1  mrg 
   1901  1.1  mrg 		  if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
   1902  1.1  mrg 		    continue;
   1903  1.1  mrg 
   1904  1.1  mrg 		  struct cgraph_node *y = e->callee->
   1905  1.1  mrg 				   function_or_virtual_thunk_symbol (&avail,
   1906  1.1  mrg 								     e->caller);
   1907  1.1  mrg 
   1908  1.1  mrg 		  /* We can use info about the callee only if we know it
   1909  1.1  mrg 		     cannot be interposed.
   1910  1.1  mrg 		     When callee is compiled with non-call exceptions we also
   1911  1.1  mrg 		     must check that the declaration is bound to current
   1912  1.1  mrg 		     body as other semantically equivalent body may still
   1913  1.1  mrg 		     throw.  */
   1914  1.1  mrg 		  if (avail <= AVAIL_INTERPOSABLE
   1915  1.1  mrg 		      || (!TREE_NOTHROW (y->decl)
   1916  1.1  mrg 			  && (funct_state_summaries->get_create (y)->can_throw
   1917  1.1  mrg 			      || (opt_for_fn (y->decl, flag_non_call_exceptions)
   1918  1.1  mrg 				  && !e->callee->binds_to_current_def_p (w)))))
   1919  1.1  mrg 		    can_throw = true;
   1920  1.1  mrg 		}
   1921  1.1  mrg 	      for (ie = w->indirect_calls; ie && !can_throw;
   1922  1.1  mrg 		   ie = ie->next_callee)
   1923  1.1  mrg 		if (ie->can_throw_external
   1924  1.1  mrg 		    && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
   1925  1.1  mrg 		  can_throw = true;
   1926  1.1  mrg 	    }
   1927  1.1  mrg 	  w_info = (struct ipa_dfs_info *) w->aux;
   1928  1.1  mrg 	  w = w_info->next_cycle;
   1929  1.1  mrg 	}
   1930  1.1  mrg 
   1931  1.1  mrg       /* Copy back the region's pure_const_state which is shared by
   1932  1.1  mrg 	 all nodes in the region.  */
   1933  1.1  mrg       w = node;
   1934  1.1  mrg       while (w)
   1935  1.1  mrg 	{
   1936  1.1  mrg 	  funct_state w_l = funct_state_summaries->get_create (w);
   1937  1.1  mrg 	  if (!can_throw && !TREE_NOTHROW (w->decl))
   1938  1.1  mrg 	    {
   1939  1.1  mrg 	      /* Inline clones share declaration with their offline copies;
   1940  1.1  mrg 		 do not modify their declarations since the offline copy may
   1941  1.1  mrg 		 be different.  */
   1942  1.1  mrg 	      if (!w->inlined_to)
   1943  1.1  mrg 		{
   1944  1.1  mrg 		  w->set_nothrow_flag (true);
   1945  1.1  mrg 		  if (dump_file)
   1946  1.1  mrg 		    fprintf (dump_file, "Function found to be nothrow: %s\n",
   1947  1.1  mrg 			     w->dump_name ());
   1948  1.1  mrg 		}
   1949  1.1  mrg 	    }
   1950  1.1  mrg 	  else if (can_throw && !TREE_NOTHROW (w->decl))
   1951  1.1  mrg 	    w_l->can_throw = true;
   1952  1.1  mrg 	  w_info = (struct ipa_dfs_info *) w->aux;
   1953  1.1  mrg 	  w = w_info->next_cycle;
   1954  1.1  mrg 	}
   1955  1.1  mrg     }
   1956  1.1  mrg 
   1957  1.1  mrg   ipa_free_postorder_info ();
   1958  1.1  mrg   free (order);
   1959  1.1  mrg }
   1960  1.1  mrg 
   1961  1.1  mrg /* Debugging function to dump state of malloc lattice.  */
   1962  1.1  mrg 
   1963  1.1  mrg DEBUG_FUNCTION
   1964  1.1  mrg static void
   1965  1.1  mrg dump_malloc_lattice (FILE *dump_file, const char *s)
   1966  1.1  mrg {
   1967  1.1  mrg   if (!dump_file)
   1968  1.1  mrg     return;
   1969  1.1  mrg 
   1970  1.1  mrg   fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
   1971  1.1  mrg   cgraph_node *node;
   1972  1.1  mrg   FOR_EACH_FUNCTION (node)
   1973  1.1  mrg     {
   1974  1.1  mrg       funct_state fs = funct_state_summaries->get (node);
   1975  1.1  mrg       if (fs)
   1976  1.1  mrg 	fprintf (dump_file, "%s: %s\n", node->dump_name (),
   1977  1.1  mrg 		 malloc_state_names[fs->malloc_state]);
   1978  1.1  mrg     }
   1979  1.1  mrg }
   1980  1.1  mrg 
   1981  1.1  mrg /* Propagate malloc attribute across the callgraph.  */
   1982  1.1  mrg 
   1983  1.1  mrg static void
   1984  1.1  mrg propagate_malloc (void)
   1985  1.1  mrg {
   1986  1.1  mrg   cgraph_node *node;
   1987  1.1  mrg   FOR_EACH_FUNCTION (node)
   1988  1.1  mrg     {
   1989  1.1  mrg       if (DECL_IS_MALLOC (node->decl))
   1990  1.1  mrg 	if (!funct_state_summaries->exists (node))
   1991  1.1  mrg 	  {
   1992  1.1  mrg 	    funct_state fs = funct_state_summaries->get_create (node);
   1993  1.1  mrg 	    fs->malloc_state = STATE_MALLOC;
   1994  1.1  mrg 	  }
   1995  1.1  mrg     }
   1996  1.1  mrg 
   1997  1.1  mrg   dump_malloc_lattice (dump_file, "Initial");
   1998  1.1  mrg   struct cgraph_node **order
   1999  1.1  mrg     = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
   2000  1.1  mrg   int order_pos = ipa_reverse_postorder (order);
   2001  1.1  mrg   bool changed = true;
   2002  1.1  mrg 
   2003  1.1  mrg   while (changed)
   2004  1.1  mrg     {
   2005  1.1  mrg       changed = false;
   2006  1.1  mrg       /* Walk in postorder.  */
   2007  1.1  mrg       for (int i = order_pos - 1; i >= 0; --i)
   2008  1.1  mrg 	{
   2009  1.1  mrg 	  cgraph_node *node = order[i];
   2010  1.1  mrg 	  if (node->alias
   2011  1.1  mrg 	      || !node->definition
   2012  1.1  mrg 	      || !funct_state_summaries->exists (node))
   2013  1.1  mrg 	    continue;
   2014  1.1  mrg 
   2015  1.1  mrg 	  funct_state l = funct_state_summaries->get (node);
   2016  1.1  mrg 
   2017  1.1  mrg 	  /* FIXME: add support for indirect-calls.  */
   2018  1.1  mrg 	  if (node->indirect_calls)
   2019  1.1  mrg 	    {
   2020  1.1  mrg 	      l->malloc_state = STATE_MALLOC_BOTTOM;
   2021  1.1  mrg 	      continue;
   2022  1.1  mrg 	    }
   2023  1.1  mrg 
   2024  1.1  mrg 	  if (node->get_availability () <= AVAIL_INTERPOSABLE)
   2025  1.1  mrg 	    {
   2026  1.1  mrg 	      l->malloc_state = STATE_MALLOC_BOTTOM;
   2027  1.1  mrg 	      continue;
   2028  1.1  mrg 	    }
   2029  1.1  mrg 
   2030  1.1  mrg 	  if (l->malloc_state == STATE_MALLOC_BOTTOM)
   2031  1.1  mrg 	    continue;
   2032  1.1  mrg 
   2033  1.1  mrg 	  auto_vec<cgraph_node *, 16> callees;
   2034  1.1  mrg 	  for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
   2035  1.1  mrg 	    {
   2036  1.1  mrg 	      ipa_call_summary *es = ipa_call_summaries->get_create (cs);
   2037  1.1  mrg 	      if (es && es->is_return_callee_uncaptured)
   2038  1.1  mrg 		callees.safe_push (cs->callee);
   2039  1.1  mrg 	    }
   2040  1.1  mrg 
   2041  1.1  mrg 	  malloc_state_e new_state = l->malloc_state;
   2042  1.1  mrg 	  for (unsigned j = 0; j < callees.length (); j++)
   2043  1.1  mrg 	    {
   2044  1.1  mrg 	      cgraph_node *callee = callees[j];
   2045  1.1  mrg 	      if (!funct_state_summaries->exists (node))
   2046  1.1  mrg 		{
   2047  1.1  mrg 		  new_state = STATE_MALLOC_BOTTOM;
   2048  1.1  mrg 		  break;
   2049  1.1  mrg 		}
   2050  1.1  mrg 	      malloc_state_e callee_state
   2051  1.1  mrg 		= funct_state_summaries->get_create (callee)->malloc_state;
   2052  1.1  mrg 	      if (new_state < callee_state)
   2053  1.1  mrg 		new_state = callee_state;
   2054  1.1  mrg 	    }
   2055  1.1  mrg 	  if (new_state != l->malloc_state)
   2056  1.1  mrg 	    {
   2057  1.1  mrg 	      changed = true;
   2058  1.1  mrg 	      l->malloc_state = new_state;
   2059  1.1  mrg 	    }
   2060  1.1  mrg 	}
   2061  1.1  mrg     }
   2062  1.1  mrg 
   2063  1.1  mrg   FOR_EACH_DEFINED_FUNCTION (node)
   2064  1.1  mrg     if (funct_state_summaries->exists (node))
   2065  1.1  mrg       {
   2066  1.1  mrg 	funct_state l = funct_state_summaries->get (node);
   2067  1.1  mrg 	if (!node->alias
   2068  1.1  mrg 	    && l->malloc_state == STATE_MALLOC
   2069  1.1  mrg 	    && !node->inlined_to
   2070  1.1  mrg 	    && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
   2071  1.1  mrg 	  {
   2072  1.1  mrg 	    if (dump_file && (dump_flags & TDF_DETAILS))
   2073  1.1  mrg 	      fprintf (dump_file, "Function %s found to be malloc\n",
   2074  1.1  mrg 		       node->dump_name ());
   2075  1.1  mrg 
   2076  1.1  mrg 	    bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
   2077  1.1  mrg 	    node->set_malloc_flag (true);
   2078  1.1  mrg 	    if (!malloc_decl_p && warn_suggest_attribute_malloc)
   2079  1.1  mrg 		warn_function_malloc (node->decl);
   2080  1.1  mrg 	  }
   2081  1.1  mrg       }
   2082  1.1  mrg 
   2083  1.1  mrg   dump_malloc_lattice (dump_file, "after propagation");
   2084  1.1  mrg   ipa_free_postorder_info ();
   2085  1.1  mrg   free (order);
   2086  1.1  mrg }
   2087  1.1  mrg 
   2088  1.1  mrg /* Produce the global information by preforming a transitive closure
   2089  1.1  mrg    on the local information that was produced by generate_summary.  */
   2090  1.1  mrg 
   2091  1.1  mrg unsigned int
   2092  1.1  mrg pass_ipa_pure_const::
   2093  1.1  mrg execute (function *)
   2094  1.1  mrg {
   2095  1.1  mrg   bool remove_p;
   2096  1.1  mrg 
   2097  1.1  mrg   /* Nothrow makes more function to not lead to return and improve
   2098  1.1  mrg      later analysis.  */
   2099  1.1  mrg   propagate_nothrow ();
   2100  1.1  mrg   propagate_malloc ();
   2101  1.1  mrg   remove_p = propagate_pure_const ();
   2102  1.1  mrg 
   2103  1.1  mrg   delete funct_state_summaries;
   2104  1.1  mrg   return remove_p ? TODO_remove_functions : 0;
   2105  1.1  mrg }
   2106  1.1  mrg 
   2107  1.1  mrg static bool
   2108  1.1  mrg gate_pure_const (void)
   2109  1.1  mrg {
   2110  1.1  mrg   return flag_ipa_pure_const || in_lto_p;
   2111  1.1  mrg }
   2112  1.1  mrg 
   2113  1.1  mrg pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
   2114  1.1  mrg     : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
   2115  1.1  mrg 		     pure_const_generate_summary, /* generate_summary */
   2116  1.1  mrg 		     pure_const_write_summary, /* write_summary */
   2117  1.1  mrg 		     pure_const_read_summary, /* read_summary */
   2118  1.1  mrg 		     NULL, /* write_optimization_summary */
   2119  1.1  mrg 		     NULL, /* read_optimization_summary */
   2120  1.1  mrg 		     NULL, /* stmt_fixup */
   2121  1.1  mrg 		     0, /* function_transform_todo_flags_start */
   2122  1.1  mrg 		     NULL, /* function_transform */
   2123  1.1  mrg 		     NULL), /* variable_transform */
   2124  1.1  mrg   init_p (false) {}
   2125  1.1  mrg 
   2126  1.1  mrg ipa_opt_pass_d *
   2127  1.1  mrg make_pass_ipa_pure_const (gcc::context *ctxt)
   2128  1.1  mrg {
   2129  1.1  mrg   return new pass_ipa_pure_const (ctxt);
   2130  1.1  mrg }
   2131  1.1  mrg 
   2132  1.1  mrg /* Simple local pass for pure const discovery reusing the analysis from
   2133  1.1  mrg    ipa_pure_const.   This pass is effective when executed together with
   2134  1.1  mrg    other optimization passes in early optimization pass queue.  */
   2135  1.1  mrg 
   2136  1.1  mrg namespace {
   2137  1.1  mrg 
   2138  1.1  mrg const pass_data pass_data_local_pure_const =
   2139  1.1  mrg {
   2140  1.1  mrg   GIMPLE_PASS, /* type */
   2141  1.1  mrg   "local-pure-const", /* name */
   2142  1.1  mrg   OPTGROUP_NONE, /* optinfo_flags */
   2143  1.1  mrg   TV_IPA_PURE_CONST, /* tv_id */
   2144  1.1  mrg   0, /* properties_required */
   2145  1.1  mrg   0, /* properties_provided */
   2146  1.1  mrg   0, /* properties_destroyed */
   2147  1.1  mrg   0, /* todo_flags_start */
   2148  1.1  mrg   0, /* todo_flags_finish */
   2149  1.1  mrg };
   2150  1.1  mrg 
   2151  1.1  mrg class pass_local_pure_const : public gimple_opt_pass
   2152  1.1  mrg {
   2153  1.1  mrg public:
   2154  1.1  mrg   pass_local_pure_const (gcc::context *ctxt)
   2155  1.1  mrg     : gimple_opt_pass (pass_data_local_pure_const, ctxt)
   2156  1.1  mrg   {}
   2157  1.1  mrg 
   2158  1.1  mrg   /* opt_pass methods: */
   2159  1.1  mrg   opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
   2160  1.1  mrg   virtual bool gate (function *) { return gate_pure_const (); }
   2161  1.1  mrg   virtual unsigned int execute (function *);
   2162  1.1  mrg 
   2163  1.1  mrg }; // class pass_local_pure_const
   2164  1.1  mrg 
   2165  1.1  mrg unsigned int
   2166  1.1  mrg pass_local_pure_const::execute (function *fun)
   2167  1.1  mrg {
   2168  1.1  mrg   bool changed = false;
   2169  1.1  mrg   funct_state l;
   2170  1.1  mrg   bool skip;
   2171  1.1  mrg   struct cgraph_node *node;
   2172  1.1  mrg 
   2173  1.1  mrg   node = cgraph_node::get (current_function_decl);
   2174  1.1  mrg   skip = skip_function_for_local_pure_const (node);
   2175  1.1  mrg 
   2176  1.1  mrg   if (!warn_suggest_attribute_const
   2177  1.1  mrg       && !warn_suggest_attribute_pure
   2178  1.1  mrg       && skip)
   2179  1.1  mrg     return 0;
   2180  1.1  mrg 
   2181  1.1  mrg   l = analyze_function (node, false);
   2182  1.1  mrg 
   2183  1.1  mrg   /* Do NORETURN discovery.  */
   2184  1.1  mrg   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
   2185  1.1  mrg       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
   2186  1.1  mrg     {
   2187  1.1  mrg       warn_function_noreturn (fun->decl);
   2188  1.1  mrg       if (dump_file)
   2189  1.1  mrg 	fprintf (dump_file, "Function found to be noreturn: %s\n",
   2190  1.1  mrg 		 current_function_name ());
   2191  1.1  mrg 
   2192  1.1  mrg       /* Update declaration and reduce profile to executed once.  */
   2193  1.1  mrg       if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
   2194  1.1  mrg 	changed = true;
   2195  1.1  mrg       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
   2196  1.1  mrg 	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
   2197  1.1  mrg     }
   2198  1.1  mrg 
   2199  1.1  mrg   switch (l->pure_const_state)
   2200  1.1  mrg     {
   2201  1.1  mrg     case IPA_CONST:
   2202  1.1  mrg       changed |= ipa_make_function_const
   2203  1.1  mrg 		   (cgraph_node::get (current_function_decl), l->looping, true);
   2204  1.1  mrg       break;
   2205  1.1  mrg 
   2206  1.1  mrg     case IPA_PURE:
   2207  1.1  mrg       changed |= ipa_make_function_pure
   2208  1.1  mrg 		   (cgraph_node::get (current_function_decl), l->looping, true);
   2209  1.1  mrg       break;
   2210  1.1  mrg 
   2211  1.1  mrg     default:
   2212  1.1  mrg       break;
   2213  1.1  mrg     }
   2214  1.1  mrg   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
   2215  1.1  mrg     {
   2216  1.1  mrg       node->set_nothrow_flag (true);
   2217  1.1  mrg       changed = true;
   2218  1.1  mrg       if (dump_file)
   2219  1.1  mrg 	fprintf (dump_file, "Function found to be nothrow: %s\n",
   2220  1.1  mrg 		 current_function_name ());
   2221  1.1  mrg     }
   2222  1.1  mrg 
   2223  1.1  mrg   if (l->malloc_state == STATE_MALLOC
   2224  1.1  mrg       && !DECL_IS_MALLOC (current_function_decl))
   2225  1.1  mrg     {
   2226  1.1  mrg       node->set_malloc_flag (true);
   2227  1.1  mrg       if (warn_suggest_attribute_malloc)
   2228  1.1  mrg 	warn_function_malloc (node->decl);
   2229  1.1  mrg       changed = true;
   2230  1.1  mrg       if (dump_file)
   2231  1.1  mrg 	fprintf (dump_file, "Function found to be malloc: %s\n",
   2232  1.1  mrg 		 node->dump_name ());
   2233  1.1  mrg     }
   2234  1.1  mrg 
   2235  1.1  mrg   free (l);
   2236  1.1  mrg   if (changed)
   2237  1.1  mrg     return execute_fixup_cfg ();
   2238  1.1  mrg   else
   2239  1.1  mrg     return 0;
   2240  1.1  mrg }
   2241  1.1  mrg 
   2242  1.1  mrg } // anon namespace
   2243  1.1  mrg 
   2244  1.1  mrg gimple_opt_pass *
   2245  1.1  mrg make_pass_local_pure_const (gcc::context *ctxt)
   2246  1.1  mrg {
   2247  1.1  mrg   return new pass_local_pure_const (ctxt);
   2248  1.1  mrg }
   2249  1.1  mrg 
   2250  1.1  mrg /* Emit noreturn warnings.  */
   2251  1.1  mrg 
   2252  1.1  mrg namespace {
   2253  1.1  mrg 
   2254  1.1  mrg const pass_data pass_data_warn_function_noreturn =
   2255  1.1  mrg {
   2256  1.1  mrg   GIMPLE_PASS, /* type */
   2257  1.1  mrg   "*warn_function_noreturn", /* name */
   2258  1.1  mrg   OPTGROUP_NONE, /* optinfo_flags */
   2259  1.1  mrg   TV_NONE, /* tv_id */
   2260  1.1  mrg   PROP_cfg, /* properties_required */
   2261  1.1  mrg   0, /* properties_provided */
   2262  1.1  mrg   0, /* properties_destroyed */
   2263  1.1  mrg   0, /* todo_flags_start */
   2264  1.1  mrg   0, /* todo_flags_finish */
   2265  1.1  mrg };
   2266  1.1  mrg 
   2267  1.1  mrg class pass_warn_function_noreturn : public gimple_opt_pass
   2268  1.1  mrg {
   2269  1.1  mrg public:
   2270  1.1  mrg   pass_warn_function_noreturn (gcc::context *ctxt)
   2271  1.1  mrg     : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
   2272  1.1  mrg   {}
   2273  1.1  mrg 
   2274  1.1  mrg   /* opt_pass methods: */
   2275  1.1  mrg   virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
   2276  1.1  mrg   virtual unsigned int execute (function *fun)
   2277  1.1  mrg     {
   2278  1.1  mrg       if (!TREE_THIS_VOLATILE (current_function_decl)
   2279  1.1  mrg 	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
   2280  1.1  mrg 	warn_function_noreturn (current_function_decl);
   2281  1.1  mrg       return 0;
   2282  1.1  mrg     }
   2283  1.1  mrg 
   2284  1.1  mrg }; // class pass_warn_function_noreturn
   2285  1.1  mrg 
   2286  1.1  mrg } // anon namespace
   2287  1.1  mrg 
   2288  1.1  mrg gimple_opt_pass *
   2289  1.1  mrg make_pass_warn_function_noreturn (gcc::context *ctxt)
   2290  1.1  mrg {
   2291  1.1  mrg   return new pass_warn_function_noreturn (ctxt);
   2292  1.1  mrg }
   2293  1.1  mrg 
   2294  1.1  mrg /* Simple local pass for pure const discovery reusing the analysis from
   2295  1.1  mrg    ipa_pure_const.   This pass is effective when executed together with
   2296  1.1  mrg    other optimization passes in early optimization pass queue.  */
   2297  1.1  mrg 
   2298  1.1  mrg namespace {
   2299  1.1  mrg 
   2300  1.1  mrg const pass_data pass_data_nothrow =
   2301  1.1  mrg {
   2302  1.1  mrg   GIMPLE_PASS, /* type */
   2303  1.1  mrg   "nothrow", /* name */
   2304  1.1  mrg   OPTGROUP_NONE, /* optinfo_flags */
   2305  1.1  mrg   TV_IPA_PURE_CONST, /* tv_id */
   2306  1.1  mrg   0, /* properties_required */
   2307  1.1  mrg   0, /* properties_provided */
   2308  1.1  mrg   0, /* properties_destroyed */
   2309  1.1  mrg   0, /* todo_flags_start */
   2310  1.1  mrg   0, /* todo_flags_finish */
   2311  1.1  mrg };
   2312  1.1  mrg 
   2313  1.1  mrg class pass_nothrow : public gimple_opt_pass
   2314  1.1  mrg {
   2315  1.1  mrg public:
   2316  1.1  mrg   pass_nothrow (gcc::context *ctxt)
   2317  1.1  mrg     : gimple_opt_pass (pass_data_nothrow, ctxt)
   2318  1.1  mrg   {}
   2319  1.1  mrg 
   2320  1.1  mrg   /* opt_pass methods: */
   2321  1.1  mrg   opt_pass * clone () { return new pass_nothrow (m_ctxt); }
   2322  1.1  mrg   virtual bool gate (function *) { return optimize; }
   2323  1.1  mrg   virtual unsigned int execute (function *);
   2324  1.1  mrg 
   2325  1.1  mrg }; // class pass_nothrow
   2326  1.1  mrg 
   2327  1.1  mrg unsigned int
   2328  1.1  mrg pass_nothrow::execute (function *)
   2329  1.1  mrg {
   2330  1.1  mrg   struct cgraph_node *node;
   2331  1.1  mrg   basic_block this_block;
   2332  1.1  mrg 
   2333  1.1  mrg   if (TREE_NOTHROW (current_function_decl))
   2334  1.1  mrg     return 0;
   2335  1.1  mrg 
   2336  1.1  mrg   node = cgraph_node::get (current_function_decl);
   2337  1.1  mrg 
   2338  1.1  mrg   /* We run during lowering, we cannot really use availability yet.  */
   2339  1.1  mrg   if (cgraph_node::get (current_function_decl)->get_availability ()
   2340  1.1  mrg       <= AVAIL_INTERPOSABLE)
   2341  1.1  mrg     {
   2342  1.1  mrg       if (dump_file)
   2343  1.1  mrg         fprintf (dump_file, "Function is interposable;"
   2344  1.1  mrg 	         " not analyzing.\n");
   2345  1.1  mrg       return true;
   2346  1.1  mrg     }
   2347  1.1  mrg 
   2348  1.1  mrg   FOR_EACH_BB_FN (this_block, cfun)
   2349  1.1  mrg     {
   2350  1.1  mrg       for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
   2351  1.1  mrg 	   !gsi_end_p (gsi);
   2352  1.1  mrg 	   gsi_next (&gsi))
   2353  1.1  mrg         if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
   2354  1.1  mrg 	  {
   2355  1.1  mrg 	    if (is_gimple_call (gsi_stmt (gsi)))
   2356  1.1  mrg 	      {
   2357  1.1  mrg 		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
   2358  1.1  mrg 		if (callee_t && recursive_call_p (current_function_decl,
   2359  1.1  mrg 						  callee_t))
   2360  1.1  mrg 		  continue;
   2361  1.1  mrg 	      }
   2362  1.1  mrg 
   2363  1.1  mrg 	    if (dump_file)
   2364  1.1  mrg 	      {
   2365  1.1  mrg 		fprintf (dump_file, "Statement can throw: ");
   2366  1.1  mrg 		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
   2367  1.1  mrg 	      }
   2368  1.1  mrg 	    return 0;
   2369  1.1  mrg 	  }
   2370  1.1  mrg     }
   2371  1.1  mrg 
   2372  1.1  mrg   node->set_nothrow_flag (true);
   2373  1.1  mrg 
   2374  1.1  mrg   bool cfg_changed = false;
   2375  1.1  mrg   if (self_recursive_p (node))
   2376  1.1  mrg     FOR_EACH_BB_FN (this_block, cfun)
   2377  1.1  mrg       if (gimple *g = last_stmt (this_block))
   2378  1.1  mrg 	if (is_gimple_call (g))
   2379  1.1  mrg 	  {
   2380  1.1  mrg 	    tree callee_t = gimple_call_fndecl (g);
   2381  1.1  mrg 	    if (callee_t
   2382  1.1  mrg 		&& recursive_call_p (current_function_decl, callee_t)
   2383  1.1  mrg 		&& maybe_clean_eh_stmt (g)
   2384  1.1  mrg 		&& gimple_purge_dead_eh_edges (this_block))
   2385  1.1  mrg 	      cfg_changed = true;
   2386  1.1  mrg 	  }
   2387  1.1  mrg 
   2388  1.1  mrg   if (dump_file)
   2389  1.1  mrg     fprintf (dump_file, "Function found to be nothrow: %s\n",
   2390  1.1  mrg 	     current_function_name ());
   2391  1.1  mrg   return cfg_changed ? TODO_cleanup_cfg : 0;
   2392  1.1  mrg }
   2393  1.1  mrg 
   2394  1.1  mrg } // anon namespace
   2395  1.1  mrg 
   2396  1.1  mrg gimple_opt_pass *
   2397  1.1  mrg make_pass_nothrow (gcc::context *ctxt)
   2398  1.1  mrg {
   2399  1.1  mrg   return new pass_nothrow (ctxt);
   2400           }
   2401