Home | History | Annotate | Line # | Download | only in gcc
      1  1.1  mrg /* Search for references that a functions loads or stores.
      2  1.1  mrg    Copyright (C) 2020-2022 Free Software Foundation, Inc.
      3  1.1  mrg    Contributed by David Cepelik and Jan Hubicka
      4  1.1  mrg 
      5  1.1  mrg This file is part of GCC.
      6  1.1  mrg 
      7  1.1  mrg GCC is free software; you can redistribute it and/or modify it under
      8  1.1  mrg the terms of the GNU General Public License as published by the Free
      9  1.1  mrg Software Foundation; either version 3, or (at your option) any later
     10  1.1  mrg version.
     11  1.1  mrg 
     12  1.1  mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13  1.1  mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14  1.1  mrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15  1.1  mrg for more details.
     16  1.1  mrg 
     17  1.1  mrg You should have received a copy of the GNU General Public License
     18  1.1  mrg along with GCC; see the file COPYING3.  If not see
     19  1.1  mrg <http://www.gnu.org/licenses/>.  */
     20  1.1  mrg 
     21  1.1  mrg /* Mod/ref pass records summary about loads and stores performed by the
     22  1.1  mrg    function.  This is later used by alias analysis to disambiguate memory
     23  1.1  mrg    accesses across function calls.
     24  1.1  mrg 
     25  1.1  mrg    This file contains a tree pass and an IPA pass.  Both performs the same
     26  1.1  mrg    analysis however tree pass is executed during early and late optimization
     27  1.1  mrg    passes to propagate info downwards in the compilation order.  IPA pass
     28  1.1  mrg    propagates across the callgraph and is able to handle recursion and works on
     29  1.1  mrg    whole program during link-time analysis.
     30  1.1  mrg 
     31  1.1  mrg    LTO mode differs from the local mode by not recording alias sets but types
     32  1.1  mrg    that are translated to alias sets later.  This is necessary in order stream
     33  1.1  mrg    the information because the alias sets are rebuild at stream-in time and may
     34  1.1  mrg    not correspond to ones seen during analysis.  For this reason part of
     35  1.1  mrg    analysis is duplicated.
     36  1.1  mrg 
     37  1.1  mrg    The following information is computed
     38  1.1  mrg      1) load/store access tree described in ipa-modref-tree.h
     39  1.1  mrg 	This is used by tree-ssa-alias to disambiguate load/stores
     40  1.1  mrg      2) EAF flags used by points-to analysis (in tree-ssa-structalias).
     41  1.1  mrg 	and defined in tree-core.h.
     42  1.1  mrg    and stored to optimization_summaries.
     43  1.1  mrg 
     44  1.1  mrg    There are multiple summaries computed and used during the propagation:
     45  1.1  mrg      - summaries holds summaries from analysis to IPA propagation
     46  1.1  mrg        time.
     47  1.1  mrg      - summaries_lto is same as summaries but holds them in a format
     48  1.1  mrg        that can be streamed (as described above).
     49  1.1  mrg      - fnspec_summary holds fnspec strings for call.  This is
     50  1.1  mrg        necessary because gimple_call_fnspec performs additional
     51  1.1  mrg        analysis except for looking callee fndecl.
     52  1.1  mrg      - escape_summary holds escape points for given call edge.
     53  1.1  mrg        That is a vector recording what function parameters
     54  1.1  mrg        may escape to a function call (and with what parameter index).  */
     55  1.1  mrg 
     56  1.1  mrg #include "config.h"
     57  1.1  mrg #include "system.h"
     58  1.1  mrg #include "coretypes.h"
     59  1.1  mrg #include "backend.h"
     60  1.1  mrg #include "tree.h"
     61  1.1  mrg #include "gimple.h"
     62  1.1  mrg #include "alloc-pool.h"
     63  1.1  mrg #include "tree-pass.h"
     64  1.1  mrg #include "gimple-iterator.h"
     65  1.1  mrg #include "tree-dfa.h"
     66  1.1  mrg #include "cgraph.h"
     67  1.1  mrg #include "ipa-utils.h"
     68  1.1  mrg #include "symbol-summary.h"
     69  1.1  mrg #include "gimple-pretty-print.h"
     70  1.1  mrg #include "gimple-walk.h"
     71  1.1  mrg #include "print-tree.h"
     72  1.1  mrg #include "tree-streamer.h"
     73  1.1  mrg #include "alias.h"
     74  1.1  mrg #include "calls.h"
     75  1.1  mrg #include "ipa-modref-tree.h"
     76  1.1  mrg #include "ipa-modref.h"
     77  1.1  mrg #include "value-range.h"
     78  1.1  mrg #include "ipa-prop.h"
     79  1.1  mrg #include "ipa-fnsummary.h"
     80  1.1  mrg #include "attr-fnspec.h"
     81  1.1  mrg #include "symtab-clones.h"
     82  1.1  mrg #include "gimple-ssa.h"
     83  1.1  mrg #include "tree-phinodes.h"
     84  1.1  mrg #include "tree-ssa-operands.h"
     85  1.1  mrg #include "ssa-iterators.h"
     86  1.1  mrg #include "stringpool.h"
     87  1.1  mrg #include "tree-ssanames.h"
     88  1.1  mrg #include "attribs.h"
     89  1.1  mrg #include "tree-cfg.h"
     90  1.1  mrg #include "tree-eh.h"
     91  1.1  mrg 
     92  1.1  mrg 
     93  1.1  mrg namespace {
     94  1.1  mrg 
     95  1.1  mrg /* We record fnspec specifiers for call edges since they depends on actual
     96  1.1  mrg    gimple statements.  */
     97  1.1  mrg 
     98  1.1  mrg class fnspec_summary
     99  1.1  mrg {
    100  1.1  mrg public:
    101  1.1  mrg   char *fnspec;
    102  1.1  mrg 
    103  1.1  mrg   fnspec_summary ()
    104  1.1  mrg   : fnspec (NULL)
    105  1.1  mrg   {
    106  1.1  mrg   }
    107  1.1  mrg 
    108  1.1  mrg   ~fnspec_summary ()
    109  1.1  mrg   {
    110  1.1  mrg     free (fnspec);
    111  1.1  mrg   }
    112  1.1  mrg };
    113  1.1  mrg 
    114  1.1  mrg /* Summary holding fnspec string for a given call.  */
    115  1.1  mrg 
    116  1.1  mrg class fnspec_summaries_t : public call_summary <fnspec_summary *>
    117  1.1  mrg {
    118  1.1  mrg public:
    119  1.1  mrg   fnspec_summaries_t (symbol_table *symtab)
    120  1.1  mrg       : call_summary <fnspec_summary *> (symtab) {}
    121  1.1  mrg   /* Hook that is called by summary when an edge is duplicated.  */
    122  1.1  mrg   virtual void duplicate (cgraph_edge *,
    123  1.1  mrg 			  cgraph_edge *,
    124  1.1  mrg 			  fnspec_summary *src,
    125  1.1  mrg 			  fnspec_summary *dst)
    126  1.1  mrg   {
    127  1.1  mrg     dst->fnspec = xstrdup (src->fnspec);
    128  1.1  mrg   }
    129  1.1  mrg };
    130  1.1  mrg 
    131  1.1  mrg static fnspec_summaries_t *fnspec_summaries = NULL;
    132  1.1  mrg 
    133  1.1  mrg /* Escape summary holds a vector of param indexes that escape to
    134  1.1  mrg    a given call.  */
    135  1.1  mrg struct escape_entry
    136  1.1  mrg {
    137  1.1  mrg   /* Parameter that escapes at a given call.  */
    138  1.1  mrg   int parm_index;
    139  1.1  mrg   /* Argument it escapes to.  */
    140  1.1  mrg   unsigned int arg;
    141  1.1  mrg   /* Minimal flags known about the argument.  */
    142  1.1  mrg   eaf_flags_t min_flags;
    143  1.1  mrg   /* Does it escape directly or indirectly?  */
    144  1.1  mrg   bool direct;
    145  1.1  mrg };
    146  1.1  mrg 
    147  1.1  mrg /* Dump EAF flags.  */
    148  1.1  mrg 
    149  1.1  mrg static void
    150  1.1  mrg dump_eaf_flags (FILE *out, int flags, bool newline = true)
    151  1.1  mrg {
    152  1.1  mrg   if (flags & EAF_UNUSED)
    153  1.1  mrg     fprintf (out, " unused");
    154  1.1  mrg   if (flags & EAF_NO_DIRECT_CLOBBER)
    155  1.1  mrg     fprintf (out, " no_direct_clobber");
    156  1.1  mrg   if (flags & EAF_NO_INDIRECT_CLOBBER)
    157  1.1  mrg     fprintf (out, " no_indirect_clobber");
    158  1.1  mrg   if (flags & EAF_NO_DIRECT_ESCAPE)
    159  1.1  mrg     fprintf (out, " no_direct_escape");
    160  1.1  mrg   if (flags & EAF_NO_INDIRECT_ESCAPE)
    161  1.1  mrg     fprintf (out, " no_indirect_escape");
    162  1.1  mrg   if (flags & EAF_NOT_RETURNED_DIRECTLY)
    163  1.1  mrg     fprintf (out, " not_returned_directly");
    164  1.1  mrg   if (flags & EAF_NOT_RETURNED_INDIRECTLY)
    165  1.1  mrg     fprintf (out, " not_returned_indirectly");
    166  1.1  mrg   if (flags & EAF_NO_DIRECT_READ)
    167  1.1  mrg     fprintf (out, " no_direct_read");
    168  1.1  mrg   if (flags & EAF_NO_INDIRECT_READ)
    169  1.1  mrg     fprintf (out, " no_indirect_read");
    170  1.1  mrg   if (newline)
    171  1.1  mrg   fprintf (out, "\n");
    172  1.1  mrg }
    173  1.1  mrg 
    174  1.1  mrg struct escape_summary
    175  1.1  mrg {
    176  1.1  mrg   auto_vec <escape_entry> esc;
    177  1.1  mrg   void dump (FILE *out)
    178  1.1  mrg   {
    179  1.1  mrg     for (unsigned int i = 0; i < esc.length (); i++)
    180  1.1  mrg       {
    181  1.1  mrg 	fprintf (out, "   parm %i arg %i %s min:",
    182  1.1  mrg 		 esc[i].parm_index,
    183  1.1  mrg 		 esc[i].arg,
    184  1.1  mrg 		 esc[i].direct ? "(direct)" : "(indirect)");
    185  1.1  mrg 	dump_eaf_flags (out, esc[i].min_flags, false);
    186  1.1  mrg       }
    187  1.1  mrg     fprintf (out, "\n");
    188  1.1  mrg   }
    189  1.1  mrg };
    190  1.1  mrg 
    191  1.1  mrg class escape_summaries_t : public call_summary <escape_summary *>
    192  1.1  mrg {
    193  1.1  mrg public:
    194  1.1  mrg   escape_summaries_t (symbol_table *symtab)
    195  1.1  mrg       : call_summary <escape_summary *> (symtab) {}
    196  1.1  mrg   /* Hook that is called by summary when an edge is duplicated.  */
    197  1.1  mrg   virtual void duplicate (cgraph_edge *,
    198  1.1  mrg 			  cgraph_edge *,
    199  1.1  mrg 			  escape_summary *src,
    200  1.1  mrg 			  escape_summary *dst)
    201  1.1  mrg   {
    202  1.1  mrg     dst->esc = src->esc.copy ();
    203  1.1  mrg   }
    204  1.1  mrg };
    205  1.1  mrg 
    206  1.1  mrg static escape_summaries_t *escape_summaries = NULL;
    207  1.1  mrg 
    208  1.1  mrg }  /* ANON namespace: GTY annotated summaries can not be anonymous.  */
    209  1.1  mrg 
    210  1.1  mrg 
    211  1.1  mrg /* Class (from which there is one global instance) that holds modref summaries
    212  1.1  mrg    for all analyzed functions.  */
    213  1.1  mrg 
    214  1.1  mrg class GTY((user)) modref_summaries
    215  1.1  mrg   : public fast_function_summary <modref_summary *, va_gc>
    216  1.1  mrg {
    217  1.1  mrg public:
    218  1.1  mrg   modref_summaries (symbol_table *symtab)
    219  1.1  mrg       : fast_function_summary <modref_summary *, va_gc> (symtab) {}
    220  1.1  mrg   virtual void insert (cgraph_node *, modref_summary *state);
    221  1.1  mrg   virtual void duplicate (cgraph_node *src_node,
    222  1.1  mrg 			  cgraph_node *dst_node,
    223  1.1  mrg 			  modref_summary *src_data,
    224  1.1  mrg 			  modref_summary *dst_data);
    225  1.1  mrg   static modref_summaries *create_ggc (symbol_table *symtab)
    226  1.1  mrg   {
    227  1.1  mrg     return new (ggc_alloc_no_dtor<modref_summaries> ())
    228  1.1  mrg 	     modref_summaries (symtab);
    229  1.1  mrg   }
    230  1.1  mrg };
    231  1.1  mrg 
    232  1.1  mrg class modref_summary_lto;
    233  1.1  mrg 
    234  1.1  mrg /* Class (from which there is one global instance) that holds modref summaries
    235  1.1  mrg    for all analyzed functions.  */
    236  1.1  mrg 
    237  1.1  mrg class GTY((user)) modref_summaries_lto
    238  1.1  mrg   : public fast_function_summary <modref_summary_lto *, va_gc>
    239  1.1  mrg {
    240  1.1  mrg public:
    241  1.1  mrg   modref_summaries_lto (symbol_table *symtab)
    242  1.1  mrg       : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
    243  1.1  mrg 	propagated (false) {}
    244  1.1  mrg   virtual void insert (cgraph_node *, modref_summary_lto *state);
    245  1.1  mrg   virtual void duplicate (cgraph_node *src_node,
    246  1.1  mrg 			  cgraph_node *dst_node,
    247  1.1  mrg 			  modref_summary_lto *src_data,
    248  1.1  mrg 			  modref_summary_lto *dst_data);
    249  1.1  mrg   static modref_summaries_lto *create_ggc (symbol_table *symtab)
    250  1.1  mrg   {
    251  1.1  mrg     return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
    252  1.1  mrg 	     modref_summaries_lto (symtab);
    253  1.1  mrg   }
    254  1.1  mrg   bool propagated;
    255  1.1  mrg };
    256  1.1  mrg 
    257  1.1  mrg /* Global variable holding all modref summaries
    258  1.1  mrg    (from analysis to IPA propagation time).  */
    259  1.1  mrg 
    260  1.1  mrg static GTY(()) fast_function_summary <modref_summary *, va_gc>
    261  1.1  mrg 	 *summaries;
    262  1.1  mrg 
    263  1.1  mrg /* Global variable holding all modref optimization summaries
    264  1.1  mrg    (from IPA propagation time or used by local optimization pass).  */
    265  1.1  mrg 
    266  1.1  mrg static GTY(()) fast_function_summary <modref_summary *, va_gc>
    267  1.1  mrg 	 *optimization_summaries;
    268  1.1  mrg 
    269  1.1  mrg /* LTO summaries hold info from analysis to LTO streaming or from LTO
    270  1.1  mrg    stream-in through propagation to LTO stream-out.  */
    271  1.1  mrg 
    272  1.1  mrg static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
    273  1.1  mrg 	 *summaries_lto;
    274  1.1  mrg 
    275  1.1  mrg /* Summary for a single function which this pass produces.  */
    276  1.1  mrg 
    277  1.1  mrg modref_summary::modref_summary ()
    278  1.1  mrg   : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
    279  1.1  mrg     writes_errno (false), side_effects (false), nondeterministic (false),
    280  1.1  mrg     calls_interposable (false), global_memory_read (false),
    281  1.1  mrg     global_memory_written (false), try_dse (false)
    282  1.1  mrg {
    283  1.1  mrg }
    284  1.1  mrg 
    285  1.1  mrg modref_summary::~modref_summary ()
    286  1.1  mrg {
    287  1.1  mrg   if (loads)
    288  1.1  mrg     ggc_delete (loads);
    289  1.1  mrg   if (stores)
    290  1.1  mrg     ggc_delete (stores);
    291  1.1  mrg }
    292  1.1  mrg 
    293  1.1  mrg /* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
    294  1.1  mrg    useful to track.  If returns_void is true moreover clear
    295  1.1  mrg    EAF_NOT_RETURNED.  */
    296  1.1  mrg static int
    297  1.1  mrg remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
    298  1.1  mrg {
    299  1.1  mrg   if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
    300  1.1  mrg     eaf_flags &= ~implicit_const_eaf_flags;
    301  1.1  mrg   else if (ecf_flags & ECF_PURE)
    302  1.1  mrg     eaf_flags &= ~implicit_pure_eaf_flags;
    303  1.1  mrg   else if ((ecf_flags & ECF_NORETURN) || returns_void)
    304  1.1  mrg     eaf_flags &= ~(EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY);
    305  1.1  mrg   return eaf_flags;
    306  1.1  mrg }
    307  1.1  mrg 
    308  1.1  mrg /* Return true if FLAGS holds some useful information.  */
    309  1.1  mrg 
    310  1.1  mrg static bool
    311  1.1  mrg eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
    312  1.1  mrg {
    313  1.1  mrg   for (unsigned i = 0; i < flags.length (); i++)
    314  1.1  mrg     if (remove_useless_eaf_flags (flags[i], ecf_flags, false))
    315  1.1  mrg       return true;
    316  1.1  mrg   return false;
    317  1.1  mrg }
    318  1.1  mrg 
    319  1.1  mrg /* Return true if summary is potentially useful for optimization.
    320  1.1  mrg    If CHECK_FLAGS is false assume that arg_flags are useful.  */
    321  1.1  mrg 
    322  1.1  mrg bool
    323  1.1  mrg modref_summary::useful_p (int ecf_flags, bool check_flags)
    324  1.1  mrg {
    325  1.1  mrg   if (arg_flags.length () && !check_flags)
    326  1.1  mrg     return true;
    327  1.1  mrg   if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
    328  1.1  mrg     return true;
    329  1.1  mrg   arg_flags.release ();
    330  1.1  mrg   if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
    331  1.1  mrg     return true;
    332  1.1  mrg   if (check_flags
    333  1.1  mrg       && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
    334  1.1  mrg     return true;
    335  1.1  mrg   if (ecf_flags & ECF_CONST)
    336  1.1  mrg     return ((!side_effects || !nondeterministic)
    337  1.1  mrg 	    && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
    338  1.1  mrg   if (loads && !loads->every_base)
    339  1.1  mrg     return true;
    340  1.1  mrg   else
    341  1.1  mrg     kills.release ();
    342  1.1  mrg   if (ecf_flags & ECF_PURE)
    343  1.1  mrg     return ((!side_effects || !nondeterministic)
    344  1.1  mrg 	    && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
    345  1.1  mrg   return stores && !stores->every_base;
    346  1.1  mrg }
    347  1.1  mrg 
    348  1.1  mrg /* Single function summary used for LTO.  */
    349  1.1  mrg 
    350  1.1  mrg typedef modref_tree <tree> modref_records_lto;
    351  1.1  mrg struct GTY(()) modref_summary_lto
    352  1.1  mrg {
    353  1.1  mrg   /* Load and stores in functions using types rather then alias sets.
    354  1.1  mrg 
    355  1.1  mrg      This is necessary to make the information streamable for LTO but is also
    356  1.1  mrg      more verbose and thus more likely to hit the limits.  */
    357  1.1  mrg   modref_records_lto *loads;
    358  1.1  mrg   modref_records_lto *stores;
    359  1.1  mrg   auto_vec<modref_access_node> GTY((skip)) kills;
    360  1.1  mrg   auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
    361  1.1  mrg   eaf_flags_t retslot_flags;
    362  1.1  mrg   eaf_flags_t static_chain_flags;
    363  1.1  mrg   unsigned writes_errno : 1;
    364  1.1  mrg   unsigned side_effects : 1;
    365  1.1  mrg   unsigned nondeterministic : 1;
    366  1.1  mrg   unsigned calls_interposable : 1;
    367  1.1  mrg 
    368  1.1  mrg   modref_summary_lto ();
    369  1.1  mrg   ~modref_summary_lto ();
    370  1.1  mrg   void dump (FILE *);
    371  1.1  mrg   bool useful_p (int ecf_flags, bool check_flags = true);
    372  1.1  mrg };
    373  1.1  mrg 
    374  1.1  mrg /* Summary for a single function which this pass produces.  */
    375  1.1  mrg 
    376  1.1  mrg modref_summary_lto::modref_summary_lto ()
    377  1.1  mrg   : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
    378  1.1  mrg     writes_errno (false), side_effects (false), nondeterministic (false),
    379  1.1  mrg     calls_interposable (false)
    380  1.1  mrg {
    381  1.1  mrg }
    382  1.1  mrg 
    383  1.1  mrg modref_summary_lto::~modref_summary_lto ()
    384  1.1  mrg {
    385  1.1  mrg   if (loads)
    386  1.1  mrg     ggc_delete (loads);
    387  1.1  mrg   if (stores)
    388  1.1  mrg     ggc_delete (stores);
    389  1.1  mrg }
    390  1.1  mrg 
    391  1.1  mrg 
    392  1.1  mrg /* Return true if lto summary is potentially useful for optimization.
    393  1.1  mrg    If CHECK_FLAGS is false assume that arg_flags are useful.  */
    394  1.1  mrg 
    395  1.1  mrg bool
    396  1.1  mrg modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
    397  1.1  mrg {
    398  1.1  mrg   if (arg_flags.length () && !check_flags)
    399  1.1  mrg     return true;
    400  1.1  mrg   if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
    401  1.1  mrg     return true;
    402  1.1  mrg   arg_flags.release ();
    403  1.1  mrg   if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
    404  1.1  mrg     return true;
    405  1.1  mrg   if (check_flags
    406  1.1  mrg       && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
    407  1.1  mrg     return true;
    408  1.1  mrg   if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
    409  1.1  mrg     return ((!side_effects || !nondeterministic)
    410  1.1  mrg 	    && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
    411  1.1  mrg   if (loads && !loads->every_base)
    412  1.1  mrg     return true;
    413  1.1  mrg   else
    414  1.1  mrg     kills.release ();
    415  1.1  mrg   if (ecf_flags & ECF_PURE)
    416  1.1  mrg     return ((!side_effects || !nondeterministic)
    417  1.1  mrg 	    && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
    418  1.1  mrg   return stores && !stores->every_base;
    419  1.1  mrg }
    420  1.1  mrg 
    421  1.1  mrg /* Dump records TT to OUT.  */
    422  1.1  mrg 
    423  1.1  mrg static void
    424  1.1  mrg dump_records (modref_records *tt, FILE *out)
    425  1.1  mrg {
    426  1.1  mrg   if (tt->every_base)
    427  1.1  mrg     {
    428  1.1  mrg       fprintf (out, "    Every base\n");
    429  1.1  mrg       return;
    430  1.1  mrg     }
    431  1.1  mrg   size_t i;
    432  1.1  mrg   modref_base_node <alias_set_type> *n;
    433  1.1  mrg   FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
    434  1.1  mrg     {
    435  1.1  mrg       fprintf (out, "      Base %i: alias set %i\n", (int)i, n->base);
    436  1.1  mrg       if (n->every_ref)
    437  1.1  mrg 	{
    438  1.1  mrg 	  fprintf (out, "      Every ref\n");
    439  1.1  mrg 	  continue;
    440  1.1  mrg 	}
    441  1.1  mrg       size_t j;
    442  1.1  mrg       modref_ref_node <alias_set_type> *r;
    443  1.1  mrg       FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
    444  1.1  mrg 	{
    445  1.1  mrg 	  fprintf (out, "        Ref %i: alias set %i\n", (int)j, r->ref);
    446  1.1  mrg 	  if (r->every_access)
    447  1.1  mrg 	    {
    448  1.1  mrg 	      fprintf (out, "          Every access\n");
    449  1.1  mrg 	      continue;
    450  1.1  mrg 	    }
    451  1.1  mrg 	  size_t k;
    452  1.1  mrg 	  modref_access_node *a;
    453  1.1  mrg 	  FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
    454  1.1  mrg 	    {
    455  1.1  mrg 	      fprintf (out, "          access:");
    456  1.1  mrg 	      a->dump (out);
    457  1.1  mrg 	    }
    458  1.1  mrg 	}
    459  1.1  mrg     }
    460  1.1  mrg }
    461  1.1  mrg 
    462  1.1  mrg /* Dump records TT to OUT.  */
    463  1.1  mrg 
    464  1.1  mrg static void
    465  1.1  mrg dump_lto_records (modref_records_lto *tt, FILE *out)
    466  1.1  mrg {
    467  1.1  mrg   if (tt->every_base)
    468  1.1  mrg     {
    469  1.1  mrg       fprintf (out, "    Every base\n");
    470  1.1  mrg       return;
    471  1.1  mrg     }
    472  1.1  mrg   size_t i;
    473  1.1  mrg   modref_base_node <tree> *n;
    474  1.1  mrg   FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
    475  1.1  mrg     {
    476  1.1  mrg       fprintf (out, "      Base %i:", (int)i);
    477  1.1  mrg       print_generic_expr (dump_file, n->base);
    478  1.1  mrg       fprintf (out, " (alias set %i)\n",
    479  1.1  mrg 	       n->base ? get_alias_set (n->base) : 0);
    480  1.1  mrg       if (n->every_ref)
    481  1.1  mrg 	{
    482  1.1  mrg 	  fprintf (out, "      Every ref\n");
    483  1.1  mrg 	  continue;
    484  1.1  mrg 	}
    485  1.1  mrg       size_t j;
    486  1.1  mrg       modref_ref_node <tree> *r;
    487  1.1  mrg       FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
    488  1.1  mrg 	{
    489  1.1  mrg 	  fprintf (out, "        Ref %i:", (int)j);
    490  1.1  mrg 	  print_generic_expr (dump_file, r->ref);
    491  1.1  mrg 	  fprintf (out, " (alias set %i)\n",
    492  1.1  mrg 		   r->ref ? get_alias_set (r->ref) : 0);
    493  1.1  mrg 	  if (r->every_access)
    494  1.1  mrg 	    {
    495  1.1  mrg 	      fprintf (out, "          Every access\n");
    496  1.1  mrg 	      continue;
    497  1.1  mrg 	    }
    498  1.1  mrg 	  size_t k;
    499  1.1  mrg 	  modref_access_node *a;
    500  1.1  mrg 	  FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
    501  1.1  mrg 	    {
    502  1.1  mrg 	      fprintf (out, "          access:");
    503  1.1  mrg 	      a->dump (out);
    504  1.1  mrg 	    }
    505  1.1  mrg 	}
    506  1.1  mrg     }
    507  1.1  mrg }
    508  1.1  mrg 
    509  1.1  mrg /* Dump all escape points of NODE to OUT.  */
    510  1.1  mrg 
    511  1.1  mrg static void
    512  1.1  mrg dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
    513  1.1  mrg {
    514  1.1  mrg   int i = 0;
    515  1.1  mrg   if (!escape_summaries)
    516  1.1  mrg     return;
    517  1.1  mrg   for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
    518  1.1  mrg     {
    519  1.1  mrg       class escape_summary *sum = escape_summaries->get (e);
    520  1.1  mrg       if (sum)
    521  1.1  mrg 	{
    522  1.1  mrg 	  fprintf (out, "%*sIndirect call %i in %s escapes:",
    523  1.1  mrg 		   depth, "", i, node->dump_name ());
    524  1.1  mrg 	  sum->dump (out);
    525  1.1  mrg 	}
    526  1.1  mrg       i++;
    527  1.1  mrg     }
    528  1.1  mrg   for (cgraph_edge *e = node->callees; e; e = e->next_callee)
    529  1.1  mrg     {
    530  1.1  mrg       if (!e->inline_failed)
    531  1.1  mrg 	dump_modref_edge_summaries (out, e->callee, depth + 1);
    532  1.1  mrg       class escape_summary *sum = escape_summaries->get (e);
    533  1.1  mrg       if (sum)
    534  1.1  mrg 	{
    535  1.1  mrg 	  fprintf (out, "%*sCall %s->%s escapes:", depth, "",
    536  1.1  mrg 		   node->dump_name (), e->callee->dump_name ());
    537  1.1  mrg 	  sum->dump (out);
    538  1.1  mrg 	}
    539  1.1  mrg       class fnspec_summary *fsum = fnspec_summaries->get (e);
    540  1.1  mrg       if (fsum)
    541  1.1  mrg 	{
    542  1.1  mrg 	  fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
    543  1.1  mrg 		   node->dump_name (), e->callee->dump_name (),
    544  1.1  mrg 		   fsum->fnspec);
    545  1.1  mrg 	}
    546  1.1  mrg     }
    547  1.1  mrg }
    548  1.1  mrg 
    549  1.1  mrg /* Remove all call edge summaries associated with NODE.  */
    550  1.1  mrg 
    551  1.1  mrg static void
    552  1.1  mrg remove_modref_edge_summaries (cgraph_node *node)
    553  1.1  mrg {
    554  1.1  mrg   if (!escape_summaries)
    555  1.1  mrg     return;
    556  1.1  mrg   for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
    557  1.1  mrg     escape_summaries->remove (e);
    558  1.1  mrg   for (cgraph_edge *e = node->callees; e; e = e->next_callee)
    559  1.1  mrg     {
    560  1.1  mrg       if (!e->inline_failed)
    561  1.1  mrg 	remove_modref_edge_summaries (e->callee);
    562  1.1  mrg       escape_summaries->remove (e);
    563  1.1  mrg       fnspec_summaries->remove (e);
    564  1.1  mrg     }
    565  1.1  mrg }
    566  1.1  mrg 
    567  1.1  mrg /* Dump summary.  */
    568  1.1  mrg 
    569  1.1  mrg void
    570  1.1  mrg modref_summary::dump (FILE *out)
    571  1.1  mrg {
    572  1.1  mrg   if (loads)
    573  1.1  mrg     {
    574  1.1  mrg       fprintf (out, "  loads:\n");
    575  1.1  mrg       dump_records (loads, out);
    576  1.1  mrg     }
    577  1.1  mrg   if (stores)
    578  1.1  mrg     {
    579  1.1  mrg       fprintf (out, "  stores:\n");
    580  1.1  mrg       dump_records (stores, out);
    581  1.1  mrg     }
    582  1.1  mrg   if (kills.length ())
    583  1.1  mrg     {
    584  1.1  mrg       fprintf (out, "  kills:\n");
    585  1.1  mrg       for (auto kill : kills)
    586  1.1  mrg 	{
    587  1.1  mrg 	  fprintf (out, "    ");
    588  1.1  mrg 	  kill.dump (out);
    589  1.1  mrg 	}
    590  1.1  mrg     }
    591  1.1  mrg   if (writes_errno)
    592  1.1  mrg     fprintf (out, "  Writes errno\n");
    593  1.1  mrg   if (side_effects)
    594  1.1  mrg     fprintf (out, "  Side effects\n");
    595  1.1  mrg   if (nondeterministic)
    596  1.1  mrg     fprintf (out, "  Nondeterministic\n");
    597  1.1  mrg   if (calls_interposable)
    598  1.1  mrg     fprintf (out, "  Calls interposable\n");
    599  1.1  mrg   if (global_memory_read)
    600  1.1  mrg     fprintf (out, "  Global memory read\n");
    601  1.1  mrg   if (global_memory_written)
    602  1.1  mrg     fprintf (out, "  Global memory written\n");
    603  1.1  mrg   if (try_dse)
    604  1.1  mrg     fprintf (out, "  Try dse\n");
    605  1.1  mrg   if (arg_flags.length ())
    606  1.1  mrg     {
    607  1.1  mrg       for (unsigned int i = 0; i < arg_flags.length (); i++)
    608  1.1  mrg 	if (arg_flags[i])
    609  1.1  mrg 	  {
    610  1.1  mrg 	    fprintf (out, "  parm %i flags:", i);
    611  1.1  mrg 	    dump_eaf_flags (out, arg_flags[i]);
    612  1.1  mrg 	  }
    613  1.1  mrg     }
    614  1.1  mrg   if (retslot_flags)
    615  1.1  mrg     {
    616  1.1  mrg       fprintf (out, "  Retslot flags:");
    617  1.1  mrg       dump_eaf_flags (out, retslot_flags);
    618  1.1  mrg     }
    619  1.1  mrg   if (static_chain_flags)
    620  1.1  mrg     {
    621  1.1  mrg       fprintf (out, "  Static chain flags:");
    622  1.1  mrg       dump_eaf_flags (out, static_chain_flags);
    623  1.1  mrg     }
    624  1.1  mrg }
    625  1.1  mrg 
    626  1.1  mrg /* Dump summary.  */
    627  1.1  mrg 
    628  1.1  mrg void
    629  1.1  mrg modref_summary_lto::dump (FILE *out)
    630  1.1  mrg {
    631  1.1  mrg   fprintf (out, "  loads:\n");
    632  1.1  mrg   dump_lto_records (loads, out);
    633  1.1  mrg   fprintf (out, "  stores:\n");
    634  1.1  mrg   dump_lto_records (stores, out);
    635  1.1  mrg   if (kills.length ())
    636  1.1  mrg     {
    637  1.1  mrg       fprintf (out, "  kills:\n");
    638  1.1  mrg       for (auto kill : kills)
    639  1.1  mrg 	{
    640  1.1  mrg 	  fprintf (out, "    ");
    641  1.1  mrg 	  kill.dump (out);
    642  1.1  mrg 	}
    643  1.1  mrg     }
    644  1.1  mrg   if (writes_errno)
    645  1.1  mrg     fprintf (out, "  Writes errno\n");
    646  1.1  mrg   if (side_effects)
    647  1.1  mrg     fprintf (out, "  Side effects\n");
    648  1.1  mrg   if (nondeterministic)
    649  1.1  mrg     fprintf (out, "  Nondeterministic\n");
    650  1.1  mrg   if (calls_interposable)
    651  1.1  mrg     fprintf (out, "  Calls interposable\n");
    652  1.1  mrg   if (arg_flags.length ())
    653  1.1  mrg     {
    654  1.1  mrg       for (unsigned int i = 0; i < arg_flags.length (); i++)
    655  1.1  mrg 	if (arg_flags[i])
    656  1.1  mrg 	  {
    657  1.1  mrg 	    fprintf (out, "  parm %i flags:", i);
    658  1.1  mrg 	    dump_eaf_flags (out, arg_flags[i]);
    659  1.1  mrg 	  }
    660  1.1  mrg     }
    661  1.1  mrg   if (retslot_flags)
    662  1.1  mrg     {
    663  1.1  mrg       fprintf (out, "  Retslot flags:");
    664  1.1  mrg       dump_eaf_flags (out, retslot_flags);
    665  1.1  mrg     }
    666  1.1  mrg   if (static_chain_flags)
    667  1.1  mrg     {
    668  1.1  mrg       fprintf (out, "  Static chain flags:");
    669  1.1  mrg       dump_eaf_flags (out, static_chain_flags);
    670  1.1  mrg     }
    671  1.1  mrg }
    672  1.1  mrg 
    673  1.1  mrg /* Called after summary is produced and before it is used by local analysis.
    674  1.1  mrg    Can be called multiple times in case summary needs to update signature.
    675  1.1  mrg    FUN is decl of function summary is attached to.  */
    676  1.1  mrg void
    677  1.1  mrg modref_summary::finalize (tree fun)
    678  1.1  mrg {
    679  1.1  mrg   global_memory_read = !loads || loads->global_access_p ();
    680  1.1  mrg   global_memory_written = !stores || stores->global_access_p ();
    681  1.1  mrg 
    682  1.1  mrg   /* We can do DSE if we know function has no side effects and
    683  1.1  mrg      we can analyze all stores.  Disable dse if there are too many
    684  1.1  mrg      stores to try.  */
    685  1.1  mrg   if (side_effects || global_memory_written || writes_errno)
    686  1.1  mrg     try_dse = false;
    687  1.1  mrg   else
    688  1.1  mrg     {
    689  1.1  mrg       try_dse = true;
    690  1.1  mrg       size_t i, j, k;
    691  1.1  mrg       int num_tests = 0, max_tests
    692  1.1  mrg 	= opt_for_fn (fun, param_modref_max_tests);
    693  1.1  mrg       modref_base_node <alias_set_type> *base_node;
    694  1.1  mrg       modref_ref_node <alias_set_type> *ref_node;
    695  1.1  mrg       modref_access_node *access_node;
    696  1.1  mrg       FOR_EACH_VEC_SAFE_ELT (stores->bases, i, base_node)
    697  1.1  mrg 	{
    698  1.1  mrg 	  if (base_node->every_ref)
    699  1.1  mrg 	    {
    700  1.1  mrg 	      try_dse = false;
    701  1.1  mrg 	      break;
    702  1.1  mrg 	    }
    703  1.1  mrg 	  FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
    704  1.1  mrg 	    {
    705  1.1  mrg 	      if (base_node->every_ref)
    706  1.1  mrg 		{
    707  1.1  mrg 		  try_dse = false;
    708  1.1  mrg 		  break;
    709  1.1  mrg 		}
    710  1.1  mrg 	      FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
    711  1.1  mrg 		if (num_tests++ > max_tests
    712  1.1  mrg 		    || !access_node->parm_offset_known)
    713  1.1  mrg 		  {
    714  1.1  mrg 		    try_dse = false;
    715  1.1  mrg 		    break;
    716  1.1  mrg 		  }
    717  1.1  mrg 	      if (!try_dse)
    718  1.1  mrg 		break;
    719  1.1  mrg 	    }
    720  1.1  mrg 	  if (!try_dse)
    721  1.1  mrg 	    break;
    722  1.1  mrg 	}
    723  1.1  mrg     }
    724  1.1  mrg   if (loads->every_base)
    725  1.1  mrg     load_accesses = 1;
    726  1.1  mrg   else
    727  1.1  mrg     {
    728  1.1  mrg       load_accesses = 0;
    729  1.1  mrg       for (auto base_node : loads->bases)
    730  1.1  mrg 	{
    731  1.1  mrg 	  if (base_node->every_ref)
    732  1.1  mrg 	    load_accesses++;
    733  1.1  mrg 	  else
    734  1.1  mrg 	    for (auto ref_node : base_node->refs)
    735  1.1  mrg 	      if (ref_node->every_access)
    736  1.1  mrg 		load_accesses++;
    737  1.1  mrg 	      else
    738  1.1  mrg 		load_accesses += ref_node->accesses->length ();
    739  1.1  mrg 	}
    740  1.1  mrg     }
    741  1.1  mrg }
    742  1.1  mrg 
    743  1.1  mrg /* Get function summary for FUNC if it exists, return NULL otherwise.  */
    744  1.1  mrg 
    745  1.1  mrg modref_summary *
    746  1.1  mrg get_modref_function_summary (cgraph_node *func)
    747  1.1  mrg {
    748  1.1  mrg   /* Avoid creation of the summary too early (e.g. when front-end calls us).  */
    749  1.1  mrg   if (!optimization_summaries)
    750  1.1  mrg     return NULL;
    751  1.1  mrg 
    752  1.1  mrg   /* A single function body may be represented by multiple symbols with
    753  1.1  mrg      different visibility.  For example, if FUNC is an interposable alias,
    754  1.1  mrg      we don't want to return anything, even if we have summary for the target
    755  1.1  mrg      function.  */
    756  1.1  mrg   enum availability avail;
    757  1.1  mrg   func = func->ultimate_alias_target
    758  1.1  mrg 		 (&avail, current_function_decl ?
    759  1.1  mrg 			  cgraph_node::get (current_function_decl) : NULL);
    760  1.1  mrg   if (avail <= AVAIL_INTERPOSABLE)
    761  1.1  mrg     return NULL;
    762  1.1  mrg 
    763  1.1  mrg   modref_summary *r = optimization_summaries->get (func);
    764  1.1  mrg   return r;
    765  1.1  mrg }
    766  1.1  mrg 
    767  1.1  mrg /* Get function summary for CALL if it exists, return NULL otherwise.
    768  1.1  mrg    If non-null set interposed to indicate whether function may not
    769  1.1  mrg    bind to current def.  In this case sometimes loads from function
    770  1.1  mrg    needs to be ignored.  */
    771  1.1  mrg 
    772  1.1  mrg modref_summary *
    773  1.1  mrg get_modref_function_summary (gcall *call, bool *interposed)
    774  1.1  mrg {
    775  1.1  mrg   tree callee = gimple_call_fndecl (call);
    776  1.1  mrg   if (!callee)
    777  1.1  mrg     return NULL;
    778  1.1  mrg   struct cgraph_node *node = cgraph_node::get (callee);
    779  1.1  mrg   if (!node)
    780  1.1  mrg     return NULL;
    781  1.1  mrg   modref_summary *r = get_modref_function_summary (node);
    782  1.1  mrg   if (interposed && r)
    783  1.1  mrg     *interposed = r->calls_interposable
    784  1.1  mrg 		  || !node->binds_to_current_def_p ();
    785  1.1  mrg   return r;
    786  1.1  mrg }
    787  1.1  mrg 
    788  1.1  mrg 
    789  1.1  mrg namespace {
    790  1.1  mrg 
    791  1.1  mrg /* Return true if ECF flags says that nondeterminism can be ignored.  */
    792  1.1  mrg 
    793  1.1  mrg static bool
    794  1.1  mrg ignore_nondeterminism_p (tree caller, int flags)
    795  1.1  mrg {
    796  1.1  mrg   if (flags & (ECF_CONST | ECF_PURE))
    797  1.1  mrg     return true;
    798  1.1  mrg   if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
    799  1.1  mrg       || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
    800  1.1  mrg     return true;
    801  1.1  mrg   return false;
    802  1.1  mrg }
    803  1.1  mrg 
    804  1.1  mrg /* Return true if ECF flags says that return value can be ignored.  */
    805  1.1  mrg 
    806  1.1  mrg static bool
    807  1.1  mrg ignore_retval_p (tree caller, int flags)
    808  1.1  mrg {
    809  1.1  mrg   if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
    810  1.1  mrg       || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
    811  1.1  mrg     return true;
    812  1.1  mrg   return false;
    813  1.1  mrg }
    814  1.1  mrg 
    815  1.1  mrg /* Return true if ECF flags says that stores can be ignored.  */
    816  1.1  mrg 
    817  1.1  mrg static bool
    818  1.1  mrg ignore_stores_p (tree caller, int flags)
    819  1.1  mrg {
    820  1.1  mrg   if (flags & (ECF_PURE | ECF_CONST | ECF_NOVOPS))
    821  1.1  mrg     return true;
    822  1.1  mrg   if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
    823  1.1  mrg       || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
    824  1.1  mrg     return true;
    825  1.1  mrg   return false;
    826  1.1  mrg }
    827  1.1  mrg 
    828  1.1  mrg /* Determine parm_map for PTR which is supposed to be a pointer.  */
    829  1.1  mrg 
    830  1.1  mrg modref_parm_map
    831  1.1  mrg parm_map_for_ptr (tree op)
    832  1.1  mrg {
    833  1.1  mrg   bool offset_known;
    834  1.1  mrg   poly_int64 offset;
    835  1.1  mrg   struct modref_parm_map parm_map;
    836  1.1  mrg   gcall *call;
    837  1.1  mrg 
    838  1.1  mrg   parm_map.parm_offset_known = false;
    839  1.1  mrg   parm_map.parm_offset = 0;
    840  1.1  mrg 
    841  1.1  mrg   offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
    842  1.1  mrg   if (TREE_CODE (op) == SSA_NAME
    843  1.1  mrg       && SSA_NAME_IS_DEFAULT_DEF (op)
    844  1.1  mrg       && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
    845  1.1  mrg     {
    846  1.1  mrg       int index = 0;
    847  1.1  mrg 
    848  1.1  mrg       if (cfun->static_chain_decl
    849  1.1  mrg 	  && op == ssa_default_def (cfun, cfun->static_chain_decl))
    850  1.1  mrg 	index = MODREF_STATIC_CHAIN_PARM;
    851  1.1  mrg       else
    852  1.1  mrg 	for (tree t = DECL_ARGUMENTS (current_function_decl);
    853  1.1  mrg 	     t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
    854  1.1  mrg 	  index++;
    855  1.1  mrg       parm_map.parm_index = index;
    856  1.1  mrg       parm_map.parm_offset_known = offset_known;
    857  1.1  mrg       parm_map.parm_offset = offset;
    858  1.1  mrg     }
    859  1.1  mrg   else if (points_to_local_or_readonly_memory_p (op))
    860  1.1  mrg     parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
    861  1.1  mrg   /* Memory allocated in the function is not visible to caller before the
    862  1.1  mrg      call and thus we do not need to record it as load/stores/kills.  */
    863  1.1  mrg   else if (TREE_CODE (op) == SSA_NAME
    864  1.1  mrg 	   && (call = dyn_cast<gcall *>(SSA_NAME_DEF_STMT (op))) != NULL
    865  1.1  mrg 	   && gimple_call_flags (call) & ECF_MALLOC)
    866  1.1  mrg     parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
    867  1.1  mrg   else
    868  1.1  mrg     parm_map.parm_index = MODREF_UNKNOWN_PARM;
    869  1.1  mrg   return parm_map;
    870  1.1  mrg }
    871  1.1  mrg 
    872  1.1  mrg /* Return true if ARG with EAF flags FLAGS can not make any caller's parameter
    873  1.1  mrg    used (if LOAD is true we check loads, otherwise stores).  */
    874  1.1  mrg 
    875  1.1  mrg static bool
    876  1.1  mrg verify_arg (tree arg, int flags, bool load)
    877  1.1  mrg {
    878  1.1  mrg   if (flags & EAF_UNUSED)
    879  1.1  mrg     return true;
    880  1.1  mrg   if (load && (flags & EAF_NO_DIRECT_READ))
    881  1.1  mrg     return true;
    882  1.1  mrg   if (!load
    883  1.1  mrg       && (flags & (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
    884  1.1  mrg 	  == (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
    885  1.1  mrg     return true;
    886  1.1  mrg   if (is_gimple_constant (arg))
    887  1.1  mrg     return true;
    888  1.1  mrg   if (DECL_P (arg) && TREE_READONLY (arg))
    889  1.1  mrg     return true;
    890  1.1  mrg   if (TREE_CODE (arg) == ADDR_EXPR)
    891  1.1  mrg     {
    892  1.1  mrg       tree t = get_base_address (TREE_OPERAND (arg, 0));
    893  1.1  mrg       if (is_gimple_constant (t))
    894  1.1  mrg 	return true;
    895  1.1  mrg       if (DECL_P (t)
    896  1.1  mrg 	  && (TREE_READONLY (t) || TREE_CODE (t) == FUNCTION_DECL))
    897  1.1  mrg 	return true;
    898  1.1  mrg     }
    899  1.1  mrg   return false;
    900  1.1  mrg }
    901  1.1  mrg 
    902  1.1  mrg /* Return true if STMT may access memory that is pointed to by parameters
    903  1.1  mrg    of caller and which is not seen as an escape by PTA.
    904  1.1  mrg    CALLEE_ECF_FLAGS are ECF flags of callee.  If LOAD is true then by access
    905  1.1  mrg    we mean load, otherwise we mean store.  */
    906  1.1  mrg 
    907  1.1  mrg static bool
    908  1.1  mrg may_access_nonescaping_parm_p (gcall *call, int callee_ecf_flags, bool load)
    909  1.1  mrg {
    910  1.1  mrg   int implicit_flags = 0;
    911  1.1  mrg 
    912  1.1  mrg   if (ignore_stores_p (current_function_decl, callee_ecf_flags))
    913  1.1  mrg     implicit_flags |= ignore_stores_eaf_flags;
    914  1.1  mrg   if (callee_ecf_flags & ECF_PURE)
    915  1.1  mrg     implicit_flags |= implicit_pure_eaf_flags;
    916  1.1  mrg   if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
    917  1.1  mrg     implicit_flags |= implicit_const_eaf_flags;
    918  1.1  mrg   if (gimple_call_chain (call)
    919  1.1  mrg       && !verify_arg (gimple_call_chain (call),
    920  1.1  mrg 		      gimple_call_static_chain_flags (call) | implicit_flags,
    921  1.1  mrg 		      load))
    922  1.1  mrg     return true;
    923  1.1  mrg   for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
    924  1.1  mrg     if (!verify_arg (gimple_call_arg (call, i),
    925  1.1  mrg 		     gimple_call_arg_flags (call, i) | implicit_flags,
    926  1.1  mrg 		     load))
    927  1.1  mrg       return true;
    928  1.1  mrg   return false;
    929  1.1  mrg }
    930  1.1  mrg 
    931  1.1  mrg 
    932  1.1  mrg /* Analyze memory accesses (loads, stores and kills) performed
    933  1.1  mrg    by the function.  Set also side_effects, calls_interposable
    934  1.1  mrg    and nondeterminism flags.  */
    935  1.1  mrg 
    936  1.1  mrg class modref_access_analysis
    937  1.1  mrg {
    938  1.1  mrg public:
    939  1.1  mrg   modref_access_analysis (bool ipa, modref_summary *summary,
    940  1.1  mrg 			  modref_summary_lto *summary_lto)
    941  1.1  mrg   : m_summary (summary), m_summary_lto (summary_lto), m_ipa (ipa)
    942  1.1  mrg   {
    943  1.1  mrg   }
    944  1.1  mrg   void analyze ();
    945  1.1  mrg private:
    946  1.1  mrg   bool set_side_effects ();
    947  1.1  mrg   bool set_nondeterministic ();
    948  1.1  mrg   static modref_access_node get_access (ao_ref *ref);
    949  1.1  mrg   static void record_access (modref_records *, ao_ref *, modref_access_node &);
    950  1.1  mrg   static void record_access_lto (modref_records_lto *, ao_ref *,
    951  1.1  mrg 				 modref_access_node &a);
    952  1.1  mrg   bool record_access_p (tree);
    953  1.1  mrg   bool record_unknown_load ();
    954  1.1  mrg   bool record_unknown_store ();
    955  1.1  mrg   bool record_global_memory_load ();
    956  1.1  mrg   bool record_global_memory_store ();
    957  1.1  mrg   bool merge_call_side_effects (gimple *, modref_summary *,
    958  1.1  mrg 				cgraph_node *, bool);
    959  1.1  mrg   modref_access_node get_access_for_fnspec (gcall *, attr_fnspec &,
    960  1.1  mrg 					    unsigned int, modref_parm_map &);
    961  1.1  mrg   void process_fnspec (gcall *);
    962  1.1  mrg   void analyze_call (gcall *);
    963  1.1  mrg   static bool analyze_load (gimple *, tree, tree, void *);
    964  1.1  mrg   static bool analyze_store (gimple *, tree, tree, void *);
    965  1.1  mrg   void analyze_stmt (gimple *, bool);
    966  1.1  mrg   void propagate ();
    967  1.1  mrg 
    968  1.1  mrg   /* Summary being computed.
    969  1.1  mrg      We work either with m_summary or m_summary_lto.  Never on both.  */
    970  1.1  mrg   modref_summary *m_summary;
    971  1.1  mrg   modref_summary_lto *m_summary_lto;
    972  1.1  mrg   /* Recursive calls needs simplistic dataflow after analysis finished.
    973  1.1  mrg      Collect all calls into this vector during analysis and later process
    974  1.1  mrg      them in propagate.  */
    975  1.1  mrg   auto_vec <gimple *, 32> m_recursive_calls;
    976  1.1  mrg   /* ECF flags of function being analyzed.  */
    977  1.1  mrg   int m_ecf_flags;
    978  1.1  mrg   /* True if IPA propagation will be done later.  */
    979  1.1  mrg   bool m_ipa;
    980  1.1  mrg   /* Set true if statement currently analyze is known to be
    981  1.1  mrg      executed each time function is called.  */
    982  1.1  mrg   bool m_always_executed;
    983  1.1  mrg };
    984  1.1  mrg 
    985  1.1  mrg /* Set side_effects flag and return if something changed.  */
    986  1.1  mrg 
    987  1.1  mrg bool
    988  1.1  mrg modref_access_analysis::set_side_effects ()
    989  1.1  mrg {
    990  1.1  mrg   bool changed = false;
    991  1.1  mrg 
    992  1.1  mrg   if (m_summary && !m_summary->side_effects)
    993  1.1  mrg     {
    994  1.1  mrg       m_summary->side_effects = true;
    995  1.1  mrg       changed = true;
    996  1.1  mrg     }
    997  1.1  mrg   if (m_summary_lto && !m_summary_lto->side_effects)
    998  1.1  mrg     {
    999  1.1  mrg       m_summary_lto->side_effects = true;
   1000  1.1  mrg       changed = true;
   1001  1.1  mrg     }
   1002  1.1  mrg   return changed;
   1003  1.1  mrg }
   1004  1.1  mrg 
   1005  1.1  mrg /* Set nondeterministic flag and return if something changed.  */
   1006  1.1  mrg 
   1007  1.1  mrg bool
   1008  1.1  mrg modref_access_analysis::set_nondeterministic ()
   1009  1.1  mrg {
   1010  1.1  mrg   bool changed = false;
   1011  1.1  mrg 
   1012  1.1  mrg   if (m_summary && !m_summary->nondeterministic)
   1013  1.1  mrg     {
   1014  1.1  mrg       m_summary->side_effects = m_summary->nondeterministic = true;
   1015  1.1  mrg       changed = true;
   1016  1.1  mrg     }
   1017  1.1  mrg   if (m_summary_lto && !m_summary_lto->nondeterministic)
   1018  1.1  mrg     {
   1019  1.1  mrg       m_summary_lto->side_effects = m_summary_lto->nondeterministic = true;
   1020  1.1  mrg       changed = true;
   1021  1.1  mrg     }
   1022  1.1  mrg   return changed;
   1023  1.1  mrg }
   1024  1.1  mrg 
   1025  1.1  mrg /* Construct modref_access_node from REF.  */
   1026  1.1  mrg 
   1027  1.1  mrg modref_access_node
   1028  1.1  mrg modref_access_analysis::get_access (ao_ref *ref)
   1029  1.1  mrg {
   1030  1.1  mrg   tree base;
   1031  1.1  mrg 
   1032  1.1  mrg   base = ao_ref_base (ref);
   1033  1.1  mrg   modref_access_node a = {ref->offset, ref->size, ref->max_size,
   1034  1.1  mrg 			  0, MODREF_UNKNOWN_PARM, false, 0};
   1035  1.1  mrg   if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
   1036  1.1  mrg     {
   1037  1.1  mrg       tree memref = base;
   1038  1.1  mrg       modref_parm_map m = parm_map_for_ptr (TREE_OPERAND (base, 0));
   1039  1.1  mrg 
   1040  1.1  mrg       a.parm_index = m.parm_index;
   1041  1.1  mrg       if (a.parm_index != MODREF_UNKNOWN_PARM && TREE_CODE (memref) == MEM_REF)
   1042  1.1  mrg 	{
   1043  1.1  mrg 	  a.parm_offset_known
   1044  1.1  mrg 	     = wi::to_poly_wide (TREE_OPERAND
   1045  1.1  mrg 				     (memref, 1)).to_shwi (&a.parm_offset);
   1046  1.1  mrg 	  if (a.parm_offset_known && m.parm_offset_known)
   1047  1.1  mrg 	    a.parm_offset += m.parm_offset;
   1048  1.1  mrg 	  else
   1049  1.1  mrg 	    a.parm_offset_known = false;
   1050  1.1  mrg 	}
   1051  1.1  mrg     }
   1052  1.1  mrg   else
   1053  1.1  mrg     a.parm_index = MODREF_UNKNOWN_PARM;
   1054  1.1  mrg   return a;
   1055  1.1  mrg }
   1056  1.1  mrg 
   1057  1.1  mrg /* Record access into the modref_records data structure.  */
   1058  1.1  mrg 
   1059  1.1  mrg void
   1060  1.1  mrg modref_access_analysis::record_access (modref_records *tt,
   1061  1.1  mrg 				       ao_ref *ref,
   1062  1.1  mrg 				       modref_access_node &a)
   1063  1.1  mrg {
   1064  1.1  mrg   alias_set_type base_set = !flag_strict_aliasing
   1065  1.1  mrg 			    || !flag_ipa_strict_aliasing ? 0
   1066  1.1  mrg 			    : ao_ref_base_alias_set (ref);
   1067  1.1  mrg   alias_set_type ref_set = !flag_strict_aliasing
   1068  1.1  mrg 			   || !flag_ipa_strict_aliasing ? 0
   1069  1.1  mrg 			    : (ao_ref_alias_set (ref));
   1070  1.1  mrg   if (dump_file)
   1071  1.1  mrg     {
   1072  1.1  mrg        fprintf (dump_file, "   - Recording base_set=%i ref_set=%i ",
   1073  1.1  mrg 		base_set, ref_set);
   1074  1.1  mrg        a.dump (dump_file);
   1075  1.1  mrg     }
   1076  1.1  mrg   tt->insert (current_function_decl, base_set, ref_set, a, false);
   1077  1.1  mrg }
   1078  1.1  mrg 
   1079  1.1  mrg /* IPA version of record_access_tree.  */
   1080  1.1  mrg 
   1081  1.1  mrg void
   1082  1.1  mrg modref_access_analysis::record_access_lto (modref_records_lto *tt, ao_ref *ref,
   1083  1.1  mrg 					   modref_access_node &a)
   1084  1.1  mrg {
   1085  1.1  mrg   /* get_alias_set sometimes use different type to compute the alias set
   1086  1.1  mrg      than TREE_TYPE (base).  Do same adjustments.  */
   1087  1.1  mrg   tree base_type = NULL_TREE, ref_type = NULL_TREE;
   1088  1.1  mrg   if (flag_strict_aliasing && flag_ipa_strict_aliasing)
   1089  1.1  mrg     {
   1090  1.1  mrg       tree base;
   1091  1.1  mrg 
   1092  1.1  mrg       base = ref->ref;
   1093  1.1  mrg       while (handled_component_p (base))
   1094  1.1  mrg 	base = TREE_OPERAND (base, 0);
   1095  1.1  mrg 
   1096  1.1  mrg       base_type = reference_alias_ptr_type_1 (&base);
   1097  1.1  mrg 
   1098  1.1  mrg       if (!base_type)
   1099  1.1  mrg 	base_type = TREE_TYPE (base);
   1100  1.1  mrg       else
   1101  1.1  mrg 	base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
   1102  1.1  mrg 		    ? NULL_TREE : TREE_TYPE (base_type);
   1103  1.1  mrg 
   1104  1.1  mrg       tree ref_expr = ref->ref;
   1105  1.1  mrg       ref_type = reference_alias_ptr_type_1 (&ref_expr);
   1106  1.1  mrg 
   1107  1.1  mrg       if (!ref_type)
   1108  1.1  mrg 	ref_type = TREE_TYPE (ref_expr);
   1109  1.1  mrg       else
   1110  1.1  mrg 	ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
   1111  1.1  mrg 		   ? NULL_TREE : TREE_TYPE (ref_type);
   1112  1.1  mrg 
   1113  1.1  mrg       /* Sanity check that we are in sync with what get_alias_set does.  */
   1114  1.1  mrg       gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
   1115  1.1  mrg 			   || get_alias_set (base_type)
   1116  1.1  mrg 			      == ao_ref_base_alias_set (ref));
   1117  1.1  mrg       gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
   1118  1.1  mrg 			   || get_alias_set (ref_type)
   1119  1.1  mrg 			      == ao_ref_alias_set (ref));
   1120  1.1  mrg 
   1121  1.1  mrg       /* Do not bother to record types that have no meaningful alias set.
   1122  1.1  mrg 	 Also skip variably modified types since these go to local streams.  */
   1123  1.1  mrg       if (base_type && (!get_alias_set (base_type)
   1124  1.1  mrg 			|| variably_modified_type_p (base_type, NULL_TREE)))
   1125  1.1  mrg 	base_type = NULL_TREE;
   1126  1.1  mrg       if (ref_type && (!get_alias_set (ref_type)
   1127  1.1  mrg 		       || variably_modified_type_p (ref_type, NULL_TREE)))
   1128  1.1  mrg 	ref_type = NULL_TREE;
   1129  1.1  mrg     }
   1130  1.1  mrg   if (dump_file)
   1131  1.1  mrg     {
   1132  1.1  mrg       fprintf (dump_file, "   - Recording base type:");
   1133  1.1  mrg       print_generic_expr (dump_file, base_type);
   1134  1.1  mrg       fprintf (dump_file, " (alias set %i) ref type:",
   1135  1.1  mrg 	       base_type ? get_alias_set (base_type) : 0);
   1136  1.1  mrg       print_generic_expr (dump_file, ref_type);
   1137  1.1  mrg       fprintf (dump_file, " (alias set %i) ",
   1138  1.1  mrg 	       ref_type ? get_alias_set (ref_type) : 0);
   1139  1.1  mrg        a.dump (dump_file);
   1140  1.1  mrg     }
   1141  1.1  mrg 
   1142  1.1  mrg   tt->insert (current_function_decl, base_type, ref_type, a, false);
   1143  1.1  mrg }
   1144  1.1  mrg 
   1145  1.1  mrg /* Returns true if and only if we should store the access to EXPR.
   1146  1.1  mrg    Some accesses, e.g. loads from automatic variables, are not interesting.  */
   1147  1.1  mrg 
   1148  1.1  mrg bool
   1149  1.1  mrg modref_access_analysis::record_access_p (tree expr)
   1150  1.1  mrg {
   1151  1.1  mrg   if (TREE_THIS_VOLATILE (expr))
   1152  1.1  mrg     {
   1153  1.1  mrg       if (dump_file)
   1154  1.1  mrg 	fprintf (dump_file, " (volatile; marking nondeterministic) ");
   1155  1.1  mrg       set_nondeterministic ();
   1156  1.1  mrg     }
   1157  1.1  mrg   if (cfun->can_throw_non_call_exceptions
   1158  1.1  mrg       && tree_could_throw_p (expr))
   1159  1.1  mrg     {
   1160  1.1  mrg       if (dump_file)
   1161  1.1  mrg 	fprintf (dump_file, " (can throw; marking side effects) ");
   1162  1.1  mrg       set_side_effects ();
   1163  1.1  mrg     }
   1164  1.1  mrg 
   1165  1.1  mrg   if (refs_local_or_readonly_memory_p (expr))
   1166  1.1  mrg     {
   1167  1.1  mrg       if (dump_file)
   1168  1.1  mrg 	fprintf (dump_file, "   - Read-only or local, ignoring.\n");
   1169  1.1  mrg       return false;
   1170  1.1  mrg     }
   1171  1.1  mrg   return true;
   1172  1.1  mrg }
   1173  1.1  mrg 
   1174  1.1  mrg /* Collapse loads and return true if something changed.  */
   1175  1.1  mrg 
   1176  1.1  mrg bool
   1177  1.1  mrg modref_access_analysis::record_unknown_load ()
   1178  1.1  mrg {
   1179  1.1  mrg   bool changed = false;
   1180  1.1  mrg 
   1181  1.1  mrg   if (m_summary && !m_summary->loads->every_base)
   1182  1.1  mrg     {
   1183  1.1  mrg       m_summary->loads->collapse ();
   1184  1.1  mrg       changed = true;
   1185  1.1  mrg     }
   1186  1.1  mrg   if (m_summary_lto && !m_summary_lto->loads->every_base)
   1187  1.1  mrg     {
   1188  1.1  mrg       m_summary_lto->loads->collapse ();
   1189  1.1  mrg       changed = true;
   1190  1.1  mrg     }
   1191  1.1  mrg   return changed;
   1192  1.1  mrg }
   1193  1.1  mrg 
   1194  1.1  mrg /* Collapse loads and return true if something changed.  */
   1195  1.1  mrg 
   1196  1.1  mrg bool
   1197  1.1  mrg modref_access_analysis::record_unknown_store ()
   1198  1.1  mrg {
   1199  1.1  mrg   bool changed = false;
   1200  1.1  mrg 
   1201  1.1  mrg   if (m_summary && !m_summary->stores->every_base)
   1202  1.1  mrg     {
   1203  1.1  mrg       m_summary->stores->collapse ();
   1204  1.1  mrg       changed = true;
   1205  1.1  mrg     }
   1206  1.1  mrg   if (m_summary_lto && !m_summary_lto->stores->every_base)
   1207  1.1  mrg     {
   1208  1.1  mrg       m_summary_lto->stores->collapse ();
   1209  1.1  mrg       changed = true;
   1210  1.1  mrg     }
   1211  1.1  mrg   return changed;
   1212  1.1  mrg }
   1213  1.1  mrg 
   1214  1.1  mrg /* Record unknown load from global memory.  */
   1215  1.1  mrg 
   1216  1.1  mrg bool
   1217  1.1  mrg modref_access_analysis::record_global_memory_load ()
   1218  1.1  mrg {
   1219  1.1  mrg   bool changed = false;
   1220  1.1  mrg   modref_access_node a = {0, -1, -1,
   1221  1.1  mrg 			  0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
   1222  1.1  mrg 
   1223  1.1  mrg   if (m_summary && !m_summary->loads->every_base)
   1224  1.1  mrg     changed |= m_summary->loads->insert (current_function_decl, 0, 0, a, false);
   1225  1.1  mrg   if (m_summary_lto && !m_summary_lto->loads->every_base)
   1226  1.1  mrg     changed |= m_summary_lto->loads->insert (current_function_decl,
   1227  1.1  mrg 					     0, 0, a, false);
   1228  1.1  mrg   return changed;
   1229  1.1  mrg }
   1230  1.1  mrg 
   1231  1.1  mrg /* Record unknown store from global memory.  */
   1232  1.1  mrg 
   1233  1.1  mrg bool
   1234  1.1  mrg modref_access_analysis::record_global_memory_store ()
   1235  1.1  mrg {
   1236  1.1  mrg   bool changed = false;
   1237  1.1  mrg   modref_access_node a = {0, -1, -1,
   1238  1.1  mrg 			  0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
   1239  1.1  mrg 
   1240  1.1  mrg   if (m_summary && !m_summary->stores->every_base)
   1241  1.1  mrg     changed |= m_summary->stores->insert (current_function_decl,
   1242  1.1  mrg 					  0, 0, a, false);
   1243  1.1  mrg   if (m_summary_lto && !m_summary_lto->stores->every_base)
   1244  1.1  mrg     changed |= m_summary_lto->stores->insert (current_function_decl,
   1245  1.1  mrg 					     0, 0, a, false);
   1246  1.1  mrg   return changed;
   1247  1.1  mrg }
   1248  1.1  mrg 
   1249  1.1  mrg /* Merge side effects of call STMT to function with CALLEE_SUMMARY.
   1250  1.1  mrg    Return true if something changed.
   1251  1.1  mrg    If IGNORE_STORES is true, do not merge stores.
   1252  1.1  mrg    If RECORD_ADJUSTMENTS is true cap number of adjustments to
   1253  1.1  mrg    a given access to make dataflow finite.  */
   1254  1.1  mrg 
   1255  1.1  mrg bool
   1256  1.1  mrg modref_access_analysis::merge_call_side_effects
   1257  1.1  mrg 	 (gimple *stmt, modref_summary *callee_summary,
   1258  1.1  mrg 	  cgraph_node *callee_node, bool record_adjustments)
   1259  1.1  mrg {
   1260  1.1  mrg   gcall *call = as_a <gcall *> (stmt);
   1261  1.1  mrg   int flags = gimple_call_flags (call);
   1262  1.1  mrg 
   1263  1.1  mrg   /* Nothing to do for non-looping cont functions.  */
   1264  1.1  mrg   if ((flags & ECF_CONST)
   1265  1.1  mrg       && !(flags & ECF_LOOPING_CONST_OR_PURE))
   1266  1.1  mrg     return false;
   1267  1.1  mrg 
   1268  1.1  mrg   bool changed = false;
   1269  1.1  mrg 
   1270  1.1  mrg   if (dump_file)
   1271  1.1  mrg     fprintf (dump_file, " - Merging side effects of %s\n",
   1272  1.1  mrg 	     callee_node->dump_name ());
   1273  1.1  mrg 
   1274  1.1  mrg   /* Merge side effects and non-determinism.
   1275  1.1  mrg      PURE/CONST flags makes functions deterministic and if there is
   1276  1.1  mrg      no LOOPING_CONST_OR_PURE they also have no side effects.  */
   1277  1.1  mrg   if (!(flags & (ECF_CONST | ECF_PURE))
   1278  1.1  mrg       || (flags & ECF_LOOPING_CONST_OR_PURE))
   1279  1.1  mrg     {
   1280  1.1  mrg       if (!m_summary->side_effects && callee_summary->side_effects)
   1281  1.1  mrg 	{
   1282  1.1  mrg 	  if (dump_file)
   1283  1.1  mrg 	    fprintf (dump_file, " - merging side effects.\n");
   1284  1.1  mrg 	  m_summary->side_effects = true;
   1285  1.1  mrg 	  changed = true;
   1286  1.1  mrg 	}
   1287  1.1  mrg       if (!m_summary->nondeterministic && callee_summary->nondeterministic
   1288  1.1  mrg 	  && !ignore_nondeterminism_p (current_function_decl, flags))
   1289  1.1  mrg 	{
   1290  1.1  mrg 	  if (dump_file)
   1291  1.1  mrg 	    fprintf (dump_file, " - merging nondeterministic.\n");
   1292  1.1  mrg 	  m_summary->nondeterministic = true;
   1293  1.1  mrg 	  changed = true;
   1294  1.1  mrg 	}
   1295  1.1  mrg      }
   1296  1.1  mrg 
   1297  1.1  mrg   /* For const functions we are done.  */
   1298  1.1  mrg   if (flags & (ECF_CONST | ECF_NOVOPS))
   1299  1.1  mrg     return changed;
   1300  1.1  mrg 
   1301  1.1  mrg   /* Merge calls_interposable flags.  */
   1302  1.1  mrg   if (!m_summary->calls_interposable && callee_summary->calls_interposable)
   1303  1.1  mrg     {
   1304  1.1  mrg       if (dump_file)
   1305  1.1  mrg 	fprintf (dump_file, " - merging calls interposable.\n");
   1306  1.1  mrg       m_summary->calls_interposable = true;
   1307  1.1  mrg       changed = true;
   1308  1.1  mrg     }
   1309  1.1  mrg 
   1310  1.1  mrg   if (!callee_node->binds_to_current_def_p () && !m_summary->calls_interposable)
   1311  1.1  mrg     {
   1312  1.1  mrg       if (dump_file)
   1313  1.1  mrg 	fprintf (dump_file, " - May be interposed.\n");
   1314  1.1  mrg       m_summary->calls_interposable = true;
   1315  1.1  mrg       changed = true;
   1316  1.1  mrg     }
   1317  1.1  mrg 
   1318  1.1  mrg   /* Now merge the actual load, store and kill vectors.  For this we need
   1319  1.1  mrg      to compute map translating new parameters to old.  */
   1320  1.1  mrg   if (dump_file)
   1321  1.1  mrg     fprintf (dump_file, "   Parm map:");
   1322  1.1  mrg 
   1323  1.1  mrg   auto_vec <modref_parm_map, 32> parm_map;
   1324  1.1  mrg   parm_map.safe_grow_cleared (gimple_call_num_args (call), true);
   1325  1.1  mrg   for (unsigned i = 0; i < gimple_call_num_args (call); i++)
   1326  1.1  mrg     {
   1327  1.1  mrg       parm_map[i] = parm_map_for_ptr (gimple_call_arg (call, i));
   1328  1.1  mrg       if (dump_file)
   1329  1.1  mrg 	{
   1330  1.1  mrg 	  fprintf (dump_file, " %i", parm_map[i].parm_index);
   1331  1.1  mrg 	  if (parm_map[i].parm_offset_known)
   1332  1.1  mrg 	    {
   1333  1.1  mrg 	      fprintf (dump_file, " offset:");
   1334  1.1  mrg 	      print_dec ((poly_int64_pod)parm_map[i].parm_offset,
   1335  1.1  mrg 			 dump_file, SIGNED);
   1336  1.1  mrg 	    }
   1337  1.1  mrg 	}
   1338  1.1  mrg     }
   1339  1.1  mrg 
   1340  1.1  mrg   modref_parm_map chain_map;
   1341  1.1  mrg   if (gimple_call_chain (call))
   1342  1.1  mrg     {
   1343  1.1  mrg       chain_map = parm_map_for_ptr (gimple_call_chain (call));
   1344  1.1  mrg       if (dump_file)
   1345  1.1  mrg 	{
   1346  1.1  mrg 	  fprintf (dump_file, "static chain %i", chain_map.parm_index);
   1347  1.1  mrg 	  if (chain_map.parm_offset_known)
   1348  1.1  mrg 	    {
   1349  1.1  mrg 	      fprintf (dump_file, " offset:");
   1350  1.1  mrg 	      print_dec ((poly_int64_pod)chain_map.parm_offset,
   1351  1.1  mrg 			 dump_file, SIGNED);
   1352  1.1  mrg 	    }
   1353  1.1  mrg 	}
   1354  1.1  mrg     }
   1355  1.1  mrg   if (dump_file)
   1356  1.1  mrg     fprintf (dump_file, "\n");
   1357  1.1  mrg 
   1358  1.1  mrg   /* Kills can me merged in only if we know the function is going to be
   1359  1.1  mrg      always executed.  */
   1360  1.1  mrg   if (m_always_executed
   1361  1.1  mrg       && callee_summary->kills.length ()
   1362  1.1  mrg       && (!cfun->can_throw_non_call_exceptions
   1363  1.1  mrg 	  || !stmt_could_throw_p (cfun, call)))
   1364  1.1  mrg     {
   1365  1.1  mrg       /* Watch for self recursive updates.  */
   1366  1.1  mrg       auto_vec<modref_access_node, 32> saved_kills;
   1367  1.1  mrg 
   1368  1.1  mrg       saved_kills.reserve_exact (callee_summary->kills.length ());
   1369  1.1  mrg       saved_kills.splice (callee_summary->kills);
   1370  1.1  mrg       for (auto kill : saved_kills)
   1371  1.1  mrg 	{
   1372  1.1  mrg 	  if (kill.parm_index >= (int)parm_map.length ())
   1373  1.1  mrg 	    continue;
   1374  1.1  mrg 	  modref_parm_map &m
   1375  1.1  mrg 		  = kill.parm_index == MODREF_STATIC_CHAIN_PARM
   1376  1.1  mrg 		    ? chain_map
   1377  1.1  mrg 		    : parm_map[kill.parm_index];
   1378  1.1  mrg 	  if (m.parm_index == MODREF_LOCAL_MEMORY_PARM
   1379  1.1  mrg 	      || m.parm_index == MODREF_UNKNOWN_PARM
   1380  1.1  mrg 	      || m.parm_index == MODREF_RETSLOT_PARM
   1381  1.1  mrg 	      || !m.parm_offset_known)
   1382  1.1  mrg 	    continue;
   1383  1.1  mrg 	  modref_access_node n = kill;
   1384  1.1  mrg 	  n.parm_index = m.parm_index;
   1385  1.1  mrg 	  n.parm_offset += m.parm_offset;
   1386  1.1  mrg 	  if (modref_access_node::insert_kill (m_summary->kills, n,
   1387  1.1  mrg 					       record_adjustments))
   1388  1.1  mrg 	    changed = true;
   1389  1.1  mrg 	}
   1390  1.1  mrg     }
   1391  1.1  mrg 
   1392  1.1  mrg   /* Merge in loads.  */
   1393  1.1  mrg   changed |= m_summary->loads->merge (current_function_decl,
   1394  1.1  mrg 				      callee_summary->loads,
   1395  1.1  mrg 				      &parm_map, &chain_map,
   1396  1.1  mrg 				      record_adjustments,
   1397  1.1  mrg 				      !may_access_nonescaping_parm_p
   1398  1.1  mrg 					 (call, flags, true));
   1399  1.1  mrg   /* Merge in stores.  */
   1400  1.1  mrg   if (!ignore_stores_p (current_function_decl, flags))
   1401  1.1  mrg     {
   1402  1.1  mrg       changed |= m_summary->stores->merge (current_function_decl,
   1403  1.1  mrg 					   callee_summary->stores,
   1404  1.1  mrg 					   &parm_map, &chain_map,
   1405  1.1  mrg 					   record_adjustments,
   1406  1.1  mrg 					   !may_access_nonescaping_parm_p
   1407  1.1  mrg 					       (call, flags, false));
   1408  1.1  mrg       if (!m_summary->writes_errno
   1409  1.1  mrg 	  && callee_summary->writes_errno)
   1410  1.1  mrg 	{
   1411  1.1  mrg 	  m_summary->writes_errno = true;
   1412  1.1  mrg 	  changed = true;
   1413  1.1  mrg 	}
   1414  1.1  mrg     }
   1415  1.1  mrg   return changed;
   1416  1.1  mrg }
   1417  1.1  mrg 
   1418  1.1  mrg /* Return access mode for argument I of call STMT with FNSPEC.  */
   1419  1.1  mrg 
   1420  1.1  mrg modref_access_node
   1421  1.1  mrg modref_access_analysis::get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
   1422  1.1  mrg 					       unsigned int i,
   1423  1.1  mrg 					       modref_parm_map &map)
   1424  1.1  mrg {
   1425  1.1  mrg   tree size = NULL_TREE;
   1426  1.1  mrg   unsigned int size_arg;
   1427  1.1  mrg 
   1428  1.1  mrg   if (!fnspec.arg_specified_p (i))
   1429  1.1  mrg     ;
   1430  1.1  mrg   else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
   1431  1.1  mrg     size = gimple_call_arg (call, size_arg);
   1432  1.1  mrg   else if (fnspec.arg_access_size_given_by_type_p (i))
   1433  1.1  mrg     {
   1434  1.1  mrg       tree callee = gimple_call_fndecl (call);
   1435  1.1  mrg       tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
   1436  1.1  mrg 
   1437  1.1  mrg       for (unsigned int p = 0; p < i; p++)
   1438  1.1  mrg 	t = TREE_CHAIN (t);
   1439  1.1  mrg       size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
   1440  1.1  mrg     }
   1441  1.1  mrg   modref_access_node a = {0, -1, -1,
   1442  1.1  mrg 			  map.parm_offset, map.parm_index,
   1443  1.1  mrg 			  map.parm_offset_known, 0};
   1444  1.1  mrg   poly_int64 size_hwi;
   1445  1.1  mrg   if (size
   1446  1.1  mrg       && poly_int_tree_p (size, &size_hwi)
   1447  1.1  mrg       && coeffs_in_range_p (size_hwi, 0,
   1448  1.1  mrg 			    HOST_WIDE_INT_MAX / BITS_PER_UNIT))
   1449  1.1  mrg     {
   1450  1.1  mrg       a.size = -1;
   1451  1.1  mrg       a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
   1452  1.1  mrg     }
   1453  1.1  mrg   return a;
   1454  1.1  mrg }
   1455  1.1  mrg /* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
   1456  1.1  mrg    If IGNORE_STORES is true ignore them.
   1457  1.1  mrg    Return false if no useful summary can be produced.   */
   1458  1.1  mrg 
   1459  1.1  mrg void
   1460  1.1  mrg modref_access_analysis::process_fnspec (gcall *call)
   1461  1.1  mrg {
   1462  1.1  mrg   int flags = gimple_call_flags (call);
   1463  1.1  mrg 
   1464  1.1  mrg   /* PURE/CONST flags makes functions deterministic and if there is
   1465  1.1  mrg      no LOOPING_CONST_OR_PURE they also have no side effects.  */
   1466  1.1  mrg   if (!(flags & (ECF_CONST | ECF_PURE))
   1467  1.1  mrg       || (flags & ECF_LOOPING_CONST_OR_PURE)
   1468  1.1  mrg       || (cfun->can_throw_non_call_exceptions
   1469  1.1  mrg 	  && stmt_could_throw_p (cfun, call)))
   1470  1.1  mrg     {
   1471  1.1  mrg       set_side_effects ();
   1472  1.1  mrg       if (!ignore_nondeterminism_p (current_function_decl, flags))
   1473  1.1  mrg 	set_nondeterministic ();
   1474  1.1  mrg     }
   1475  1.1  mrg 
   1476  1.1  mrg   /* For const functions we are done.  */
   1477  1.1  mrg   if (flags & (ECF_CONST | ECF_NOVOPS))
   1478  1.1  mrg     return;
   1479  1.1  mrg 
   1480  1.1  mrg   attr_fnspec fnspec = gimple_call_fnspec (call);
   1481  1.1  mrg   /* If there is no fnpec we know nothing about loads & stores.  */
   1482  1.1  mrg   if (!fnspec.known_p ())
   1483  1.1  mrg     {
   1484  1.1  mrg       if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
   1485  1.1  mrg 	fprintf (dump_file, "      Builtin with no fnspec: %s\n",
   1486  1.1  mrg 		 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
   1487  1.1  mrg       if (!ignore_stores_p (current_function_decl, flags))
   1488  1.1  mrg 	{
   1489  1.1  mrg 	  if (!may_access_nonescaping_parm_p (call, flags, false))
   1490  1.1  mrg 	    record_global_memory_store ();
   1491  1.1  mrg 	  else
   1492  1.1  mrg 	    record_unknown_store ();
   1493  1.1  mrg 	  if (!may_access_nonescaping_parm_p (call, flags, true))
   1494  1.1  mrg 	    record_global_memory_load ();
   1495  1.1  mrg 	  else
   1496  1.1  mrg 	    record_unknown_load ();
   1497  1.1  mrg 	}
   1498  1.1  mrg       else
   1499  1.1  mrg 	{
   1500  1.1  mrg 	  if (!may_access_nonescaping_parm_p (call, flags, true))
   1501  1.1  mrg 	    record_global_memory_load ();
   1502  1.1  mrg 	  else
   1503  1.1  mrg 	    record_unknown_load ();
   1504  1.1  mrg 	}
   1505  1.1  mrg       return;
   1506  1.1  mrg     }
   1507  1.1  mrg   /* Process fnspec.  */
   1508  1.1  mrg   if (fnspec.global_memory_read_p ())
   1509  1.1  mrg     {
   1510  1.1  mrg       if (may_access_nonescaping_parm_p (call, flags, true))
   1511  1.1  mrg 	record_unknown_load ();
   1512  1.1  mrg       else
   1513  1.1  mrg 	record_global_memory_load ();
   1514  1.1  mrg     }
   1515  1.1  mrg   else
   1516  1.1  mrg     {
   1517  1.1  mrg       for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
   1518  1.1  mrg 	if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
   1519  1.1  mrg 	  ;
   1520  1.1  mrg 	else if (!fnspec.arg_specified_p (i)
   1521  1.1  mrg 		 || fnspec.arg_maybe_read_p (i))
   1522  1.1  mrg 	  {
   1523  1.1  mrg 	    modref_parm_map map = parm_map_for_ptr
   1524  1.1  mrg 					(gimple_call_arg (call, i));
   1525  1.1  mrg 
   1526  1.1  mrg 	    if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
   1527  1.1  mrg 	      continue;
   1528  1.1  mrg 	    if (map.parm_index == MODREF_UNKNOWN_PARM)
   1529  1.1  mrg 	      {
   1530  1.1  mrg 		record_unknown_load ();
   1531  1.1  mrg 		break;
   1532  1.1  mrg 	      }
   1533  1.1  mrg 	    modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
   1534  1.1  mrg 	    if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
   1535  1.1  mrg 	      continue;
   1536  1.1  mrg 	    if (m_summary)
   1537  1.1  mrg 	      m_summary->loads->insert (current_function_decl, 0, 0, a, false);
   1538  1.1  mrg 	    if (m_summary_lto)
   1539  1.1  mrg 	      m_summary_lto->loads->insert (current_function_decl, 0, 0, a,
   1540  1.1  mrg 					    false);
   1541  1.1  mrg 	  }
   1542  1.1  mrg     }
   1543  1.1  mrg   if (ignore_stores_p (current_function_decl, flags))
   1544  1.1  mrg     return;
   1545  1.1  mrg   if (fnspec.global_memory_written_p ())
   1546  1.1  mrg     {
   1547  1.1  mrg       if (may_access_nonescaping_parm_p (call, flags, false))
   1548  1.1  mrg 	record_unknown_store ();
   1549  1.1  mrg       else
   1550  1.1  mrg 	record_global_memory_store ();
   1551  1.1  mrg     }
   1552  1.1  mrg   else
   1553  1.1  mrg     {
   1554  1.1  mrg       for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
   1555  1.1  mrg 	if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
   1556  1.1  mrg 	  ;
   1557  1.1  mrg 	else if (!fnspec.arg_specified_p (i)
   1558  1.1  mrg 		 || fnspec.arg_maybe_written_p (i))
   1559  1.1  mrg 	  {
   1560  1.1  mrg 	    modref_parm_map map = parm_map_for_ptr
   1561  1.1  mrg 					 (gimple_call_arg (call, i));
   1562  1.1  mrg 
   1563  1.1  mrg 	    if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
   1564  1.1  mrg 	      continue;
   1565  1.1  mrg 	    if (map.parm_index == MODREF_UNKNOWN_PARM)
   1566  1.1  mrg 	      {
   1567  1.1  mrg 		record_unknown_store ();
   1568  1.1  mrg 		break;
   1569  1.1  mrg 	      }
   1570  1.1  mrg 	    modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
   1571  1.1  mrg 	    if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
   1572  1.1  mrg 	      continue;
   1573  1.1  mrg 	    if (m_summary)
   1574  1.1  mrg 	      m_summary->stores->insert (current_function_decl, 0, 0, a, false);
   1575  1.1  mrg 	    if (m_summary_lto)
   1576  1.1  mrg 	      m_summary_lto->stores->insert (current_function_decl,
   1577  1.1  mrg 					     0, 0, a, false);
   1578  1.1  mrg 	  }
   1579  1.1  mrg       if (fnspec.errno_maybe_written_p () && flag_errno_math)
   1580  1.1  mrg 	{
   1581  1.1  mrg 	  if (m_summary)
   1582  1.1  mrg 	    m_summary->writes_errno = true;
   1583  1.1  mrg 	  if (m_summary_lto)
   1584  1.1  mrg 	    m_summary_lto->writes_errno = true;
   1585  1.1  mrg 	}
   1586  1.1  mrg     }
   1587  1.1  mrg }
   1588  1.1  mrg 
   1589  1.1  mrg /* Analyze function call STMT in function F.
   1590  1.1  mrg    Remember recursive calls in RECURSIVE_CALLS.  */
   1591  1.1  mrg 
   1592  1.1  mrg void
   1593  1.1  mrg modref_access_analysis::analyze_call (gcall *stmt)
   1594  1.1  mrg {
   1595  1.1  mrg   /* Check flags on the function call.  In certain cases, analysis can be
   1596  1.1  mrg      simplified.  */
   1597  1.1  mrg   int flags = gimple_call_flags (stmt);
   1598  1.1  mrg 
   1599  1.1  mrg   if (dump_file)
   1600  1.1  mrg     {
   1601  1.1  mrg       fprintf (dump_file, " - Analyzing call:");
   1602  1.1  mrg       print_gimple_stmt (dump_file, stmt, 0);
   1603  1.1  mrg     }
   1604  1.1  mrg 
   1605  1.1  mrg   if ((flags & ECF_CONST)
   1606  1.1  mrg       && !(flags & ECF_LOOPING_CONST_OR_PURE))
   1607  1.1  mrg     {
   1608  1.1  mrg       if (dump_file)
   1609  1.1  mrg 	fprintf (dump_file,
   1610  1.1  mrg 		 " - ECF_CONST, ignoring all stores and all loads "
   1611  1.1  mrg 		 "except for args.\n");
   1612  1.1  mrg       return;
   1613  1.1  mrg     }
   1614  1.1  mrg 
   1615  1.1  mrg   /* Next, we try to get the callee's function declaration.  The goal is to
   1616  1.1  mrg      merge their summary with ours.  */
   1617  1.1  mrg   tree callee = gimple_call_fndecl (stmt);
   1618  1.1  mrg 
   1619  1.1  mrg   /* Check if this is an indirect call.  */
   1620  1.1  mrg   if (!callee)
   1621  1.1  mrg     {
   1622  1.1  mrg       if (dump_file)
   1623  1.1  mrg 	fprintf (dump_file, gimple_call_internal_p (stmt)
   1624  1.1  mrg 		 ? " - Internal call" : " - Indirect call.\n");
   1625  1.1  mrg       if (flags & ECF_NOVOPS)
   1626  1.1  mrg         {
   1627  1.1  mrg 	  set_side_effects ();
   1628  1.1  mrg 	  set_nondeterministic ();
   1629  1.1  mrg         }
   1630  1.1  mrg       else
   1631  1.1  mrg 	process_fnspec (stmt);
   1632  1.1  mrg       return;
   1633  1.1  mrg     }
   1634  1.1  mrg   /* We only need to handle internal calls in IPA mode.  */
   1635  1.1  mrg   gcc_checking_assert (!m_summary_lto && !m_ipa);
   1636  1.1  mrg 
   1637  1.1  mrg   struct cgraph_node *callee_node = cgraph_node::get_create (callee);
   1638  1.1  mrg 
   1639  1.1  mrg   /* If this is a recursive call, the target summary is the same as ours, so
   1640  1.1  mrg      there's nothing to do.  */
   1641  1.1  mrg   if (recursive_call_p (current_function_decl, callee))
   1642  1.1  mrg     {
   1643  1.1  mrg       m_recursive_calls.safe_push (stmt);
   1644  1.1  mrg       set_side_effects ();
   1645  1.1  mrg       if (dump_file)
   1646  1.1  mrg 	fprintf (dump_file, " - Skipping recursive call.\n");
   1647  1.1  mrg       return;
   1648  1.1  mrg     }
   1649  1.1  mrg 
   1650  1.1  mrg   gcc_assert (callee_node != NULL);
   1651  1.1  mrg 
   1652  1.1  mrg   /* Get the function symbol and its availability.  */
   1653  1.1  mrg   enum availability avail;
   1654  1.1  mrg   callee_node = callee_node->function_symbol (&avail);
   1655  1.1  mrg   bool looping;
   1656  1.1  mrg   if (builtin_safe_for_const_function_p (&looping, callee))
   1657  1.1  mrg     {
   1658  1.1  mrg       if (looping)
   1659  1.1  mrg 	set_side_effects ();
   1660  1.1  mrg       if (dump_file)
   1661  1.1  mrg 	fprintf (dump_file, " - Builtin is safe for const.\n");
   1662  1.1  mrg       return;
   1663  1.1  mrg     }
   1664  1.1  mrg   if (avail <= AVAIL_INTERPOSABLE)
   1665  1.1  mrg     {
   1666  1.1  mrg       if (dump_file)
   1667  1.1  mrg 	fprintf (dump_file,
   1668  1.1  mrg 		 " - Function availability <= AVAIL_INTERPOSABLE.\n");
   1669  1.1  mrg       process_fnspec (stmt);
   1670  1.1  mrg       return;
   1671  1.1  mrg     }
   1672  1.1  mrg 
   1673  1.1  mrg   /* Get callee's modref summary.  As above, if there's no summary, we either
   1674  1.1  mrg      have to give up or, if stores are ignored, we can just purge loads.  */
   1675  1.1  mrg   modref_summary *callee_summary = optimization_summaries->get (callee_node);
   1676  1.1  mrg   if (!callee_summary)
   1677  1.1  mrg     {
   1678  1.1  mrg       if (dump_file)
   1679  1.1  mrg 	fprintf (dump_file, " - No modref summary available for callee.\n");
   1680  1.1  mrg       process_fnspec (stmt);
   1681  1.1  mrg       return;
   1682  1.1  mrg     }
   1683  1.1  mrg 
   1684  1.1  mrg   merge_call_side_effects (stmt, callee_summary, callee_node, false);
   1685  1.1  mrg 
   1686  1.1  mrg   return;
   1687  1.1  mrg }
   1688  1.1  mrg 
   1689  1.1  mrg /* Helper for analyze_stmt.  */
   1690  1.1  mrg 
   1691  1.1  mrg bool
   1692  1.1  mrg modref_access_analysis::analyze_load (gimple *, tree, tree op, void *data)
   1693  1.1  mrg {
   1694  1.1  mrg   modref_access_analysis *t = (modref_access_analysis *)data;
   1695  1.1  mrg 
   1696  1.1  mrg   if (dump_file)
   1697  1.1  mrg     {
   1698  1.1  mrg       fprintf (dump_file, " - Analyzing load: ");
   1699  1.1  mrg       print_generic_expr (dump_file, op);
   1700  1.1  mrg       fprintf (dump_file, "\n");
   1701  1.1  mrg     }
   1702  1.1  mrg 
   1703  1.1  mrg   if (!t->record_access_p (op))
   1704  1.1  mrg     return false;
   1705  1.1  mrg 
   1706  1.1  mrg   ao_ref r;
   1707  1.1  mrg   ao_ref_init (&r, op);
   1708  1.1  mrg   modref_access_node a = get_access (&r);
   1709  1.1  mrg   if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
   1710  1.1  mrg     return false;
   1711  1.1  mrg 
   1712  1.1  mrg   if (t->m_summary)
   1713  1.1  mrg     t->record_access (t->m_summary->loads, &r, a);
   1714  1.1  mrg   if (t->m_summary_lto)
   1715  1.1  mrg     t->record_access_lto (t->m_summary_lto->loads, &r, a);
   1716  1.1  mrg   return false;
   1717  1.1  mrg }
   1718  1.1  mrg 
   1719  1.1  mrg /* Helper for analyze_stmt.  */
   1720  1.1  mrg 
   1721  1.1  mrg bool
   1722  1.1  mrg modref_access_analysis::analyze_store (gimple *stmt, tree, tree op, void *data)
   1723  1.1  mrg {
   1724  1.1  mrg   modref_access_analysis *t = (modref_access_analysis *)data;
   1725  1.1  mrg 
   1726  1.1  mrg   if (dump_file)
   1727  1.1  mrg     {
   1728  1.1  mrg       fprintf (dump_file, " - Analyzing store: ");
   1729  1.1  mrg       print_generic_expr (dump_file, op);
   1730  1.1  mrg       fprintf (dump_file, "\n");
   1731  1.1  mrg     }
   1732  1.1  mrg 
   1733  1.1  mrg   if (!t->record_access_p (op))
   1734  1.1  mrg     return false;
   1735  1.1  mrg 
   1736  1.1  mrg   ao_ref r;
   1737  1.1  mrg   ao_ref_init (&r, op);
   1738  1.1  mrg   modref_access_node a = get_access (&r);
   1739  1.1  mrg   if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
   1740  1.1  mrg     return false;
   1741  1.1  mrg 
   1742  1.1  mrg   if (t->m_summary)
   1743  1.1  mrg     t->record_access (t->m_summary->stores, &r, a);
   1744  1.1  mrg   if (t->m_summary_lto)
   1745  1.1  mrg     t->record_access_lto (t->m_summary_lto->stores, &r, a);
   1746  1.1  mrg   if (t->m_always_executed
   1747  1.1  mrg       && a.useful_for_kill_p ()
   1748  1.1  mrg       && !stmt_could_throw_p (cfun, stmt))
   1749  1.1  mrg     {
   1750  1.1  mrg       if (dump_file)
   1751  1.1  mrg 	fprintf (dump_file, "   - Recording kill\n");
   1752  1.1  mrg       if (t->m_summary)
   1753  1.1  mrg 	modref_access_node::insert_kill (t->m_summary->kills, a, false);
   1754  1.1  mrg       if (t->m_summary_lto)
   1755  1.1  mrg 	modref_access_node::insert_kill (t->m_summary_lto->kills, a, false);
   1756  1.1  mrg     }
   1757  1.1  mrg   return false;
   1758  1.1  mrg }
   1759  1.1  mrg 
   1760  1.1  mrg /* Analyze statement STMT of function F.
   1761  1.1  mrg    If IPA is true do not merge in side effects of calls.  */
   1762  1.1  mrg 
   1763  1.1  mrg void
   1764  1.1  mrg modref_access_analysis::analyze_stmt (gimple *stmt, bool always_executed)
   1765  1.1  mrg {
   1766  1.1  mrg   m_always_executed = always_executed;
   1767  1.1  mrg   /* In general we can not ignore clobbers because they are barriers for code
   1768  1.1  mrg      motion, however after inlining it is safe to do because local optimization
   1769  1.1  mrg      passes do not consider clobbers from other functions.
   1770  1.1  mrg      Similar logic is in ipa-pure-const.cc.  */
   1771  1.1  mrg   if ((m_ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
   1772  1.1  mrg     {
   1773  1.1  mrg       if (always_executed && record_access_p (gimple_assign_lhs (stmt)))
   1774  1.1  mrg 	{
   1775  1.1  mrg 	  ao_ref r;
   1776  1.1  mrg 	  ao_ref_init (&r, gimple_assign_lhs (stmt));
   1777  1.1  mrg 	  modref_access_node a = get_access (&r);
   1778  1.1  mrg 	  if (a.useful_for_kill_p ())
   1779  1.1  mrg 	    {
   1780  1.1  mrg 	      if (dump_file)
   1781  1.1  mrg 		fprintf (dump_file, "   - Recording kill\n");
   1782  1.1  mrg 	      if (m_summary)
   1783  1.1  mrg 		modref_access_node::insert_kill (m_summary->kills, a, false);
   1784  1.1  mrg 	      if (m_summary_lto)
   1785  1.1  mrg 		modref_access_node::insert_kill (m_summary_lto->kills,
   1786  1.1  mrg 						 a, false);
   1787  1.1  mrg 	    }
   1788  1.1  mrg 	}
   1789  1.1  mrg       return;
   1790  1.1  mrg     }
   1791  1.1  mrg 
   1792  1.1  mrg   /* Analyze all loads and stores in STMT.  */
   1793  1.1  mrg   walk_stmt_load_store_ops (stmt, this,
   1794  1.1  mrg 			    analyze_load, analyze_store);
   1795  1.1  mrg 
   1796  1.1  mrg   switch (gimple_code (stmt))
   1797  1.1  mrg    {
   1798  1.1  mrg    case GIMPLE_ASM:
   1799  1.1  mrg       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
   1800  1.1  mrg 	set_nondeterministic ();
   1801  1.1  mrg       if (cfun->can_throw_non_call_exceptions
   1802  1.1  mrg 	  && stmt_could_throw_p (cfun, stmt))
   1803  1.1  mrg 	set_side_effects ();
   1804  1.1  mrg      /* If the ASM statement does not read nor write memory, there's nothing
   1805  1.1  mrg 	to do.  Otherwise just give up.  */
   1806  1.1  mrg      if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
   1807  1.1  mrg        return;
   1808  1.1  mrg      if (dump_file)
   1809  1.1  mrg        fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
   1810  1.1  mrg 	       "which clobbers memory.\n");
   1811  1.1  mrg      record_unknown_load ();
   1812  1.1  mrg      record_unknown_store ();
   1813  1.1  mrg      return;
   1814  1.1  mrg    case GIMPLE_CALL:
   1815  1.1  mrg      if (!m_ipa || gimple_call_internal_p (stmt))
   1816  1.1  mrg        analyze_call (as_a <gcall *> (stmt));
   1817  1.1  mrg      else
   1818  1.1  mrg        {
   1819  1.1  mrg 	 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
   1820  1.1  mrg 
   1821  1.1  mrg 	 if (fnspec.known_p ()
   1822  1.1  mrg 	     && (!fnspec.global_memory_read_p ()
   1823  1.1  mrg 		 || !fnspec.global_memory_written_p ()))
   1824  1.1  mrg 	   {
   1825  1.1  mrg 	     cgraph_edge *e = cgraph_node::get
   1826  1.1  mrg 				  (current_function_decl)->get_edge (stmt);
   1827  1.1  mrg 	     if (e->callee)
   1828  1.1  mrg 	       {
   1829  1.1  mrg 		 fnspec_summaries->get_create (e)->fnspec
   1830  1.1  mrg 			  = xstrdup (fnspec.get_str ());
   1831  1.1  mrg 		 if (dump_file)
   1832  1.1  mrg 		   fprintf (dump_file, "  Recorded fnspec %s\n",
   1833  1.1  mrg 			    fnspec.get_str ());
   1834  1.1  mrg 	       }
   1835  1.1  mrg 	   }
   1836  1.1  mrg        }
   1837  1.1  mrg      return;
   1838  1.1  mrg    default:
   1839  1.1  mrg      if (cfun->can_throw_non_call_exceptions
   1840  1.1  mrg 	 && stmt_could_throw_p (cfun, stmt))
   1841  1.1  mrg 	set_side_effects ();
   1842  1.1  mrg      return;
   1843  1.1  mrg    }
   1844  1.1  mrg }
   1845  1.1  mrg 
   1846  1.1  mrg /* Propagate load/stores across recursive calls.  */
   1847  1.1  mrg 
   1848  1.1  mrg void
   1849  1.1  mrg modref_access_analysis::propagate ()
   1850  1.1  mrg {
   1851  1.1  mrg   if (m_ipa && m_summary)
   1852  1.1  mrg     return;
   1853  1.1  mrg 
   1854  1.1  mrg   bool changed = true;
   1855  1.1  mrg   bool first = true;
   1856  1.1  mrg   cgraph_node *fnode = cgraph_node::get (current_function_decl);
   1857  1.1  mrg 
   1858  1.1  mrg   m_always_executed = false;
   1859  1.1  mrg   while (changed && m_summary->useful_p (m_ecf_flags, false))
   1860  1.1  mrg     {
   1861  1.1  mrg       changed = false;
   1862  1.1  mrg       for (unsigned i = 0; i < m_recursive_calls.length (); i++)
   1863  1.1  mrg 	{
   1864  1.1  mrg 	  changed |= merge_call_side_effects (m_recursive_calls[i], m_summary,
   1865  1.1  mrg 					      fnode, !first);
   1866  1.1  mrg 	}
   1867  1.1  mrg       first = false;
   1868  1.1  mrg     }
   1869  1.1  mrg }
   1870  1.1  mrg 
   1871  1.1  mrg /* Analyze function.  */
   1872  1.1  mrg 
   1873  1.1  mrg void
   1874  1.1  mrg modref_access_analysis::analyze ()
   1875  1.1  mrg {
   1876  1.1  mrg   m_ecf_flags = flags_from_decl_or_type (current_function_decl);
   1877  1.1  mrg   bool summary_useful = true;
   1878  1.1  mrg 
   1879  1.1  mrg   /* Analyze each statement in each basic block of the function.  If the
   1880  1.1  mrg      statement cannot be analyzed (for any reason), the entire function cannot
   1881  1.1  mrg      be analyzed by modref.  */
   1882  1.1  mrg   basic_block bb;
   1883  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
   1884  1.1  mrg     {
   1885  1.1  mrg       gimple_stmt_iterator si;
   1886  1.1  mrg       bool always_executed
   1887  1.1  mrg 	      = bb == single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
   1888  1.1  mrg 
   1889  1.1  mrg       for (si = gsi_start_nondebug_after_labels_bb (bb);
   1890  1.1  mrg 	   !gsi_end_p (si); gsi_next_nondebug (&si))
   1891  1.1  mrg 	{
   1892  1.1  mrg 	  /* NULL memory accesses terminates BB.  These accesses are known
   1893  1.1  mrg 	     to trip undefined behavior.  gimple-ssa-isolate-paths turns them
   1894  1.1  mrg 	     to volatile accesses and adds builtin_trap call which would
   1895  1.1  mrg 	     confuse us otherwise.  */
   1896  1.1  mrg 	  if (infer_nonnull_range_by_dereference (gsi_stmt (si),
   1897  1.1  mrg 						  null_pointer_node))
   1898  1.1  mrg 	    {
   1899  1.1  mrg 	      if (dump_file)
   1900  1.1  mrg 		fprintf (dump_file, " - NULL memory access; terminating BB\n");
   1901  1.1  mrg 	      if (flag_non_call_exceptions)
   1902  1.1  mrg 		set_side_effects ();
   1903  1.1  mrg 	      break;
   1904  1.1  mrg 	    }
   1905  1.1  mrg 	  analyze_stmt (gsi_stmt (si), always_executed);
   1906  1.1  mrg 
   1907  1.1  mrg 	  /* Avoid doing useless work.  */
   1908  1.1  mrg 	  if ((!m_summary || !m_summary->useful_p (m_ecf_flags, false))
   1909  1.1  mrg 	      && (!m_summary_lto
   1910  1.1  mrg 		  || !m_summary_lto->useful_p (m_ecf_flags, false)))
   1911  1.1  mrg 	    {
   1912  1.1  mrg 	      summary_useful = false;
   1913  1.1  mrg 	      break;
   1914  1.1  mrg 	    }
   1915  1.1  mrg 	  if (always_executed
   1916  1.1  mrg 	      && stmt_can_throw_external (cfun, gsi_stmt (si)))
   1917  1.1  mrg 	    always_executed = false;
   1918  1.1  mrg 	}
   1919  1.1  mrg       if (!summary_useful)
   1920  1.1  mrg 	break;
   1921  1.1  mrg     }
   1922  1.1  mrg   /* In non-IPA mode we need to perform iterative dataflow on recursive calls.
   1923  1.1  mrg      This needs to be done after all other side effects are computed.  */
   1924  1.1  mrg   if (summary_useful)
   1925  1.1  mrg     {
   1926  1.1  mrg       if (!m_ipa)
   1927  1.1  mrg 	propagate ();
   1928  1.1  mrg       if (m_summary && !m_summary->side_effects && !finite_function_p ())
   1929  1.1  mrg 	m_summary->side_effects = true;
   1930  1.1  mrg       if (m_summary_lto && !m_summary_lto->side_effects
   1931  1.1  mrg 	  && !finite_function_p ())
   1932  1.1  mrg 	m_summary_lto->side_effects = true;
   1933  1.1  mrg     }
   1934  1.1  mrg }
   1935  1.1  mrg 
   1936  1.1  mrg /* Return true if OP accesses memory pointed to by SSA_NAME.  */
   1937  1.1  mrg 
   1938  1.1  mrg bool
   1939  1.1  mrg memory_access_to (tree op, tree ssa_name)
   1940  1.1  mrg {
   1941  1.1  mrg   tree base = get_base_address (op);
   1942  1.1  mrg   if (!base)
   1943  1.1  mrg     return false;
   1944  1.1  mrg   if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
   1945  1.1  mrg     return false;
   1946  1.1  mrg   return TREE_OPERAND (base, 0) == ssa_name;
   1947  1.1  mrg }
   1948  1.1  mrg 
   1949  1.1  mrg /* Consider statement val = *arg.
   1950  1.1  mrg    return EAF flags of ARG that can be determined from EAF flags of VAL
   1951  1.1  mrg    (which are known to be FLAGS).  If IGNORE_STORES is true we can ignore
   1952  1.1  mrg    all stores to VAL, i.e. when handling noreturn function.  */
   1953  1.1  mrg 
   1954  1.1  mrg static int
   1955  1.1  mrg deref_flags (int flags, bool ignore_stores)
   1956  1.1  mrg {
   1957  1.1  mrg   /* Dereference is also a direct read but dereferenced value does not
   1958  1.1  mrg      yield any other direct use.  */
   1959  1.1  mrg   int ret = EAF_NO_DIRECT_CLOBBER | EAF_NO_DIRECT_ESCAPE
   1960  1.1  mrg 	    | EAF_NOT_RETURNED_DIRECTLY;
   1961  1.1  mrg   /* If argument is unused just account for
   1962  1.1  mrg      the read involved in dereference.  */
   1963  1.1  mrg   if (flags & EAF_UNUSED)
   1964  1.1  mrg     ret |= EAF_NO_INDIRECT_READ | EAF_NO_INDIRECT_CLOBBER
   1965  1.1  mrg 	   | EAF_NO_INDIRECT_ESCAPE;
   1966  1.1  mrg   else
   1967  1.1  mrg     {
   1968  1.1  mrg       /* Direct or indirect accesses leads to indirect accesses.  */
   1969  1.1  mrg       if (((flags & EAF_NO_DIRECT_CLOBBER)
   1970  1.1  mrg 	   && (flags & EAF_NO_INDIRECT_CLOBBER))
   1971  1.1  mrg 	  || ignore_stores)
   1972  1.1  mrg 	ret |= EAF_NO_INDIRECT_CLOBBER;
   1973  1.1  mrg       if (((flags & EAF_NO_DIRECT_ESCAPE)
   1974  1.1  mrg 	   && (flags & EAF_NO_INDIRECT_ESCAPE))
   1975  1.1  mrg 	  || ignore_stores)
   1976  1.1  mrg 	ret |= EAF_NO_INDIRECT_ESCAPE;
   1977  1.1  mrg       if ((flags & EAF_NO_DIRECT_READ)
   1978  1.1  mrg 	   && (flags & EAF_NO_INDIRECT_READ))
   1979  1.1  mrg 	ret |= EAF_NO_INDIRECT_READ;
   1980  1.1  mrg       if ((flags & EAF_NOT_RETURNED_DIRECTLY)
   1981  1.1  mrg 	  && (flags & EAF_NOT_RETURNED_INDIRECTLY))
   1982  1.1  mrg 	ret |= EAF_NOT_RETURNED_INDIRECTLY;
   1983  1.1  mrg     }
   1984  1.1  mrg   return ret;
   1985  1.1  mrg }
   1986  1.1  mrg 
   1987  1.1  mrg 
   1988  1.1  mrg /* Description of an escape point: a call which affects flags of a given
   1989  1.1  mrg    SSA name.  */
   1990  1.1  mrg 
   1991  1.1  mrg struct escape_point
   1992  1.1  mrg {
   1993  1.1  mrg   /* Value escapes to this call.  */
   1994  1.1  mrg   gcall *call;
   1995  1.1  mrg   /* Argument it escapes to.  */
   1996  1.1  mrg   int arg;
   1997  1.1  mrg   /* Flags already known about the argument (this can save us from recording
   1998  1.1  mrg      escape points if local analysis did good job already).  */
   1999  1.1  mrg   eaf_flags_t min_flags;
   2000  1.1  mrg   /* Does value escape directly or indirectly?  */
   2001  1.1  mrg   bool direct;
   2002  1.1  mrg };
   2003  1.1  mrg 
   2004  1.1  mrg /* Lattice used during the eaf flags analysis dataflow.  For a given SSA name
   2005  1.1  mrg    we aim to compute its flags and escape points.  We also use the lattice
   2006  1.1  mrg    to dynamically build dataflow graph to propagate on.  */
   2007  1.1  mrg 
   2008  1.1  mrg class modref_lattice
   2009  1.1  mrg {
   2010  1.1  mrg public:
   2011  1.1  mrg   /* EAF flags of the SSA name.  */
   2012  1.1  mrg   eaf_flags_t flags;
   2013  1.1  mrg   /* Used during DFS walk to mark names where final value was determined
   2014  1.1  mrg      without need for dataflow.  */
   2015  1.1  mrg   bool known;
   2016  1.1  mrg   /* Used during DFS walk to mark open vertices (for cycle detection).  */
   2017  1.1  mrg   bool open;
   2018  1.1  mrg   /* Set during DFS walk for names that needs dataflow propagation.  */
   2019  1.1  mrg   bool do_dataflow;
   2020  1.1  mrg   /* Used during the iterative dataflow.  */
   2021  1.1  mrg   bool changed;
   2022  1.1  mrg 
   2023  1.1  mrg   /* When doing IPA analysis we can not merge in callee escape points;
   2024  1.1  mrg      Only remember them and do the merging at IPA propagation time.  */
   2025  1.1  mrg   vec <escape_point, va_heap, vl_ptr> escape_points;
   2026  1.1  mrg 
   2027  1.1  mrg   /* Representation of a graph for dataflow.  This graph is built on-demand
   2028  1.1  mrg      using modref_eaf_analysis::analyze_ssa and later solved by
   2029  1.1  mrg      modref_eaf_analysis::propagate.
   2030  1.1  mrg      Each edge represents the fact that flags of current lattice should be
   2031  1.1  mrg      propagated to lattice of SSA_NAME.  */
   2032  1.1  mrg   struct propagate_edge
   2033  1.1  mrg   {
   2034  1.1  mrg     int ssa_name;
   2035  1.1  mrg     bool deref;
   2036  1.1  mrg   };
   2037  1.1  mrg   vec <propagate_edge, va_heap, vl_ptr> propagate_to;
   2038  1.1  mrg 
   2039  1.1  mrg   void init ();
   2040  1.1  mrg   void release ();
   2041  1.1  mrg   bool merge (const modref_lattice &with);
   2042  1.1  mrg   bool merge (int flags);
   2043  1.1  mrg   bool merge_deref (const modref_lattice &with, bool ignore_stores);
   2044  1.1  mrg   bool merge_direct_load ();
   2045  1.1  mrg   bool merge_direct_store ();
   2046  1.1  mrg   bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
   2047  1.1  mrg   void dump (FILE *out, int indent = 0) const;
   2048  1.1  mrg };
   2049  1.1  mrg 
   2050  1.1  mrg /* Lattices are saved to vectors, so keep them PODs.  */
   2051  1.1  mrg void
   2052  1.1  mrg modref_lattice::init ()
   2053  1.1  mrg {
   2054  1.1  mrg   /* All flags we track.  */
   2055  1.1  mrg   int f = EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER
   2056  1.1  mrg 	  | EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE
   2057  1.1  mrg 	  | EAF_NO_DIRECT_READ | EAF_NO_INDIRECT_READ
   2058  1.1  mrg 	  | EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY
   2059  1.1  mrg 	  | EAF_UNUSED;
   2060  1.1  mrg   flags = f;
   2061  1.1  mrg   /* Check that eaf_flags_t is wide enough to hold all flags.  */
   2062  1.1  mrg   gcc_checking_assert (f == flags);
   2063  1.1  mrg   open = true;
   2064  1.1  mrg   known = false;
   2065  1.1  mrg }
   2066  1.1  mrg 
   2067  1.1  mrg /* Release memory.  */
   2068  1.1  mrg void
   2069  1.1  mrg modref_lattice::release ()
   2070  1.1  mrg {
   2071  1.1  mrg   escape_points.release ();
   2072  1.1  mrg   propagate_to.release ();
   2073  1.1  mrg }
   2074  1.1  mrg 
   2075  1.1  mrg /* Dump lattice to OUT; indent with INDENT spaces.  */
   2076  1.1  mrg 
   2077  1.1  mrg void
   2078  1.1  mrg modref_lattice::dump (FILE *out, int indent) const
   2079  1.1  mrg {
   2080  1.1  mrg   dump_eaf_flags (out, flags);
   2081  1.1  mrg   if (escape_points.length ())
   2082  1.1  mrg     {
   2083  1.1  mrg       fprintf (out, "%*sEscapes:\n", indent, "");
   2084  1.1  mrg       for (unsigned int i = 0; i < escape_points.length (); i++)
   2085  1.1  mrg 	{
   2086  1.1  mrg 	  fprintf (out, "%*s  Arg %i (%s) min flags", indent, "",
   2087  1.1  mrg 		   escape_points[i].arg,
   2088  1.1  mrg 		   escape_points[i].direct ? "direct" : "indirect");
   2089  1.1  mrg 	  dump_eaf_flags (out, escape_points[i].min_flags, false);
   2090  1.1  mrg 	  fprintf (out, " in call ");
   2091  1.1  mrg 	  print_gimple_stmt (out, escape_points[i].call, 0);
   2092  1.1  mrg 	}
   2093  1.1  mrg     }
   2094  1.1  mrg }
   2095  1.1  mrg 
   2096  1.1  mrg /* Add escape point CALL, ARG, MIN_FLAGS, DIRECT.  Return false if such escape
   2097  1.1  mrg    point exists.  */
   2098  1.1  mrg 
   2099  1.1  mrg bool
   2100  1.1  mrg modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
   2101  1.1  mrg 				  bool direct)
   2102  1.1  mrg {
   2103  1.1  mrg   escape_point *ep;
   2104  1.1  mrg   unsigned int i;
   2105  1.1  mrg 
   2106  1.1  mrg   /* If we already determined flags to be bad enough,
   2107  1.1  mrg      we do not need to record.  */
   2108  1.1  mrg   if ((flags & min_flags) == flags || (min_flags & EAF_UNUSED))
   2109  1.1  mrg     return false;
   2110  1.1  mrg 
   2111  1.1  mrg   FOR_EACH_VEC_ELT (escape_points, i, ep)
   2112  1.1  mrg     if (ep->call == call && ep->arg == arg && ep->direct == direct)
   2113  1.1  mrg       {
   2114  1.1  mrg 	if ((ep->min_flags & min_flags) == min_flags)
   2115  1.1  mrg 	  return false;
   2116  1.1  mrg 	ep->min_flags &= min_flags;
   2117  1.1  mrg 	return true;
   2118  1.1  mrg       }
   2119  1.1  mrg   /* Give up if max escape points is met.  */
   2120  1.1  mrg   if ((int)escape_points.length () > param_modref_max_escape_points)
   2121  1.1  mrg     {
   2122  1.1  mrg       if (dump_file)
   2123  1.1  mrg 	fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
   2124  1.1  mrg       merge (0);
   2125  1.1  mrg       return true;
   2126  1.1  mrg     }
   2127  1.1  mrg   escape_point new_ep = {call, arg, min_flags, direct};
   2128  1.1  mrg   escape_points.safe_push (new_ep);
   2129  1.1  mrg   return true;
   2130  1.1  mrg }
   2131  1.1  mrg 
   2132  1.1  mrg /* Merge in flags from F.  */
   2133  1.1  mrg bool
   2134  1.1  mrg modref_lattice::merge (int f)
   2135  1.1  mrg {
   2136  1.1  mrg   if (f & EAF_UNUSED)
   2137  1.1  mrg     return false;
   2138  1.1  mrg   /* Check that flags seems sane: if function does not read the parameter
   2139  1.1  mrg      it can not access it indirectly.  */
   2140  1.1  mrg   gcc_checking_assert (!(f & EAF_NO_DIRECT_READ)
   2141  1.1  mrg 		       || ((f & EAF_NO_INDIRECT_READ)
   2142  1.1  mrg 			   && (f & EAF_NO_INDIRECT_CLOBBER)
   2143  1.1  mrg 			   && (f & EAF_NO_INDIRECT_ESCAPE)
   2144  1.1  mrg 			   && (f & EAF_NOT_RETURNED_INDIRECTLY)));
   2145  1.1  mrg   if ((flags & f) != flags)
   2146  1.1  mrg     {
   2147  1.1  mrg       flags &= f;
   2148  1.1  mrg       /* Prune obviously useless flags;
   2149  1.1  mrg 	 We do not have ECF_FLAGS handy which is not big problem since
   2150  1.1  mrg 	 we will do final flags cleanup before producing summary.
   2151  1.1  mrg 	 Merging should be fast so it can work well with dataflow.  */
   2152  1.1  mrg       flags = remove_useless_eaf_flags (flags, 0, false);
   2153  1.1  mrg       if (!flags)
   2154  1.1  mrg 	escape_points.release ();
   2155  1.1  mrg       return true;
   2156  1.1  mrg     }
   2157  1.1  mrg   return false;
   2158  1.1  mrg }
   2159  1.1  mrg 
   2160  1.1  mrg /* Merge in WITH.  Return true if anything changed.  */
   2161  1.1  mrg 
   2162  1.1  mrg bool
   2163  1.1  mrg modref_lattice::merge (const modref_lattice &with)
   2164  1.1  mrg {
   2165  1.1  mrg   if (!with.known)
   2166  1.1  mrg     do_dataflow = true;
   2167  1.1  mrg 
   2168  1.1  mrg   bool changed = merge (with.flags);
   2169  1.1  mrg 
   2170  1.1  mrg   if (!flags)
   2171  1.1  mrg     return changed;
   2172  1.1  mrg   for (unsigned int i = 0; i < with.escape_points.length (); i++)
   2173  1.1  mrg     changed |= add_escape_point (with.escape_points[i].call,
   2174  1.1  mrg 				 with.escape_points[i].arg,
   2175  1.1  mrg 				 with.escape_points[i].min_flags,
   2176  1.1  mrg 				 with.escape_points[i].direct);
   2177  1.1  mrg   return changed;
   2178  1.1  mrg }
   2179  1.1  mrg 
   2180  1.1  mrg /* Merge in deref of WITH.  If IGNORE_STORES is true do not consider
   2181  1.1  mrg    stores.  Return true if anything changed.  */
   2182  1.1  mrg 
   2183  1.1  mrg bool
   2184  1.1  mrg modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
   2185  1.1  mrg {
   2186  1.1  mrg   if (!with.known)
   2187  1.1  mrg     do_dataflow = true;
   2188  1.1  mrg 
   2189  1.1  mrg   bool changed = merge (deref_flags (with.flags, ignore_stores));
   2190  1.1  mrg 
   2191  1.1  mrg   if (!flags)
   2192  1.1  mrg     return changed;
   2193  1.1  mrg   for (unsigned int i = 0; i < with.escape_points.length (); i++)
   2194  1.1  mrg     {
   2195  1.1  mrg       int min_flags = with.escape_points[i].min_flags;
   2196  1.1  mrg 
   2197  1.1  mrg       if (with.escape_points[i].direct)
   2198  1.1  mrg 	min_flags = deref_flags (min_flags, ignore_stores);
   2199  1.1  mrg       else if (ignore_stores)
   2200  1.1  mrg 	min_flags |= ignore_stores_eaf_flags;
   2201  1.1  mrg       changed |= add_escape_point (with.escape_points[i].call,
   2202  1.1  mrg 				   with.escape_points[i].arg,
   2203  1.1  mrg 				   min_flags,
   2204  1.1  mrg 				   false);
   2205  1.1  mrg     }
   2206  1.1  mrg   return changed;
   2207  1.1  mrg }
   2208  1.1  mrg 
   2209  1.1  mrg /* Merge in flags for direct load.  */
   2210  1.1  mrg 
   2211  1.1  mrg bool
   2212  1.1  mrg modref_lattice::merge_direct_load ()
   2213  1.1  mrg {
   2214  1.1  mrg   return merge (~(EAF_UNUSED | EAF_NO_DIRECT_READ));
   2215  1.1  mrg }
   2216  1.1  mrg 
   2217  1.1  mrg /* Merge in flags for direct store.  */
   2218  1.1  mrg 
   2219  1.1  mrg bool
   2220  1.1  mrg modref_lattice::merge_direct_store ()
   2221  1.1  mrg {
   2222  1.1  mrg   return merge (~(EAF_UNUSED | EAF_NO_DIRECT_CLOBBER));
   2223  1.1  mrg }
   2224  1.1  mrg 
   2225  1.1  mrg /* Analyzer of EAF flags.
   2226  1.1  mrg    This is generally dataflow problem over the SSA graph, however we only
   2227  1.1  mrg    care about flags of few selected ssa names (arguments, return slot and
   2228  1.1  mrg    static chain).  So we first call analyze_ssa_name on all relevant names
   2229  1.1  mrg    and perform a DFS walk to discover SSA names where flags needs to be
   2230  1.1  mrg    determined.  For acyclic graphs we try to determine final flags during
   2231  1.1  mrg    this walk.  Once cycles or recursion depth is met we enlist SSA names
   2232  1.1  mrg    for dataflow which is done by propagate call.
   2233  1.1  mrg 
   2234  1.1  mrg    After propagation the flags can be obtained using get_ssa_name_flags.  */
   2235  1.1  mrg 
   2236  1.1  mrg class modref_eaf_analysis
   2237  1.1  mrg {
   2238  1.1  mrg public:
   2239  1.1  mrg   /* Mark NAME as relevant for analysis.  */
   2240  1.1  mrg   void analyze_ssa_name (tree name, bool deferred = false);
   2241  1.1  mrg   /* Dataflow solver.  */
   2242  1.1  mrg   void propagate ();
   2243  1.1  mrg   /* Return flags computed earlier for NAME.  */
   2244  1.1  mrg   int get_ssa_name_flags (tree name)
   2245  1.1  mrg   {
   2246  1.1  mrg     int version = SSA_NAME_VERSION (name);
   2247  1.1  mrg     gcc_checking_assert (m_lattice[version].known);
   2248  1.1  mrg     return m_lattice[version].flags;
   2249  1.1  mrg   }
   2250  1.1  mrg   /* In IPA mode this will record all escape points
   2251  1.1  mrg      determined for NAME to PARM_IDNEX.  Flags are minimal
   2252  1.1  mrg      flags known.  */
   2253  1.1  mrg   void record_escape_points (tree name, int parm_index, int flags);
   2254  1.1  mrg   modref_eaf_analysis (bool ipa)
   2255  1.1  mrg   {
   2256  1.1  mrg     m_ipa = ipa;
   2257  1.1  mrg     m_depth = 0;
   2258  1.1  mrg     m_lattice.safe_grow_cleared (num_ssa_names, true);
   2259  1.1  mrg   }
   2260  1.1  mrg   ~modref_eaf_analysis ()
   2261  1.1  mrg   {
   2262  1.1  mrg     gcc_checking_assert (!m_depth);
   2263  1.1  mrg     if (m_ipa || m_names_to_propagate.length ())
   2264  1.1  mrg       for (unsigned int i = 0; i < num_ssa_names; i++)
   2265  1.1  mrg 	m_lattice[i].release ();
   2266  1.1  mrg   }
   2267  1.1  mrg private:
   2268  1.1  mrg   /* If true, we produce analysis for IPA mode.  In this case escape points are
   2269  1.1  mrg      collected.  */
   2270  1.1  mrg   bool m_ipa;
   2271  1.1  mrg   /* Depth of recursion of analyze_ssa_name.  */
   2272  1.1  mrg   int m_depth;
   2273  1.1  mrg   /* Propagation lattice for individual ssa names.  */
   2274  1.1  mrg   auto_vec<modref_lattice> m_lattice;
   2275  1.1  mrg   auto_vec<tree> m_deferred_names;
   2276  1.1  mrg   auto_vec<int> m_names_to_propagate;
   2277  1.1  mrg 
   2278  1.1  mrg   void merge_with_ssa_name (tree dest, tree src, bool deref);
   2279  1.1  mrg   void merge_call_lhs_flags (gcall *call, int arg, tree name, bool direct,
   2280  1.1  mrg 			     bool deref);
   2281  1.1  mrg };
   2282  1.1  mrg 
   2283  1.1  mrg 
   2284  1.1  mrg /* Call statements may return their parameters.  Consider argument number
   2285  1.1  mrg    ARG of USE_STMT and determine flags that can needs to be cleared
   2286  1.1  mrg    in case pointer possibly indirectly references from ARG I is returned.
   2287  1.1  mrg    If DIRECT is true consider direct returns and if INDIRECT consider
   2288  1.1  mrg    indirect returns.
   2289  1.1  mrg    LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
   2290  1.1  mrg    ARG is set to -1 for static chain.  */
   2291  1.1  mrg 
   2292  1.1  mrg void
   2293  1.1  mrg modref_eaf_analysis::merge_call_lhs_flags (gcall *call, int arg,
   2294  1.1  mrg 					   tree name, bool direct,
   2295  1.1  mrg 					   bool indirect)
   2296  1.1  mrg {
   2297  1.1  mrg   int index = SSA_NAME_VERSION (name);
   2298  1.1  mrg   bool returned_directly = false;
   2299  1.1  mrg 
   2300  1.1  mrg   /* If there is no return value, no flags are affected.  */
   2301  1.1  mrg   if (!gimple_call_lhs (call))
   2302  1.1  mrg     return;
   2303  1.1  mrg 
   2304  1.1  mrg   /* If we know that function returns given argument and it is not ARG
   2305  1.1  mrg      we can still be happy.  */
   2306  1.1  mrg   if (arg >= 0)
   2307  1.1  mrg     {
   2308  1.1  mrg       int flags = gimple_call_return_flags (call);
   2309  1.1  mrg       if (flags & ERF_RETURNS_ARG)
   2310  1.1  mrg 	{
   2311  1.1  mrg 	  if ((flags & ERF_RETURN_ARG_MASK) == arg)
   2312  1.1  mrg 	    returned_directly = true;
   2313  1.1  mrg 	  else
   2314  1.1  mrg 	   return;
   2315  1.1  mrg 	}
   2316  1.1  mrg     }
   2317  1.1  mrg   /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED.  */
   2318  1.1  mrg   if (returned_directly)
   2319  1.1  mrg     {
   2320  1.1  mrg       direct = true;
   2321  1.1  mrg       indirect = false;
   2322  1.1  mrg     }
   2323  1.1  mrg   /* If value is not returned at all, do nothing.  */
   2324  1.1  mrg   else if (!direct && !indirect)
   2325  1.1  mrg     return;
   2326  1.1  mrg 
   2327  1.1  mrg   /* If return value is SSA name determine its flags.  */
   2328  1.1  mrg   if (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME)
   2329  1.1  mrg     {
   2330  1.1  mrg       tree lhs = gimple_call_lhs (call);
   2331  1.1  mrg       if (direct)
   2332  1.1  mrg 	merge_with_ssa_name (name, lhs, false);
   2333  1.1  mrg       if (indirect)
   2334  1.1  mrg 	merge_with_ssa_name (name, lhs, true);
   2335  1.1  mrg     }
   2336  1.1  mrg   /* In the case of memory store we can do nothing.  */
   2337  1.1  mrg   else if (!direct)
   2338  1.1  mrg     m_lattice[index].merge (deref_flags (0, false));
   2339  1.1  mrg   else
   2340  1.1  mrg     m_lattice[index].merge (0);
   2341  1.1  mrg }
   2342  1.1  mrg 
   2343  1.1  mrg /* CALL_FLAGS are EAF_FLAGS of the argument.  Turn them
   2344  1.1  mrg    into flags for caller, update LATTICE of corresponding
   2345  1.1  mrg    argument if needed.  */
   2346  1.1  mrg 
   2347  1.1  mrg static int
   2348  1.1  mrg callee_to_caller_flags (int call_flags, bool ignore_stores,
   2349  1.1  mrg 			modref_lattice &lattice)
   2350  1.1  mrg {
   2351  1.1  mrg   /* call_flags is about callee returning a value
   2352  1.1  mrg      that is not the same as caller returning it.  */
   2353  1.1  mrg   call_flags |= EAF_NOT_RETURNED_DIRECTLY
   2354  1.1  mrg 		| EAF_NOT_RETURNED_INDIRECTLY;
   2355  1.1  mrg   if (!ignore_stores && !(call_flags & EAF_UNUSED))
   2356  1.1  mrg     {
   2357  1.1  mrg       /* If value escapes we are no longer able to track what happens
   2358  1.1  mrg 	 with it because we can read it from the escaped location
   2359  1.1  mrg 	 anytime.  */
   2360  1.1  mrg       if (!(call_flags & EAF_NO_DIRECT_ESCAPE))
   2361  1.1  mrg 	lattice.merge (0);
   2362  1.1  mrg       else if (!(call_flags & EAF_NO_INDIRECT_ESCAPE))
   2363  1.1  mrg 	lattice.merge (~(EAF_NOT_RETURNED_INDIRECTLY
   2364  1.1  mrg 			 | EAF_NO_DIRECT_READ
   2365  1.1  mrg 			 | EAF_NO_INDIRECT_READ
   2366  1.1  mrg 			 | EAF_NO_INDIRECT_CLOBBER
   2367  1.1  mrg 			 | EAF_UNUSED));
   2368  1.1  mrg     }
   2369  1.1  mrg   else
   2370  1.1  mrg     call_flags |= ignore_stores_eaf_flags;
   2371  1.1  mrg   return call_flags;
   2372  1.1  mrg }
   2373  1.1  mrg 
   2374  1.1  mrg /* Analyze EAF flags for SSA name NAME and store result to LATTICE.
   2375  1.1  mrg    LATTICE is an array of modref_lattices.
   2376  1.1  mrg    DEPTH is a recursion depth used to make debug output prettier.
   2377  1.1  mrg    If IPA is true we analyze for IPA propagation (and thus call escape points
   2378  1.1  mrg    are processed later)  */
   2379  1.1  mrg 
   2380  1.1  mrg void
   2381  1.1  mrg modref_eaf_analysis::analyze_ssa_name (tree name, bool deferred)
   2382  1.1  mrg {
   2383  1.1  mrg   imm_use_iterator ui;
   2384  1.1  mrg   gimple *use_stmt;
   2385  1.1  mrg   int index = SSA_NAME_VERSION (name);
   2386  1.1  mrg 
   2387  1.1  mrg   if (!deferred)
   2388  1.1  mrg     {
   2389  1.1  mrg       /* See if value is already computed.  */
   2390  1.1  mrg       if (m_lattice[index].known || m_lattice[index].do_dataflow)
   2391  1.1  mrg        return;
   2392  1.1  mrg       if (m_lattice[index].open)
   2393  1.1  mrg 	{
   2394  1.1  mrg 	  if (dump_file)
   2395  1.1  mrg 	    fprintf (dump_file,
   2396  1.1  mrg 		     "%*sCycle in SSA graph\n",
   2397  1.1  mrg 		     m_depth * 4, "");
   2398  1.1  mrg 	  return;
   2399  1.1  mrg 	}
   2400  1.1  mrg       /* Recursion guard.  */
   2401  1.1  mrg       m_lattice[index].init ();
   2402  1.1  mrg       if (m_depth == param_modref_max_depth)
   2403  1.1  mrg 	{
   2404  1.1  mrg 	  if (dump_file)
   2405  1.1  mrg 	    fprintf (dump_file,
   2406  1.1  mrg 		     "%*sMax recursion depth reached; postponing\n",
   2407  1.1  mrg 		     m_depth * 4, "");
   2408  1.1  mrg 	  m_deferred_names.safe_push (name);
   2409  1.1  mrg 	  return;
   2410  1.1  mrg 	}
   2411  1.1  mrg     }
   2412  1.1  mrg 
   2413  1.1  mrg   if (dump_file)
   2414  1.1  mrg     {
   2415  1.1  mrg       fprintf (dump_file,
   2416  1.1  mrg 	       "%*sAnalyzing flags of ssa name: ", m_depth * 4, "");
   2417  1.1  mrg       print_generic_expr (dump_file, name);
   2418  1.1  mrg       fprintf (dump_file, "\n");
   2419  1.1  mrg     }
   2420  1.1  mrg 
   2421  1.1  mrg   FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
   2422  1.1  mrg     {
   2423  1.1  mrg       if (m_lattice[index].flags == 0)
   2424  1.1  mrg 	break;
   2425  1.1  mrg       if (is_gimple_debug (use_stmt))
   2426  1.1  mrg 	continue;
   2427  1.1  mrg       if (dump_file)
   2428  1.1  mrg 	{
   2429  1.1  mrg 	  fprintf (dump_file, "%*s  Analyzing stmt: ", m_depth * 4, "");
   2430  1.1  mrg 	  print_gimple_stmt (dump_file, use_stmt, 0);
   2431  1.1  mrg 	}
   2432  1.1  mrg       /* If we see a direct non-debug use, clear unused bit.
   2433  1.1  mrg 	 All dereferences should be accounted below using deref_flags.  */
   2434  1.1  mrg       m_lattice[index].merge (~EAF_UNUSED);
   2435  1.1  mrg 
   2436  1.1  mrg       /* Gimple return may load the return value.
   2437  1.1  mrg 	 Returning name counts as an use by tree-ssa-structalias.cc  */
   2438  1.1  mrg       if (greturn *ret = dyn_cast <greturn *> (use_stmt))
   2439  1.1  mrg 	{
   2440  1.1  mrg 	  /* Returning through return slot is seen as memory write earlier.  */
   2441  1.1  mrg 	  if (DECL_RESULT (current_function_decl)
   2442  1.1  mrg 	      && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
   2443  1.1  mrg 	    ;
   2444  1.1  mrg 	  else if (gimple_return_retval (ret) == name)
   2445  1.1  mrg 	    m_lattice[index].merge (~(EAF_UNUSED | EAF_NOT_RETURNED_DIRECTLY
   2446  1.1  mrg 				      | EAF_NOT_RETURNED_DIRECTLY));
   2447  1.1  mrg 	  else if (memory_access_to (gimple_return_retval (ret), name))
   2448  1.1  mrg 	    {
   2449  1.1  mrg 	      m_lattice[index].merge_direct_load ();
   2450  1.1  mrg 	      m_lattice[index].merge (~(EAF_UNUSED
   2451  1.1  mrg 					| EAF_NOT_RETURNED_INDIRECTLY));
   2452  1.1  mrg 	    }
   2453  1.1  mrg 	}
   2454  1.1  mrg       /* Account for LHS store, arg loads and flags from callee function.  */
   2455  1.1  mrg       else if (gcall *call = dyn_cast <gcall *> (use_stmt))
   2456  1.1  mrg 	{
   2457  1.1  mrg 	  tree callee = gimple_call_fndecl (call);
   2458  1.1  mrg 
   2459  1.1  mrg 	  /* IPA PTA internally it treats calling a function as "writing" to
   2460  1.1  mrg 	     the argument space of all functions the function pointer points to
   2461  1.1  mrg 	     (PR101949).  We can not drop EAF_NOCLOBBER only when ipa-pta
   2462  1.1  mrg 	     is on since that would allow propagation of this from -fno-ipa-pta
   2463  1.1  mrg 	     to -fipa-pta functions.  */
   2464  1.1  mrg 	  if (gimple_call_fn (use_stmt) == name)
   2465  1.1  mrg 	    m_lattice[index].merge (~(EAF_NO_DIRECT_CLOBBER | EAF_UNUSED));
   2466  1.1  mrg 
   2467  1.1  mrg 	  /* Recursion would require bit of propagation; give up for now.  */
   2468  1.1  mrg 	  if (callee && !m_ipa && recursive_call_p (current_function_decl,
   2469  1.1  mrg 						  callee))
   2470  1.1  mrg 	    m_lattice[index].merge (0);
   2471  1.1  mrg 	  else
   2472  1.1  mrg 	    {
   2473  1.1  mrg 	      int ecf_flags = gimple_call_flags (call);
   2474  1.1  mrg 	      bool ignore_stores = ignore_stores_p (current_function_decl,
   2475  1.1  mrg 						    ecf_flags);
   2476  1.1  mrg 	      bool ignore_retval = ignore_retval_p (current_function_decl,
   2477  1.1  mrg 						    ecf_flags);
   2478  1.1  mrg 
   2479  1.1  mrg 	      /* Handle *name = func (...).  */
   2480  1.1  mrg 	      if (gimple_call_lhs (call)
   2481  1.1  mrg 		  && memory_access_to (gimple_call_lhs (call), name))
   2482  1.1  mrg 		{
   2483  1.1  mrg 		  m_lattice[index].merge_direct_store ();
   2484  1.1  mrg 		  /* Return slot optimization passes address of
   2485  1.1  mrg 		     LHS to callee via hidden parameter and this
   2486  1.1  mrg 		     may make LHS to escape.  See PR 98499.  */
   2487  1.1  mrg 		  if (gimple_call_return_slot_opt_p (call)
   2488  1.1  mrg 		      && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call))))
   2489  1.1  mrg 		    {
   2490  1.1  mrg 		      int call_flags = gimple_call_retslot_flags (call);
   2491  1.1  mrg 		      bool isretslot = false;
   2492  1.1  mrg 
   2493  1.1  mrg 		      if (DECL_RESULT (current_function_decl)
   2494  1.1  mrg 			  && DECL_BY_REFERENCE
   2495  1.1  mrg 				(DECL_RESULT (current_function_decl)))
   2496  1.1  mrg 			isretslot = ssa_default_def
   2497  1.1  mrg 					 (cfun,
   2498  1.1  mrg 					  DECL_RESULT (current_function_decl))
   2499  1.1  mrg 					 == name;
   2500  1.1  mrg 
   2501  1.1  mrg 		      /* Passing returnslot to return slot is special because
   2502  1.1  mrg 			 not_returned and escape has same meaning.
   2503  1.1  mrg 			 However passing arg to return slot is different.  If
   2504  1.1  mrg 			 the callee's return slot is returned it means that
   2505  1.1  mrg 			 arg is written to itself which is an escape.
   2506  1.1  mrg 			 Since we do not track the memory it is written to we
   2507  1.1  mrg 			 need to give up on analyzing it.  */
   2508  1.1  mrg 		      if (!isretslot)
   2509  1.1  mrg 			{
   2510  1.1  mrg 			  if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY
   2511  1.1  mrg 					      | EAF_UNUSED)))
   2512  1.1  mrg 			    m_lattice[index].merge (0);
   2513  1.1  mrg 			  else gcc_checking_assert
   2514  1.1  mrg 				(call_flags & (EAF_NOT_RETURNED_INDIRECTLY
   2515  1.1  mrg 					       | EAF_UNUSED));
   2516  1.1  mrg 			  call_flags = callee_to_caller_flags
   2517  1.1  mrg 					   (call_flags, false,
   2518  1.1  mrg 					    m_lattice[index]);
   2519  1.1  mrg 			}
   2520  1.1  mrg 		      m_lattice[index].merge (call_flags);
   2521  1.1  mrg 		    }
   2522  1.1  mrg 		}
   2523  1.1  mrg 
   2524  1.1  mrg 	      if (gimple_call_chain (call)
   2525  1.1  mrg 		  && (gimple_call_chain (call) == name))
   2526  1.1  mrg 		{
   2527  1.1  mrg 		  int call_flags = gimple_call_static_chain_flags (call);
   2528  1.1  mrg 		  if (!ignore_retval && !(call_flags & EAF_UNUSED))
   2529  1.1  mrg 		    merge_call_lhs_flags
   2530  1.1  mrg 			 (call, -1, name,
   2531  1.1  mrg 			  !(call_flags & EAF_NOT_RETURNED_DIRECTLY),
   2532  1.1  mrg 			  !(call_flags & EAF_NOT_RETURNED_INDIRECTLY));
   2533  1.1  mrg 		  call_flags = callee_to_caller_flags
   2534  1.1  mrg 				   (call_flags, ignore_stores,
   2535  1.1  mrg 				    m_lattice[index]);
   2536  1.1  mrg 		  if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
   2537  1.1  mrg 		    m_lattice[index].merge (call_flags);
   2538  1.1  mrg 		}
   2539  1.1  mrg 
   2540  1.1  mrg 	      /* Process internal functions and right away.  */
   2541  1.1  mrg 	      bool record_ipa = m_ipa && !gimple_call_internal_p (call);
   2542  1.1  mrg 
   2543  1.1  mrg 	      /* Handle all function parameters.  */
   2544  1.1  mrg 	      for (unsigned i = 0;
   2545  1.1  mrg 		   i < gimple_call_num_args (call)
   2546  1.1  mrg 		   && m_lattice[index].flags; i++)
   2547  1.1  mrg 		/* Name is directly passed to the callee.  */
   2548  1.1  mrg 		if (gimple_call_arg (call, i) == name)
   2549  1.1  mrg 		  {
   2550  1.1  mrg 		    int call_flags = gimple_call_arg_flags (call, i);
   2551  1.1  mrg 		    if (!ignore_retval)
   2552  1.1  mrg 		      merge_call_lhs_flags
   2553  1.1  mrg 			      (call, i, name,
   2554  1.1  mrg 			       !(call_flags & (EAF_NOT_RETURNED_DIRECTLY
   2555  1.1  mrg 					       | EAF_UNUSED)),
   2556  1.1  mrg 			       !(call_flags & (EAF_NOT_RETURNED_INDIRECTLY
   2557  1.1  mrg 					       | EAF_UNUSED)));
   2558  1.1  mrg 		    if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
   2559  1.1  mrg 		      {
   2560  1.1  mrg 			call_flags = callee_to_caller_flags
   2561  1.1  mrg 					 (call_flags, ignore_stores,
   2562  1.1  mrg 					  m_lattice[index]);
   2563  1.1  mrg 			if (!record_ipa)
   2564  1.1  mrg 			  m_lattice[index].merge (call_flags);
   2565  1.1  mrg 			else
   2566  1.1  mrg 			  m_lattice[index].add_escape_point (call, i,
   2567  1.1  mrg 							   call_flags, true);
   2568  1.1  mrg 		      }
   2569  1.1  mrg 		  }
   2570  1.1  mrg 		/* Name is dereferenced and passed to a callee.  */
   2571  1.1  mrg 		else if (memory_access_to (gimple_call_arg (call, i), name))
   2572  1.1  mrg 		  {
   2573  1.1  mrg 		    int call_flags = deref_flags
   2574  1.1  mrg 			    (gimple_call_arg_flags (call, i), ignore_stores);
   2575  1.1  mrg 		    if (!ignore_retval && !(call_flags & EAF_UNUSED)
   2576  1.1  mrg 			&& (call_flags & (EAF_NOT_RETURNED_DIRECTLY
   2577  1.1  mrg 				       	  | EAF_NOT_RETURNED_INDIRECTLY))
   2578  1.1  mrg 			    != (EAF_NOT_RETURNED_DIRECTLY
   2579  1.1  mrg 				| EAF_NOT_RETURNED_INDIRECTLY))
   2580  1.1  mrg 		      merge_call_lhs_flags (call, i, name, false, true);
   2581  1.1  mrg 		    if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
   2582  1.1  mrg 		      m_lattice[index].merge_direct_load ();
   2583  1.1  mrg 		    else
   2584  1.1  mrg 		      {
   2585  1.1  mrg 			call_flags = callee_to_caller_flags
   2586  1.1  mrg 					 (call_flags, ignore_stores,
   2587  1.1  mrg 					  m_lattice[index]);
   2588  1.1  mrg 			if (!record_ipa)
   2589  1.1  mrg 			  m_lattice[index].merge (call_flags);
   2590  1.1  mrg 			else
   2591  1.1  mrg 			  m_lattice[index].add_escape_point (call, i,
   2592  1.1  mrg 							     call_flags, false);
   2593  1.1  mrg 		      }
   2594  1.1  mrg 		  }
   2595  1.1  mrg 	    }
   2596  1.1  mrg 	}
   2597  1.1  mrg       else if (gimple_assign_load_p (use_stmt))
   2598  1.1  mrg 	{
   2599  1.1  mrg 	  gassign *assign = as_a <gassign *> (use_stmt);
   2600  1.1  mrg 	  /* Memory to memory copy.  */
   2601  1.1  mrg 	  if (gimple_store_p (assign))
   2602  1.1  mrg 	    {
   2603  1.1  mrg 	      /* Handle *lhs = *name.
   2604  1.1  mrg 
   2605  1.1  mrg 		 We do not track memory locations, so assume that value
   2606  1.1  mrg 		 is used arbitrarily.  */
   2607  1.1  mrg 	      if (memory_access_to (gimple_assign_rhs1 (assign), name))
   2608  1.1  mrg 		m_lattice[index].merge (deref_flags (0, false));
   2609  1.1  mrg 
   2610  1.1  mrg 	      /* Handle *name = *exp.  */
   2611  1.1  mrg 	      if (memory_access_to (gimple_assign_lhs (assign), name))
   2612  1.1  mrg 		m_lattice[index].merge_direct_store ();
   2613  1.1  mrg 	    }
   2614  1.1  mrg 	  /* Handle lhs = *name.  */
   2615  1.1  mrg 	  else if (memory_access_to (gimple_assign_rhs1 (assign), name))
   2616  1.1  mrg 	    {
   2617  1.1  mrg 	      tree lhs = gimple_assign_lhs (assign);
   2618  1.1  mrg 	      merge_with_ssa_name (name, lhs, true);
   2619  1.1  mrg 	    }
   2620  1.1  mrg 	}
   2621  1.1  mrg       else if (gimple_store_p (use_stmt))
   2622  1.1  mrg 	{
   2623  1.1  mrg 	  gassign *assign = dyn_cast <gassign *> (use_stmt);
   2624  1.1  mrg 
   2625  1.1  mrg 	  /* Handle *lhs = name.  */
   2626  1.1  mrg 	  if (assign && gimple_assign_rhs1 (assign) == name)
   2627  1.1  mrg 	    {
   2628  1.1  mrg 	      if (dump_file)
   2629  1.1  mrg 		fprintf (dump_file, "%*s  ssa name saved to memory\n",
   2630  1.1  mrg 			 m_depth * 4, "");
   2631  1.1  mrg 	      m_lattice[index].merge (0);
   2632  1.1  mrg 	    }
   2633  1.1  mrg 	  /* Handle *name = exp.  */
   2634  1.1  mrg 	  else if (assign
   2635  1.1  mrg 		   && memory_access_to (gimple_assign_lhs (assign), name))
   2636  1.1  mrg 	    {
   2637  1.1  mrg 	      /* In general we can not ignore clobbers because they are
   2638  1.1  mrg 		 barriers for code motion, however after inlining it is safe to
   2639  1.1  mrg 		 do because local optimization passes do not consider clobbers
   2640  1.1  mrg 		 from other functions.
   2641  1.1  mrg 		 Similar logic is in ipa-pure-const.cc.  */
   2642  1.1  mrg 	      if (!cfun->after_inlining || !gimple_clobber_p (assign))
   2643  1.1  mrg 		m_lattice[index].merge_direct_store ();
   2644  1.1  mrg 	    }
   2645  1.1  mrg 	  /* ASM statements etc.  */
   2646  1.1  mrg 	  else if (!assign)
   2647  1.1  mrg 	    {
   2648  1.1  mrg 	      if (dump_file)
   2649  1.1  mrg 		fprintf (dump_file, "%*s  Unhandled store\n", m_depth * 4, "");
   2650  1.1  mrg 	      m_lattice[index].merge (0);
   2651  1.1  mrg 	    }
   2652  1.1  mrg 	}
   2653  1.1  mrg       else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
   2654  1.1  mrg 	{
   2655  1.1  mrg 	  enum tree_code code = gimple_assign_rhs_code (assign);
   2656  1.1  mrg 
   2657  1.1  mrg 	  /* See if operation is a merge as considered by
   2658  1.1  mrg 	     tree-ssa-structalias.cc:find_func_aliases.  */
   2659  1.1  mrg 	  if (!truth_value_p (code)
   2660  1.1  mrg 	      && code != POINTER_DIFF_EXPR
   2661  1.1  mrg 	      && (code != POINTER_PLUS_EXPR
   2662  1.1  mrg 		  || gimple_assign_rhs1 (assign) == name))
   2663  1.1  mrg 	    {
   2664  1.1  mrg 	      tree lhs = gimple_assign_lhs (assign);
   2665  1.1  mrg 	      merge_with_ssa_name (name, lhs, false);
   2666  1.1  mrg 	    }
   2667  1.1  mrg 	}
   2668  1.1  mrg       else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
   2669  1.1  mrg 	{
   2670  1.1  mrg 	  tree result = gimple_phi_result (phi);
   2671  1.1  mrg 	  merge_with_ssa_name (name, result, false);
   2672  1.1  mrg 	}
   2673  1.1  mrg       /* Conditions are not considered escape points
   2674  1.1  mrg 	 by tree-ssa-structalias.  */
   2675  1.1  mrg       else if (gimple_code (use_stmt) == GIMPLE_COND)
   2676  1.1  mrg 	;
   2677  1.1  mrg       else
   2678  1.1  mrg 	{
   2679  1.1  mrg 	  if (dump_file)
   2680  1.1  mrg 	    fprintf (dump_file, "%*s  Unhandled stmt\n", m_depth * 4, "");
   2681  1.1  mrg 	  m_lattice[index].merge (0);
   2682  1.1  mrg 	}
   2683  1.1  mrg 
   2684  1.1  mrg       if (dump_file)
   2685  1.1  mrg 	{
   2686  1.1  mrg 	  fprintf (dump_file, "%*s  current flags of ", m_depth * 4, "");
   2687  1.1  mrg 	  print_generic_expr (dump_file, name);
   2688  1.1  mrg 	  m_lattice[index].dump (dump_file, m_depth * 4 + 4);
   2689  1.1  mrg 	}
   2690  1.1  mrg     }
   2691  1.1  mrg   if (dump_file)
   2692  1.1  mrg     {
   2693  1.1  mrg       fprintf (dump_file, "%*sflags of ssa name ", m_depth * 4, "");
   2694  1.1  mrg       print_generic_expr (dump_file, name);
   2695  1.1  mrg       m_lattice[index].dump (dump_file, m_depth * 4 + 2);
   2696  1.1  mrg     }
   2697  1.1  mrg   m_lattice[index].open = false;
   2698  1.1  mrg   if (!m_lattice[index].do_dataflow)
   2699  1.1  mrg     m_lattice[index].known = true;
   2700  1.1  mrg }
   2701  1.1  mrg 
   2702  1.1  mrg /* Propagate info from SRC to DEST.  If DEREF it true, assume that SRC
   2703  1.1  mrg    is dereferenced.  */
   2704  1.1  mrg 
   2705  1.1  mrg void
   2706  1.1  mrg modref_eaf_analysis::merge_with_ssa_name (tree dest, tree src, bool deref)
   2707  1.1  mrg {
   2708  1.1  mrg   int index = SSA_NAME_VERSION (dest);
   2709  1.1  mrg   int src_index = SSA_NAME_VERSION (src);
   2710  1.1  mrg 
   2711  1.1  mrg   /* Merging lattice with itself is a no-op.  */
   2712  1.1  mrg   if (!deref && src == dest)
   2713  1.1  mrg     return;
   2714  1.1  mrg 
   2715  1.1  mrg   m_depth++;
   2716  1.1  mrg   analyze_ssa_name (src);
   2717  1.1  mrg   m_depth--;
   2718  1.1  mrg   if (deref)
   2719  1.1  mrg     m_lattice[index].merge_deref (m_lattice[src_index], false);
   2720  1.1  mrg   else
   2721  1.1  mrg     m_lattice[index].merge (m_lattice[src_index]);
   2722  1.1  mrg 
   2723  1.1  mrg   /* If we failed to produce final solution add an edge to the dataflow
   2724  1.1  mrg      graph.  */
   2725  1.1  mrg   if (!m_lattice[src_index].known)
   2726  1.1  mrg     {
   2727  1.1  mrg       modref_lattice::propagate_edge e = {index, deref};
   2728  1.1  mrg 
   2729  1.1  mrg       if (!m_lattice[src_index].propagate_to.length ())
   2730  1.1  mrg 	m_names_to_propagate.safe_push (src_index);
   2731  1.1  mrg       m_lattice[src_index].propagate_to.safe_push (e);
   2732  1.1  mrg       m_lattice[src_index].changed = true;
   2733  1.1  mrg       m_lattice[src_index].do_dataflow = true;
   2734  1.1  mrg       if (dump_file)
   2735  1.1  mrg 	fprintf (dump_file,
   2736  1.1  mrg 		 "%*sWill propgate from ssa_name %i to %i%s\n",
   2737  1.1  mrg 		 m_depth * 4 + 4,
   2738  1.1  mrg 		 "", src_index, index, deref ? " (deref)" : "");
   2739  1.1  mrg     }
   2740  1.1  mrg }
   2741  1.1  mrg 
   2742  1.1  mrg /* In the case we deferred some SSA names, reprocess them.  In the case some
   2743  1.1  mrg    dataflow edges were introduced, do the actual iterative dataflow.  */
   2744  1.1  mrg 
   2745  1.1  mrg void
   2746  1.1  mrg modref_eaf_analysis::propagate ()
   2747  1.1  mrg {
   2748  1.1  mrg   int iterations = 0;
   2749  1.1  mrg   size_t i;
   2750  1.1  mrg   int index;
   2751  1.1  mrg   bool changed = true;
   2752  1.1  mrg 
   2753  1.1  mrg   while (m_deferred_names.length ())
   2754  1.1  mrg     {
   2755  1.1  mrg       tree name = m_deferred_names.pop ();
   2756  1.1  mrg       if (dump_file)
   2757  1.1  mrg 	fprintf (dump_file, "Analyzing deferred SSA name\n");
   2758  1.1  mrg       analyze_ssa_name (name, true);
   2759  1.1  mrg     }
   2760  1.1  mrg 
   2761  1.1  mrg   if (!m_names_to_propagate.length ())
   2762  1.1  mrg     return;
   2763  1.1  mrg   if (dump_file)
   2764  1.1  mrg     fprintf (dump_file, "Propagating EAF flags\n");
   2765  1.1  mrg 
   2766  1.1  mrg   /* Compute reverse postorder.  */
   2767  1.1  mrg   auto_vec <int> rpo;
   2768  1.1  mrg   struct stack_entry
   2769  1.1  mrg   {
   2770  1.1  mrg     int name;
   2771  1.1  mrg     unsigned pos;
   2772  1.1  mrg   };
   2773  1.1  mrg   auto_vec <struct stack_entry> stack;
   2774  1.1  mrg   int pos = m_names_to_propagate.length () - 1;
   2775  1.1  mrg 
   2776  1.1  mrg   rpo.safe_grow (m_names_to_propagate.length (), true);
   2777  1.1  mrg   stack.reserve_exact (m_names_to_propagate.length ());
   2778  1.1  mrg 
   2779  1.1  mrg   /* We reuse known flag for RPO DFS walk bookkeeping.  */
   2780  1.1  mrg   if (flag_checking)
   2781  1.1  mrg     FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
   2782  1.1  mrg       gcc_assert (!m_lattice[index].known && m_lattice[index].changed);
   2783  1.1  mrg 
   2784  1.1  mrg   FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
   2785  1.1  mrg     {
   2786  1.1  mrg       if (!m_lattice[index].known)
   2787  1.1  mrg 	{
   2788  1.1  mrg 	  stack_entry e = {index, 0};
   2789  1.1  mrg 
   2790  1.1  mrg 	  stack.quick_push (e);
   2791  1.1  mrg 	  m_lattice[index].known = true;
   2792  1.1  mrg 	}
   2793  1.1  mrg       while (stack.length ())
   2794  1.1  mrg 	{
   2795  1.1  mrg 	  bool found = false;
   2796  1.1  mrg 	  int index1 = stack.last ().name;
   2797  1.1  mrg 
   2798  1.1  mrg 	  while (stack.last ().pos < m_lattice[index1].propagate_to.length ())
   2799  1.1  mrg 	    {
   2800  1.1  mrg 	      int index2 = m_lattice[index1]
   2801  1.1  mrg 		      .propagate_to[stack.last ().pos].ssa_name;
   2802  1.1  mrg 
   2803  1.1  mrg 	      stack.last ().pos++;
   2804  1.1  mrg 	      if (!m_lattice[index2].known
   2805  1.1  mrg 		  && m_lattice[index2].propagate_to.length ())
   2806  1.1  mrg 		{
   2807  1.1  mrg 		  stack_entry e = {index2, 0};
   2808  1.1  mrg 
   2809  1.1  mrg 		  stack.quick_push (e);
   2810  1.1  mrg 		  m_lattice[index2].known = true;
   2811  1.1  mrg 		  found = true;
   2812  1.1  mrg 		  break;
   2813  1.1  mrg 		}
   2814  1.1  mrg 	    }
   2815  1.1  mrg 	  if (!found
   2816  1.1  mrg 	      && stack.last ().pos == m_lattice[index1].propagate_to.length ())
   2817  1.1  mrg 	    {
   2818  1.1  mrg 	      rpo[pos--] = index1;
   2819  1.1  mrg 	      stack.pop ();
   2820  1.1  mrg 	    }
   2821  1.1  mrg 	}
   2822  1.1  mrg     }
   2823  1.1  mrg 
   2824  1.1  mrg   /* Perform iterative dataflow.  */
   2825  1.1  mrg   while (changed)
   2826  1.1  mrg     {
   2827  1.1  mrg       changed = false;
   2828  1.1  mrg       iterations++;
   2829  1.1  mrg       if (dump_file)
   2830  1.1  mrg 	fprintf (dump_file, " iteration %i\n", iterations);
   2831  1.1  mrg       FOR_EACH_VEC_ELT (rpo, i, index)
   2832  1.1  mrg 	{
   2833  1.1  mrg 	  if (m_lattice[index].changed)
   2834  1.1  mrg 	    {
   2835  1.1  mrg 	      size_t j;
   2836  1.1  mrg 
   2837  1.1  mrg 	      m_lattice[index].changed = false;
   2838  1.1  mrg 	      if (dump_file)
   2839  1.1  mrg 		fprintf (dump_file, "  Visiting ssa name %i\n", index);
   2840  1.1  mrg 	      for (j = 0; j < m_lattice[index].propagate_to.length (); j++)
   2841  1.1  mrg 		{
   2842  1.1  mrg 		  bool ch;
   2843  1.1  mrg 		  int target = m_lattice[index].propagate_to[j].ssa_name;
   2844  1.1  mrg 		  bool deref = m_lattice[index].propagate_to[j].deref;
   2845  1.1  mrg 
   2846  1.1  mrg 		  if (dump_file)
   2847  1.1  mrg 		    fprintf (dump_file, "   Propagating flags of ssa name"
   2848  1.1  mrg 			     " %i to %i%s\n",
   2849  1.1  mrg 			     index, target, deref ? " (deref)" : "");
   2850  1.1  mrg 		  m_lattice[target].known = true;
   2851  1.1  mrg 		  if (!m_lattice[index].propagate_to[j].deref)
   2852  1.1  mrg 		    ch = m_lattice[target].merge (m_lattice[index]);
   2853  1.1  mrg 		  else
   2854  1.1  mrg 		    ch = m_lattice[target].merge_deref (m_lattice[index],
   2855  1.1  mrg 							false);
   2856  1.1  mrg 		  if (!ch)
   2857  1.1  mrg 		    continue;
   2858  1.1  mrg 		  if (dump_file)
   2859  1.1  mrg 		    {
   2860  1.1  mrg 		      fprintf (dump_file, "   New lattice: ");
   2861  1.1  mrg 		      m_lattice[target].dump (dump_file);
   2862  1.1  mrg 		    }
   2863  1.1  mrg 		  changed = true;
   2864  1.1  mrg 		  m_lattice[target].changed = true;
   2865  1.1  mrg 		}
   2866  1.1  mrg 	    }
   2867  1.1  mrg 	}
   2868  1.1  mrg     }
   2869  1.1  mrg   if (dump_file)
   2870  1.1  mrg     fprintf (dump_file, "EAF flags propagated in %i iterations\n", iterations);
   2871  1.1  mrg }
   2872  1.1  mrg 
   2873  1.1  mrg /* Record escape points of PARM_INDEX according to LATTICE.  */
   2874  1.1  mrg 
   2875  1.1  mrg void
   2876  1.1  mrg modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
   2877  1.1  mrg {
   2878  1.1  mrg   modref_lattice &lattice = m_lattice[SSA_NAME_VERSION (name)];
   2879  1.1  mrg 
   2880  1.1  mrg   if (lattice.escape_points.length ())
   2881  1.1  mrg     {
   2882  1.1  mrg       escape_point *ep;
   2883  1.1  mrg       unsigned int ip;
   2884  1.1  mrg       cgraph_node *node = cgraph_node::get (current_function_decl);
   2885  1.1  mrg 
   2886  1.1  mrg       gcc_assert (m_ipa);
   2887  1.1  mrg       FOR_EACH_VEC_ELT (lattice.escape_points, ip, ep)
   2888  1.1  mrg 	if ((ep->min_flags & flags) != flags)
   2889  1.1  mrg 	  {
   2890  1.1  mrg 	    cgraph_edge *e = node->get_edge (ep->call);
   2891  1.1  mrg 	    struct escape_entry ee = {parm_index, ep->arg,
   2892  1.1  mrg 				      ep->min_flags, ep->direct};
   2893  1.1  mrg 
   2894  1.1  mrg 	    escape_summaries->get_create (e)->esc.safe_push (ee);
   2895  1.1  mrg 	  }
   2896  1.1  mrg     }
   2897  1.1  mrg }
   2898  1.1  mrg 
   2899  1.1  mrg /* Determine EAF flags for function parameters
   2900  1.1  mrg    and fill in SUMMARY/SUMMARY_LTO.  If IPA is true work in IPA mode
   2901  1.1  mrg    where we also collect escape points.
   2902  1.1  mrg    PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
   2903  1.1  mrg    used to preserve flags from previous (IPA) run for cases where
   2904  1.1  mrg    late optimizations changed code in a way we can no longer analyze
   2905  1.1  mrg    it easily.  */
   2906  1.1  mrg 
   2907  1.1  mrg static void
   2908  1.1  mrg analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
   2909  1.1  mrg 	       bool ipa, vec<eaf_flags_t> &past_flags,
   2910  1.1  mrg 	       int past_retslot_flags, int past_static_chain_flags)
   2911  1.1  mrg {
   2912  1.1  mrg   unsigned int parm_index = 0;
   2913  1.1  mrg   unsigned int count = 0;
   2914  1.1  mrg   int ecf_flags = flags_from_decl_or_type (current_function_decl);
   2915  1.1  mrg   tree retslot = NULL;
   2916  1.1  mrg   tree static_chain = NULL;
   2917  1.1  mrg 
   2918  1.1  mrg   /* If there is return slot, look up its SSA name.  */
   2919  1.1  mrg   if (DECL_RESULT (current_function_decl)
   2920  1.1  mrg       && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
   2921  1.1  mrg     retslot = ssa_default_def (cfun, DECL_RESULT (current_function_decl));
   2922  1.1  mrg   if (cfun->static_chain_decl)
   2923  1.1  mrg     static_chain = ssa_default_def (cfun, cfun->static_chain_decl);
   2924  1.1  mrg 
   2925  1.1  mrg   for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
   2926  1.1  mrg        parm = TREE_CHAIN (parm))
   2927  1.1  mrg     count++;
   2928  1.1  mrg 
   2929  1.1  mrg   if (!count && !retslot && !static_chain)
   2930  1.1  mrg     return;
   2931  1.1  mrg 
   2932  1.1  mrg   modref_eaf_analysis eaf_analysis (ipa);
   2933  1.1  mrg 
   2934  1.1  mrg   /* Determine all SSA names we need to know flags for.  */
   2935  1.1  mrg   for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
   2936  1.1  mrg        parm = TREE_CHAIN (parm))
   2937  1.1  mrg     {
   2938  1.1  mrg       tree name = ssa_default_def (cfun, parm);
   2939  1.1  mrg       if (name)
   2940  1.1  mrg 	eaf_analysis.analyze_ssa_name (name);
   2941  1.1  mrg     }
   2942  1.1  mrg   if (retslot)
   2943  1.1  mrg     eaf_analysis.analyze_ssa_name (retslot);
   2944  1.1  mrg   if (static_chain)
   2945  1.1  mrg     eaf_analysis.analyze_ssa_name (static_chain);
   2946  1.1  mrg 
   2947  1.1  mrg   /* Do the dataflow.  */
   2948  1.1  mrg   eaf_analysis.propagate ();
   2949  1.1  mrg 
   2950  1.1  mrg   tree attr = lookup_attribute ("fn spec",
   2951  1.1  mrg 				TYPE_ATTRIBUTES
   2952  1.1  mrg 				  (TREE_TYPE (current_function_decl)));
   2953  1.1  mrg   attr_fnspec fnspec (attr
   2954  1.1  mrg 		      ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))
   2955  1.1  mrg 		      : "");
   2956  1.1  mrg 
   2957  1.1  mrg 
   2958  1.1  mrg   /* Store results to summaries.  */
   2959  1.1  mrg   for (tree parm = DECL_ARGUMENTS (current_function_decl); parm; parm_index++,
   2960  1.1  mrg        parm = TREE_CHAIN (parm))
   2961  1.1  mrg     {
   2962  1.1  mrg       tree name = ssa_default_def (cfun, parm);
   2963  1.1  mrg       if (!name || has_zero_uses (name))
   2964  1.1  mrg 	{
   2965  1.1  mrg 	  /* We do not track non-SSA parameters,
   2966  1.1  mrg 	     but we want to track unused gimple_regs.  */
   2967  1.1  mrg 	  if (!is_gimple_reg (parm))
   2968  1.1  mrg 	    continue;
   2969  1.1  mrg 	  if (summary)
   2970  1.1  mrg 	    {
   2971  1.1  mrg 	      if (parm_index >= summary->arg_flags.length ())
   2972  1.1  mrg 		summary->arg_flags.safe_grow_cleared (count, true);
   2973  1.1  mrg 	      summary->arg_flags[parm_index] = EAF_UNUSED;
   2974  1.1  mrg 	    }
   2975  1.1  mrg 	  if (summary_lto)
   2976  1.1  mrg 	    {
   2977  1.1  mrg 	      if (parm_index >= summary_lto->arg_flags.length ())
   2978  1.1  mrg 		summary_lto->arg_flags.safe_grow_cleared (count, true);
   2979  1.1  mrg 	      summary_lto->arg_flags[parm_index] = EAF_UNUSED;
   2980  1.1  mrg 	    }
   2981  1.1  mrg 	  continue;
   2982  1.1  mrg 	}
   2983  1.1  mrg       int flags = eaf_analysis.get_ssa_name_flags (name);
   2984  1.1  mrg       int attr_flags = fnspec.arg_eaf_flags (parm_index);
   2985  1.1  mrg 
   2986  1.1  mrg       if (dump_file && (flags | attr_flags) != flags && !(flags & EAF_UNUSED))
   2987  1.1  mrg 	{
   2988  1.1  mrg 	  fprintf (dump_file,
   2989  1.1  mrg 		   "  Flags for param %i combined with fnspec flags:",
   2990  1.1  mrg 		   (int)parm_index);
   2991  1.1  mrg 	  dump_eaf_flags (dump_file, attr_flags, false);
   2992  1.1  mrg 	  fprintf (dump_file, " determined: ");
   2993  1.1  mrg 	  dump_eaf_flags (dump_file, flags, true);
   2994  1.1  mrg 	}
   2995  1.1  mrg       flags |= attr_flags;
   2996  1.1  mrg 
   2997  1.1  mrg       /* Eliminate useless flags so we do not end up storing unnecessary
   2998  1.1  mrg 	 summaries.  */
   2999  1.1  mrg 
   3000  1.1  mrg       flags = remove_useless_eaf_flags
   3001  1.1  mrg 		 (flags, ecf_flags,
   3002  1.1  mrg 		  VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
   3003  1.1  mrg       if (past_flags.length () > parm_index)
   3004  1.1  mrg 	{
   3005  1.1  mrg 	  int past = past_flags[parm_index];
   3006  1.1  mrg 	  past = remove_useless_eaf_flags
   3007  1.1  mrg 		     (past, ecf_flags,
   3008  1.1  mrg 		      VOID_TYPE_P (TREE_TYPE
   3009  1.1  mrg 			  (TREE_TYPE (current_function_decl))));
   3010  1.1  mrg 	  /* Store merging can produce reads when combining together multiple
   3011  1.1  mrg 	     bitfields.  See PR111613.  */
   3012  1.1  mrg 	  past &= ~(EAF_NO_DIRECT_READ | EAF_NO_INDIRECT_READ);
   3013  1.1  mrg 	  if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
   3014  1.1  mrg 	    {
   3015  1.1  mrg 	      fprintf (dump_file,
   3016  1.1  mrg 		       "  Flags for param %i combined with IPA pass:",
   3017  1.1  mrg 		       (int)parm_index);
   3018  1.1  mrg 	      dump_eaf_flags (dump_file, past, false);
   3019  1.1  mrg 	      fprintf (dump_file, " determined: ");
   3020  1.1  mrg 	      dump_eaf_flags (dump_file, flags, true);
   3021  1.1  mrg 	    }
   3022  1.1  mrg 	  if (!(flags & EAF_UNUSED))
   3023  1.1  mrg 	    flags |= past;
   3024  1.1  mrg 	}
   3025  1.1  mrg 
   3026  1.1  mrg       if (flags)
   3027  1.1  mrg 	{
   3028  1.1  mrg 	  if (summary)
   3029  1.1  mrg 	    {
   3030  1.1  mrg 	      if (parm_index >= summary->arg_flags.length ())
   3031  1.1  mrg 		summary->arg_flags.safe_grow_cleared (count, true);
   3032  1.1  mrg 	      summary->arg_flags[parm_index] = flags;
   3033  1.1  mrg 	    }
   3034  1.1  mrg 	  if (summary_lto)
   3035  1.1  mrg 	    {
   3036  1.1  mrg 	      if (parm_index >= summary_lto->arg_flags.length ())
   3037  1.1  mrg 		summary_lto->arg_flags.safe_grow_cleared (count, true);
   3038  1.1  mrg 	      summary_lto->arg_flags[parm_index] = flags;
   3039  1.1  mrg 	    }
   3040  1.1  mrg 	  eaf_analysis.record_escape_points (name, parm_index, flags);
   3041  1.1  mrg 	}
   3042  1.1  mrg     }
   3043  1.1  mrg   if (retslot)
   3044  1.1  mrg     {
   3045  1.1  mrg       int flags = eaf_analysis.get_ssa_name_flags (retslot);
   3046  1.1  mrg       int past = past_retslot_flags;
   3047  1.1  mrg 
   3048  1.1  mrg       flags = remove_useless_eaf_flags (flags, ecf_flags, false);
   3049  1.1  mrg       past = remove_useless_eaf_flags
   3050  1.1  mrg 		 (past, ecf_flags,
   3051  1.1  mrg 		  VOID_TYPE_P (TREE_TYPE
   3052  1.1  mrg 		      (TREE_TYPE (current_function_decl))));
   3053  1.1  mrg       if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
   3054  1.1  mrg 	{
   3055  1.1  mrg 	  fprintf (dump_file,
   3056  1.1  mrg 		   "  Retslot flags combined with IPA pass:");
   3057  1.1  mrg 	  dump_eaf_flags (dump_file, past, false);
   3058  1.1  mrg 	  fprintf (dump_file, " determined: ");
   3059  1.1  mrg 	  dump_eaf_flags (dump_file, flags, true);
   3060  1.1  mrg 	}
   3061  1.1  mrg       if (!(flags & EAF_UNUSED))
   3062  1.1  mrg 	flags |= past;
   3063  1.1  mrg       if (flags)
   3064  1.1  mrg 	{
   3065  1.1  mrg 	  if (summary)
   3066  1.1  mrg 	    summary->retslot_flags = flags;
   3067  1.1  mrg 	  if (summary_lto)
   3068  1.1  mrg 	    summary_lto->retslot_flags = flags;
   3069  1.1  mrg 	  eaf_analysis.record_escape_points (retslot,
   3070  1.1  mrg 					     MODREF_RETSLOT_PARM, flags);
   3071  1.1  mrg 	}
   3072  1.1  mrg     }
   3073  1.1  mrg   if (static_chain)
   3074  1.1  mrg     {
   3075  1.1  mrg       int flags = eaf_analysis.get_ssa_name_flags (static_chain);
   3076  1.1  mrg       int past = past_static_chain_flags;
   3077  1.1  mrg 
   3078  1.1  mrg       flags = remove_useless_eaf_flags (flags, ecf_flags, false);
   3079  1.1  mrg       past = remove_useless_eaf_flags
   3080  1.1  mrg 		 (past, ecf_flags,
   3081  1.1  mrg 		  VOID_TYPE_P (TREE_TYPE
   3082  1.1  mrg 		      (TREE_TYPE (current_function_decl))));
   3083  1.1  mrg       if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
   3084  1.1  mrg 	{
   3085  1.1  mrg 	  fprintf (dump_file,
   3086  1.1  mrg 		   "  Static chain flags combined with IPA pass:");
   3087  1.1  mrg 	  dump_eaf_flags (dump_file, past, false);
   3088  1.1  mrg 	  fprintf (dump_file, " determined: ");
   3089  1.1  mrg 	  dump_eaf_flags (dump_file, flags, true);
   3090  1.1  mrg 	}
   3091  1.1  mrg       if (!(flags & EAF_UNUSED))
   3092  1.1  mrg 	flags |= past;
   3093  1.1  mrg       if (flags)
   3094  1.1  mrg 	{
   3095  1.1  mrg 	  if (summary)
   3096  1.1  mrg 	    summary->static_chain_flags = flags;
   3097  1.1  mrg 	  if (summary_lto)
   3098  1.1  mrg 	    summary_lto->static_chain_flags = flags;
   3099  1.1  mrg 	  eaf_analysis.record_escape_points (static_chain,
   3100  1.1  mrg 					     MODREF_STATIC_CHAIN_PARM,
   3101  1.1  mrg 					     flags);
   3102  1.1  mrg 	}
   3103  1.1  mrg     }
   3104  1.1  mrg }
   3105  1.1  mrg 
   3106  1.1  mrg /* Analyze function.  IPA indicates whether we're running in local mode
   3107  1.1  mrg    (false) or the IPA mode (true).
   3108  1.1  mrg    Return true if fixup cfg is needed after the pass.  */
   3109  1.1  mrg 
   3110  1.1  mrg static bool
   3111  1.1  mrg analyze_function (bool ipa)
   3112  1.1  mrg {
   3113  1.1  mrg   bool fixup_cfg = false;
   3114  1.1  mrg   if (dump_file)
   3115  1.1  mrg     fprintf (dump_file, "\n\nmodref analyzing '%s' (ipa=%i)%s%s\n",
   3116  1.1  mrg 	     cgraph_node::get (current_function_decl)->dump_name (), ipa,
   3117  1.1  mrg 	     TREE_READONLY (current_function_decl) ? " (const)" : "",
   3118  1.1  mrg 	     DECL_PURE_P (current_function_decl) ? " (pure)" : "");
   3119  1.1  mrg 
   3120  1.1  mrg   /* Don't analyze this function if it's compiled with -fno-strict-aliasing.  */
   3121  1.1  mrg   if (!flag_ipa_modref
   3122  1.1  mrg       || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)))
   3123  1.1  mrg     return false;
   3124  1.1  mrg 
   3125  1.1  mrg   /* Compute no-LTO summaries when local optimization is going to happen.  */
   3126  1.1  mrg   bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
   3127  1.1  mrg 		|| (in_lto_p && !flag_wpa
   3128  1.1  mrg 		    && flag_incremental_link != INCREMENTAL_LINK_LTO));
   3129  1.1  mrg   /* Compute LTO when LTO streaming is going to happen.  */
   3130  1.1  mrg   bool lto = ipa && ((flag_lto && !in_lto_p)
   3131  1.1  mrg 		     || flag_wpa
   3132  1.1  mrg 		     || flag_incremental_link == INCREMENTAL_LINK_LTO);
   3133  1.1  mrg   cgraph_node *fnode = cgraph_node::get (current_function_decl);
   3134  1.1  mrg 
   3135  1.1  mrg   modref_summary *summary = NULL;
   3136  1.1  mrg   modref_summary_lto *summary_lto = NULL;
   3137  1.1  mrg 
   3138  1.1  mrg   bool past_flags_known = false;
   3139  1.1  mrg   auto_vec <eaf_flags_t> past_flags;
   3140  1.1  mrg   int past_retslot_flags = 0;
   3141  1.1  mrg   int past_static_chain_flags = 0;
   3142  1.1  mrg 
   3143  1.1  mrg   /* Initialize the summary.
   3144  1.1  mrg      If we run in local mode there is possibly pre-existing summary from
   3145  1.1  mrg      IPA pass.  Dump it so it is easy to compare if mod-ref info has
   3146  1.1  mrg      improved.  */
   3147  1.1  mrg   if (!ipa)
   3148  1.1  mrg     {
   3149  1.1  mrg       if (!optimization_summaries)
   3150  1.1  mrg 	optimization_summaries = modref_summaries::create_ggc (symtab);
   3151  1.1  mrg       else /* Remove existing summary if we are re-running the pass.  */
   3152  1.1  mrg 	{
   3153  1.1  mrg 	  summary = optimization_summaries->get (fnode);
   3154  1.1  mrg 	  if (summary != NULL
   3155  1.1  mrg 	      && summary->loads)
   3156  1.1  mrg 	    {
   3157  1.1  mrg 	      if (dump_file)
   3158  1.1  mrg 		{
   3159  1.1  mrg 		  fprintf (dump_file, "Past summary:\n");
   3160  1.1  mrg 		  optimization_summaries->get (fnode)->dump (dump_file);
   3161  1.1  mrg 		}
   3162  1.1  mrg 	      past_flags.reserve_exact (summary->arg_flags.length ());
   3163  1.1  mrg 	      past_flags.splice (summary->arg_flags);
   3164  1.1  mrg 	      past_retslot_flags = summary->retslot_flags;
   3165  1.1  mrg 	      past_static_chain_flags = summary->static_chain_flags;
   3166  1.1  mrg 	      past_flags_known = true;
   3167  1.1  mrg 	    }
   3168  1.1  mrg 	  optimization_summaries->remove (fnode);
   3169  1.1  mrg 	}
   3170  1.1  mrg       summary = optimization_summaries->get_create (fnode);
   3171  1.1  mrg       gcc_checking_assert (nolto && !lto);
   3172  1.1  mrg     }
   3173  1.1  mrg   /* In IPA mode we analyze every function precisely once.  Assert that.  */
   3174  1.1  mrg   else
   3175  1.1  mrg     {
   3176  1.1  mrg       if (nolto)
   3177  1.1  mrg 	{
   3178  1.1  mrg 	  if (!summaries)
   3179  1.1  mrg 	    summaries = modref_summaries::create_ggc (symtab);
   3180  1.1  mrg 	  else
   3181  1.1  mrg 	    summaries->remove (fnode);
   3182  1.1  mrg 	  summary = summaries->get_create (fnode);
   3183  1.1  mrg 	}
   3184  1.1  mrg       if (lto)
   3185  1.1  mrg 	{
   3186  1.1  mrg 	  if (!summaries_lto)
   3187  1.1  mrg 	    summaries_lto = modref_summaries_lto::create_ggc (symtab);
   3188  1.1  mrg 	  else
   3189  1.1  mrg 	    summaries_lto->remove (fnode);
   3190  1.1  mrg 	  summary_lto = summaries_lto->get_create (fnode);
   3191  1.1  mrg 	}
   3192  1.1  mrg       if (!fnspec_summaries)
   3193  1.1  mrg 	fnspec_summaries = new fnspec_summaries_t (symtab);
   3194  1.1  mrg       if (!escape_summaries)
   3195  1.1  mrg 	escape_summaries = new escape_summaries_t (symtab);
   3196  1.1  mrg      }
   3197  1.1  mrg 
   3198  1.1  mrg 
   3199  1.1  mrg   /* Create and initialize summary for F.
   3200  1.1  mrg      Note that summaries may be already allocated from previous
   3201  1.1  mrg      run of the pass.  */
   3202  1.1  mrg   if (nolto)
   3203  1.1  mrg     {
   3204  1.1  mrg       gcc_assert (!summary->loads);
   3205  1.1  mrg       summary->loads = modref_records::create_ggc ();
   3206  1.1  mrg       gcc_assert (!summary->stores);
   3207  1.1  mrg       summary->stores = modref_records::create_ggc ();
   3208  1.1  mrg       summary->writes_errno = false;
   3209  1.1  mrg       summary->side_effects = false;
   3210  1.1  mrg       summary->nondeterministic = false;
   3211  1.1  mrg       summary->calls_interposable = false;
   3212  1.1  mrg     }
   3213  1.1  mrg   if (lto)
   3214  1.1  mrg     {
   3215  1.1  mrg       gcc_assert (!summary_lto->loads);
   3216  1.1  mrg       summary_lto->loads = modref_records_lto::create_ggc ();
   3217  1.1  mrg       gcc_assert (!summary_lto->stores);
   3218  1.1  mrg       summary_lto->stores = modref_records_lto::create_ggc ();
   3219  1.1  mrg       summary_lto->writes_errno = false;
   3220  1.1  mrg       summary_lto->side_effects = false;
   3221  1.1  mrg       summary_lto->nondeterministic = false;
   3222  1.1  mrg       summary_lto->calls_interposable = false;
   3223  1.1  mrg     }
   3224  1.1  mrg 
   3225  1.1  mrg   analyze_parms (summary, summary_lto, ipa,
   3226  1.1  mrg 		 past_flags, past_retslot_flags, past_static_chain_flags);
   3227  1.1  mrg 
   3228  1.1  mrg   {
   3229  1.1  mrg     modref_access_analysis analyzer (ipa, summary, summary_lto);
   3230  1.1  mrg     analyzer.analyze ();
   3231  1.1  mrg   }
   3232  1.1  mrg 
   3233  1.1  mrg   if (!ipa && flag_ipa_pure_const)
   3234  1.1  mrg     {
   3235  1.1  mrg       if (!summary->stores->every_base && !summary->stores->bases
   3236  1.1  mrg 	  && !summary->nondeterministic)
   3237  1.1  mrg 	{
   3238  1.1  mrg 	  if (!summary->loads->every_base && !summary->loads->bases
   3239  1.1  mrg 	      && !summary->calls_interposable)
   3240  1.1  mrg 	    fixup_cfg = ipa_make_function_const (fnode,
   3241  1.1  mrg 						 summary->side_effects, true);
   3242  1.1  mrg 	  else
   3243  1.1  mrg 	    fixup_cfg = ipa_make_function_pure (fnode,
   3244  1.1  mrg 						summary->side_effects, true);
   3245  1.1  mrg 	}
   3246  1.1  mrg     }
   3247  1.1  mrg   int ecf_flags = flags_from_decl_or_type (current_function_decl);
   3248  1.1  mrg   if (summary && !summary->useful_p (ecf_flags))
   3249  1.1  mrg     {
   3250  1.1  mrg       if (!ipa)
   3251  1.1  mrg 	optimization_summaries->remove (fnode);
   3252  1.1  mrg       else
   3253  1.1  mrg 	summaries->remove (fnode);
   3254  1.1  mrg       summary = NULL;
   3255  1.1  mrg     }
   3256  1.1  mrg   if (summary)
   3257  1.1  mrg     summary->finalize (current_function_decl);
   3258  1.1  mrg   if (summary_lto && !summary_lto->useful_p (ecf_flags))
   3259  1.1  mrg     {
   3260  1.1  mrg       summaries_lto->remove (fnode);
   3261  1.1  mrg       summary_lto = NULL;
   3262  1.1  mrg     }
   3263  1.1  mrg 
   3264  1.1  mrg   if (ipa && !summary && !summary_lto)
   3265  1.1  mrg     remove_modref_edge_summaries (fnode);
   3266  1.1  mrg 
   3267  1.1  mrg   if (dump_file)
   3268  1.1  mrg     {
   3269  1.1  mrg       fprintf (dump_file, " - modref done with result: tracked.\n");
   3270  1.1  mrg       if (summary)
   3271  1.1  mrg 	summary->dump (dump_file);
   3272  1.1  mrg       if (summary_lto)
   3273  1.1  mrg 	summary_lto->dump (dump_file);
   3274  1.1  mrg       dump_modref_edge_summaries (dump_file, fnode, 2);
   3275  1.1  mrg       /* To simplify debugging, compare IPA and local solutions.  */
   3276  1.1  mrg       if (past_flags_known && summary)
   3277  1.1  mrg 	{
   3278  1.1  mrg 	  size_t len = summary->arg_flags.length ();
   3279  1.1  mrg 
   3280  1.1  mrg 	  if (past_flags.length () > len)
   3281  1.1  mrg 	    len = past_flags.length ();
   3282  1.1  mrg 	  for (size_t i = 0; i < len; i++)
   3283  1.1  mrg 	    {
   3284  1.1  mrg 	      int old_flags = i < past_flags.length () ? past_flags[i] : 0;
   3285  1.1  mrg 	      int new_flags = i < summary->arg_flags.length ()
   3286  1.1  mrg 			      ? summary->arg_flags[i] : 0;
   3287  1.1  mrg 	      old_flags = remove_useless_eaf_flags
   3288  1.1  mrg 		(old_flags, flags_from_decl_or_type (current_function_decl),
   3289  1.1  mrg 		 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
   3290  1.1  mrg 	      if (old_flags != new_flags)
   3291  1.1  mrg 		{
   3292  1.1  mrg 		  if ((old_flags & ~new_flags) == 0
   3293  1.1  mrg 		      || (new_flags & EAF_UNUSED))
   3294  1.1  mrg 		    fprintf (dump_file, "  Flags for param %i improved:",
   3295  1.1  mrg 			     (int)i);
   3296  1.1  mrg 		  else
   3297  1.1  mrg 		    fprintf (dump_file, "  Flags for param %i changed:",
   3298  1.1  mrg 			     (int)i);
   3299  1.1  mrg 		  dump_eaf_flags (dump_file, old_flags, false);
   3300  1.1  mrg 		  fprintf (dump_file, " -> ");
   3301  1.1  mrg 		  dump_eaf_flags (dump_file, new_flags, true);
   3302  1.1  mrg 		}
   3303  1.1  mrg 	    }
   3304  1.1  mrg 	  past_retslot_flags = remove_useless_eaf_flags
   3305  1.1  mrg 		(past_retslot_flags,
   3306  1.1  mrg 		 flags_from_decl_or_type (current_function_decl),
   3307  1.1  mrg 		 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
   3308  1.1  mrg 	  if (past_retslot_flags != summary->retslot_flags)
   3309  1.1  mrg 	    {
   3310  1.1  mrg 	      if ((past_retslot_flags & ~summary->retslot_flags) == 0
   3311  1.1  mrg 		  || (summary->retslot_flags & EAF_UNUSED))
   3312  1.1  mrg 		fprintf (dump_file, "  Flags for retslot improved:");
   3313  1.1  mrg 	      else
   3314  1.1  mrg 		fprintf (dump_file, "  Flags for retslot changed:");
   3315  1.1  mrg 	      dump_eaf_flags (dump_file, past_retslot_flags, false);
   3316  1.1  mrg 	      fprintf (dump_file, " -> ");
   3317  1.1  mrg 	      dump_eaf_flags (dump_file, summary->retslot_flags, true);
   3318  1.1  mrg 	    }
   3319  1.1  mrg 	  past_static_chain_flags = remove_useless_eaf_flags
   3320  1.1  mrg 		(past_static_chain_flags,
   3321  1.1  mrg 		 flags_from_decl_or_type (current_function_decl),
   3322  1.1  mrg 		 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
   3323  1.1  mrg 	  if (past_static_chain_flags != summary->static_chain_flags)
   3324  1.1  mrg 	    {
   3325  1.1  mrg 	      if ((past_static_chain_flags & ~summary->static_chain_flags) == 0
   3326  1.1  mrg 		  || (summary->static_chain_flags & EAF_UNUSED))
   3327  1.1  mrg 		fprintf (dump_file, "  Flags for static chain improved:");
   3328  1.1  mrg 	      else
   3329  1.1  mrg 		fprintf (dump_file, "  Flags for static chain changed:");
   3330  1.1  mrg 	      dump_eaf_flags (dump_file, past_static_chain_flags, false);
   3331  1.1  mrg 	      fprintf (dump_file, " -> ");
   3332  1.1  mrg 	      dump_eaf_flags (dump_file, summary->static_chain_flags, true);
   3333  1.1  mrg 	    }
   3334  1.1  mrg 	}
   3335  1.1  mrg       else if (past_flags_known && !summary)
   3336  1.1  mrg 	{
   3337  1.1  mrg 	  for (size_t i = 0; i < past_flags.length (); i++)
   3338  1.1  mrg 	    {
   3339  1.1  mrg 	      int old_flags = past_flags[i];
   3340  1.1  mrg 	      old_flags = remove_useless_eaf_flags
   3341  1.1  mrg 		(old_flags, flags_from_decl_or_type (current_function_decl),
   3342  1.1  mrg 		 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
   3343  1.1  mrg 	      if (old_flags)
   3344  1.1  mrg 		{
   3345  1.1  mrg 		  fprintf (dump_file, "  Flags for param %i worsened:",
   3346  1.1  mrg 			   (int)i);
   3347  1.1  mrg 		  dump_eaf_flags (dump_file, old_flags, false);
   3348  1.1  mrg 		  fprintf (dump_file, " -> \n");
   3349  1.1  mrg 		}
   3350  1.1  mrg 	    }
   3351  1.1  mrg 	  past_retslot_flags = remove_useless_eaf_flags
   3352  1.1  mrg 		(past_retslot_flags,
   3353  1.1  mrg 		 flags_from_decl_or_type (current_function_decl),
   3354  1.1  mrg 		 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
   3355  1.1  mrg 	  if (past_retslot_flags)
   3356  1.1  mrg 	    {
   3357  1.1  mrg 	      fprintf (dump_file, "  Flags for retslot worsened:");
   3358  1.1  mrg 	      dump_eaf_flags (dump_file, past_retslot_flags, false);
   3359  1.1  mrg 	      fprintf (dump_file, " ->\n");
   3360  1.1  mrg 	    }
   3361  1.1  mrg 	  past_static_chain_flags = remove_useless_eaf_flags
   3362  1.1  mrg 		(past_static_chain_flags,
   3363  1.1  mrg 		 flags_from_decl_or_type (current_function_decl),
   3364  1.1  mrg 		 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
   3365  1.1  mrg 	  if (past_static_chain_flags)
   3366  1.1  mrg 	    {
   3367  1.1  mrg 	      fprintf (dump_file, "  Flags for static chain worsened:");
   3368  1.1  mrg 	      dump_eaf_flags (dump_file, past_static_chain_flags, false);
   3369  1.1  mrg 	      fprintf (dump_file, " ->\n");
   3370  1.1  mrg 	    }
   3371  1.1  mrg 	}
   3372  1.1  mrg     }
   3373  1.1  mrg   return fixup_cfg;
   3374  1.1  mrg }
   3375  1.1  mrg 
   3376  1.1  mrg /* Callback for generate_summary.  */
   3377  1.1  mrg 
   3378  1.1  mrg static void
   3379  1.1  mrg modref_generate (void)
   3380  1.1  mrg {
   3381  1.1  mrg   struct cgraph_node *node;
   3382  1.1  mrg   FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
   3383  1.1  mrg     {
   3384  1.1  mrg       function *f = DECL_STRUCT_FUNCTION (node->decl);
   3385  1.1  mrg       if (!f)
   3386  1.1  mrg 	continue;
   3387  1.1  mrg       push_cfun (f);
   3388  1.1  mrg       analyze_function (true);
   3389  1.1  mrg       pop_cfun ();
   3390  1.1  mrg     }
   3391  1.1  mrg }
   3392  1.1  mrg 
   3393  1.1  mrg }  /* ANON namespace.  */
   3394  1.1  mrg 
   3395  1.1  mrg /* Debugging helper.  */
   3396  1.1  mrg 
   3397  1.1  mrg void
   3398  1.1  mrg debug_eaf_flags (int flags)
   3399  1.1  mrg {
   3400  1.1  mrg    dump_eaf_flags (stderr, flags, true);
   3401  1.1  mrg }
   3402  1.1  mrg 
   3403  1.1  mrg /* Called when a new function is inserted to callgraph late.  */
   3404  1.1  mrg 
   3405  1.1  mrg void
   3406  1.1  mrg modref_summaries::insert (struct cgraph_node *node, modref_summary *)
   3407  1.1  mrg {
   3408  1.1  mrg   /* Local passes ought to be executed by the pass manager.  */
   3409  1.1  mrg   if (this == optimization_summaries)
   3410  1.1  mrg     {
   3411  1.1  mrg       optimization_summaries->remove (node);
   3412  1.1  mrg       return;
   3413  1.1  mrg     }
   3414  1.1  mrg   if (!DECL_STRUCT_FUNCTION (node->decl)
   3415  1.1  mrg       || !opt_for_fn (node->decl, flag_ipa_modref))
   3416  1.1  mrg     {
   3417  1.1  mrg       summaries->remove (node);
   3418  1.1  mrg       return;
   3419  1.1  mrg     }
   3420  1.1  mrg   push_cfun (DECL_STRUCT_FUNCTION (node->decl));
   3421  1.1  mrg   analyze_function (true);
   3422  1.1  mrg   pop_cfun ();
   3423  1.1  mrg }
   3424  1.1  mrg 
   3425  1.1  mrg /* Called when a new function is inserted to callgraph late.  */
   3426  1.1  mrg 
   3427  1.1  mrg void
   3428  1.1  mrg modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
   3429  1.1  mrg {
   3430  1.1  mrg   /* We do not support adding new function when IPA information is already
   3431  1.1  mrg      propagated.  This is done only by SIMD cloning that is not very
   3432  1.1  mrg      critical.  */
   3433  1.1  mrg   if (!DECL_STRUCT_FUNCTION (node->decl)
   3434  1.1  mrg       || !opt_for_fn (node->decl, flag_ipa_modref)
   3435  1.1  mrg       || propagated)
   3436  1.1  mrg     {
   3437  1.1  mrg       summaries_lto->remove (node);
   3438  1.1  mrg       return;
   3439  1.1  mrg     }
   3440  1.1  mrg   push_cfun (DECL_STRUCT_FUNCTION (node->decl));
   3441  1.1  mrg   analyze_function (true);
   3442  1.1  mrg   pop_cfun ();
   3443  1.1  mrg }
   3444  1.1  mrg 
   3445  1.1  mrg /* Called when new clone is inserted to callgraph late.  */
   3446  1.1  mrg 
   3447  1.1  mrg void
   3448  1.1  mrg modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
   3449  1.1  mrg 			     modref_summary *src_data,
   3450  1.1  mrg 			     modref_summary *dst_data)
   3451  1.1  mrg {
   3452  1.1  mrg   /* Do not duplicate optimization summaries; we do not handle parameter
   3453  1.1  mrg      transforms on them.  */
   3454  1.1  mrg   if (this == optimization_summaries)
   3455  1.1  mrg     {
   3456  1.1  mrg       optimization_summaries->remove (dst);
   3457  1.1  mrg       return;
   3458  1.1  mrg     }
   3459  1.1  mrg   dst_data->stores = modref_records::create_ggc ();
   3460  1.1  mrg   dst_data->stores->copy_from (src_data->stores);
   3461  1.1  mrg   dst_data->loads = modref_records::create_ggc ();
   3462  1.1  mrg   dst_data->loads->copy_from (src_data->loads);
   3463  1.1  mrg   dst_data->kills.reserve_exact (src_data->kills.length ());
   3464  1.1  mrg   dst_data->kills.splice (src_data->kills);
   3465  1.1  mrg   dst_data->writes_errno = src_data->writes_errno;
   3466  1.1  mrg   dst_data->side_effects = src_data->side_effects;
   3467  1.1  mrg   dst_data->nondeterministic = src_data->nondeterministic;
   3468  1.1  mrg   dst_data->calls_interposable = src_data->calls_interposable;
   3469  1.1  mrg   if (src_data->arg_flags.length ())
   3470  1.1  mrg     dst_data->arg_flags = src_data->arg_flags.copy ();
   3471  1.1  mrg   dst_data->retslot_flags = src_data->retslot_flags;
   3472  1.1  mrg   dst_data->static_chain_flags = src_data->static_chain_flags;
   3473  1.1  mrg }
   3474  1.1  mrg 
   3475  1.1  mrg /* Called when new clone is inserted to callgraph late.  */
   3476  1.1  mrg 
   3477  1.1  mrg void
   3478  1.1  mrg modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
   3479  1.1  mrg 				 modref_summary_lto *src_data,
   3480  1.1  mrg 				 modref_summary_lto *dst_data)
   3481  1.1  mrg {
   3482  1.1  mrg   /* Be sure that no further cloning happens after ipa-modref.  If it does
   3483  1.1  mrg      we will need to update signatures for possible param changes.  */
   3484  1.1  mrg   gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
   3485  1.1  mrg   dst_data->stores = modref_records_lto::create_ggc ();
   3486  1.1  mrg   dst_data->stores->copy_from (src_data->stores);
   3487  1.1  mrg   dst_data->loads = modref_records_lto::create_ggc ();
   3488  1.1  mrg   dst_data->loads->copy_from (src_data->loads);
   3489  1.1  mrg   dst_data->kills.reserve_exact (src_data->kills.length ());
   3490  1.1  mrg   dst_data->kills.splice (src_data->kills);
   3491  1.1  mrg   dst_data->writes_errno = src_data->writes_errno;
   3492  1.1  mrg   dst_data->side_effects = src_data->side_effects;
   3493  1.1  mrg   dst_data->nondeterministic = src_data->nondeterministic;
   3494  1.1  mrg   dst_data->calls_interposable = src_data->calls_interposable;
   3495  1.1  mrg   if (src_data->arg_flags.length ())
   3496  1.1  mrg     dst_data->arg_flags = src_data->arg_flags.copy ();
   3497  1.1  mrg   dst_data->retslot_flags = src_data->retslot_flags;
   3498  1.1  mrg   dst_data->static_chain_flags = src_data->static_chain_flags;
   3499  1.1  mrg }
   3500  1.1  mrg 
   3501  1.1  mrg namespace
   3502  1.1  mrg {
   3503  1.1  mrg /* Definition of the modref pass on GIMPLE.  */
   3504  1.1  mrg const pass_data pass_data_modref = {
   3505  1.1  mrg   GIMPLE_PASS,
   3506  1.1  mrg   "modref",
   3507  1.1  mrg   OPTGROUP_IPA,
   3508  1.1  mrg   TV_TREE_MODREF,
   3509  1.1  mrg   (PROP_cfg | PROP_ssa),
   3510  1.1  mrg   0,
   3511  1.1  mrg   0,
   3512  1.1  mrg   0,
   3513  1.1  mrg   0,
   3514  1.1  mrg };
   3515  1.1  mrg 
   3516  1.1  mrg class pass_modref : public gimple_opt_pass
   3517  1.1  mrg {
   3518  1.1  mrg   public:
   3519  1.1  mrg     pass_modref (gcc::context *ctxt)
   3520  1.1  mrg 	: gimple_opt_pass (pass_data_modref, ctxt) {}
   3521  1.1  mrg 
   3522  1.1  mrg     /* opt_pass methods: */
   3523  1.1  mrg     opt_pass *clone ()
   3524  1.1  mrg     {
   3525  1.1  mrg       return new pass_modref (m_ctxt);
   3526  1.1  mrg     }
   3527  1.1  mrg     virtual bool gate (function *)
   3528  1.1  mrg     {
   3529  1.1  mrg       return flag_ipa_modref;
   3530  1.1  mrg     }
   3531  1.1  mrg     virtual unsigned int execute (function *);
   3532  1.1  mrg };
   3533  1.1  mrg 
   3534  1.1  mrg /* Encode TT to the output block OB using the summary streaming API.  */
   3535  1.1  mrg 
   3536  1.1  mrg static void
   3537  1.1  mrg write_modref_records (modref_records_lto *tt, struct output_block *ob)
   3538  1.1  mrg {
   3539  1.1  mrg   streamer_write_uhwi (ob, tt->every_base);
   3540  1.1  mrg   streamer_write_uhwi (ob, vec_safe_length (tt->bases));
   3541  1.1  mrg   for (auto base_node : tt->bases)
   3542  1.1  mrg     {
   3543  1.1  mrg       stream_write_tree (ob, base_node->base, true);
   3544  1.1  mrg 
   3545  1.1  mrg       streamer_write_uhwi (ob, base_node->every_ref);
   3546  1.1  mrg       streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
   3547  1.1  mrg 
   3548  1.1  mrg       for (auto ref_node : base_node->refs)
   3549  1.1  mrg 	{
   3550  1.1  mrg 	  stream_write_tree (ob, ref_node->ref, true);
   3551  1.1  mrg 	  streamer_write_uhwi (ob, ref_node->every_access);
   3552  1.1  mrg 	  streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
   3553  1.1  mrg 
   3554  1.1  mrg 	  for (auto access_node : ref_node->accesses)
   3555  1.1  mrg 	    access_node.stream_out (ob);
   3556  1.1  mrg 	}
   3557  1.1  mrg     }
   3558  1.1  mrg }
   3559  1.1  mrg 
   3560  1.1  mrg /* Read a modref_tree from the input block IB using the data from DATA_IN.
   3561  1.1  mrg    This assumes that the tree was encoded using write_modref_tree.
   3562  1.1  mrg    Either nolto_ret or lto_ret is initialized by the tree depending whether
   3563  1.1  mrg    LTO streaming is expected or not.  */
   3564  1.1  mrg 
   3565  1.1  mrg static void
   3566  1.1  mrg read_modref_records (tree decl,
   3567  1.1  mrg 		     lto_input_block *ib, struct data_in *data_in,
   3568  1.1  mrg 		     modref_records **nolto_ret,
   3569  1.1  mrg 		     modref_records_lto **lto_ret)
   3570  1.1  mrg {
   3571  1.1  mrg   size_t max_bases = opt_for_fn (decl, param_modref_max_bases);
   3572  1.1  mrg   size_t max_refs = opt_for_fn (decl, param_modref_max_refs);
   3573  1.1  mrg   size_t max_accesses = opt_for_fn (decl, param_modref_max_accesses);
   3574  1.1  mrg 
   3575  1.1  mrg   if (lto_ret)
   3576  1.1  mrg     *lto_ret = modref_records_lto::create_ggc ();
   3577  1.1  mrg   if (nolto_ret)
   3578  1.1  mrg     *nolto_ret = modref_records::create_ggc ();
   3579  1.1  mrg   gcc_checking_assert (lto_ret || nolto_ret);
   3580  1.1  mrg 
   3581  1.1  mrg   size_t every_base = streamer_read_uhwi (ib);
   3582  1.1  mrg   size_t nbase = streamer_read_uhwi (ib);
   3583  1.1  mrg 
   3584  1.1  mrg   gcc_assert (!every_base || nbase == 0);
   3585  1.1  mrg   if (every_base)
   3586  1.1  mrg     {
   3587  1.1  mrg       if (nolto_ret)
   3588  1.1  mrg 	(*nolto_ret)->collapse ();
   3589  1.1  mrg       if (lto_ret)
   3590  1.1  mrg 	(*lto_ret)->collapse ();
   3591  1.1  mrg     }
   3592  1.1  mrg   for (size_t i = 0; i < nbase; i++)
   3593  1.1  mrg     {
   3594  1.1  mrg       tree base_tree = stream_read_tree (ib, data_in);
   3595  1.1  mrg       modref_base_node <alias_set_type> *nolto_base_node = NULL;
   3596  1.1  mrg       modref_base_node <tree> *lto_base_node = NULL;
   3597  1.1  mrg 
   3598  1.1  mrg       /* At stream in time we have LTO alias info.  Check if we streamed in
   3599  1.1  mrg 	 something obviously unnecessary.  Do not glob types by alias sets;
   3600  1.1  mrg 	 it is not 100% clear that ltrans types will get merged same way.
   3601  1.1  mrg 	 Types may get refined based on ODR type conflicts.  */
   3602  1.1  mrg       if (base_tree && !get_alias_set (base_tree))
   3603  1.1  mrg 	{
   3604  1.1  mrg 	  if (dump_file)
   3605  1.1  mrg 	    {
   3606  1.1  mrg 	      fprintf (dump_file, "Streamed in alias set 0 type ");
   3607  1.1  mrg 	      print_generic_expr (dump_file, base_tree);
   3608  1.1  mrg 	      fprintf (dump_file, "\n");
   3609  1.1  mrg 	    }
   3610  1.1  mrg 	  base_tree = NULL;
   3611  1.1  mrg 	}
   3612  1.1  mrg 
   3613  1.1  mrg       if (nolto_ret)
   3614  1.1  mrg 	nolto_base_node = (*nolto_ret)->insert_base (base_tree
   3615  1.1  mrg 						     ? get_alias_set (base_tree)
   3616  1.1  mrg 						     : 0, 0, INT_MAX);
   3617  1.1  mrg       if (lto_ret)
   3618  1.1  mrg 	lto_base_node = (*lto_ret)->insert_base (base_tree, 0, max_bases);
   3619  1.1  mrg       size_t every_ref = streamer_read_uhwi (ib);
   3620  1.1  mrg       size_t nref = streamer_read_uhwi (ib);
   3621  1.1  mrg 
   3622  1.1  mrg       gcc_assert (!every_ref || nref == 0);
   3623  1.1  mrg       if (every_ref)
   3624  1.1  mrg 	{
   3625  1.1  mrg 	  if (nolto_base_node)
   3626  1.1  mrg 	    nolto_base_node->collapse ();
   3627  1.1  mrg 	  if (lto_base_node)
   3628  1.1  mrg 	    lto_base_node->collapse ();
   3629  1.1  mrg 	}
   3630  1.1  mrg       for (size_t j = 0; j < nref; j++)
   3631  1.1  mrg 	{
   3632  1.1  mrg 	  tree ref_tree = stream_read_tree (ib, data_in);
   3633  1.1  mrg 
   3634  1.1  mrg 	  if (ref_tree && !get_alias_set (ref_tree))
   3635  1.1  mrg 	    {
   3636  1.1  mrg 	      if (dump_file)
   3637  1.1  mrg 		{
   3638  1.1  mrg 		  fprintf (dump_file, "Streamed in alias set 0 type ");
   3639  1.1  mrg 		  print_generic_expr (dump_file, ref_tree);
   3640  1.1  mrg 		  fprintf (dump_file, "\n");
   3641  1.1  mrg 		}
   3642  1.1  mrg 	      ref_tree = NULL;
   3643  1.1  mrg 	    }
   3644  1.1  mrg 
   3645  1.1  mrg 	  modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
   3646  1.1  mrg 	  modref_ref_node <tree> *lto_ref_node = NULL;
   3647  1.1  mrg 
   3648  1.1  mrg 	  if (nolto_base_node)
   3649  1.1  mrg 	    nolto_ref_node
   3650  1.1  mrg 	      = nolto_base_node->insert_ref (ref_tree
   3651  1.1  mrg 					     ? get_alias_set (ref_tree) : 0,
   3652  1.1  mrg 					     max_refs);
   3653  1.1  mrg 	  if (lto_base_node)
   3654  1.1  mrg 	    lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
   3655  1.1  mrg 
   3656  1.1  mrg 	  size_t every_access = streamer_read_uhwi (ib);
   3657  1.1  mrg 	  size_t naccesses = streamer_read_uhwi (ib);
   3658  1.1  mrg 
   3659  1.1  mrg 	  if (nolto_ref_node && every_access)
   3660  1.1  mrg 	    nolto_ref_node->collapse ();
   3661  1.1  mrg 	  if (lto_ref_node && every_access)
   3662  1.1  mrg 	    lto_ref_node->collapse ();
   3663  1.1  mrg 
   3664  1.1  mrg 	  for (size_t k = 0; k < naccesses; k++)
   3665  1.1  mrg 	    {
   3666  1.1  mrg 	      modref_access_node a = modref_access_node::stream_in (ib);
   3667  1.1  mrg 	      if (nolto_ref_node)
   3668  1.1  mrg 		nolto_ref_node->insert_access (a, max_accesses, false);
   3669  1.1  mrg 	      if (lto_ref_node)
   3670  1.1  mrg 		lto_ref_node->insert_access (a, max_accesses, false);
   3671  1.1  mrg 	    }
   3672  1.1  mrg 	}
   3673  1.1  mrg     }
   3674  1.1  mrg   if (lto_ret)
   3675  1.1  mrg     (*lto_ret)->cleanup ();
   3676  1.1  mrg   if (nolto_ret)
   3677  1.1  mrg     (*nolto_ret)->cleanup ();
   3678  1.1  mrg }
   3679  1.1  mrg 
   3680  1.1  mrg /* Write ESUM to BP.  */
   3681  1.1  mrg 
   3682  1.1  mrg static void
   3683  1.1  mrg modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
   3684  1.1  mrg {
   3685  1.1  mrg   if (!esum)
   3686  1.1  mrg     {
   3687  1.1  mrg       bp_pack_var_len_unsigned (bp, 0);
   3688  1.1  mrg       return;
   3689  1.1  mrg     }
   3690  1.1  mrg   bp_pack_var_len_unsigned (bp, esum->esc.length ());
   3691  1.1  mrg   unsigned int i;
   3692  1.1  mrg   escape_entry *ee;
   3693  1.1  mrg   FOR_EACH_VEC_ELT (esum->esc, i, ee)
   3694  1.1  mrg     {
   3695  1.1  mrg       bp_pack_var_len_int (bp, ee->parm_index);
   3696  1.1  mrg       bp_pack_var_len_unsigned (bp, ee->arg);
   3697  1.1  mrg       bp_pack_var_len_unsigned (bp, ee->min_flags);
   3698  1.1  mrg       bp_pack_value (bp, ee->direct, 1);
   3699  1.1  mrg     }
   3700  1.1  mrg }
   3701  1.1  mrg 
   3702  1.1  mrg /* Read escape summary for E from BP.  */
   3703  1.1  mrg 
   3704  1.1  mrg static void
   3705  1.1  mrg modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
   3706  1.1  mrg {
   3707  1.1  mrg   unsigned int n = bp_unpack_var_len_unsigned (bp);
   3708  1.1  mrg   if (!n)
   3709  1.1  mrg     return;
   3710  1.1  mrg   escape_summary *esum = escape_summaries->get_create (e);
   3711  1.1  mrg   esum->esc.reserve_exact (n);
   3712  1.1  mrg   for (unsigned int i = 0; i < n; i++)
   3713  1.1  mrg     {
   3714  1.1  mrg       escape_entry ee;
   3715  1.1  mrg       ee.parm_index = bp_unpack_var_len_int (bp);
   3716  1.1  mrg       ee.arg = bp_unpack_var_len_unsigned (bp);
   3717  1.1  mrg       ee.min_flags = bp_unpack_var_len_unsigned (bp);
   3718  1.1  mrg       ee.direct = bp_unpack_value (bp, 1);
   3719  1.1  mrg       esum->esc.quick_push (ee);
   3720  1.1  mrg     }
   3721  1.1  mrg }
   3722  1.1  mrg 
   3723  1.1  mrg /* Callback for write_summary.  */
   3724  1.1  mrg 
   3725  1.1  mrg static void
   3726  1.1  mrg modref_write ()
   3727  1.1  mrg {
   3728  1.1  mrg   struct output_block *ob = create_output_block (LTO_section_ipa_modref);
   3729  1.1  mrg   lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
   3730  1.1  mrg   unsigned int count = 0;
   3731  1.1  mrg   int i;
   3732  1.1  mrg 
   3733  1.1  mrg   if (!summaries_lto)
   3734  1.1  mrg     {
   3735  1.1  mrg       streamer_write_uhwi (ob, 0);
   3736  1.1  mrg       streamer_write_char_stream (ob->main_stream, 0);
   3737  1.1  mrg       produce_asm (ob, NULL);
   3738  1.1  mrg       destroy_output_block (ob);
   3739  1.1  mrg       return;
   3740  1.1  mrg     }
   3741  1.1  mrg 
   3742  1.1  mrg   for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
   3743  1.1  mrg     {
   3744  1.1  mrg       symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
   3745  1.1  mrg       cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
   3746  1.1  mrg       modref_summary_lto *r;
   3747  1.1  mrg 
   3748  1.1  mrg       if (cnode && cnode->definition && !cnode->alias
   3749  1.1  mrg 	  && (r = summaries_lto->get (cnode))
   3750  1.1  mrg 	  && r->useful_p (flags_from_decl_or_type (cnode->decl)))
   3751  1.1  mrg 	count++;
   3752  1.1  mrg     }
   3753  1.1  mrg   streamer_write_uhwi (ob, count);
   3754  1.1  mrg 
   3755  1.1  mrg   for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
   3756  1.1  mrg     {
   3757  1.1  mrg       symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
   3758  1.1  mrg       cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
   3759  1.1  mrg 
   3760  1.1  mrg       if (cnode && cnode->definition && !cnode->alias)
   3761  1.1  mrg 	{
   3762  1.1  mrg 	  modref_summary_lto *r = summaries_lto->get (cnode);
   3763  1.1  mrg 
   3764  1.1  mrg 	  if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
   3765  1.1  mrg 	    continue;
   3766  1.1  mrg 
   3767  1.1  mrg 	  streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
   3768  1.1  mrg 
   3769  1.1  mrg 	  streamer_write_uhwi (ob, r->arg_flags.length ());
   3770  1.1  mrg 	  for (unsigned int i = 0; i < r->arg_flags.length (); i++)
   3771  1.1  mrg 	    streamer_write_uhwi (ob, r->arg_flags[i]);
   3772  1.1  mrg 	  streamer_write_uhwi (ob, r->retslot_flags);
   3773  1.1  mrg 	  streamer_write_uhwi (ob, r->static_chain_flags);
   3774  1.1  mrg 
   3775  1.1  mrg 	  write_modref_records (r->loads, ob);
   3776  1.1  mrg 	  write_modref_records (r->stores, ob);
   3777  1.1  mrg 	  streamer_write_uhwi (ob, r->kills.length ());
   3778  1.1  mrg 	  for (auto kill : r->kills)
   3779  1.1  mrg 	    kill.stream_out (ob);
   3780  1.1  mrg 
   3781  1.1  mrg 	  struct bitpack_d bp = bitpack_create (ob->main_stream);
   3782  1.1  mrg 	  bp_pack_value (&bp, r->writes_errno, 1);
   3783  1.1  mrg 	  bp_pack_value (&bp, r->side_effects, 1);
   3784  1.1  mrg 	  bp_pack_value (&bp, r->nondeterministic, 1);
   3785  1.1  mrg 	  bp_pack_value (&bp, r->calls_interposable, 1);
   3786  1.1  mrg 	  if (!flag_wpa)
   3787  1.1  mrg 	    {
   3788  1.1  mrg 	      for (cgraph_edge *e = cnode->indirect_calls;
   3789  1.1  mrg 		   e; e = e->next_callee)
   3790  1.1  mrg 		{
   3791  1.1  mrg 		  class fnspec_summary *sum = fnspec_summaries->get (e);
   3792  1.1  mrg 		  bp_pack_value (&bp, sum != NULL, 1);
   3793  1.1  mrg 		  if (sum)
   3794  1.1  mrg 		    bp_pack_string (ob, &bp, sum->fnspec, true);
   3795  1.1  mrg 		  class escape_summary *esum = escape_summaries->get (e);
   3796  1.1  mrg 		  modref_write_escape_summary (&bp,esum);
   3797  1.1  mrg 		}
   3798  1.1  mrg 	      for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
   3799  1.1  mrg 		{
   3800  1.1  mrg 		  class fnspec_summary *sum = fnspec_summaries->get (e);
   3801  1.1  mrg 		  bp_pack_value (&bp, sum != NULL, 1);
   3802  1.1  mrg 		  if (sum)
   3803  1.1  mrg 		    bp_pack_string (ob, &bp, sum->fnspec, true);
   3804  1.1  mrg 		  class escape_summary *esum = escape_summaries->get (e);
   3805  1.1  mrg 		  modref_write_escape_summary (&bp,esum);
   3806  1.1  mrg 		}
   3807  1.1  mrg 	    }
   3808  1.1  mrg 	  streamer_write_bitpack (&bp);
   3809  1.1  mrg 	}
   3810  1.1  mrg     }
   3811  1.1  mrg   streamer_write_char_stream (ob->main_stream, 0);
   3812  1.1  mrg   produce_asm (ob, NULL);
   3813  1.1  mrg   destroy_output_block (ob);
   3814  1.1  mrg }
   3815  1.1  mrg 
   3816  1.1  mrg static void
   3817  1.1  mrg read_section (struct lto_file_decl_data *file_data, const char *data,
   3818  1.1  mrg 	      size_t len)
   3819  1.1  mrg {
   3820  1.1  mrg   const struct lto_function_header *header
   3821  1.1  mrg     = (const struct lto_function_header *) data;
   3822  1.1  mrg   const int cfg_offset = sizeof (struct lto_function_header);
   3823  1.1  mrg   const int main_offset = cfg_offset + header->cfg_size;
   3824  1.1  mrg   const int string_offset = main_offset + header->main_size;
   3825  1.1  mrg   struct data_in *data_in;
   3826  1.1  mrg   unsigned int i;
   3827  1.1  mrg   unsigned int f_count;
   3828  1.1  mrg 
   3829  1.1  mrg   lto_input_block ib ((const char *) data + main_offset, header->main_size,
   3830  1.1  mrg 		      file_data->mode_table);
   3831  1.1  mrg 
   3832  1.1  mrg   data_in
   3833  1.1  mrg     = lto_data_in_create (file_data, (const char *) data + string_offset,
   3834  1.1  mrg 			  header->string_size, vNULL);
   3835  1.1  mrg   f_count = streamer_read_uhwi (&ib);
   3836  1.1  mrg   for (i = 0; i < f_count; i++)
   3837  1.1  mrg     {
   3838  1.1  mrg       struct cgraph_node *node;
   3839  1.1  mrg       lto_symtab_encoder_t encoder;
   3840  1.1  mrg 
   3841  1.1  mrg       unsigned int index = streamer_read_uhwi (&ib);
   3842  1.1  mrg       encoder = file_data->symtab_node_encoder;
   3843  1.1  mrg       node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
   3844  1.1  mrg 								index));
   3845  1.1  mrg 
   3846  1.1  mrg       modref_summary *modref_sum = summaries
   3847  1.1  mrg 				   ? summaries->get_create (node) : NULL;
   3848  1.1  mrg       modref_summary_lto *modref_sum_lto = summaries_lto
   3849  1.1  mrg 					   ? summaries_lto->get_create (node)
   3850  1.1  mrg 					   : NULL;
   3851  1.1  mrg       if (optimization_summaries)
   3852  1.1  mrg 	modref_sum = optimization_summaries->get_create (node);
   3853  1.1  mrg 
   3854  1.1  mrg       if (modref_sum)
   3855  1.1  mrg 	{
   3856  1.1  mrg 	  modref_sum->writes_errno = false;
   3857  1.1  mrg 	  modref_sum->side_effects = false;
   3858  1.1  mrg 	  modref_sum->nondeterministic = false;
   3859  1.1  mrg 	  modref_sum->calls_interposable = false;
   3860  1.1  mrg 	}
   3861  1.1  mrg       if (modref_sum_lto)
   3862  1.1  mrg 	{
   3863  1.1  mrg 	  modref_sum_lto->writes_errno = false;
   3864  1.1  mrg 	  modref_sum_lto->side_effects = false;
   3865  1.1  mrg 	  modref_sum_lto->nondeterministic = false;
   3866  1.1  mrg 	  modref_sum_lto->calls_interposable = false;
   3867  1.1  mrg 	}
   3868  1.1  mrg 
   3869  1.1  mrg       gcc_assert (!modref_sum || (!modref_sum->loads
   3870  1.1  mrg 				  && !modref_sum->stores));
   3871  1.1  mrg       gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
   3872  1.1  mrg 				      && !modref_sum_lto->stores));
   3873  1.1  mrg       unsigned int args = streamer_read_uhwi (&ib);
   3874  1.1  mrg       if (args && modref_sum)
   3875  1.1  mrg 	modref_sum->arg_flags.reserve_exact (args);
   3876  1.1  mrg       if (args && modref_sum_lto)
   3877  1.1  mrg 	modref_sum_lto->arg_flags.reserve_exact (args);
   3878  1.1  mrg       for (unsigned int i = 0; i < args; i++)
   3879  1.1  mrg 	{
   3880  1.1  mrg 	  eaf_flags_t flags = streamer_read_uhwi (&ib);
   3881  1.1  mrg 	  if (modref_sum)
   3882  1.1  mrg 	    modref_sum->arg_flags.quick_push (flags);
   3883  1.1  mrg 	  if (modref_sum_lto)
   3884  1.1  mrg 	    modref_sum_lto->arg_flags.quick_push (flags);
   3885  1.1  mrg 	}
   3886  1.1  mrg       eaf_flags_t flags = streamer_read_uhwi (&ib);
   3887  1.1  mrg       if (modref_sum)
   3888  1.1  mrg 	modref_sum->retslot_flags = flags;
   3889  1.1  mrg       if (modref_sum_lto)
   3890  1.1  mrg 	modref_sum_lto->retslot_flags = flags;
   3891  1.1  mrg 
   3892  1.1  mrg       flags = streamer_read_uhwi (&ib);
   3893  1.1  mrg       if (modref_sum)
   3894  1.1  mrg 	modref_sum->static_chain_flags = flags;
   3895  1.1  mrg       if (modref_sum_lto)
   3896  1.1  mrg 	modref_sum_lto->static_chain_flags = flags;
   3897  1.1  mrg 
   3898  1.1  mrg       read_modref_records (node->decl, &ib, data_in,
   3899  1.1  mrg 			   modref_sum ? &modref_sum->loads : NULL,
   3900  1.1  mrg 			   modref_sum_lto ? &modref_sum_lto->loads : NULL);
   3901  1.1  mrg       read_modref_records (node->decl, &ib, data_in,
   3902  1.1  mrg 			   modref_sum ? &modref_sum->stores : NULL,
   3903  1.1  mrg 			   modref_sum_lto ? &modref_sum_lto->stores : NULL);
   3904  1.1  mrg       int j = streamer_read_uhwi (&ib);
   3905  1.1  mrg       if (j && modref_sum)
   3906  1.1  mrg 	modref_sum->kills.reserve_exact (j);
   3907  1.1  mrg       if (j && modref_sum_lto)
   3908  1.1  mrg 	modref_sum_lto->kills.reserve_exact (j);
   3909  1.1  mrg       for (int k = 0; k < j; k++)
   3910  1.1  mrg 	{
   3911  1.1  mrg 	  modref_access_node a = modref_access_node::stream_in (&ib);
   3912  1.1  mrg 
   3913  1.1  mrg 	  if (modref_sum)
   3914  1.1  mrg 	    modref_sum->kills.quick_push (a);
   3915  1.1  mrg 	  if (modref_sum_lto)
   3916  1.1  mrg 	    modref_sum_lto->kills.quick_push (a);
   3917  1.1  mrg 	}
   3918  1.1  mrg       struct bitpack_d bp = streamer_read_bitpack (&ib);
   3919  1.1  mrg       if (bp_unpack_value (&bp, 1))
   3920  1.1  mrg 	{
   3921  1.1  mrg 	  if (modref_sum)
   3922  1.1  mrg 	    modref_sum->writes_errno = true;
   3923  1.1  mrg 	  if (modref_sum_lto)
   3924  1.1  mrg 	    modref_sum_lto->writes_errno = true;
   3925  1.1  mrg 	}
   3926  1.1  mrg       if (bp_unpack_value (&bp, 1))
   3927  1.1  mrg 	{
   3928  1.1  mrg 	  if (modref_sum)
   3929  1.1  mrg 	    modref_sum->side_effects = true;
   3930  1.1  mrg 	  if (modref_sum_lto)
   3931  1.1  mrg 	    modref_sum_lto->side_effects = true;
   3932  1.1  mrg 	}
   3933  1.1  mrg       if (bp_unpack_value (&bp, 1))
   3934  1.1  mrg 	{
   3935  1.1  mrg 	  if (modref_sum)
   3936  1.1  mrg 	    modref_sum->nondeterministic = true;
   3937  1.1  mrg 	  if (modref_sum_lto)
   3938  1.1  mrg 	    modref_sum_lto->nondeterministic = true;
   3939  1.1  mrg 	}
   3940  1.1  mrg       if (bp_unpack_value (&bp, 1))
   3941  1.1  mrg 	{
   3942  1.1  mrg 	  if (modref_sum)
   3943  1.1  mrg 	    modref_sum->calls_interposable = true;
   3944  1.1  mrg 	  if (modref_sum_lto)
   3945  1.1  mrg 	    modref_sum_lto->calls_interposable = true;
   3946  1.1  mrg 	}
   3947  1.1  mrg       if (!flag_ltrans)
   3948  1.1  mrg 	{
   3949  1.1  mrg 	  for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
   3950  1.1  mrg 	    {
   3951  1.1  mrg 	      if (bp_unpack_value (&bp, 1))
   3952  1.1  mrg 		{
   3953  1.1  mrg 		  class fnspec_summary *sum = fnspec_summaries->get_create (e);
   3954  1.1  mrg 		  sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
   3955  1.1  mrg 		}
   3956  1.1  mrg 	      modref_read_escape_summary (&bp, e);
   3957  1.1  mrg 	    }
   3958  1.1  mrg 	  for (cgraph_edge *e = node->callees; e; e = e->next_callee)
   3959  1.1  mrg 	    {
   3960  1.1  mrg 	      if (bp_unpack_value (&bp, 1))
   3961  1.1  mrg 		{
   3962  1.1  mrg 		  class fnspec_summary *sum = fnspec_summaries->get_create (e);
   3963  1.1  mrg 		  sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
   3964  1.1  mrg 		}
   3965  1.1  mrg 	      modref_read_escape_summary (&bp, e);
   3966  1.1  mrg 	    }
   3967  1.1  mrg 	}
   3968  1.1  mrg       if (flag_ltrans)
   3969  1.1  mrg 	modref_sum->finalize (node->decl);
   3970  1.1  mrg       if (dump_file)
   3971  1.1  mrg 	{
   3972  1.1  mrg 	  fprintf (dump_file, "Read modref for %s\n",
   3973  1.1  mrg 		   node->dump_name ());
   3974  1.1  mrg 	  if (modref_sum)
   3975  1.1  mrg 	    modref_sum->dump (dump_file);
   3976  1.1  mrg 	  if (modref_sum_lto)
   3977  1.1  mrg 	    modref_sum_lto->dump (dump_file);
   3978  1.1  mrg 	  dump_modref_edge_summaries (dump_file, node, 4);
   3979  1.1  mrg 	}
   3980  1.1  mrg     }
   3981  1.1  mrg 
   3982  1.1  mrg   lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
   3983  1.1  mrg 			 len);
   3984  1.1  mrg   lto_data_in_delete (data_in);
   3985  1.1  mrg }
   3986  1.1  mrg 
   3987  1.1  mrg /* Callback for read_summary.  */
   3988  1.1  mrg 
   3989  1.1  mrg static void
   3990  1.1  mrg modref_read (void)
   3991  1.1  mrg {
   3992  1.1  mrg   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
   3993  1.1  mrg   struct lto_file_decl_data *file_data;
   3994  1.1  mrg   unsigned int j = 0;
   3995  1.1  mrg 
   3996  1.1  mrg   gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
   3997  1.1  mrg   if (flag_ltrans)
   3998  1.1  mrg     optimization_summaries = modref_summaries::create_ggc (symtab);
   3999  1.1  mrg   else
   4000  1.1  mrg     {
   4001  1.1  mrg       if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
   4002  1.1  mrg 	summaries_lto = modref_summaries_lto::create_ggc (symtab);
   4003  1.1  mrg       if (!flag_wpa
   4004  1.1  mrg 	  || (flag_incremental_link == INCREMENTAL_LINK_LTO
   4005  1.1  mrg 	      && flag_fat_lto_objects))
   4006  1.1  mrg 	summaries = modref_summaries::create_ggc (symtab);
   4007  1.1  mrg       if (!fnspec_summaries)
   4008  1.1  mrg 	fnspec_summaries = new fnspec_summaries_t (symtab);
   4009  1.1  mrg       if (!escape_summaries)
   4010  1.1  mrg 	escape_summaries = new escape_summaries_t (symtab);
   4011  1.1  mrg     }
   4012  1.1  mrg 
   4013  1.1  mrg   while ((file_data = file_data_vec[j++]))
   4014  1.1  mrg     {
   4015  1.1  mrg       size_t len;
   4016  1.1  mrg       const char *data = lto_get_summary_section_data (file_data,
   4017  1.1  mrg 						       LTO_section_ipa_modref,
   4018  1.1  mrg 						       &len);
   4019  1.1  mrg       if (data)
   4020  1.1  mrg 	read_section (file_data, data, len);
   4021  1.1  mrg       else
   4022  1.1  mrg 	/* Fatal error here.  We do not want to support compiling ltrans units
   4023  1.1  mrg 	   with different version of compiler or different flags than the WPA
   4024  1.1  mrg 	   unit, so this should never happen.  */
   4025  1.1  mrg 	fatal_error (input_location,
   4026  1.1  mrg 		     "IPA modref summary is missing in input file");
   4027  1.1  mrg     }
   4028  1.1  mrg }
   4029  1.1  mrg 
   4030  1.1  mrg /* Recompute arg_flags for param adjustments in INFO.  */
   4031  1.1  mrg 
   4032  1.1  mrg static void
   4033  1.1  mrg remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
   4034  1.1  mrg {
   4035  1.1  mrg   auto_vec<eaf_flags_t> old = arg_flags.copy ();
   4036  1.1  mrg   int max = -1;
   4037  1.1  mrg   size_t i;
   4038  1.1  mrg   ipa_adjusted_param *p;
   4039  1.1  mrg 
   4040  1.1  mrg   arg_flags.release ();
   4041  1.1  mrg 
   4042  1.1  mrg   FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
   4043  1.1  mrg     {
   4044  1.1  mrg       int o = info->param_adjustments->get_original_index (i);
   4045  1.1  mrg       if (o >= 0 && (int)old.length () > o && old[o])
   4046  1.1  mrg 	max = i;
   4047  1.1  mrg     }
   4048  1.1  mrg   if (max >= 0)
   4049  1.1  mrg     arg_flags.safe_grow_cleared (max + 1, true);
   4050  1.1  mrg   FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
   4051  1.1  mrg     {
   4052  1.1  mrg       int o = info->param_adjustments->get_original_index (i);
   4053  1.1  mrg       if (o >= 0 && (int)old.length () > o && old[o])
   4054  1.1  mrg 	arg_flags[i] = old[o];
   4055  1.1  mrg     }
   4056  1.1  mrg }
   4057  1.1  mrg 
   4058  1.1  mrg /* Update kills according to the parm map MAP.  */
   4059  1.1  mrg 
   4060  1.1  mrg static void
   4061  1.1  mrg remap_kills (vec <modref_access_node> &kills, const vec <int> &map)
   4062  1.1  mrg {
   4063  1.1  mrg   for (size_t i = 0; i < kills.length ();)
   4064  1.1  mrg     if (kills[i].parm_index >= 0)
   4065  1.1  mrg       {
   4066  1.1  mrg 	if (kills[i].parm_index < (int)map.length ()
   4067  1.1  mrg 	    && map[kills[i].parm_index] != MODREF_UNKNOWN_PARM)
   4068  1.1  mrg 	  {
   4069  1.1  mrg 	    kills[i].parm_index = map[kills[i].parm_index];
   4070  1.1  mrg 	    i++;
   4071  1.1  mrg 	  }
   4072  1.1  mrg 	else
   4073  1.1  mrg 	  kills.unordered_remove (i);
   4074  1.1  mrg       }
   4075  1.1  mrg     else
   4076  1.1  mrg       i++;
   4077  1.1  mrg }
   4078  1.1  mrg 
   4079  1.1  mrg /* If signature changed, update the summary.  */
   4080  1.1  mrg 
   4081  1.1  mrg static void
   4082  1.1  mrg update_signature (struct cgraph_node *node)
   4083  1.1  mrg {
   4084  1.1  mrg   clone_info *info = clone_info::get (node);
   4085  1.1  mrg   if (!info || !info->param_adjustments)
   4086  1.1  mrg     return;
   4087  1.1  mrg 
   4088  1.1  mrg   modref_summary *r = optimization_summaries
   4089  1.1  mrg 		      ? optimization_summaries->get (node) : NULL;
   4090  1.1  mrg   modref_summary_lto *r_lto = summaries_lto
   4091  1.1  mrg 			      ? summaries_lto->get (node) : NULL;
   4092  1.1  mrg   if (!r && !r_lto)
   4093  1.1  mrg     return;
   4094  1.1  mrg   if (dump_file)
   4095  1.1  mrg     {
   4096  1.1  mrg       fprintf (dump_file, "Updating summary for %s from:\n",
   4097  1.1  mrg 	       node->dump_name ());
   4098  1.1  mrg       if (r)
   4099  1.1  mrg 	r->dump (dump_file);
   4100  1.1  mrg       if (r_lto)
   4101  1.1  mrg 	r_lto->dump (dump_file);
   4102  1.1  mrg     }
   4103  1.1  mrg 
   4104  1.1  mrg   size_t i, max = 0;
   4105  1.1  mrg   ipa_adjusted_param *p;
   4106  1.1  mrg 
   4107  1.1  mrg   FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
   4108  1.1  mrg     {
   4109  1.1  mrg       int idx = info->param_adjustments->get_original_index (i);
   4110  1.1  mrg       if (idx > (int)max)
   4111  1.1  mrg 	max = idx;
   4112  1.1  mrg     }
   4113  1.1  mrg 
   4114  1.1  mrg   auto_vec <int, 32> map;
   4115  1.1  mrg 
   4116  1.1  mrg   map.reserve (max + 1);
   4117  1.1  mrg   for (i = 0; i <= max; i++)
   4118  1.1  mrg     map.quick_push (MODREF_UNKNOWN_PARM);
   4119  1.1  mrg   FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
   4120  1.1  mrg     {
   4121  1.1  mrg       int idx = info->param_adjustments->get_original_index (i);
   4122  1.1  mrg       if (idx >= 0)
   4123  1.1  mrg 	map[idx] = i;
   4124  1.1  mrg     }
   4125  1.1  mrg   if (r)
   4126  1.1  mrg     {
   4127  1.1  mrg       r->loads->remap_params (&map);
   4128  1.1  mrg       r->stores->remap_params (&map);
   4129  1.1  mrg       remap_kills (r->kills, map);
   4130  1.1  mrg       if (r->arg_flags.length ())
   4131  1.1  mrg 	remap_arg_flags (r->arg_flags, info);
   4132  1.1  mrg     }
   4133  1.1  mrg   if (r_lto)
   4134  1.1  mrg     {
   4135  1.1  mrg       r_lto->loads->remap_params (&map);
   4136  1.1  mrg       r_lto->stores->remap_params (&map);
   4137  1.1  mrg       remap_kills (r_lto->kills, map);
   4138  1.1  mrg       if (r_lto->arg_flags.length ())
   4139  1.1  mrg 	remap_arg_flags (r_lto->arg_flags, info);
   4140  1.1  mrg     }
   4141  1.1  mrg   if (dump_file)
   4142  1.1  mrg     {
   4143  1.1  mrg       fprintf (dump_file, "to:\n");
   4144  1.1  mrg       if (r)
   4145  1.1  mrg 	r->dump (dump_file);
   4146  1.1  mrg       if (r_lto)
   4147  1.1  mrg 	r_lto->dump (dump_file);
   4148  1.1  mrg     }
   4149  1.1  mrg   if (r)
   4150  1.1  mrg     r->finalize (node->decl);
   4151  1.1  mrg   return;
   4152  1.1  mrg }
   4153  1.1  mrg 
   4154  1.1  mrg /* Definition of the modref IPA pass.  */
   4155  1.1  mrg const pass_data pass_data_ipa_modref =
   4156  1.1  mrg {
   4157  1.1  mrg   IPA_PASS,           /* type */
   4158  1.1  mrg   "modref",       /* name */
   4159  1.1  mrg   OPTGROUP_IPA,       /* optinfo_flags */
   4160  1.1  mrg   TV_IPA_MODREF, /* tv_id */
   4161  1.1  mrg   0, /* properties_required */
   4162  1.1  mrg   0, /* properties_provided */
   4163  1.1  mrg   0, /* properties_destroyed */
   4164  1.1  mrg   0, /* todo_flags_start */
   4165  1.1  mrg   ( TODO_dump_symtab ), /* todo_flags_finish */
   4166  1.1  mrg };
   4167  1.1  mrg 
   4168  1.1  mrg class pass_ipa_modref : public ipa_opt_pass_d
   4169  1.1  mrg {
   4170  1.1  mrg public:
   4171  1.1  mrg   pass_ipa_modref (gcc::context *ctxt)
   4172  1.1  mrg     : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
   4173  1.1  mrg 		      modref_generate, /* generate_summary */
   4174  1.1  mrg 		      modref_write,    /* write_summary */
   4175  1.1  mrg 		      modref_read,     /* read_summary */
   4176  1.1  mrg 		      modref_write,    /* write_optimization_summary */
   4177  1.1  mrg 		      modref_read,     /* read_optimization_summary */
   4178  1.1  mrg 		      NULL,            /* stmt_fixup */
   4179  1.1  mrg 		      0,               /* function_transform_todo_flags_start */
   4180  1.1  mrg 		      NULL,	       /* function_transform */
   4181  1.1  mrg 		      NULL)            /* variable_transform */
   4182  1.1  mrg   {}
   4183  1.1  mrg 
   4184  1.1  mrg   /* opt_pass methods: */
   4185  1.1  mrg   opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
   4186  1.1  mrg   virtual bool gate (function *)
   4187  1.1  mrg   {
   4188  1.1  mrg     return true;
   4189  1.1  mrg   }
   4190  1.1  mrg   virtual unsigned int execute (function *);
   4191  1.1  mrg 
   4192  1.1  mrg };
   4193  1.1  mrg 
   4194  1.1  mrg }
   4195  1.1  mrg 
   4196  1.1  mrg unsigned int pass_modref::execute (function *)
   4197  1.1  mrg {
   4198  1.1  mrg   if (analyze_function (false))
   4199  1.1  mrg     return execute_fixup_cfg ();
   4200  1.1  mrg   return 0;
   4201  1.1  mrg }
   4202  1.1  mrg 
   4203  1.1  mrg gimple_opt_pass *
   4204  1.1  mrg make_pass_modref (gcc::context *ctxt)
   4205  1.1  mrg {
   4206  1.1  mrg   return new pass_modref (ctxt);
   4207  1.1  mrg }
   4208  1.1  mrg 
   4209  1.1  mrg ipa_opt_pass_d *
   4210  1.1  mrg make_pass_ipa_modref (gcc::context *ctxt)
   4211  1.1  mrg {
   4212  1.1  mrg   return new pass_ipa_modref (ctxt);
   4213  1.1  mrg }
   4214  1.1  mrg 
   4215  1.1  mrg namespace {
   4216  1.1  mrg 
   4217  1.1  mrg /* Skip edges from and to nodes without ipa_pure_const enabled.
   4218  1.1  mrg    Ignore not available symbols.  */
   4219  1.1  mrg 
   4220  1.1  mrg static bool
   4221  1.1  mrg ignore_edge (struct cgraph_edge *e)
   4222  1.1  mrg {
   4223  1.1  mrg   /* We merge summaries of inline clones into summaries of functions they
   4224  1.1  mrg      are inlined to.  For that reason the complete function bodies must
   4225  1.1  mrg      act as unit.  */
   4226  1.1  mrg   if (!e->inline_failed)
   4227  1.1  mrg     return false;
   4228  1.1  mrg   enum availability avail;
   4229  1.1  mrg   cgraph_node *callee = e->callee->ultimate_alias_target
   4230  1.1  mrg 			  (&avail, e->caller);
   4231  1.1  mrg 
   4232  1.1  mrg   return (avail <= AVAIL_INTERPOSABLE
   4233  1.1  mrg 	  || ((!optimization_summaries || !optimization_summaries->get (callee))
   4234  1.1  mrg 	      && (!summaries_lto || !summaries_lto->get (callee))));
   4235  1.1  mrg }
   4236  1.1  mrg 
   4237  1.1  mrg /* Compute parm_map for CALLEE_EDGE.  */
   4238  1.1  mrg 
   4239  1.1  mrg static bool
   4240  1.1  mrg compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
   4241  1.1  mrg {
   4242  1.1  mrg   class ipa_edge_args *args;
   4243  1.1  mrg   if (ipa_node_params_sum
   4244  1.1  mrg       && !callee_edge->call_stmt_cannot_inline_p
   4245  1.1  mrg       && (args = ipa_edge_args_sum->get (callee_edge)) != NULL)
   4246  1.1  mrg     {
   4247  1.1  mrg       int i, count = ipa_get_cs_argument_count (args);
   4248  1.1  mrg       class ipa_node_params *caller_parms_info, *callee_pi;
   4249  1.1  mrg       class ipa_call_summary *es
   4250  1.1  mrg 	     = ipa_call_summaries->get (callee_edge);
   4251  1.1  mrg       cgraph_node *callee
   4252  1.1  mrg 	 = callee_edge->callee->ultimate_alias_target
   4253  1.1  mrg 			      (NULL, callee_edge->caller);
   4254  1.1  mrg 
   4255  1.1  mrg       caller_parms_info
   4256  1.1  mrg 	= ipa_node_params_sum->get (callee_edge->caller->inlined_to
   4257  1.1  mrg 				    ? callee_edge->caller->inlined_to
   4258  1.1  mrg 				    : callee_edge->caller);
   4259  1.1  mrg       callee_pi = ipa_node_params_sum->get (callee);
   4260  1.1  mrg 
   4261  1.1  mrg       (*parm_map).safe_grow_cleared (count, true);
   4262  1.1  mrg 
   4263  1.1  mrg       for (i = 0; i < count; i++)
   4264  1.1  mrg 	{
   4265  1.1  mrg 	  if (es && es->param[i].points_to_local_or_readonly_memory)
   4266  1.1  mrg 	    {
   4267  1.1  mrg 	      (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
   4268  1.1  mrg 	      continue;
   4269  1.1  mrg 	    }
   4270  1.1  mrg 
   4271  1.1  mrg 	  struct ipa_jump_func *jf
   4272  1.1  mrg 	     = ipa_get_ith_jump_func (args, i);
   4273  1.1  mrg 	  if (jf && callee_pi)
   4274  1.1  mrg 	    {
   4275  1.1  mrg 	      tree cst = ipa_value_from_jfunc (caller_parms_info,
   4276  1.1  mrg 					       jf,
   4277  1.1  mrg 					       ipa_get_type
   4278  1.1  mrg 						 (callee_pi, i));
   4279  1.1  mrg 	      if (cst && points_to_local_or_readonly_memory_p (cst))
   4280  1.1  mrg 		{
   4281  1.1  mrg 		  (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
   4282  1.1  mrg 		  continue;
   4283  1.1  mrg 		}
   4284  1.1  mrg 	    }
   4285  1.1  mrg 	  if (jf && jf->type == IPA_JF_PASS_THROUGH)
   4286  1.1  mrg 	    {
   4287  1.1  mrg 	      (*parm_map)[i].parm_index
   4288  1.1  mrg 		= ipa_get_jf_pass_through_formal_id (jf);
   4289  1.1  mrg 	      if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
   4290  1.1  mrg 		{
   4291  1.1  mrg 		  (*parm_map)[i].parm_offset_known = true;
   4292  1.1  mrg 		  (*parm_map)[i].parm_offset = 0;
   4293  1.1  mrg 		}
   4294  1.1  mrg 	      else if (ipa_get_jf_pass_through_operation (jf)
   4295  1.1  mrg 		       == POINTER_PLUS_EXPR
   4296  1.1  mrg 		       && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
   4297  1.1  mrg 					  &(*parm_map)[i].parm_offset))
   4298  1.1  mrg 		(*parm_map)[i].parm_offset_known = true;
   4299  1.1  mrg 	      else
   4300  1.1  mrg 		(*parm_map)[i].parm_offset_known = false;
   4301  1.1  mrg 	      continue;
   4302  1.1  mrg 	    }
   4303  1.1  mrg 	  if (jf && jf->type == IPA_JF_ANCESTOR)
   4304  1.1  mrg 	    {
   4305  1.1  mrg 	      (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
   4306  1.1  mrg 	      (*parm_map)[i].parm_offset_known = true;
   4307  1.1  mrg 	      gcc_checking_assert
   4308  1.1  mrg 		(!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
   4309  1.1  mrg 	      (*parm_map)[i].parm_offset
   4310  1.1  mrg 		 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
   4311  1.1  mrg 	    }
   4312  1.1  mrg 	  else
   4313  1.1  mrg 	    (*parm_map)[i].parm_index = -1;
   4314  1.1  mrg 	}
   4315  1.1  mrg       if (dump_file)
   4316  1.1  mrg 	{
   4317  1.1  mrg 	  fprintf (dump_file, "  Parm map: ");
   4318  1.1  mrg 	  for (i = 0; i < count; i++)
   4319  1.1  mrg 	    fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
   4320  1.1  mrg 	  fprintf (dump_file, "\n");
   4321  1.1  mrg 	}
   4322  1.1  mrg       return true;
   4323  1.1  mrg     }
   4324  1.1  mrg   return false;
   4325  1.1  mrg }
   4326  1.1  mrg 
   4327  1.1  mrg /* Map used to translate escape infos.  */
   4328  1.1  mrg 
   4329  1.1  mrg struct escape_map
   4330  1.1  mrg {
   4331  1.1  mrg   int parm_index;
   4332  1.1  mrg   bool direct;
   4333  1.1  mrg };
   4334  1.1  mrg 
   4335  1.1  mrg /* Update escape map for E.  */
   4336  1.1  mrg 
   4337  1.1  mrg static void
   4338  1.1  mrg update_escape_summary_1 (cgraph_edge *e,
   4339  1.1  mrg 			 vec <vec <escape_map>> &map,
   4340  1.1  mrg 			 bool ignore_stores)
   4341  1.1  mrg {
   4342  1.1  mrg   escape_summary *sum = escape_summaries->get (e);
   4343  1.1  mrg   if (!sum)
   4344  1.1  mrg     return;
   4345  1.1  mrg   auto_vec <escape_entry> old = sum->esc.copy ();
   4346  1.1  mrg   sum->esc.release ();
   4347  1.1  mrg 
   4348  1.1  mrg   unsigned int i;
   4349  1.1  mrg   escape_entry *ee;
   4350  1.1  mrg   FOR_EACH_VEC_ELT (old, i, ee)
   4351  1.1  mrg     {
   4352  1.1  mrg       unsigned int j;
   4353  1.1  mrg       struct escape_map *em;
   4354  1.1  mrg       /* TODO: We do not have jump functions for return slots, so we
   4355  1.1  mrg 	 never propagate them to outer function.  */
   4356  1.1  mrg       if (ee->parm_index >= (int)map.length ()
   4357  1.1  mrg 	  || ee->parm_index < 0)
   4358  1.1  mrg 	continue;
   4359  1.1  mrg       FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)
   4360  1.1  mrg 	{
   4361  1.1  mrg 	  int min_flags = ee->min_flags;
   4362  1.1  mrg 	  if (ee->direct && !em->direct)
   4363  1.1  mrg 	    min_flags = deref_flags (min_flags, ignore_stores);
   4364  1.1  mrg 	  struct escape_entry entry = {em->parm_index, ee->arg,
   4365  1.1  mrg 				       min_flags,
   4366  1.1  mrg 				       ee->direct & em->direct};
   4367  1.1  mrg 	  sum->esc.safe_push (entry);
   4368  1.1  mrg 	}
   4369  1.1  mrg     }
   4370  1.1  mrg   if (!sum->esc.length ())
   4371  1.1  mrg     escape_summaries->remove (e);
   4372  1.1  mrg }
   4373  1.1  mrg 
   4374  1.1  mrg /* Update escape map for NODE.  */
   4375  1.1  mrg 
   4376  1.1  mrg static void
   4377  1.1  mrg update_escape_summary (cgraph_node *node,
   4378  1.1  mrg 		       vec <vec <escape_map>> &map,
   4379  1.1  mrg 		       bool ignore_stores)
   4380  1.1  mrg {
   4381  1.1  mrg   if (!escape_summaries)
   4382  1.1  mrg     return;
   4383  1.1  mrg   for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
   4384  1.1  mrg     update_escape_summary_1 (e, map, ignore_stores);
   4385  1.1  mrg   for (cgraph_edge *e = node->callees; e; e = e->next_callee)
   4386  1.1  mrg     {
   4387  1.1  mrg       if (!e->inline_failed)
   4388  1.1  mrg 	update_escape_summary (e->callee, map, ignore_stores);
   4389  1.1  mrg       else
   4390  1.1  mrg 	update_escape_summary_1 (e, map, ignore_stores);
   4391  1.1  mrg     }
   4392  1.1  mrg }
   4393  1.1  mrg 
   4394  1.1  mrg /* Get parameter type from DECL.  This is only safe for special cases
   4395  1.1  mrg    like builtins we create fnspec for because the type match is checked
   4396  1.1  mrg    at fnspec creation time.  */
   4397  1.1  mrg 
   4398  1.1  mrg static tree
   4399  1.1  mrg get_parm_type (tree decl, unsigned int i)
   4400  1.1  mrg {
   4401  1.1  mrg   tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
   4402  1.1  mrg 
   4403  1.1  mrg   for (unsigned int p = 0; p < i; p++)
   4404  1.1  mrg     t = TREE_CHAIN (t);
   4405  1.1  mrg   return TREE_VALUE (t);
   4406  1.1  mrg }
   4407  1.1  mrg 
   4408  1.1  mrg /* Return access mode for argument I of call E with FNSPEC.  */
   4409  1.1  mrg 
   4410  1.1  mrg static modref_access_node
   4411  1.1  mrg get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
   4412  1.1  mrg 		       unsigned int i, modref_parm_map &map)
   4413  1.1  mrg {
   4414  1.1  mrg   tree size = NULL_TREE;
   4415  1.1  mrg   unsigned int size_arg;
   4416  1.1  mrg 
   4417  1.1  mrg   if (!fnspec.arg_specified_p (i))
   4418  1.1  mrg     ;
   4419  1.1  mrg   else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
   4420  1.1  mrg     {
   4421  1.1  mrg       cgraph_node *node = e->caller->inlined_to
   4422  1.1  mrg 			  ? e->caller->inlined_to : e->caller;
   4423  1.1  mrg       ipa_node_params *caller_parms_info = ipa_node_params_sum->get (node);
   4424  1.1  mrg       ipa_edge_args *args = ipa_edge_args_sum->get (e);
   4425  1.1  mrg       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
   4426  1.1  mrg 
   4427  1.1  mrg       if (jf)
   4428  1.1  mrg 	size = ipa_value_from_jfunc (caller_parms_info, jf,
   4429  1.1  mrg 				     get_parm_type (e->callee->decl, size_arg));
   4430  1.1  mrg     }
   4431  1.1  mrg   else if (fnspec.arg_access_size_given_by_type_p (i))
   4432  1.1  mrg     size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
   4433  1.1  mrg   modref_access_node a = {0, -1, -1,
   4434  1.1  mrg 			  map.parm_offset, map.parm_index,
   4435  1.1  mrg 			  map.parm_offset_known, 0};
   4436  1.1  mrg   poly_int64 size_hwi;
   4437  1.1  mrg   if (size
   4438  1.1  mrg       && poly_int_tree_p (size, &size_hwi)
   4439  1.1  mrg       && coeffs_in_range_p (size_hwi, 0,
   4440  1.1  mrg 			    HOST_WIDE_INT_MAX / BITS_PER_UNIT))
   4441  1.1  mrg     {
   4442  1.1  mrg       a.size = -1;
   4443  1.1  mrg       a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
   4444  1.1  mrg     }
   4445  1.1  mrg   return a;
   4446  1.1  mrg }
   4447  1.1  mrg 
   4448  1.1  mrg  /* Collapse loads and return true if something changed.  */
   4449  1.1  mrg static bool
   4450  1.1  mrg collapse_loads (modref_summary *cur_summary,
   4451  1.1  mrg 		modref_summary_lto *cur_summary_lto)
   4452  1.1  mrg {
   4453  1.1  mrg   bool changed = false;
   4454  1.1  mrg 
   4455  1.1  mrg   if (cur_summary && !cur_summary->loads->every_base)
   4456  1.1  mrg     {
   4457  1.1  mrg       cur_summary->loads->collapse ();
   4458  1.1  mrg       changed = true;
   4459  1.1  mrg     }
   4460  1.1  mrg   if (cur_summary_lto
   4461  1.1  mrg       && !cur_summary_lto->loads->every_base)
   4462  1.1  mrg     {
   4463  1.1  mrg       cur_summary_lto->loads->collapse ();
   4464  1.1  mrg       changed = true;
   4465  1.1  mrg     }
   4466  1.1  mrg   return changed;
   4467  1.1  mrg }
   4468  1.1  mrg 
   4469  1.1  mrg /* Collapse loads and return true if something changed.  */
   4470  1.1  mrg 
   4471  1.1  mrg static bool
   4472  1.1  mrg collapse_stores (modref_summary *cur_summary,
   4473  1.1  mrg 		modref_summary_lto *cur_summary_lto)
   4474  1.1  mrg {
   4475  1.1  mrg   bool changed = false;
   4476  1.1  mrg 
   4477  1.1  mrg   if (cur_summary && !cur_summary->stores->every_base)
   4478  1.1  mrg     {
   4479  1.1  mrg       cur_summary->stores->collapse ();
   4480  1.1  mrg       changed = true;
   4481  1.1  mrg     }
   4482  1.1  mrg   if (cur_summary_lto
   4483  1.1  mrg       && !cur_summary_lto->stores->every_base)
   4484  1.1  mrg     {
   4485  1.1  mrg       cur_summary_lto->stores->collapse ();
   4486  1.1  mrg       changed = true;
   4487  1.1  mrg     }
   4488  1.1  mrg   return changed;
   4489  1.1  mrg }
   4490  1.1  mrg 
   4491  1.1  mrg /* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
   4492  1.1  mrg    CUR_SUMMARY_LTO accordingly.  Return true if something changed.  */
   4493  1.1  mrg 
   4494  1.1  mrg static bool
   4495  1.1  mrg propagate_unknown_call (cgraph_node *node,
   4496  1.1  mrg 			cgraph_edge *e, int ecf_flags,
   4497  1.1  mrg 			modref_summary *cur_summary,
   4498  1.1  mrg 			modref_summary_lto *cur_summary_lto,
   4499  1.1  mrg 			bool nontrivial_scc)
   4500  1.1  mrg {
   4501  1.1  mrg   bool changed = false;
   4502  1.1  mrg   class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
   4503  1.1  mrg   auto_vec <modref_parm_map, 32> parm_map;
   4504  1.1  mrg   bool looping;
   4505  1.1  mrg 
   4506  1.1  mrg   if (e->callee
   4507  1.1  mrg       && builtin_safe_for_const_function_p (&looping, e->callee->decl))
   4508  1.1  mrg     {
   4509  1.1  mrg       if (looping && cur_summary && !cur_summary->side_effects)
   4510  1.1  mrg 	{
   4511  1.1  mrg 	  cur_summary->side_effects = true;
   4512  1.1  mrg 	  changed = true;
   4513  1.1  mrg 	}
   4514  1.1  mrg       if (looping && cur_summary_lto && !cur_summary_lto->side_effects)
   4515  1.1  mrg 	{
   4516  1.1  mrg 	  cur_summary_lto->side_effects = true;
   4517  1.1  mrg 	  changed = true;
   4518  1.1  mrg 	}
   4519  1.1  mrg       return changed;
   4520  1.1  mrg     }
   4521  1.1  mrg 
   4522  1.1  mrg   if (!(ecf_flags & (ECF_CONST | ECF_PURE))
   4523  1.1  mrg       || (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
   4524  1.1  mrg       || nontrivial_scc)
   4525  1.1  mrg     {
   4526  1.1  mrg       if (cur_summary && !cur_summary->side_effects)
   4527  1.1  mrg 	{
   4528  1.1  mrg 	  cur_summary->side_effects = true;
   4529  1.1  mrg 	  changed = true;
   4530  1.1  mrg 	}
   4531  1.1  mrg       if (cur_summary_lto && !cur_summary_lto->side_effects)
   4532  1.1  mrg 	{
   4533  1.1  mrg 	  cur_summary_lto->side_effects = true;
   4534  1.1  mrg 	  changed = true;
   4535  1.1  mrg 	}
   4536  1.1  mrg       if (cur_summary && !cur_summary->nondeterministic
   4537  1.1  mrg 	  && !ignore_nondeterminism_p (node->decl, ecf_flags))
   4538  1.1  mrg 	{
   4539  1.1  mrg 	  cur_summary->nondeterministic = true;
   4540  1.1  mrg 	  changed = true;
   4541  1.1  mrg 	}
   4542  1.1  mrg       if (cur_summary_lto && !cur_summary_lto->nondeterministic
   4543  1.1  mrg 	  && !ignore_nondeterminism_p (node->decl, ecf_flags))
   4544  1.1  mrg 	{
   4545  1.1  mrg 	  cur_summary_lto->nondeterministic = true;
   4546  1.1  mrg 	  changed = true;
   4547  1.1  mrg 	}
   4548  1.1  mrg     }
   4549  1.1  mrg   if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
   4550  1.1  mrg     return changed;
   4551  1.1  mrg 
   4552  1.1  mrg   if (fnspec_sum
   4553  1.1  mrg       && compute_parm_map (e, &parm_map))
   4554  1.1  mrg     {
   4555  1.1  mrg       attr_fnspec fnspec (fnspec_sum->fnspec);
   4556  1.1  mrg 
   4557  1.1  mrg       gcc_checking_assert (fnspec.known_p ());
   4558  1.1  mrg       if (fnspec.global_memory_read_p ())
   4559  1.1  mrg 	collapse_loads (cur_summary, cur_summary_lto);
   4560  1.1  mrg       else
   4561  1.1  mrg 	{
   4562  1.1  mrg 	  tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
   4563  1.1  mrg 	  for (unsigned i = 0; i < parm_map.length () && t;
   4564  1.1  mrg 	       i++, t = TREE_CHAIN (t))
   4565  1.1  mrg 	    if (!POINTER_TYPE_P (TREE_VALUE (t)))
   4566  1.1  mrg 	      ;
   4567  1.1  mrg 	  else if (!fnspec.arg_specified_p (i)
   4568  1.1  mrg 		   || fnspec.arg_maybe_read_p (i))
   4569  1.1  mrg 	    {
   4570  1.1  mrg 	      modref_parm_map map = parm_map[i];
   4571  1.1  mrg 	      if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
   4572  1.1  mrg 		continue;
   4573  1.1  mrg 	      if (map.parm_index == MODREF_UNKNOWN_PARM)
   4574  1.1  mrg 		{
   4575  1.1  mrg 		  collapse_loads (cur_summary, cur_summary_lto);
   4576  1.1  mrg 		  break;
   4577  1.1  mrg 		}
   4578  1.1  mrg 	      if (cur_summary)
   4579  1.1  mrg 		changed |= cur_summary->loads->insert
   4580  1.1  mrg 		  (node->decl, 0, 0,
   4581  1.1  mrg 		   get_access_for_fnspec (e, fnspec, i, map), false);
   4582  1.1  mrg 	      if (cur_summary_lto)
   4583  1.1  mrg 		changed |= cur_summary_lto->loads->insert
   4584  1.1  mrg 		  (node->decl, 0, 0,
   4585  1.1  mrg 		   get_access_for_fnspec (e, fnspec, i, map), false);
   4586  1.1  mrg 	    }
   4587  1.1  mrg 	}
   4588  1.1  mrg       if (ignore_stores_p (node->decl, ecf_flags))
   4589  1.1  mrg 	;
   4590  1.1  mrg       else if (fnspec.global_memory_written_p ())
   4591  1.1  mrg 	collapse_stores (cur_summary, cur_summary_lto);
   4592  1.1  mrg       else
   4593  1.1  mrg 	{
   4594  1.1  mrg 	  tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
   4595  1.1  mrg 	  for (unsigned i = 0; i < parm_map.length () && t;
   4596  1.1  mrg 	       i++, t = TREE_CHAIN (t))
   4597  1.1  mrg 	    if (!POINTER_TYPE_P (TREE_VALUE (t)))
   4598  1.1  mrg 	      ;
   4599  1.1  mrg 	  else if (!fnspec.arg_specified_p (i)
   4600  1.1  mrg 		   || fnspec.arg_maybe_written_p (i))
   4601  1.1  mrg 	    {
   4602  1.1  mrg 	      modref_parm_map map = parm_map[i];
   4603  1.1  mrg 	      if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
   4604  1.1  mrg 		continue;
   4605  1.1  mrg 	      if (map.parm_index == MODREF_UNKNOWN_PARM)
   4606  1.1  mrg 		{
   4607  1.1  mrg 		  collapse_stores (cur_summary, cur_summary_lto);
   4608  1.1  mrg 		  break;
   4609  1.1  mrg 		}
   4610  1.1  mrg 	      if (cur_summary)
   4611  1.1  mrg 		changed |= cur_summary->stores->insert
   4612  1.1  mrg 		  (node->decl, 0, 0,
   4613  1.1  mrg 		   get_access_for_fnspec (e, fnspec, i, map), false);
   4614  1.1  mrg 	      if (cur_summary_lto)
   4615  1.1  mrg 		changed |= cur_summary_lto->stores->insert
   4616  1.1  mrg 		  (node->decl, 0, 0,
   4617  1.1  mrg 		   get_access_for_fnspec (e, fnspec, i, map), false);
   4618  1.1  mrg 	    }
   4619  1.1  mrg 	}
   4620  1.1  mrg       if (fnspec.errno_maybe_written_p () && flag_errno_math)
   4621  1.1  mrg 	{
   4622  1.1  mrg 	  if (cur_summary && !cur_summary->writes_errno)
   4623  1.1  mrg 	    {
   4624  1.1  mrg 	      cur_summary->writes_errno = true;
   4625  1.1  mrg 	      changed = true;
   4626  1.1  mrg 	    }
   4627  1.1  mrg 	  if (cur_summary_lto && !cur_summary_lto->writes_errno)
   4628  1.1  mrg 	    {
   4629  1.1  mrg 	      cur_summary_lto->writes_errno = true;
   4630  1.1  mrg 	      changed = true;
   4631  1.1  mrg 	    }
   4632  1.1  mrg 	}
   4633  1.1  mrg       return changed;
   4634  1.1  mrg     }
   4635  1.1  mrg   if (dump_file)
   4636  1.1  mrg     fprintf (dump_file, "      collapsing loads\n");
   4637  1.1  mrg   changed |= collapse_loads (cur_summary, cur_summary_lto);
   4638  1.1  mrg   if (!ignore_stores_p (node->decl, ecf_flags))
   4639  1.1  mrg     {
   4640  1.1  mrg       if (dump_file)
   4641  1.1  mrg 	fprintf (dump_file, "      collapsing stores\n");
   4642  1.1  mrg       changed |= collapse_stores (cur_summary, cur_summary_lto);
   4643  1.1  mrg     }
   4644  1.1  mrg   return changed;
   4645  1.1  mrg }
   4646  1.1  mrg 
   4647  1.1  mrg /* Maybe remove summaries of NODE pointed to by CUR_SUMMARY_PTR
   4648  1.1  mrg    and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS.  */
   4649  1.1  mrg 
   4650  1.1  mrg static void
   4651  1.1  mrg remove_useless_summaries (cgraph_node *node,
   4652  1.1  mrg 			  modref_summary **cur_summary_ptr,
   4653  1.1  mrg 			  modref_summary_lto **cur_summary_lto_ptr,
   4654  1.1  mrg 			  int ecf_flags)
   4655  1.1  mrg {
   4656  1.1  mrg   if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
   4657  1.1  mrg     {
   4658  1.1  mrg       optimization_summaries->remove (node);
   4659  1.1  mrg       *cur_summary_ptr = NULL;
   4660  1.1  mrg     }
   4661  1.1  mrg   if (*cur_summary_lto_ptr
   4662  1.1  mrg       && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
   4663  1.1  mrg     {
   4664  1.1  mrg       summaries_lto->remove (node);
   4665  1.1  mrg       *cur_summary_lto_ptr = NULL;
   4666  1.1  mrg     }
   4667  1.1  mrg }
   4668  1.1  mrg 
   4669  1.1  mrg /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
   4670  1.1  mrg    and propagate loads/stores.  */
   4671  1.1  mrg 
   4672  1.1  mrg static bool
   4673  1.1  mrg modref_propagate_in_scc (cgraph_node *component_node)
   4674  1.1  mrg {
   4675  1.1  mrg   bool changed = true;
   4676  1.1  mrg   bool first = true;
   4677  1.1  mrg   int iteration = 0;
   4678  1.1  mrg 
   4679  1.1  mrg   while (changed)
   4680  1.1  mrg     {
   4681  1.1  mrg       bool nontrivial_scc
   4682  1.1  mrg 		 = ((struct ipa_dfs_info *) component_node->aux)->next_cycle;
   4683  1.1  mrg       changed = false;
   4684  1.1  mrg       for (struct cgraph_node *cur = component_node; cur;
   4685  1.1  mrg 	   cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
   4686  1.1  mrg 	{
   4687  1.1  mrg 	  cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
   4688  1.1  mrg 	  modref_summary *cur_summary = optimization_summaries
   4689  1.1  mrg 					? optimization_summaries->get (node)
   4690  1.1  mrg 					: NULL;
   4691  1.1  mrg 	  modref_summary_lto *cur_summary_lto = summaries_lto
   4692  1.1  mrg 						? summaries_lto->get (node)
   4693  1.1  mrg 						: NULL;
   4694  1.1  mrg 
   4695  1.1  mrg 	  if (!cur_summary && !cur_summary_lto)
   4696  1.1  mrg 	    continue;
   4697  1.1  mrg 
   4698  1.1  mrg 	  int cur_ecf_flags = flags_from_decl_or_type (node->decl);
   4699  1.1  mrg 
   4700  1.1  mrg 	  if (dump_file)
   4701  1.1  mrg 	    fprintf (dump_file, "  Processing %s%s%s\n",
   4702  1.1  mrg 		     cur->dump_name (),
   4703  1.1  mrg 		     TREE_READONLY (cur->decl) ? " (const)" : "",
   4704  1.1  mrg 		     DECL_PURE_P (cur->decl) ? " (pure)" : "");
   4705  1.1  mrg 
   4706  1.1  mrg 	  for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
   4707  1.1  mrg 	    {
   4708  1.1  mrg 	      if (dump_file)
   4709  1.1  mrg 		fprintf (dump_file, "    Indirect call\n");
   4710  1.1  mrg 	      if (propagate_unknown_call
   4711  1.1  mrg 			   (node, e, e->indirect_info->ecf_flags,
   4712  1.1  mrg 			    cur_summary, cur_summary_lto,
   4713  1.1  mrg 			    nontrivial_scc))
   4714  1.1  mrg 		{
   4715  1.1  mrg 		  changed = true;
   4716  1.1  mrg 		  remove_useless_summaries (node, &cur_summary,
   4717  1.1  mrg 					    &cur_summary_lto,
   4718  1.1  mrg 					    cur_ecf_flags);
   4719  1.1  mrg 		  if (!cur_summary && !cur_summary_lto)
   4720  1.1  mrg 		    break;
   4721  1.1  mrg 		}
   4722  1.1  mrg 	    }
   4723  1.1  mrg 
   4724  1.1  mrg 	  if (!cur_summary && !cur_summary_lto)
   4725  1.1  mrg 	    continue;
   4726  1.1  mrg 
   4727  1.1  mrg 	  for (cgraph_edge *callee_edge = cur->callees; callee_edge;
   4728  1.1  mrg 	       callee_edge = callee_edge->next_callee)
   4729  1.1  mrg 	    {
   4730  1.1  mrg 	      int flags = flags_from_decl_or_type (callee_edge->callee->decl);
   4731  1.1  mrg 	      modref_summary *callee_summary = NULL;
   4732  1.1  mrg 	      modref_summary_lto *callee_summary_lto = NULL;
   4733  1.1  mrg 	      struct cgraph_node *callee;
   4734  1.1  mrg 
   4735  1.1  mrg 	      if (!callee_edge->inline_failed
   4736  1.1  mrg 		 || ((flags & ECF_CONST)
   4737  1.1  mrg 		     && !(flags & ECF_LOOPING_CONST_OR_PURE)))
   4738  1.1  mrg 		continue;
   4739  1.1  mrg 
   4740  1.1  mrg 	      /* Get the callee and its summary.  */
   4741  1.1  mrg 	      enum availability avail;
   4742  1.1  mrg 	      callee = callee_edge->callee->ultimate_alias_target
   4743  1.1  mrg 			 (&avail, cur);
   4744  1.1  mrg 
   4745  1.1  mrg 	      /* It is not necessary to re-process calls outside of the
   4746  1.1  mrg 		 SCC component.  */
   4747  1.1  mrg 	      if (iteration > 0
   4748  1.1  mrg 		  && (!callee->aux
   4749  1.1  mrg 		      || ((struct ipa_dfs_info *)cur->aux)->scc_no
   4750  1.1  mrg 			  != ((struct ipa_dfs_info *)callee->aux)->scc_no))
   4751  1.1  mrg 		continue;
   4752  1.1  mrg 
   4753  1.1  mrg 	      if (dump_file)
   4754  1.1  mrg 		fprintf (dump_file, "    Call to %s\n",
   4755  1.1  mrg 			 callee_edge->callee->dump_name ());
   4756  1.1  mrg 
   4757  1.1  mrg 	      bool ignore_stores = ignore_stores_p (cur->decl, flags);
   4758  1.1  mrg 
   4759  1.1  mrg 	      if (avail <= AVAIL_INTERPOSABLE)
   4760  1.1  mrg 		{
   4761  1.1  mrg 		  if (dump_file)
   4762  1.1  mrg 		    fprintf (dump_file, "      Call target interposable"
   4763  1.1  mrg 			     " or not available\n");
   4764  1.1  mrg 		  changed |= propagate_unknown_call
   4765  1.1  mrg 			       (node, callee_edge, flags,
   4766  1.1  mrg 				cur_summary, cur_summary_lto,
   4767  1.1  mrg 				nontrivial_scc);
   4768  1.1  mrg 		  if (!cur_summary && !cur_summary_lto)
   4769  1.1  mrg 		    break;
   4770  1.1  mrg 		  continue;
   4771  1.1  mrg 		}
   4772  1.1  mrg 
   4773  1.1  mrg 	      /* We don't know anything about CALLEE, hence we cannot tell
   4774  1.1  mrg 		 anything about the entire component.  */
   4775  1.1  mrg 
   4776  1.1  mrg 	      if (cur_summary
   4777  1.1  mrg 		  && !(callee_summary = optimization_summaries->get (callee)))
   4778  1.1  mrg 		{
   4779  1.1  mrg 		  if (dump_file)
   4780  1.1  mrg 		    fprintf (dump_file, "      No call target summary\n");
   4781  1.1  mrg 		  changed |= propagate_unknown_call
   4782  1.1  mrg 			       (node, callee_edge, flags,
   4783  1.1  mrg 				cur_summary, NULL,
   4784  1.1  mrg 				nontrivial_scc);
   4785  1.1  mrg 		}
   4786  1.1  mrg 	      if (cur_summary_lto
   4787  1.1  mrg 		  && !(callee_summary_lto = summaries_lto->get (callee)))
   4788  1.1  mrg 		{
   4789  1.1  mrg 		  if (dump_file)
   4790  1.1  mrg 		    fprintf (dump_file, "      No call target summary\n");
   4791  1.1  mrg 		  changed |= propagate_unknown_call
   4792  1.1  mrg 			       (node, callee_edge, flags,
   4793  1.1  mrg 				NULL, cur_summary_lto,
   4794  1.1  mrg 				nontrivial_scc);
   4795  1.1  mrg 		}
   4796  1.1  mrg 
   4797  1.1  mrg 	      if (callee_summary && !cur_summary->side_effects
   4798  1.1  mrg 		  && (callee_summary->side_effects
   4799  1.1  mrg 		      || callee_edge->recursive_p ()))
   4800  1.1  mrg 		{
   4801  1.1  mrg 		  cur_summary->side_effects = true;
   4802  1.1  mrg 		  changed = true;
   4803  1.1  mrg 		}
   4804  1.1  mrg 	      if (callee_summary_lto && !cur_summary_lto->side_effects
   4805  1.1  mrg 		  && (callee_summary_lto->side_effects
   4806  1.1  mrg 		      || callee_edge->recursive_p ()))
   4807  1.1  mrg 		{
   4808  1.1  mrg 		  cur_summary_lto->side_effects = true;
   4809  1.1  mrg 		  changed = true;
   4810  1.1  mrg 		}
   4811  1.1  mrg 	      if (callee_summary && !cur_summary->nondeterministic
   4812  1.1  mrg 		  && callee_summary->nondeterministic
   4813  1.1  mrg 		  && !ignore_nondeterminism_p (cur->decl, flags))
   4814  1.1  mrg 		{
   4815  1.1  mrg 		  cur_summary->nondeterministic = true;
   4816  1.1  mrg 		  changed = true;
   4817  1.1  mrg 		}
   4818  1.1  mrg 	      if (callee_summary_lto && !cur_summary_lto->nondeterministic
   4819  1.1  mrg 		  && callee_summary_lto->nondeterministic
   4820  1.1  mrg 		  && !ignore_nondeterminism_p (cur->decl, flags))
   4821  1.1  mrg 		{
   4822  1.1  mrg 		  cur_summary_lto->nondeterministic = true;
   4823  1.1  mrg 		  changed = true;
   4824  1.1  mrg 		}
   4825  1.1  mrg 	      if (flags & (ECF_CONST | ECF_NOVOPS))
   4826  1.1  mrg 		continue;
   4827  1.1  mrg 
   4828  1.1  mrg 	      /* We can not safely optimize based on summary of callee if it
   4829  1.1  mrg 		 does not always bind to current def: it is possible that
   4830  1.1  mrg 		 memory load was optimized out earlier which may not happen in
   4831  1.1  mrg 		 the interposed variant.  */
   4832  1.1  mrg 	      if (!callee_edge->binds_to_current_def_p ())
   4833  1.1  mrg 		{
   4834  1.1  mrg 		  if (cur_summary && !cur_summary->calls_interposable)
   4835  1.1  mrg 		    {
   4836  1.1  mrg 		      cur_summary->calls_interposable = true;
   4837  1.1  mrg 		      changed = true;
   4838  1.1  mrg 		    }
   4839  1.1  mrg 		  if (cur_summary_lto && !cur_summary_lto->calls_interposable)
   4840  1.1  mrg 		    {
   4841  1.1  mrg 		      cur_summary_lto->calls_interposable = true;
   4842  1.1  mrg 		      changed = true;
   4843  1.1  mrg 		    }
   4844  1.1  mrg 		  if (dump_file)
   4845  1.1  mrg 		    fprintf (dump_file, "      May not bind local;"
   4846  1.1  mrg 			     " collapsing loads\n");
   4847  1.1  mrg 		}
   4848  1.1  mrg 
   4849  1.1  mrg 
   4850  1.1  mrg 	      auto_vec <modref_parm_map, 32> parm_map;
   4851  1.1  mrg 	      modref_parm_map chain_map;
   4852  1.1  mrg 	      /* TODO: Once we get jump functions for static chains we could
   4853  1.1  mrg 		 compute this.  */
   4854  1.1  mrg 	      chain_map.parm_index = MODREF_UNKNOWN_PARM;
   4855  1.1  mrg 
   4856  1.1  mrg 	      compute_parm_map (callee_edge, &parm_map);
   4857  1.1  mrg 
   4858  1.1  mrg 	      /* Merge in callee's information.  */
   4859  1.1  mrg 	      if (callee_summary)
   4860  1.1  mrg 		{
   4861  1.1  mrg 		  changed |= cur_summary->loads->merge
   4862  1.1  mrg 				  (node->decl, callee_summary->loads,
   4863  1.1  mrg 				   &parm_map, &chain_map, !first);
   4864  1.1  mrg 		  if (!ignore_stores)
   4865  1.1  mrg 		    {
   4866  1.1  mrg 		      changed |= cur_summary->stores->merge
   4867  1.1  mrg 				      (node->decl, callee_summary->stores,
   4868  1.1  mrg 				       &parm_map, &chain_map, !first);
   4869  1.1  mrg 		      if (!cur_summary->writes_errno
   4870  1.1  mrg 			  && callee_summary->writes_errno)
   4871  1.1  mrg 			{
   4872  1.1  mrg 			  cur_summary->writes_errno = true;
   4873  1.1  mrg 			  changed = true;
   4874  1.1  mrg 			}
   4875  1.1  mrg 		    }
   4876  1.1  mrg 		}
   4877  1.1  mrg 	      if (callee_summary_lto)
   4878  1.1  mrg 		{
   4879  1.1  mrg 		  changed |= cur_summary_lto->loads->merge
   4880  1.1  mrg 				  (node->decl, callee_summary_lto->loads,
   4881  1.1  mrg 				   &parm_map, &chain_map, !first);
   4882  1.1  mrg 		  if (!ignore_stores)
   4883  1.1  mrg 		    {
   4884  1.1  mrg 		      changed |= cur_summary_lto->stores->merge
   4885  1.1  mrg 				      (node->decl, callee_summary_lto->stores,
   4886  1.1  mrg 				       &parm_map, &chain_map, !first);
   4887  1.1  mrg 		      if (!cur_summary_lto->writes_errno
   4888  1.1  mrg 			  && callee_summary_lto->writes_errno)
   4889  1.1  mrg 			{
   4890  1.1  mrg 			  cur_summary_lto->writes_errno = true;
   4891  1.1  mrg 			  changed = true;
   4892  1.1  mrg 			}
   4893  1.1  mrg 		    }
   4894  1.1  mrg 		}
   4895  1.1  mrg 	      if (changed)
   4896  1.1  mrg 		remove_useless_summaries (node, &cur_summary,
   4897  1.1  mrg 					  &cur_summary_lto,
   4898  1.1  mrg 					  cur_ecf_flags);
   4899  1.1  mrg 	      if (!cur_summary && !cur_summary_lto)
   4900  1.1  mrg 		break;
   4901  1.1  mrg 	      if (dump_file && changed)
   4902  1.1  mrg 		{
   4903  1.1  mrg 		  if (cur_summary)
   4904  1.1  mrg 		    cur_summary->dump (dump_file);
   4905  1.1  mrg 		  if (cur_summary_lto)
   4906  1.1  mrg 		    cur_summary_lto->dump (dump_file);
   4907  1.1  mrg 		  dump_modref_edge_summaries (dump_file, node, 4);
   4908  1.1  mrg 		}
   4909  1.1  mrg 	    }
   4910  1.1  mrg 	}
   4911  1.1  mrg       iteration++;
   4912  1.1  mrg       first = false;
   4913  1.1  mrg     }
   4914  1.1  mrg   if (dump_file)
   4915  1.1  mrg     fprintf (dump_file,
   4916  1.1  mrg 	     "Propagation finished in %i iterations\n", iteration);
   4917  1.1  mrg   bool pureconst = false;
   4918  1.1  mrg   for (struct cgraph_node *cur = component_node; cur;
   4919  1.1  mrg        cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
   4920  1.1  mrg     if (!cur->inlined_to && opt_for_fn (cur->decl, flag_ipa_pure_const))
   4921  1.1  mrg       {
   4922  1.1  mrg 	modref_summary *summary = optimization_summaries
   4923  1.1  mrg 				  ? optimization_summaries->get (cur)
   4924  1.1  mrg 				  : NULL;
   4925  1.1  mrg 	modref_summary_lto *summary_lto = summaries_lto
   4926  1.1  mrg 					  ? summaries_lto->get (cur)
   4927  1.1  mrg 					  : NULL;
   4928  1.1  mrg 	if (summary && !summary->stores->every_base && !summary->stores->bases
   4929  1.1  mrg 	    && !summary->nondeterministic)
   4930  1.1  mrg 	  {
   4931  1.1  mrg 	    if (!summary->loads->every_base && !summary->loads->bases
   4932  1.1  mrg 		&& !summary->calls_interposable)
   4933  1.1  mrg 	      pureconst |= ipa_make_function_const
   4934  1.1  mrg 		     (cur, summary->side_effects, false);
   4935  1.1  mrg 	    else
   4936  1.1  mrg 	      pureconst |= ipa_make_function_pure
   4937  1.1  mrg 		     (cur, summary->side_effects, false);
   4938  1.1  mrg 	  }
   4939  1.1  mrg 	if (summary_lto && !summary_lto->stores->every_base
   4940  1.1  mrg 	    && !summary_lto->stores->bases && !summary_lto->nondeterministic)
   4941  1.1  mrg 	  {
   4942  1.1  mrg 	    if (!summary_lto->loads->every_base && !summary_lto->loads->bases
   4943  1.1  mrg 		&& !summary_lto->calls_interposable)
   4944  1.1  mrg 	      pureconst |= ipa_make_function_const
   4945  1.1  mrg 		     (cur, summary_lto->side_effects, false);
   4946  1.1  mrg 	    else
   4947  1.1  mrg 	      pureconst |= ipa_make_function_pure
   4948  1.1  mrg 		     (cur, summary_lto->side_effects, false);
   4949  1.1  mrg 	  }
   4950  1.1  mrg      }
   4951  1.1  mrg   return pureconst;
   4952  1.1  mrg }
   4953  1.1  mrg 
   4954  1.1  mrg /* Dump results of propagation in SCC rooted in COMPONENT_NODE.  */
   4955  1.1  mrg 
   4956  1.1  mrg static void
   4957  1.1  mrg modref_propagate_dump_scc (cgraph_node *component_node)
   4958  1.1  mrg {
   4959  1.1  mrg   for (struct cgraph_node *cur = component_node; cur;
   4960  1.1  mrg        cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
   4961  1.1  mrg     if (!cur->inlined_to)
   4962  1.1  mrg       {
   4963  1.1  mrg 	modref_summary *cur_summary = optimization_summaries
   4964  1.1  mrg 				      ? optimization_summaries->get (cur)
   4965  1.1  mrg 				      : NULL;
   4966  1.1  mrg 	modref_summary_lto *cur_summary_lto = summaries_lto
   4967  1.1  mrg 					      ? summaries_lto->get (cur)
   4968  1.1  mrg 					      : NULL;
   4969  1.1  mrg 
   4970  1.1  mrg 	fprintf (dump_file, "Propagated modref for %s%s%s\n",
   4971  1.1  mrg 		 cur->dump_name (),
   4972  1.1  mrg 		 TREE_READONLY (cur->decl) ? " (const)" : "",
   4973  1.1  mrg 		 DECL_PURE_P (cur->decl) ? " (pure)" : "");
   4974  1.1  mrg 	if (optimization_summaries)
   4975  1.1  mrg 	  {
   4976  1.1  mrg 	    if (cur_summary)
   4977  1.1  mrg 	      cur_summary->dump (dump_file);
   4978  1.1  mrg 	    else
   4979  1.1  mrg 	      fprintf (dump_file, "  Not tracked\n");
   4980  1.1  mrg 	  }
   4981  1.1  mrg 	if (summaries_lto)
   4982  1.1  mrg 	  {
   4983  1.1  mrg 	    if (cur_summary_lto)
   4984  1.1  mrg 	      cur_summary_lto->dump (dump_file);
   4985  1.1  mrg 	    else
   4986  1.1  mrg 	      fprintf (dump_file, "  Not tracked (lto)\n");
   4987  1.1  mrg 	  }
   4988  1.1  mrg       }
   4989  1.1  mrg }
   4990  1.1  mrg 
   4991  1.1  mrg /* Determine EAF flags know for call E with CALLEE_ECF_FLAGS and ARG.  */
   4992  1.1  mrg 
   4993  1.1  mrg int
   4994  1.1  mrg implicit_eaf_flags_for_edge_and_arg (cgraph_edge *e, int callee_ecf_flags,
   4995  1.1  mrg 				     bool ignore_stores, int arg)
   4996  1.1  mrg {
   4997  1.1  mrg   /* Returning the value is already accounted to at local propagation.  */
   4998  1.1  mrg   int implicit_flags = EAF_NOT_RETURNED_DIRECTLY
   4999  1.1  mrg 		       | EAF_NOT_RETURNED_INDIRECTLY;
   5000  1.1  mrg   if (ignore_stores)
   5001  1.1  mrg      implicit_flags |= ignore_stores_eaf_flags;
   5002  1.1  mrg   if (callee_ecf_flags & ECF_PURE)
   5003  1.1  mrg     implicit_flags |= implicit_pure_eaf_flags;
   5004  1.1  mrg   if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
   5005  1.1  mrg     implicit_flags |= implicit_const_eaf_flags;
   5006  1.1  mrg   class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
   5007  1.1  mrg   if (fnspec_sum)
   5008  1.1  mrg     {
   5009  1.1  mrg       attr_fnspec fnspec (fnspec_sum->fnspec);
   5010  1.1  mrg       implicit_flags |= fnspec.arg_eaf_flags (arg);
   5011  1.1  mrg     }
   5012  1.1  mrg   return implicit_flags;
   5013  1.1  mrg }
   5014  1.1  mrg 
   5015  1.1  mrg /* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
   5016  1.1  mrg    and SUMMARY_LTO to CUR_SUMMARY_LTO.
   5017  1.1  mrg    Return true if something changed.  */
   5018  1.1  mrg 
   5019  1.1  mrg static bool
   5020  1.1  mrg modref_merge_call_site_flags (escape_summary *sum,
   5021  1.1  mrg 			      modref_summary *cur_summary,
   5022  1.1  mrg 			      modref_summary_lto *cur_summary_lto,
   5023  1.1  mrg 			      modref_summary *summary,
   5024  1.1  mrg 			      modref_summary_lto *summary_lto,
   5025  1.1  mrg 			      tree caller,
   5026  1.1  mrg 			      cgraph_edge *e,
   5027  1.1  mrg 			      int caller_ecf_flags,
   5028  1.1  mrg 			      int callee_ecf_flags,
   5029  1.1  mrg 			      bool binds_to_current_def)
   5030  1.1  mrg {
   5031  1.1  mrg   escape_entry *ee;
   5032  1.1  mrg   unsigned int i;
   5033  1.1  mrg   bool changed = false;
   5034  1.1  mrg   bool ignore_stores = ignore_stores_p (caller, callee_ecf_flags);
   5035  1.1  mrg 
   5036  1.1  mrg   /* Return early if we have no useful info to propagate.  */
   5037  1.1  mrg   if ((!cur_summary
   5038  1.1  mrg        || (!cur_summary->arg_flags.length ()
   5039  1.1  mrg 	   && !cur_summary->static_chain_flags
   5040  1.1  mrg 	   && !cur_summary->retslot_flags))
   5041  1.1  mrg       && (!cur_summary_lto
   5042  1.1  mrg 	  || (!cur_summary_lto->arg_flags.length ()
   5043  1.1  mrg 	      && !cur_summary_lto->static_chain_flags
   5044  1.1  mrg 	      && !cur_summary_lto->retslot_flags)))
   5045  1.1  mrg     return false;
   5046  1.1  mrg 
   5047  1.1  mrg   FOR_EACH_VEC_ELT (sum->esc, i, ee)
   5048  1.1  mrg     {
   5049  1.1  mrg       int flags = 0;
   5050  1.1  mrg       int flags_lto = 0;
   5051  1.1  mrg       int implicit_flags = implicit_eaf_flags_for_edge_and_arg
   5052  1.1  mrg 				(e, callee_ecf_flags, ignore_stores, ee->arg);
   5053  1.1  mrg 
   5054  1.1  mrg       if (summary && ee->arg < summary->arg_flags.length ())
   5055  1.1  mrg 	flags = summary->arg_flags[ee->arg];
   5056  1.1  mrg       if (summary_lto
   5057  1.1  mrg 	  && ee->arg < summary_lto->arg_flags.length ())
   5058  1.1  mrg 	flags_lto = summary_lto->arg_flags[ee->arg];
   5059  1.1  mrg       if (!ee->direct)
   5060  1.1  mrg 	{
   5061  1.1  mrg 	  flags = deref_flags (flags, ignore_stores);
   5062  1.1  mrg 	  flags_lto = deref_flags (flags_lto, ignore_stores);
   5063  1.1  mrg 	}
   5064  1.1  mrg       if (ignore_stores)
   5065  1.1  mrg 	 implicit_flags |= ignore_stores_eaf_flags;
   5066  1.1  mrg       if (callee_ecf_flags & ECF_PURE)
   5067  1.1  mrg 	implicit_flags |= implicit_pure_eaf_flags;
   5068  1.1  mrg       if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
   5069  1.1  mrg 	implicit_flags |= implicit_const_eaf_flags;
   5070  1.1  mrg       class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
   5071  1.1  mrg       if (fnspec_sum)
   5072  1.1  mrg 	{
   5073  1.1  mrg 	  attr_fnspec fnspec (fnspec_sum->fnspec);
   5074  1.1  mrg 	  implicit_flags |= fnspec.arg_eaf_flags (ee->arg);
   5075  1.1  mrg 	}
   5076  1.1  mrg       if (!ee->direct)
   5077  1.1  mrg 	implicit_flags = deref_flags (implicit_flags, ignore_stores);
   5078  1.1  mrg       flags |= implicit_flags;
   5079  1.1  mrg       flags_lto |= implicit_flags;
   5080  1.1  mrg       if (!binds_to_current_def && (flags || flags_lto))
   5081  1.1  mrg 	{
   5082  1.1  mrg 	  flags = interposable_eaf_flags (flags, implicit_flags);
   5083  1.1  mrg 	  flags_lto = interposable_eaf_flags (flags_lto, implicit_flags);
   5084  1.1  mrg 	}
   5085  1.1  mrg       if (!(flags & EAF_UNUSED)
   5086  1.1  mrg 	  && cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
   5087  1.1  mrg 	{
   5088  1.1  mrg 	  eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
   5089  1.1  mrg 			   ? cur_summary->retslot_flags
   5090  1.1  mrg 			   : ee->parm_index == MODREF_STATIC_CHAIN_PARM
   5091  1.1  mrg 			   ? cur_summary->static_chain_flags
   5092  1.1  mrg 			   : cur_summary->arg_flags[ee->parm_index];
   5093  1.1  mrg 	  if ((f & flags) != f)
   5094  1.1  mrg 	    {
   5095  1.1  mrg 	      f = remove_useless_eaf_flags
   5096  1.1  mrg 			 (f & flags, caller_ecf_flags,
   5097  1.1  mrg 			  VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
   5098  1.1  mrg 	      changed = true;
   5099  1.1  mrg 	    }
   5100  1.1  mrg 	}
   5101  1.1  mrg       if (!(flags_lto & EAF_UNUSED)
   5102  1.1  mrg 	  && cur_summary_lto
   5103  1.1  mrg 	  && ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
   5104  1.1  mrg 	{
   5105  1.1  mrg 	  eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
   5106  1.1  mrg 			   ? cur_summary_lto->retslot_flags
   5107  1.1  mrg 			   : ee->parm_index == MODREF_STATIC_CHAIN_PARM
   5108  1.1  mrg 			   ? cur_summary_lto->static_chain_flags
   5109  1.1  mrg 			   : cur_summary_lto->arg_flags[ee->parm_index];
   5110  1.1  mrg 	  if ((f & flags_lto) != f)
   5111  1.1  mrg 	    {
   5112  1.1  mrg 	      f = remove_useless_eaf_flags
   5113  1.1  mrg 			 (f & flags_lto, caller_ecf_flags,
   5114  1.1  mrg 			  VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
   5115  1.1  mrg 	      changed = true;
   5116  1.1  mrg 	    }
   5117  1.1  mrg 	}
   5118  1.1  mrg     }
   5119  1.1  mrg   return changed;
   5120  1.1  mrg }
   5121  1.1  mrg 
   5122  1.1  mrg /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
   5123  1.1  mrg    and propagate arg flags.  */
   5124  1.1  mrg 
   5125  1.1  mrg static void
   5126  1.1  mrg modref_propagate_flags_in_scc (cgraph_node *component_node)
   5127  1.1  mrg {
   5128  1.1  mrg   bool changed = true;
   5129  1.1  mrg   int iteration = 0;
   5130  1.1  mrg 
   5131  1.1  mrg   while (changed)
   5132  1.1  mrg     {
   5133  1.1  mrg       changed = false;
   5134  1.1  mrg       for (struct cgraph_node *cur = component_node; cur;
   5135  1.1  mrg 	   cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
   5136  1.1  mrg 	{
   5137  1.1  mrg 	  cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
   5138  1.1  mrg 	  modref_summary *cur_summary = optimization_summaries
   5139  1.1  mrg 					? optimization_summaries->get (node)
   5140  1.1  mrg 					: NULL;
   5141  1.1  mrg 	  modref_summary_lto *cur_summary_lto = summaries_lto
   5142  1.1  mrg 						? summaries_lto->get (node)
   5143  1.1  mrg 						: NULL;
   5144  1.1  mrg 
   5145  1.1  mrg 	  if (!cur_summary && !cur_summary_lto)
   5146  1.1  mrg 	    continue;
   5147  1.1  mrg 	  int caller_ecf_flags = flags_from_decl_or_type (cur->decl);
   5148  1.1  mrg 
   5149  1.1  mrg 	  if (dump_file)
   5150  1.1  mrg 	    fprintf (dump_file, "  Processing %s%s%s\n",
   5151  1.1  mrg 		     cur->dump_name (),
   5152  1.1  mrg 		     TREE_READONLY (cur->decl) ? " (const)" : "",
   5153  1.1  mrg 		     DECL_PURE_P (cur->decl) ? " (pure)" : "");
   5154  1.1  mrg 
   5155  1.1  mrg 	  for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
   5156  1.1  mrg 	    {
   5157  1.1  mrg 	      escape_summary *sum = escape_summaries->get (e);
   5158  1.1  mrg 
   5159  1.1  mrg 	      if (!sum || ((e->indirect_info->ecf_flags & ECF_CONST)
   5160  1.1  mrg 		  && !(e->indirect_info->ecf_flags & ECF_LOOPING_CONST_OR_PURE)))
   5161  1.1  mrg 		continue;
   5162  1.1  mrg 
   5163  1.1  mrg 	      changed |= modref_merge_call_site_flags
   5164  1.1  mrg 				(sum, cur_summary, cur_summary_lto,
   5165  1.1  mrg 				 NULL, NULL,
   5166  1.1  mrg 				 node->decl,
   5167  1.1  mrg 				 e,
   5168  1.1  mrg 				 caller_ecf_flags,
   5169  1.1  mrg 				 e->indirect_info->ecf_flags,
   5170  1.1  mrg 				 false);
   5171  1.1  mrg 	    }
   5172  1.1  mrg 
   5173  1.1  mrg 	  if (!cur_summary && !cur_summary_lto)
   5174  1.1  mrg 	    continue;
   5175  1.1  mrg 
   5176  1.1  mrg 	  for (cgraph_edge *callee_edge = cur->callees; callee_edge;
   5177  1.1  mrg 	       callee_edge = callee_edge->next_callee)
   5178  1.1  mrg 	    {
   5179  1.1  mrg 	      int ecf_flags = flags_from_decl_or_type
   5180  1.1  mrg 				 (callee_edge->callee->decl);
   5181  1.1  mrg 	      modref_summary *callee_summary = NULL;
   5182  1.1  mrg 	      modref_summary_lto *callee_summary_lto = NULL;
   5183  1.1  mrg 	      struct cgraph_node *callee;
   5184  1.1  mrg 
   5185  1.1  mrg 	      if ((ecf_flags & ECF_CONST)
   5186  1.1  mrg 		  && !(ecf_flags & ECF_LOOPING_CONST_OR_PURE))
   5187  1.1  mrg 		continue;
   5188  1.1  mrg 
   5189  1.1  mrg 	      /* Get the callee and its summary.  */
   5190  1.1  mrg 	      enum availability avail;
   5191  1.1  mrg 	      callee = callee_edge->callee->ultimate_alias_target
   5192  1.1  mrg 			 (&avail, cur);
   5193  1.1  mrg 
   5194  1.1  mrg 	      /* It is not necessary to re-process calls outside of the
   5195  1.1  mrg 		 SCC component.  */
   5196  1.1  mrg 	      if (iteration > 0
   5197  1.1  mrg 		  && (!callee->aux
   5198  1.1  mrg 		      || ((struct ipa_dfs_info *)cur->aux)->scc_no
   5199  1.1  mrg 			  != ((struct ipa_dfs_info *)callee->aux)->scc_no))
   5200  1.1  mrg 		continue;
   5201  1.1  mrg 
   5202  1.1  mrg 	      escape_summary *sum = escape_summaries->get (callee_edge);
   5203  1.1  mrg 	      if (!sum)
   5204  1.1  mrg 		continue;
   5205  1.1  mrg 
   5206  1.1  mrg 	      if (dump_file)
   5207  1.1  mrg 		fprintf (dump_file, "    Call to %s\n",
   5208  1.1  mrg 			 callee_edge->callee->dump_name ());
   5209  1.1  mrg 
   5210  1.1  mrg 	      if (avail <= AVAIL_INTERPOSABLE
   5211  1.1  mrg 		  || callee_edge->call_stmt_cannot_inline_p)
   5212  1.1  mrg 		;
   5213  1.1  mrg 	      else
   5214  1.1  mrg 		{
   5215  1.1  mrg 		  if (cur_summary)
   5216  1.1  mrg 		    callee_summary = optimization_summaries->get (callee);
   5217  1.1  mrg 		  if (cur_summary_lto)
   5218  1.1  mrg 		    callee_summary_lto = summaries_lto->get (callee);
   5219  1.1  mrg 		}
   5220  1.1  mrg 	      changed |= modref_merge_call_site_flags
   5221  1.1  mrg 				(sum, cur_summary, cur_summary_lto,
   5222  1.1  mrg 				 callee_summary, callee_summary_lto,
   5223  1.1  mrg 				 node->decl,
   5224  1.1  mrg 				 callee_edge,
   5225  1.1  mrg 				 caller_ecf_flags,
   5226  1.1  mrg 				 ecf_flags,
   5227  1.1  mrg 				 callee->binds_to_current_def_p ());
   5228  1.1  mrg 	      if (dump_file && changed)
   5229  1.1  mrg 		{
   5230  1.1  mrg 		  if (cur_summary)
   5231  1.1  mrg 		    cur_summary->dump (dump_file);
   5232  1.1  mrg 		  if (cur_summary_lto)
   5233  1.1  mrg 		    cur_summary_lto->dump (dump_file);
   5234  1.1  mrg 		}
   5235  1.1  mrg 	    }
   5236  1.1  mrg 	}
   5237  1.1  mrg       iteration++;
   5238  1.1  mrg     }
   5239  1.1  mrg   if (dump_file)
   5240  1.1  mrg     fprintf (dump_file,
   5241  1.1  mrg 	     "Propagation of flags finished in %i iterations\n", iteration);
   5242  1.1  mrg }
   5243  1.1  mrg 
   5244  1.1  mrg }  /* ANON namespace.  */
   5245  1.1  mrg 
   5246  1.1  mrg /* Call EDGE was inlined; merge summary from callee to the caller.  */
   5247  1.1  mrg 
   5248  1.1  mrg void
   5249  1.1  mrg ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
   5250  1.1  mrg {
   5251  1.1  mrg   if (!summaries && !summaries_lto)
   5252  1.1  mrg     return;
   5253  1.1  mrg 
   5254  1.1  mrg   struct cgraph_node *to = (edge->caller->inlined_to
   5255  1.1  mrg 			    ? edge->caller->inlined_to : edge->caller);
   5256  1.1  mrg   class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
   5257  1.1  mrg   class modref_summary_lto *to_info_lto = summaries_lto
   5258  1.1  mrg 					  ? summaries_lto->get (to) : NULL;
   5259  1.1  mrg 
   5260  1.1  mrg   if (!to_info && !to_info_lto)
   5261  1.1  mrg     {
   5262  1.1  mrg       if (summaries)
   5263  1.1  mrg 	summaries->remove (edge->callee);
   5264  1.1  mrg       if (summaries_lto)
   5265  1.1  mrg 	summaries_lto->remove (edge->callee);
   5266  1.1  mrg       remove_modref_edge_summaries (edge->callee);
   5267  1.1  mrg       return;
   5268  1.1  mrg     }
   5269  1.1  mrg 
   5270  1.1  mrg   class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
   5271  1.1  mrg 				      : NULL;
   5272  1.1  mrg   class modref_summary_lto *callee_info_lto
   5273  1.1  mrg 		 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
   5274  1.1  mrg   int flags = flags_from_decl_or_type (edge->callee->decl);
   5275  1.1  mrg   /* Combine in outer flags.  */
   5276  1.1  mrg   cgraph_node *n;
   5277  1.1  mrg   for (n = edge->caller; n->inlined_to; n = n->callers->caller)
   5278  1.1  mrg     flags |= flags_from_decl_or_type (n->decl);
   5279  1.1  mrg   flags |= flags_from_decl_or_type (n->decl);
   5280  1.1  mrg   bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
   5281  1.1  mrg 
   5282  1.1  mrg   if (!callee_info && to_info)
   5283  1.1  mrg     {
   5284  1.1  mrg       if (!(flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
   5285  1.1  mrg 	to_info->loads->collapse ();
   5286  1.1  mrg       if (!ignore_stores)
   5287  1.1  mrg 	to_info->stores->collapse ();
   5288  1.1  mrg     }
   5289  1.1  mrg   if (!callee_info_lto && to_info_lto)
   5290  1.1  mrg     {
   5291  1.1  mrg       if (!(flags & (ECF_CONST | ECF_NOVOPS)))
   5292  1.1  mrg 	to_info_lto->loads->collapse ();
   5293  1.1  mrg       if (!ignore_stores)
   5294  1.1  mrg 	to_info_lto->stores->collapse ();
   5295  1.1  mrg     }
   5296  1.1  mrg   /* Merge side effects and non-determinism.
   5297  1.1  mrg      PURE/CONST flags makes functions deterministic and if there is
   5298  1.1  mrg      no LOOPING_CONST_OR_PURE they also have no side effects.  */
   5299  1.1  mrg   if (!(flags & (ECF_CONST | ECF_PURE))
   5300  1.1  mrg       || (flags & ECF_LOOPING_CONST_OR_PURE))
   5301  1.1  mrg     {
   5302  1.1  mrg       if (to_info)
   5303  1.1  mrg 	{
   5304  1.1  mrg 	  if (!callee_info || callee_info->side_effects)
   5305  1.1  mrg 	    to_info->side_effects = true;
   5306  1.1  mrg 	  if ((!callee_info || callee_info->nondeterministic)
   5307  1.1  mrg 	      && !ignore_nondeterminism_p (edge->caller->decl, flags))
   5308  1.1  mrg 	    to_info->nondeterministic = true;
   5309  1.1  mrg 	}
   5310  1.1  mrg       if (to_info_lto)
   5311  1.1  mrg 	{
   5312  1.1  mrg 	  if (!callee_info_lto || callee_info_lto->side_effects)
   5313  1.1  mrg 	    to_info_lto->side_effects = true;
   5314  1.1  mrg 	  if ((!callee_info_lto || callee_info_lto->nondeterministic)
   5315  1.1  mrg 	      && !ignore_nondeterminism_p (edge->caller->decl, flags))
   5316  1.1  mrg 	    to_info_lto->nondeterministic = true;
   5317  1.1  mrg 	}
   5318  1.1  mrg      }
   5319  1.1  mrg   if (callee_info || callee_info_lto)
   5320  1.1  mrg     {
   5321  1.1  mrg       auto_vec <modref_parm_map, 32> parm_map;
   5322  1.1  mrg       modref_parm_map chain_map;
   5323  1.1  mrg       /* TODO: Once we get jump functions for static chains we could
   5324  1.1  mrg 	 compute parm_index.  */
   5325  1.1  mrg 
   5326  1.1  mrg       compute_parm_map (edge, &parm_map);
   5327  1.1  mrg 
   5328  1.1  mrg       if (!ignore_stores)
   5329  1.1  mrg 	{
   5330  1.1  mrg 	  if (to_info && callee_info)
   5331  1.1  mrg 	    to_info->stores->merge (to->decl, callee_info->stores, &parm_map,
   5332  1.1  mrg 				    &chain_map, false);
   5333  1.1  mrg 	  if (to_info_lto && callee_info_lto)
   5334  1.1  mrg 	    to_info_lto->stores->merge (to->decl, callee_info_lto->stores,
   5335  1.1  mrg 					&parm_map, &chain_map, false);
   5336  1.1  mrg 	}
   5337  1.1  mrg       if (!(flags & (ECF_CONST | ECF_NOVOPS)))
   5338  1.1  mrg 	{
   5339  1.1  mrg 	  if (to_info && callee_info)
   5340  1.1  mrg 	    to_info->loads->merge (to->decl, callee_info->loads, &parm_map,
   5341  1.1  mrg 				   &chain_map, false);
   5342  1.1  mrg 	  if (to_info_lto && callee_info_lto)
   5343  1.1  mrg 	    to_info_lto->loads->merge (to->decl, callee_info_lto->loads,
   5344  1.1  mrg 				       &parm_map, &chain_map, false);
   5345  1.1  mrg 	}
   5346  1.1  mrg     }
   5347  1.1  mrg 
   5348  1.1  mrg   /* Now merge escape summaries.
   5349  1.1  mrg      For every escape to the callee we need to merge callee flags
   5350  1.1  mrg      and remap callee's escapes.  */
   5351  1.1  mrg   class escape_summary *sum = escape_summaries->get (edge);
   5352  1.1  mrg   int max_escape = -1;
   5353  1.1  mrg   escape_entry *ee;
   5354  1.1  mrg   unsigned int i;
   5355  1.1  mrg 
   5356  1.1  mrg   if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
   5357  1.1  mrg     FOR_EACH_VEC_ELT (sum->esc, i, ee)
   5358  1.1  mrg       if ((int)ee->arg > max_escape)
   5359  1.1  mrg 	max_escape = ee->arg;
   5360  1.1  mrg 
   5361  1.1  mrg   auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
   5362  1.1  mrg   emap.safe_grow (max_escape + 1, true);
   5363  1.1  mrg   for (i = 0; (int)i < max_escape + 1; i++)
   5364  1.1  mrg     emap[i] = vNULL;
   5365  1.1  mrg 
   5366  1.1  mrg   if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
   5367  1.1  mrg     FOR_EACH_VEC_ELT (sum->esc, i, ee)
   5368  1.1  mrg       {
   5369  1.1  mrg 	bool needed = false;
   5370  1.1  mrg 	int implicit_flags = implicit_eaf_flags_for_edge_and_arg
   5371  1.1  mrg 				(edge, flags, ignore_stores,
   5372  1.1  mrg 				 ee->arg);
   5373  1.1  mrg 	if (!ee->direct)
   5374  1.1  mrg 	  implicit_flags = deref_flags (implicit_flags, ignore_stores);
   5375  1.1  mrg 	if (to_info && (int)to_info->arg_flags.length () > ee->parm_index)
   5376  1.1  mrg 	  {
   5377  1.1  mrg 	    int flags = callee_info
   5378  1.1  mrg 			&& callee_info->arg_flags.length () > ee->arg
   5379  1.1  mrg 			? callee_info->arg_flags[ee->arg] : 0;
   5380  1.1  mrg 	    if (!ee->direct)
   5381  1.1  mrg 	      flags = deref_flags (flags, ignore_stores);
   5382  1.1  mrg 	    flags |= ee->min_flags | implicit_flags;
   5383  1.1  mrg 	    eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
   5384  1.1  mrg 			     ? to_info->retslot_flags
   5385  1.1  mrg 			     : ee->parm_index == MODREF_STATIC_CHAIN_PARM
   5386  1.1  mrg 			     ? to_info->static_chain_flags
   5387  1.1  mrg 			     : to_info->arg_flags[ee->parm_index];
   5388  1.1  mrg 	    f &= flags;
   5389  1.1  mrg 	    if (f)
   5390  1.1  mrg 	      needed = true;
   5391  1.1  mrg 	  }
   5392  1.1  mrg 	if (to_info_lto
   5393  1.1  mrg 	    && (int)to_info_lto->arg_flags.length () > ee->parm_index)
   5394  1.1  mrg 	  {
   5395  1.1  mrg 	    int flags = callee_info_lto
   5396  1.1  mrg 			&& callee_info_lto->arg_flags.length () > ee->arg
   5397  1.1  mrg 			? callee_info_lto->arg_flags[ee->arg] : 0;
   5398  1.1  mrg 	    if (!ee->direct)
   5399  1.1  mrg 	      flags = deref_flags (flags, ignore_stores);
   5400  1.1  mrg 	    flags |= ee->min_flags | implicit_flags;
   5401  1.1  mrg 	    eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
   5402  1.1  mrg 			     ? to_info_lto->retslot_flags
   5403  1.1  mrg 			     : ee->parm_index == MODREF_STATIC_CHAIN_PARM
   5404  1.1  mrg 			     ? to_info_lto->static_chain_flags
   5405  1.1  mrg 			     : to_info_lto->arg_flags[ee->parm_index];
   5406  1.1  mrg 	    f &= flags;
   5407  1.1  mrg 	    if (f)
   5408  1.1  mrg 	      needed = true;
   5409  1.1  mrg 	  }
   5410  1.1  mrg 	struct escape_map entry = {ee->parm_index, ee->direct};
   5411  1.1  mrg 	if (needed)
   5412  1.1  mrg 	  emap[ee->arg].safe_push (entry);
   5413  1.1  mrg       }
   5414  1.1  mrg   update_escape_summary (edge->callee, emap, ignore_stores);
   5415  1.1  mrg   for (i = 0; (int)i < max_escape + 1; i++)
   5416  1.1  mrg     emap[i].release ();
   5417  1.1  mrg   if (sum)
   5418  1.1  mrg     escape_summaries->remove (edge);
   5419  1.1  mrg 
   5420  1.1  mrg   if (summaries)
   5421  1.1  mrg     {
   5422  1.1  mrg       if (to_info && !to_info->useful_p (flags))
   5423  1.1  mrg 	{
   5424  1.1  mrg 	  if (dump_file)
   5425  1.1  mrg 	    fprintf (dump_file, "Removed mod-ref summary for %s\n",
   5426  1.1  mrg 		     to->dump_name ());
   5427  1.1  mrg 	  summaries->remove (to);
   5428  1.1  mrg 	  to_info = NULL;
   5429  1.1  mrg 	}
   5430  1.1  mrg       else if (to_info && dump_file)
   5431  1.1  mrg 	{
   5432  1.1  mrg 	  if (dump_file)
   5433  1.1  mrg 	    fprintf (dump_file, "Updated mod-ref summary for %s\n",
   5434  1.1  mrg 		     to->dump_name ());
   5435  1.1  mrg 	  to_info->dump (dump_file);
   5436  1.1  mrg 	}
   5437  1.1  mrg       if (callee_info)
   5438  1.1  mrg 	summaries->remove (edge->callee);
   5439  1.1  mrg     }
   5440  1.1  mrg   if (summaries_lto)
   5441  1.1  mrg     {
   5442  1.1  mrg       if (to_info_lto && !to_info_lto->useful_p (flags))
   5443  1.1  mrg 	{
   5444  1.1  mrg 	  if (dump_file)
   5445  1.1  mrg 	    fprintf (dump_file, "Removed mod-ref summary for %s\n",
   5446  1.1  mrg 		     to->dump_name ());
   5447  1.1  mrg 	  summaries_lto->remove (to);
   5448  1.1  mrg 	  to_info_lto = NULL;
   5449  1.1  mrg 	}
   5450  1.1  mrg       else if (to_info_lto && dump_file)
   5451  1.1  mrg 	{
   5452  1.1  mrg 	  if (dump_file)
   5453  1.1  mrg 	    fprintf (dump_file, "Updated mod-ref summary for %s\n",
   5454  1.1  mrg 		     to->dump_name ());
   5455  1.1  mrg 	  to_info_lto->dump (dump_file);
   5456  1.1  mrg 	}
   5457  1.1  mrg       if (callee_info_lto)
   5458  1.1  mrg 	summaries_lto->remove (edge->callee);
   5459  1.1  mrg     }
   5460  1.1  mrg   if (!to_info && !to_info_lto)
   5461  1.1  mrg     remove_modref_edge_summaries (to);
   5462  1.1  mrg   return;
   5463  1.1  mrg }
   5464  1.1  mrg 
   5465  1.1  mrg /* Run the IPA pass.  This will take a function's summaries and calls and
   5466  1.1  mrg    construct new summaries which represent a transitive closure.  So that
   5467  1.1  mrg    summary of an analyzed function contains information about the loads and
   5468  1.1  mrg    stores that the function or any function that it calls does.  */
   5469  1.1  mrg 
   5470  1.1  mrg unsigned int
   5471  1.1  mrg pass_ipa_modref::execute (function *)
   5472  1.1  mrg {
   5473  1.1  mrg   if (!summaries && !summaries_lto)
   5474  1.1  mrg     return 0;
   5475  1.1  mrg   bool pureconst = false;
   5476  1.1  mrg 
   5477  1.1  mrg   if (optimization_summaries)
   5478  1.1  mrg     ggc_delete (optimization_summaries);
   5479  1.1  mrg   optimization_summaries = summaries;
   5480  1.1  mrg   summaries = NULL;
   5481  1.1  mrg 
   5482  1.1  mrg   struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
   5483  1.1  mrg 					 symtab->cgraph_count);
   5484  1.1  mrg   int order_pos;
   5485  1.1  mrg   order_pos = ipa_reduced_postorder (order, true, ignore_edge);
   5486  1.1  mrg   int i;
   5487  1.1  mrg 
   5488  1.1  mrg   /* Iterate over all strongly connected components in post-order.  */
   5489  1.1  mrg   for (i = 0; i < order_pos; i++)
   5490  1.1  mrg     {
   5491  1.1  mrg       /* Get the component's representative.  That's just any node in the
   5492  1.1  mrg 	 component from which we can traverse the entire component.  */
   5493  1.1  mrg       struct cgraph_node *component_node = order[i];
   5494  1.1  mrg 
   5495  1.1  mrg       if (dump_file)
   5496  1.1  mrg 	fprintf (dump_file, "\n\nStart of SCC component\n");
   5497  1.1  mrg 
   5498  1.1  mrg       pureconst |= modref_propagate_in_scc (component_node);
   5499  1.1  mrg       modref_propagate_flags_in_scc (component_node);
   5500  1.1  mrg       if (optimization_summaries)
   5501  1.1  mrg 	for (struct cgraph_node *cur = component_node; cur;
   5502  1.1  mrg 	     cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
   5503  1.1  mrg 	  if (modref_summary *sum = optimization_summaries->get (cur))
   5504  1.1  mrg 	    sum->finalize (cur->decl);
   5505  1.1  mrg       if (dump_file)
   5506  1.1  mrg 	modref_propagate_dump_scc (component_node);
   5507  1.1  mrg     }
   5508  1.1  mrg   cgraph_node *node;
   5509  1.1  mrg   FOR_EACH_FUNCTION (node)
   5510  1.1  mrg     update_signature (node);
   5511  1.1  mrg   if (summaries_lto)
   5512  1.1  mrg     ((modref_summaries_lto *)summaries_lto)->propagated = true;
   5513  1.1  mrg   ipa_free_postorder_info ();
   5514  1.1  mrg   free (order);
   5515  1.1  mrg   delete fnspec_summaries;
   5516  1.1  mrg   fnspec_summaries = NULL;
   5517  1.1  mrg   delete escape_summaries;
   5518  1.1  mrg   escape_summaries = NULL;
   5519  1.1  mrg 
   5520  1.1  mrg   /* If we possibly made constructors const/pure we may need to remove
   5521  1.1  mrg      them.  */
   5522  1.1  mrg   return pureconst ? TODO_remove_functions : 0;
   5523  1.1  mrg }
   5524  1.1  mrg 
   5525  1.1  mrg /* Summaries must stay alive until end of compilation.  */
   5526  1.1  mrg 
   5527  1.1  mrg void
   5528  1.1  mrg ipa_modref_cc_finalize ()
   5529  1.1  mrg {
   5530  1.1  mrg   if (optimization_summaries)
   5531  1.1  mrg     ggc_delete (optimization_summaries);
   5532  1.1  mrg   optimization_summaries = NULL;
   5533  1.1  mrg   if (summaries_lto)
   5534  1.1  mrg     ggc_delete (summaries_lto);
   5535  1.1  mrg   summaries_lto = NULL;
   5536  1.1  mrg   if (fnspec_summaries)
   5537  1.1  mrg     delete fnspec_summaries;
   5538  1.1  mrg   fnspec_summaries = NULL;
   5539  1.1  mrg   if (escape_summaries)
   5540  1.1  mrg     delete escape_summaries;
   5541  1.1  mrg   escape_summaries = NULL;
   5542  1.1  mrg }
   5543  1.1  mrg 
   5544  1.1  mrg #include "gt-ipa-modref.h"
   5545