Home | History | Annotate | Line # | Download | only in gcc
ipa-polymorphic-call.cc revision 1.1.1.1
      1 /* Analysis of polymorphic call context.
      2    Copyright (C) 2013-2022 Free Software Foundation, Inc.
      3    Contributed by Jan Hubicka
      4 
      5 This file is part of GCC.
      6 
      7 GCC is free software; you can redistribute it and/or modify it under
      8 the terms of the GNU General Public License as published by the Free
      9 Software Foundation; either version 3, or (at your option) any later
     10 version.
     11 
     12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15 for more details.
     16 
     17 You should have received a copy of the GNU General Public License
     18 along with GCC; see the file COPYING3.  If not see
     19 <http://www.gnu.org/licenses/>.  */
     20 
     21 #include "config.h"
     22 #include "system.h"
     23 #include "coretypes.h"
     24 #include "backend.h"
     25 #include "rtl.h"
     26 #include "tree.h"
     27 #include "gimple.h"
     28 #include "tree-pass.h"
     29 #include "tree-ssa-operands.h"
     30 #include "streamer-hooks.h"
     31 #include "cgraph.h"
     32 #include "data-streamer.h"
     33 #include "diagnostic.h"
     34 #include "alias.h"
     35 #include "fold-const.h"
     36 #include "calls.h"
     37 #include "ipa-utils.h"
     38 #include "tree-dfa.h"
     39 #include "gimple-pretty-print.h"
     40 #include "tree-into-ssa.h"
     41 #include "alloc-pool.h"
     42 #include "symbol-summary.h"
     43 #include "symtab-thunks.h"
     44 
     45 /* Return true when TYPE contains an polymorphic type and thus is interesting
     46    for devirtualization machinery.  */
     47 
     48 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
     49 			     bool consider_placement_new = true,
     50 			     bool consider_bases = true);
     51 
     52 bool
     53 contains_polymorphic_type_p (const_tree type)
     54 {
     55   type = TYPE_MAIN_VARIANT (type);
     56 
     57   if (RECORD_OR_UNION_TYPE_P (type))
     58     {
     59       if (TYPE_BINFO (type)
     60           && polymorphic_type_binfo_p (TYPE_BINFO (type)))
     61 	return true;
     62       for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
     63 	if (TREE_CODE (fld) == FIELD_DECL
     64 	    && !DECL_ARTIFICIAL (fld)
     65 	    && contains_polymorphic_type_p (TREE_TYPE (fld)))
     66 	  return true;
     67       return false;
     68     }
     69   if (TREE_CODE (type) == ARRAY_TYPE)
     70     return contains_polymorphic_type_p (TREE_TYPE (type));
     71   return false;
     72 }
     73 
     74 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
     75    at position CUR_OFFSET within TYPE.
     76 
     77    POD can be changed to an instance of a polymorphic type by
     78    placement new.  Here we play safe and assume that any
     79    non-polymorphic type is POD.  */
     80 bool
     81 possible_placement_new (tree type, tree expected_type,
     82 			HOST_WIDE_INT cur_offset)
     83 {
     84   if (cur_offset < 0)
     85     return true;
     86   return ((TREE_CODE (type) != RECORD_TYPE
     87 	   || !TYPE_BINFO (type)
     88 	   || cur_offset >= POINTER_SIZE
     89 	   || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
     90 	  && (!TYPE_SIZE (type)
     91 	      || !tree_fits_shwi_p (TYPE_SIZE (type))
     92 	      || (cur_offset
     93 		  + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
     94 		     : POINTER_SIZE)
     95 		  <= tree_to_uhwi (TYPE_SIZE (type)))));
     96 }
     97 
     98 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
     99    is contained at THIS->OFFSET.  Walk the memory representation of
    100    THIS->OUTER_TYPE and find the outermost class type that match
    101    OTR_TYPE or contain OTR_TYPE as a base.  Update THIS
    102    to represent it.
    103 
    104    If OTR_TYPE is NULL, just find outermost polymorphic type with
    105    virtual table present at position OFFSET.
    106 
    107    For example when THIS represents type
    108    class A
    109      {
    110        int a;
    111        class B b;
    112      }
    113    and we look for type at offset sizeof(int), we end up with B and offset 0.
    114    If the same is produced by multiple inheritance, we end up with A and offset
    115    sizeof(int).
    116 
    117    If we cannot find corresponding class, give up by setting
    118    THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
    119    Return true when lookup was successful.
    120 
    121    When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
    122    valid only via allocation of new polymorphic type inside by means
    123    of placement new.
    124 
    125    When CONSIDER_BASES is false, only look for actual fields, not base types
    126    of TYPE.  */
    127 
    128 bool
    129 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
    130 						       bool consider_placement_new,
    131 						       bool consider_bases)
    132 {
    133   tree type = outer_type;
    134   HOST_WIDE_INT cur_offset = offset;
    135   bool speculative = false;
    136   bool size_unknown = false;
    137   unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
    138 
    139   /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set.  */
    140   if (!outer_type)
    141     {
    142       clear_outer_type (otr_type);
    143       type = otr_type;
    144       cur_offset = 0;
    145     }
    146  /* See if OFFSET points inside OUTER_TYPE.  If it does not, we know
    147     that the context is either invalid, or the instance type must be
    148     derived from OUTER_TYPE.
    149 
    150     Because the instance type may contain field whose type is of OUTER_TYPE,
    151     we cannot derive any effective information about it.
    152 
    153     TODO: In the case we know all derived types, we can definitely do better
    154     here.  */
    155   else if (TYPE_SIZE (outer_type)
    156 	   && tree_fits_shwi_p (TYPE_SIZE (outer_type))
    157 	   && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
    158 	   && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
    159    {
    160      bool der = maybe_derived_type; /* clear_outer_type will reset it.  */
    161      bool dyn = dynamic;
    162      clear_outer_type (otr_type);
    163      type = otr_type;
    164      cur_offset = 0;
    165 
    166      /* If derived type is not allowed, we know that the context is invalid.
    167 	For dynamic types, we really do not have information about
    168 	size of the memory location.  It is possible that completely
    169 	different type is stored after outer_type.  */
    170      if (!der && !dyn)
    171        {
    172 	 clear_speculation ();
    173 	 invalid = true;
    174 	 return false;
    175        }
    176    }
    177 
    178   if (otr_type && TYPE_SIZE (otr_type)
    179       && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
    180     otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
    181 
    182   if (!type || offset < 0)
    183     goto no_useful_type_info;
    184 
    185   /* Find the sub-object the constant actually refers to and mark whether it is
    186      an artificial one (as opposed to a user-defined one).
    187 
    188      This loop is performed twice; first time for outer_type and second time
    189      for speculative_outer_type.  The second run has SPECULATIVE set.  */
    190   while (true)
    191     {
    192       unsigned HOST_WIDE_INT pos, size;
    193       tree fld;
    194 
    195       /* If we do not know size of TYPE, we need to be more conservative
    196          about accepting cases where we cannot find EXPECTED_TYPE.
    197 	 Generally the types that do matter here are of constant size.
    198 	 Size_unknown case should be very rare.  */
    199       if (TYPE_SIZE (type)
    200 	  && tree_fits_shwi_p (TYPE_SIZE (type))
    201 	  && tree_to_shwi (TYPE_SIZE (type)) >= 0)
    202 	size_unknown = false;
    203       else
    204 	size_unknown = true;
    205 
    206       /* On a match, just return what we found.  */
    207       if ((otr_type
    208 	   && types_odr_comparable (type, otr_type)
    209 	   && types_same_for_odr (type, otr_type))
    210 	  || (!otr_type
    211 	      && TREE_CODE (type) == RECORD_TYPE
    212 	      && TYPE_BINFO (type)
    213 	      && polymorphic_type_binfo_p (TYPE_BINFO (type))))
    214 	{
    215 	  if (speculative)
    216 	    {
    217 	      /* If we did not match the offset, just give up on speculation.  */
    218 	      if (cur_offset != 0
    219 		  /* Also check if speculation did not end up being same as
    220 		     non-speculation.  */
    221 		  || (types_must_be_same_for_odr (speculative_outer_type,
    222 						  outer_type)
    223 		      && (maybe_derived_type
    224 			  == speculative_maybe_derived_type)))
    225 		clear_speculation ();
    226 	      return true;
    227 	    }
    228 	  else
    229 	    {
    230 	      /* If type is known to be final, do not worry about derived
    231 		 types.  Testing it here may help us to avoid speculation.  */
    232 	      if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
    233 		  && (!in_lto_p || odr_type_p (outer_type))
    234 		  && type_with_linkage_p (outer_type)
    235 		  && type_known_to_have_no_derivations_p (outer_type))
    236 		maybe_derived_type = false;
    237 
    238 	      /* Type cannot contain itself on an non-zero offset.  In that case
    239 		 just give up.  Still accept the case where size is now known.
    240 		 Either the second copy may appear past the end of type or within
    241 		 the non-POD buffer located inside the variably sized type
    242 		 itself.  */
    243 	      if (cur_offset != 0)
    244 		goto no_useful_type_info;
    245 	      /* If we determined type precisely or we have no clue on
    246  		 speculation, we are done.  */
    247 	      if (!maybe_derived_type || !speculative_outer_type
    248 		  || !speculation_consistent_p (speculative_outer_type,
    249 					        speculative_offset,
    250 					        speculative_maybe_derived_type,
    251 						otr_type))
    252 		{
    253 		  clear_speculation ();
    254 	          return true;
    255 		}
    256 	      /* Otherwise look into speculation now.  */
    257 	      else
    258 		{
    259 		  speculative = true;
    260 		  type = speculative_outer_type;
    261 		  cur_offset = speculative_offset;
    262 		  continue;
    263 		}
    264 	    }
    265 	}
    266 
    267       /* Walk fields and find corresponding on at OFFSET.  */
    268       if (TREE_CODE (type) == RECORD_TYPE)
    269 	{
    270 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
    271 	    {
    272 	      if (TREE_CODE (fld) != FIELD_DECL
    273 		  || TREE_TYPE (fld) == error_mark_node)
    274 		continue;
    275 
    276 	      pos = int_bit_position (fld);
    277 	      if (pos > (unsigned HOST_WIDE_INT)cur_offset)
    278 		continue;
    279 
    280 	      /* Do not consider vptr itself.  Not even for placement new.  */
    281 	      if (!pos && DECL_ARTIFICIAL (fld)
    282 		  && POINTER_TYPE_P (TREE_TYPE (fld))
    283 		  && TYPE_BINFO (type)
    284 		  && polymorphic_type_binfo_p (TYPE_BINFO (type)))
    285 		continue;
    286 
    287 	      if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
    288 		goto no_useful_type_info;
    289 	      size = tree_to_uhwi (DECL_SIZE (fld));
    290 
    291 	      /* We can always skip types smaller than pointer size:
    292 		 those cannot contain a virtual table pointer.
    293 
    294 		 Disqualifying fields that are too small to fit OTR_TYPE
    295 		 saves work needed to walk them for no benefit.
    296 		 Because of the way the bases are packed into a class, the
    297 		 field's size may be smaller than type size, so it needs
    298 		 to be done with a care.  */
    299 
    300 	      if (pos <= (unsigned HOST_WIDE_INT)cur_offset
    301 		  && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
    302 				     + POINTER_SIZE
    303 		  && (!otr_type
    304 		      || !TYPE_SIZE (TREE_TYPE (fld))
    305 		      || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
    306 		      || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
    307 			  >= cur_offset + otr_type_size))
    308 		break;
    309 	    }
    310 
    311 	  if (!fld)
    312 	    goto no_useful_type_info;
    313 
    314 	  type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
    315 	  cur_offset -= pos;
    316 	  /* DECL_ARTIFICIAL represents a basetype.  */
    317 	  if (!DECL_ARTIFICIAL (fld))
    318 	    {
    319 	      if (!speculative)
    320 		{
    321 		  outer_type = type;
    322 		  offset = cur_offset;
    323 		  /* As soon as we see an field containing the type,
    324 		     we know we are not looking for derivations.  */
    325 		  maybe_derived_type = false;
    326 		}
    327 	      else
    328 		{
    329 		  speculative_outer_type = type;
    330 		  speculative_offset = cur_offset;
    331 		  speculative_maybe_derived_type = false;
    332 		}
    333 	    }
    334 	  else if (!consider_bases)
    335 	    goto no_useful_type_info;
    336 	}
    337       else if (TREE_CODE (type) == ARRAY_TYPE)
    338 	{
    339 	  tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
    340 
    341 	  /* Give up if we don't know array field size.
    342 	     Also give up on non-polymorphic types as they are used
    343 	     as buffers for placement new.  */
    344 	  if (!TYPE_SIZE (subtype)
    345 	      || !tree_fits_shwi_p (TYPE_SIZE (subtype))
    346 	      || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
    347 	      || !contains_polymorphic_type_p (subtype))
    348 	    goto no_useful_type_info;
    349 
    350 	  HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
    351 
    352 	  /* We may see buffer for placement new.  In this case the expected type
    353 	     can be bigger than the subtype.  */
    354 	  if (TYPE_SIZE (subtype)
    355 	      && (cur_offset + otr_type_size
    356 		  > tree_to_uhwi (TYPE_SIZE (subtype))))
    357 	    goto no_useful_type_info;
    358 
    359 	  cur_offset = new_offset;
    360 	  type = TYPE_MAIN_VARIANT (subtype);
    361 	  if (!speculative)
    362 	    {
    363 	      outer_type = type;
    364 	      offset = cur_offset;
    365 	      maybe_derived_type = false;
    366 	    }
    367 	  else
    368 	    {
    369 	      speculative_outer_type = type;
    370 	      speculative_offset = cur_offset;
    371 	      speculative_maybe_derived_type = false;
    372 	    }
    373 	}
    374       /* Give up on anything else.  */
    375       else
    376 	{
    377 no_useful_type_info:
    378 	  if (maybe_derived_type && !speculative
    379 	      && TREE_CODE (outer_type) == RECORD_TYPE
    380 	      && TREE_CODE (otr_type) == RECORD_TYPE
    381 	      && TYPE_BINFO (otr_type)
    382 	      && !offset
    383 	      && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
    384 	    {
    385 	      clear_outer_type (otr_type);
    386 	      if (!speculative_outer_type
    387 		  || !speculation_consistent_p (speculative_outer_type,
    388 						speculative_offset,
    389 					        speculative_maybe_derived_type,
    390 						otr_type))
    391 		clear_speculation ();
    392 	      if (speculative_outer_type)
    393 		{
    394 		  speculative = true;
    395 		  type = speculative_outer_type;
    396 		  cur_offset = speculative_offset;
    397 		}
    398 	      else
    399 		return true;
    400 	    }
    401 	  /* We found no way to embed EXPECTED_TYPE in TYPE.
    402 	     We still permit two special cases - placement new and
    403 	     the case of variadic types containing themselves.  */
    404 	  if (!speculative
    405 	      && consider_placement_new
    406 	      && (size_unknown || !type || maybe_derived_type
    407 		  || possible_placement_new (type, otr_type, cur_offset)))
    408 	    {
    409 	      /* In these weird cases we want to accept the context.
    410 		 In non-speculative run we have no useful outer_type info
    411 		 (TODO: we may eventually want to record upper bound on the
    412 		  type size that can be used to prune the walk),
    413 		 but we still want to consider speculation that may
    414 		 give useful info.  */
    415 	      if (!speculative)
    416 		{
    417 		  clear_outer_type (otr_type);
    418 		  if (!speculative_outer_type
    419 		      || !speculation_consistent_p (speculative_outer_type,
    420 						    speculative_offset,
    421 						    speculative_maybe_derived_type,
    422 						    otr_type))
    423 		    clear_speculation ();
    424 		  if (speculative_outer_type)
    425 		    {
    426 		      speculative = true;
    427 		      type = speculative_outer_type;
    428 		      cur_offset = speculative_offset;
    429 		    }
    430 		  else
    431 		    return true;
    432 		}
    433 	      else
    434 		{
    435 		  clear_speculation ();
    436 	          return true;
    437 		}
    438 	    }
    439 	  else
    440 	    {
    441 	      clear_speculation ();
    442 	      if (speculative)
    443 		return true;
    444 	      clear_outer_type (otr_type);
    445 	      invalid = true;
    446 	      return false;
    447 	    }
    448 	}
    449     }
    450 }
    451 
    452 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
    453    CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
    454    be built within OUTER_TYPE by means of placement new.  CONSIDER_BASES makes
    455    function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
    456    base of one of fields of OUTER_TYPE.  */
    457 
    458 static bool
    459 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
    460 		 tree otr_type,
    461 		 bool consider_placement_new,
    462 		 bool consider_bases)
    463 {
    464   ipa_polymorphic_call_context context;
    465 
    466   /* Check that type is within range.  */
    467   if (offset < 0)
    468     return false;
    469 
    470   /* PR ipa/71207
    471      As OUTER_TYPE can be a type which has a diamond virtual inheritance,
    472      it's not necessary that INNER_TYPE will fit within OUTER_TYPE with
    473      a given offset.  It can happen that INNER_TYPE also contains a base object,
    474      however it would point to the same instance in the OUTER_TYPE.  */
    475 
    476   context.offset = offset;
    477   context.outer_type = TYPE_MAIN_VARIANT (outer_type);
    478   context.maybe_derived_type = false;
    479   context.dynamic = false;
    480   return context.restrict_to_inner_class (otr_type, consider_placement_new,
    481 					  consider_bases);
    482 }
    483 
    484 
    485 /* Return a FUNCTION_DECL if FN represent a constructor or destructor.
    486    If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
    487 
    488 tree
    489 polymorphic_ctor_dtor_p (tree fn, bool check_clones)
    490 {
    491   if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
    492       || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
    493     {
    494       if (!check_clones)
    495 	return NULL_TREE;
    496 
    497       /* Watch for clones where we constant propagated the first
    498 	 argument (pointer to the instance).  */
    499       fn = DECL_ABSTRACT_ORIGIN (fn);
    500       if (!fn
    501 	  || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
    502 	  || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
    503 	return NULL_TREE;
    504     }
    505 
    506   if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
    507     return NULL_TREE;
    508 
    509   return fn;
    510 }
    511 
    512 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
    513    If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
    514 
    515 tree
    516 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
    517 {
    518   tree fn = block_ultimate_origin (block);
    519   if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
    520     return NULL_TREE;
    521 
    522   return polymorphic_ctor_dtor_p (fn, check_clones);
    523 }
    524 
    525 
    526 /* We know that the instance is stored in variable or parameter
    527    (not dynamically allocated) and we want to disprove the fact
    528    that it may be in construction at invocation of CALL.
    529 
    530    BASE represents memory location where instance is stored.
    531    If BASE is NULL, it is assumed to be global memory.
    532    OUTER_TYPE is known type of the instance or NULL if not
    533    known.
    534 
    535    For the variable to be in construction we actually need to
    536    be in constructor of corresponding global variable or
    537    the inline stack of CALL must contain the constructor.
    538    Check this condition.  This check works safely only before
    539    IPA passes, because inline stacks may become out of date
    540    later.  */
    541 
    542 bool
    543 decl_maybe_in_construction_p (tree base, tree outer_type,
    544 			      gimple *call, tree function)
    545 {
    546   if (outer_type)
    547     outer_type = TYPE_MAIN_VARIANT (outer_type);
    548   gcc_assert (!base || DECL_P (base));
    549 
    550   /* After inlining the code unification optimizations may invalidate
    551      inline stacks.  Also we need to give up on global variables after
    552      IPA, because addresses of these may have been propagated to their
    553      constructors.  */
    554   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
    555     return true;
    556 
    557   /* Pure functions cannot do any changes on the dynamic type;
    558      that require writing to memory.  */
    559   if ((!base || !auto_var_in_fn_p (base, function))
    560       && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
    561     return false;
    562 
    563   bool check_clones = !base || is_global_var (base);
    564   for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
    565        block = BLOCK_SUPERCONTEXT (block))
    566     if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
    567       {
    568 	tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
    569 
    570 	if (!outer_type || !types_odr_comparable (type, outer_type))
    571 	  {
    572 	    if (TREE_CODE (type) == RECORD_TYPE
    573 		&& TYPE_BINFO (type)
    574 		&& polymorphic_type_binfo_p (TYPE_BINFO (type)))
    575 	      return true;
    576 	  }
    577  	else if (types_same_for_odr (type, outer_type))
    578 	  return true;
    579       }
    580 
    581   if (!base || (VAR_P (base) && is_global_var (base)))
    582     {
    583       if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
    584 	  || (!DECL_CXX_CONSTRUCTOR_P (function)
    585 	      && !DECL_CXX_DESTRUCTOR_P (function)))
    586 	{
    587 	  if (!DECL_ABSTRACT_ORIGIN (function))
    588 	    return false;
    589 	  /* Watch for clones where we constant propagated the first
    590 	     argument (pointer to the instance).  */
    591 	  function = DECL_ABSTRACT_ORIGIN (function);
    592 	  if (!function
    593 	      || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
    594 	      || (!DECL_CXX_CONSTRUCTOR_P (function)
    595 		  && !DECL_CXX_DESTRUCTOR_P (function)))
    596 	    return false;
    597 	}
    598       tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function));
    599       if (!outer_type || !types_odr_comparable (type, outer_type))
    600 	{
    601 	  if (TREE_CODE (type) == RECORD_TYPE
    602 	      && TYPE_BINFO (type)
    603 	      && polymorphic_type_binfo_p (TYPE_BINFO (type)))
    604 	    return true;
    605 	}
    606       else if (types_same_for_odr (type, outer_type))
    607 	return true;
    608     }
    609   return false;
    610 }
    611 
    612 /* Dump human readable context to F.  If NEWLINE is true, it will be terminated
    613    by a newline.  */
    614 
    615 void
    616 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
    617 {
    618   fprintf (f, "    ");
    619   if (invalid)
    620     fprintf (f, "Call is known to be undefined");
    621   else
    622     {
    623       if (useless_p ())
    624 	fprintf (f, "nothing known");
    625       if (outer_type || offset)
    626 	{
    627 	  fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
    628 	  print_generic_expr (f, outer_type, TDF_SLIM);
    629 	  if (maybe_derived_type)
    630 	    fprintf (f, " (or a derived type)");
    631 	  if (maybe_in_construction)
    632 	    fprintf (f, " (maybe in construction)");
    633 	  fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC,
    634 		   offset);
    635 	}
    636       if (speculative_outer_type)
    637 	{
    638 	  if (outer_type || offset)
    639 	    fprintf (f, " ");
    640 	  fprintf (f, "Speculative outer type:");
    641 	  print_generic_expr (f, speculative_outer_type, TDF_SLIM);
    642 	  if (speculative_maybe_derived_type)
    643 	    fprintf (f, " (or a derived type)");
    644 	  fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC,
    645 		   speculative_offset);
    646 	}
    647     }
    648   if (newline)
    649     fprintf(f, "\n");
    650 }
    651 
    652 /* Print context to stderr.  */
    653 
    654 void
    655 ipa_polymorphic_call_context::debug () const
    656 {
    657   dump (stderr);
    658 }
    659 
    660 /* Stream out the context to OB.  */
    661 
    662 void
    663 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
    664 {
    665   struct bitpack_d bp = bitpack_create (ob->main_stream);
    666 
    667   bp_pack_value (&bp, invalid, 1);
    668   bp_pack_value (&bp, maybe_in_construction, 1);
    669   bp_pack_value (&bp, maybe_derived_type, 1);
    670   bp_pack_value (&bp, speculative_maybe_derived_type, 1);
    671   bp_pack_value (&bp, dynamic, 1);
    672   bp_pack_value (&bp, outer_type != NULL, 1);
    673   bp_pack_value (&bp, offset != 0, 1);
    674   bp_pack_value (&bp, speculative_outer_type != NULL, 1);
    675   streamer_write_bitpack (&bp);
    676 
    677   if (outer_type != NULL)
    678     stream_write_tree (ob, outer_type, true);
    679   if (offset)
    680     streamer_write_hwi (ob, offset);
    681   if (speculative_outer_type != NULL)
    682     {
    683       stream_write_tree (ob, speculative_outer_type, true);
    684       streamer_write_hwi (ob, speculative_offset);
    685     }
    686   else
    687     gcc_assert (!speculative_offset);
    688 }
    689 
    690 /* Stream in the context from IB and DATA_IN.  */
    691 
    692 void
    693 ipa_polymorphic_call_context::stream_in (class lto_input_block *ib,
    694 					 class data_in *data_in)
    695 {
    696   struct bitpack_d bp = streamer_read_bitpack (ib);
    697 
    698   invalid = bp_unpack_value (&bp, 1);
    699   maybe_in_construction = bp_unpack_value (&bp, 1);
    700   maybe_derived_type = bp_unpack_value (&bp, 1);
    701   speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
    702   dynamic = bp_unpack_value (&bp, 1);
    703   bool outer_type_p = bp_unpack_value (&bp, 1);
    704   bool offset_p = bp_unpack_value (&bp, 1);
    705   bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
    706 
    707   if (outer_type_p)
    708     outer_type = stream_read_tree (ib, data_in);
    709   else
    710     outer_type = NULL;
    711   if (offset_p)
    712     offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
    713   else
    714     offset = 0;
    715   if (speculative_outer_type_p)
    716     {
    717       speculative_outer_type = stream_read_tree (ib, data_in);
    718       speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
    719     }
    720   else
    721     {
    722       speculative_outer_type = NULL;
    723       speculative_offset = 0;
    724     }
    725 }
    726 
    727 /* Produce polymorphic call context for call method of instance
    728    that is located within BASE (that is assumed to be a decl) at offset OFF. */
    729 
    730 void
    731 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
    732 {
    733   gcc_assert (DECL_P (base));
    734   clear_speculation ();
    735 
    736   if (!contains_polymorphic_type_p (TREE_TYPE (base)))
    737     {
    738       clear_outer_type ();
    739       offset = off;
    740       return;
    741     }
    742   outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
    743   offset = off;
    744   /* Make very conservative assumption that all objects
    745      may be in construction.
    746 
    747      It is up to caller to revisit this via
    748      get_dynamic_type or decl_maybe_in_construction_p.  */
    749   maybe_in_construction = true;
    750   maybe_derived_type = false;
    751   dynamic = false;
    752 }
    753 
    754 /* CST is an invariant (address of decl), try to get meaningful
    755    polymorphic call context for polymorphic call of method
    756    if instance of OTR_TYPE that is located at offset OFF of this invariant.
    757    Return FALSE if nothing meaningful can be found.  */
    758 
    759 bool
    760 ipa_polymorphic_call_context::set_by_invariant (tree cst,
    761 						tree otr_type,
    762 						HOST_WIDE_INT off)
    763 {
    764   poly_int64 offset2, size, max_size;
    765   bool reverse;
    766   tree base;
    767 
    768   invalid = false;
    769   off = 0;
    770   clear_outer_type (otr_type);
    771 
    772   if (TREE_CODE (cst) != ADDR_EXPR)
    773     return false;
    774 
    775   cst = TREE_OPERAND (cst, 0);
    776   base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
    777   if (!DECL_P (base) || !known_size_p (max_size) || maybe_ne (max_size, size))
    778     return false;
    779 
    780   /* Only type inconsistent programs can have otr_type that is
    781      not part of outer type.  */
    782   if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
    783     return false;
    784 
    785   set_by_decl (base, off);
    786   return true;
    787 }
    788 
    789 /* See if OP is SSA name initialized as a copy or by single assignment.
    790    If so, walk the SSA graph up.  Because simple PHI conditional is considered
    791    copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
    792    graph.  */
    793 
    794 static tree
    795 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
    796 {
    797   hash_set <tree> *visited = NULL;
    798   STRIP_NOPS (op);
    799   while (TREE_CODE (op) == SSA_NAME
    800 	 && !SSA_NAME_IS_DEFAULT_DEF (op)
    801 	 /* We might be called via fold_stmt during cfgcleanup where
    802 	    SSA form need not be up-to-date.  */
    803 	 && !name_registered_for_update_p (op)
    804 	 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
    805 	     || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
    806     {
    807       if (global_visited)
    808 	{
    809 	  if (!*global_visited)
    810 	    *global_visited = new hash_set<tree>;
    811 	  if ((*global_visited)->add (op))
    812 	    goto done;
    813 	}
    814       else
    815 	{
    816 	  if (!visited)
    817 	    visited = new hash_set<tree>;
    818 	  if (visited->add (op))
    819 	    goto done;
    820 	}
    821       /* Special case
    822 	 if (ptr == 0)
    823 	   ptr = 0;
    824 	 else
    825 	   ptr = ptr.foo;
    826 	 This pattern is implicitly produced for casts to non-primary
    827 	 bases.  When doing context analysis, we do not really care
    828 	 about the case pointer is NULL, because the call will be
    829 	 undefined anyway.  */
    830       if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
    831 	{
    832 	  gimple *phi = SSA_NAME_DEF_STMT (op);
    833 
    834 	  if (gimple_phi_num_args (phi) > 2)
    835 	    goto done;
    836 	  if (gimple_phi_num_args (phi) == 1)
    837 	    op = gimple_phi_arg_def (phi, 0);
    838 	  else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
    839 	    op = gimple_phi_arg_def (phi, 1);
    840 	  else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
    841 	    op = gimple_phi_arg_def (phi, 0);
    842 	  else
    843 	    goto done;
    844 	}
    845       else
    846 	{
    847 	  if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
    848 	    goto done;
    849 	  op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
    850 	}
    851       STRIP_NOPS (op);
    852     }
    853 done:
    854   if (visited)
    855     delete (visited);
    856   return op;
    857 }
    858 
    859 /* Create polymorphic call context from IP invariant CST.
    860    This is typically &global_var.
    861    OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
    862    is offset of call.  */
    863 
    864 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
    865 							    tree otr_type,
    866 							    HOST_WIDE_INT off)
    867 {
    868   clear_speculation ();
    869   set_by_invariant (cst, otr_type, off);
    870 }
    871 
    872 /* Build context for pointer REF contained in FNDECL at statement STMT.
    873    if INSTANCE is non-NULL, return pointer to the object described by
    874    the context or DECL where context is contained in.  */
    875 
    876 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
    877 							    tree ref,
    878 							    gimple *stmt,
    879 							    tree *instance)
    880 {
    881   tree otr_type = NULL;
    882   tree base_pointer;
    883   hash_set <tree> *visited = NULL;
    884 
    885   if (TREE_CODE (ref) == OBJ_TYPE_REF)
    886     {
    887       otr_type = obj_type_ref_class (ref);
    888       base_pointer = OBJ_TYPE_REF_OBJECT (ref);
    889     }
    890   else
    891     base_pointer = ref;
    892 
    893   /* Set up basic info in case we find nothing interesting in the analysis.  */
    894   clear_speculation ();
    895   clear_outer_type (otr_type);
    896   invalid = false;
    897 
    898   /* Walk SSA for outer object.  */
    899   while (true)
    900     {
    901       base_pointer = walk_ssa_copies (base_pointer, &visited);
    902       if (TREE_CODE (base_pointer) == ADDR_EXPR)
    903 	{
    904 	  HOST_WIDE_INT offset2, size;
    905 	  bool reverse;
    906 	  tree base
    907 	    = get_ref_base_and_extent_hwi (TREE_OPERAND (base_pointer, 0),
    908 					   &offset2, &size, &reverse);
    909 	  if (!base)
    910 	    break;
    911 
    912 	  combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
    913 				    offset + offset2,
    914 				    true,
    915 				    NULL /* Do not change outer type.  */);
    916 
    917 	  /* If this is a varying address, punt.  */
    918 	  if (TREE_CODE (base) == MEM_REF || DECL_P (base))
    919 	    {
    920 	      /* We found dereference of a pointer.  Type of the pointer
    921 		 and MEM_REF is meaningless, but we can look further.  */
    922 	      offset_int mem_offset;
    923 	      if (TREE_CODE (base) == MEM_REF
    924 		  && mem_ref_offset (base).is_constant (&mem_offset))
    925 		{
    926 		  offset_int o = mem_offset * BITS_PER_UNIT;
    927 		  o += offset;
    928 		  o += offset2;
    929 		  if (!wi::fits_shwi_p (o))
    930 		    break;
    931 		  base_pointer = TREE_OPERAND (base, 0);
    932 		  offset = o.to_shwi ();
    933 		  outer_type = NULL;
    934 		}
    935 	      /* We found base object.  In this case the outer_type
    936 		 is known.  */
    937 	      else if (DECL_P (base))
    938 		{
    939 		  if (visited)
    940 		    delete (visited);
    941 		  /* Only type inconsistent programs can have otr_type that is
    942 		     not part of outer type.  */
    943 		  if (otr_type
    944 		      && !contains_type_p (TREE_TYPE (base),
    945 					   offset + offset2, otr_type))
    946 		    {
    947 		      invalid = true;
    948 		      if (instance)
    949 			*instance = base_pointer;
    950 		      return;
    951 		    }
    952 		  set_by_decl (base, offset + offset2);
    953 		  if (outer_type && maybe_in_construction && stmt)
    954 		    maybe_in_construction
    955 		     = decl_maybe_in_construction_p (base,
    956 						     outer_type,
    957 						     stmt,
    958 						     fndecl);
    959 		  if (instance)
    960 		    *instance = base;
    961 		  return;
    962 		}
    963 	      else
    964 		break;
    965 	    }
    966 	  else
    967 	    break;
    968 	}
    969       else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
    970 	       && TREE_CODE (TREE_OPERAND (base_pointer, 1)) == INTEGER_CST)
    971 	{
    972 	  offset_int o
    973 	    = offset_int::from (wi::to_wide (TREE_OPERAND (base_pointer, 1)),
    974 				SIGNED);
    975 	  o *= BITS_PER_UNIT;
    976 	  o += offset;
    977 	  if (!wi::fits_shwi_p (o))
    978 	    break;
    979 	  offset = o.to_shwi ();
    980 	  base_pointer = TREE_OPERAND (base_pointer, 0);
    981 	}
    982       else
    983 	break;
    984     }
    985 
    986   if (visited)
    987     delete (visited);
    988 
    989   /* Try to determine type of the outer object.  */
    990   if (TREE_CODE (base_pointer) == SSA_NAME
    991       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
    992       && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
    993     {
    994       /* See if parameter is THIS pointer of a method.  */
    995       if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
    996 	  && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
    997 	{
    998 	  outer_type
    999 	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
   1000 	  cgraph_node *node = cgraph_node::get (current_function_decl);
   1001 	  gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
   1002 		      || TREE_CODE (outer_type) == UNION_TYPE);
   1003 
   1004 	  /* Handle the case we inlined into a thunk.  In this case
   1005 	     thunk has THIS pointer of type bar, but it really receives
   1006 	     address to its base type foo which sits in bar at
   1007 	     0-thunk.fixed_offset.  It starts with code that adds
   1008 	     think.fixed_offset to the pointer to compensate for this.
   1009 
   1010 	     Because we walked all the way to the beginning of thunk, we now
   1011 	     see pointer &bar-thunk.fixed_offset and need to compensate
   1012 	     for it.  */
   1013 	  thunk_info *info = thunk_info::get (node);
   1014 	  if (info && info->fixed_offset)
   1015 	    offset -= info->fixed_offset * BITS_PER_UNIT;
   1016 
   1017 	  /* Dynamic casting has possibly upcasted the type
   1018 	     in the hierarchy.  In this case outer type is less
   1019 	     informative than inner type and we should forget
   1020 	     about it.  */
   1021 	  if ((otr_type
   1022 	       && !contains_type_p (outer_type, offset,
   1023 				    otr_type))
   1024 	      || !contains_polymorphic_type_p (outer_type)
   1025 	      /* If we compile thunk with virtual offset, the THIS pointer
   1026 		 is adjusted by unknown value.  We can't thus use outer info
   1027 		 at all.  */
   1028 	      || (info && info->virtual_offset_p))
   1029 	    {
   1030 	      outer_type = NULL;
   1031 	      if (instance)
   1032 		*instance = base_pointer;
   1033 	      return;
   1034 	    }
   1035 
   1036 	  dynamic = true;
   1037 
   1038 	  /* If the function is constructor or destructor, then
   1039 	     the type is possibly in construction, but we know
   1040 	     it is not derived type.  */
   1041 	  if (DECL_CXX_CONSTRUCTOR_P (fndecl)
   1042 	      || DECL_CXX_DESTRUCTOR_P (fndecl))
   1043 	    {
   1044 	      maybe_in_construction = true;
   1045 	      maybe_derived_type = false;
   1046 	    }
   1047 	  else
   1048 	    {
   1049 	      maybe_derived_type = true;
   1050 	      maybe_in_construction = false;
   1051 	    }
   1052 	  if (instance)
   1053 	    {
   1054 	      thunk_info *info = thunk_info::get (node);
   1055 	      /* If method is expanded thunk, we need to apply thunk offset
   1056 		 to instance pointer.  */
   1057 	      if (info && (info->virtual_offset_p || info->fixed_offset))
   1058 		*instance = NULL;
   1059 	      else
   1060 	        *instance = base_pointer;
   1061 	    }
   1062 	  return;
   1063 	}
   1064       /* Non-PODs passed by value are really passed by invisible
   1065 	 reference.  In this case we also know the type of the
   1066 	 object.  */
   1067       if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
   1068 	{
   1069 	  outer_type
   1070 	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
   1071 	  /* Only type inconsistent programs can have otr_type that is
   1072 	     not part of outer type.  */
   1073 	  if (otr_type && !contains_type_p (outer_type, offset,
   1074 					    otr_type))
   1075 	    {
   1076 	      invalid = true;
   1077 	      if (instance)
   1078 		*instance = base_pointer;
   1079 	      return;
   1080 	    }
   1081 	  /* Non-polymorphic types have no interest for us.  */
   1082 	  else if (!otr_type && !contains_polymorphic_type_p (outer_type))
   1083 	    {
   1084 	      outer_type = NULL;
   1085 	      if (instance)
   1086 		*instance = base_pointer;
   1087 	      return;
   1088 	    }
   1089 	  maybe_derived_type = false;
   1090 	  maybe_in_construction = false;
   1091 	  if (instance)
   1092 	    *instance = base_pointer;
   1093 	  return;
   1094 	}
   1095     }
   1096 
   1097   tree base_type = TREE_TYPE (base_pointer);
   1098 
   1099   if (TREE_CODE (base_pointer) == SSA_NAME
   1100       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
   1101       && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
   1102 	   || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
   1103     {
   1104       invalid = true;
   1105       if (instance)
   1106 	*instance = base_pointer;
   1107       return;
   1108     }
   1109   if (TREE_CODE (base_pointer) == SSA_NAME
   1110       && SSA_NAME_DEF_STMT (base_pointer)
   1111       && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
   1112     base_type = TREE_TYPE (gimple_assign_rhs1
   1113 			    (SSA_NAME_DEF_STMT (base_pointer)));
   1114 
   1115   if (base_type && POINTER_TYPE_P (base_type))
   1116     combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
   1117 			      offset,
   1118 			      true, NULL /* Do not change type here */);
   1119   /* TODO: There are multiple ways to derive a type.  For instance
   1120      if BASE_POINTER is passed to an constructor call prior our reference.
   1121      We do not make this type of flow sensitive analysis yet.  */
   1122   if (instance)
   1123     *instance = base_pointer;
   1124   return;
   1125 }
   1126 
   1127 /* Structure to be passed in between detect_type_change and
   1128    check_stmt_for_type_change.  */
   1129 
   1130 struct type_change_info
   1131 {
   1132   /* Offset into the object where there is the virtual method pointer we are
   1133      looking for.  */
   1134   HOST_WIDE_INT offset;
   1135   /* The declaration or SSA_NAME pointer of the base that we are checking for
   1136      type change.  */
   1137   tree instance;
   1138   /* The reference to virtual table pointer used.  */
   1139   tree vtbl_ptr_ref;
   1140   tree otr_type;
   1141   /* If we actually can tell the type that the object has changed to, it is
   1142      stored in this field.  Otherwise it remains NULL_TREE.  */
   1143   tree known_current_type;
   1144   HOST_WIDE_INT known_current_offset;
   1145 
   1146   /* Set to nonzero if we possibly missed some dynamic type changes and we
   1147      should consider the set to be speculative.  */
   1148   unsigned speculative;
   1149 
   1150   /* Set to true if dynamic type change has been detected.  */
   1151   bool type_maybe_changed;
   1152   /* Set to true if multiple types have been encountered.  known_current_type
   1153      must be disregarded in that case.  */
   1154   bool multiple_types_encountered;
   1155   bool seen_unanalyzed_store;
   1156 };
   1157 
   1158 /* Return true if STMT is not call and can modify a virtual method table pointer.
   1159    We take advantage of fact that vtable stores must appear within constructor
   1160    and destructor functions.  */
   1161 
   1162 static bool
   1163 noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
   1164 {
   1165   if (is_gimple_assign (stmt))
   1166     {
   1167       tree lhs = gimple_assign_lhs (stmt);
   1168 
   1169       if (gimple_clobber_p (stmt))
   1170 	return false;
   1171       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
   1172 	{
   1173 	  if (flag_strict_aliasing
   1174 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
   1175 	    return false;
   1176 
   1177 	  if (TREE_CODE (lhs) == COMPONENT_REF
   1178 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
   1179 	    return false;
   1180 	  /* In the future we might want to use get_ref_base_and_extent to find
   1181 	     if there is a field corresponding to the offset and if so, proceed
   1182 	     almost like if it was a component ref.  */
   1183 	}
   1184     }
   1185 
   1186   /* Code unification may mess with inline stacks.  */
   1187   if (cfun->after_inlining)
   1188     return true;
   1189 
   1190   /* Walk the inline stack and watch out for ctors/dtors.
   1191      TODO: Maybe we can require the store to appear in toplevel
   1192      block of CTOR/DTOR.  */
   1193   for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
   1194        block = BLOCK_SUPERCONTEXT (block))
   1195     if (BLOCK_ABSTRACT_ORIGIN (block)
   1196 	&& TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL)
   1197       return inlined_polymorphic_ctor_dtor_block_p (block, false);
   1198   return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
   1199 	  && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
   1200 	      || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
   1201 }
   1202 
   1203 /* If STMT can be proved to be an assignment to the virtual method table
   1204    pointer of ANALYZED_OBJ and the type associated with the new table
   1205    identified, return the type.  Otherwise return NULL_TREE if type changes
   1206    in unknown way or ERROR_MARK_NODE if type is unchanged.  */
   1207 
   1208 static tree
   1209 extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
   1210 			       HOST_WIDE_INT *type_offset)
   1211 {
   1212   poly_int64 offset, size, max_size;
   1213   tree lhs, rhs, base;
   1214   bool reverse;
   1215 
   1216   if (!gimple_assign_single_p (stmt))
   1217     return NULL_TREE;
   1218 
   1219   lhs = gimple_assign_lhs (stmt);
   1220   rhs = gimple_assign_rhs1 (stmt);
   1221   if (TREE_CODE (lhs) != COMPONENT_REF
   1222       || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
   1223      {
   1224 	if (dump_file)
   1225 	  fprintf (dump_file, "  LHS is not virtual table.\n");
   1226 	return NULL_TREE;
   1227      }
   1228 
   1229   if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
   1230     ;
   1231   else
   1232     {
   1233       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
   1234       if (DECL_P (tci->instance))
   1235 	{
   1236 	  if (base != tci->instance)
   1237 	    {
   1238 	      if (dump_file)
   1239 		{
   1240 		  fprintf (dump_file, "    base:");
   1241 		  print_generic_expr (dump_file, base, TDF_SLIM);
   1242 		  fprintf (dump_file, " does not match instance:");
   1243 		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
   1244 		  fprintf (dump_file, "\n");
   1245 		}
   1246 	      return NULL_TREE;
   1247 	    }
   1248 	}
   1249       else if (TREE_CODE (base) == MEM_REF)
   1250 	{
   1251 	  if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
   1252 	    {
   1253 	      if (dump_file)
   1254 		{
   1255 		  fprintf (dump_file, "    base mem ref:");
   1256 		  print_generic_expr (dump_file, base, TDF_SLIM);
   1257 		  fprintf (dump_file, " does not match instance:");
   1258 		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
   1259 		  fprintf (dump_file, "\n");
   1260 		}
   1261 	      return NULL_TREE;
   1262 	    }
   1263 	  if (!integer_zerop (TREE_OPERAND (base, 1)))
   1264 	    {
   1265 	      if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
   1266 		{
   1267 		  if (dump_file)
   1268 		    {
   1269 		      fprintf (dump_file, "    base mem ref:");
   1270 		      print_generic_expr (dump_file, base, TDF_SLIM);
   1271 		      fprintf (dump_file, " has non-representable offset:");
   1272 		      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
   1273 		      fprintf (dump_file, "\n");
   1274 		    }
   1275 		  return NULL_TREE;
   1276 		}
   1277 	      else
   1278 	        offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
   1279 	    }
   1280 	}
   1281       else if (!operand_equal_p (tci->instance, base, 0)
   1282 	       || tci->offset)
   1283 	{
   1284 	  if (dump_file)
   1285 	    {
   1286 	      fprintf (dump_file, "    base:");
   1287 	      print_generic_expr (dump_file, base, TDF_SLIM);
   1288 	      fprintf (dump_file, " does not match instance:");
   1289 	      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
   1290 	      fprintf (dump_file, " with offset %i\n", (int)tci->offset);
   1291 	    }
   1292 	  return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
   1293 	}
   1294       if (maybe_ne (offset, tci->offset)
   1295 	  || maybe_ne (size, POINTER_SIZE)
   1296 	  || maybe_ne (max_size, POINTER_SIZE))
   1297 	{
   1298 	  if (dump_file)
   1299 	    {
   1300 	      fprintf (dump_file, "    wrong offset ");
   1301 	      print_dec (offset, dump_file);
   1302 	      fprintf (dump_file, "!=%i or size ", (int) tci->offset);
   1303 	      print_dec (size, dump_file);
   1304 	      fprintf (dump_file, "\n");
   1305 	    }
   1306 	  return (known_le (offset + POINTER_SIZE, tci->offset)
   1307 		  || (known_size_p (max_size)
   1308 		      && known_gt (tci->offset + POINTER_SIZE,
   1309 				   offset + max_size))
   1310 		  ? error_mark_node : NULL);
   1311 	}
   1312     }
   1313 
   1314   tree vtable;
   1315   unsigned HOST_WIDE_INT offset2;
   1316 
   1317   if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
   1318     {
   1319       if (dump_file)
   1320 	fprintf (dump_file, "    Failed to lookup binfo\n");
   1321       return NULL;
   1322     }
   1323 
   1324   tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
   1325 					       offset2, vtable);
   1326   if (!binfo)
   1327     {
   1328       if (dump_file)
   1329 	fprintf (dump_file, "    Construction vtable used\n");
   1330       /* FIXME: We should support construction contexts.  */
   1331       return NULL;
   1332     }
   1333 
   1334   *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
   1335   return DECL_CONTEXT (vtable);
   1336 }
   1337 
   1338 /* Record dynamic type change of TCI to TYPE.  */
   1339 
   1340 static void
   1341 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
   1342 {
   1343   if (dump_file)
   1344     {
   1345       if (type)
   1346 	{
   1347           fprintf (dump_file, "  Recording type: ");
   1348 	  print_generic_expr (dump_file, type, TDF_SLIM);
   1349           fprintf (dump_file, " at offset %i\n", (int)offset);
   1350 	}
   1351      else
   1352        fprintf (dump_file, "  Recording unknown type\n");
   1353     }
   1354 
   1355   /* If we found a constructor of type that is not polymorphic or
   1356      that may contain the type in question as a field (not as base),
   1357      restrict to the inner class first to make type matching bellow
   1358      happier.  */
   1359   if (type
   1360       && (offset
   1361           || (TREE_CODE (type) != RECORD_TYPE
   1362 	      || !TYPE_BINFO (type)
   1363 	      || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
   1364     {
   1365       ipa_polymorphic_call_context context;
   1366 
   1367       context.offset = offset;
   1368       context.outer_type = type;
   1369       context.maybe_in_construction = false;
   1370       context.maybe_derived_type = false;
   1371       context.dynamic = true;
   1372       /* If we failed to find the inner type, we know that the call
   1373 	 would be undefined for type produced here.  */
   1374       if (!context.restrict_to_inner_class (tci->otr_type))
   1375 	{
   1376 	  if (dump_file)
   1377 	    fprintf (dump_file, "  Ignoring; does not contain otr_type\n");
   1378 	  return;
   1379 	}
   1380       /* Watch for case we reached an POD type and anticipate placement
   1381 	 new.  */
   1382       if (!context.maybe_derived_type)
   1383 	{
   1384           type = context.outer_type;
   1385           offset = context.offset;
   1386 	}
   1387     }
   1388   if (tci->type_maybe_changed
   1389       && (!types_same_for_odr (type, tci->known_current_type)
   1390 	  || offset != tci->known_current_offset))
   1391     tci->multiple_types_encountered = true;
   1392   tci->known_current_type = TYPE_MAIN_VARIANT (type);
   1393   tci->known_current_offset = offset;
   1394   tci->type_maybe_changed = true;
   1395 }
   1396 
   1397 
   1398 /* The maximum number of may-defs we visit when looking for a must-def
   1399    that changes the dynamic type in check_stmt_for_type_change.  Tuned
   1400    after the PR12392 testcase which unlimited spends 40% time within
   1401    these alias walks and 8% with the following limit.  */
   1402 
   1403 static inline bool
   1404 csftc_abort_walking_p (unsigned speculative)
   1405 {
   1406   unsigned max = param_max_speculative_devirt_maydefs;
   1407   return speculative > max ? true : false;
   1408 }
   1409 
   1410 /* Callback of walk_aliased_vdefs and a helper function for
   1411    detect_type_change to check whether a particular statement may modify
   1412    the virtual table pointer, and if possible also determine the new type of
   1413    the (sub-)object.  It stores its result into DATA, which points to a
   1414    type_change_info structure.  */
   1415 
   1416 static bool
   1417 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
   1418 {
   1419   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
   1420   struct type_change_info *tci = (struct type_change_info *) data;
   1421   tree fn;
   1422 
   1423   /* If we already gave up, just terminate the rest of walk.  */
   1424   if (tci->multiple_types_encountered)
   1425     return true;
   1426 
   1427   if (is_gimple_call (stmt))
   1428     {
   1429       if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
   1430 	return false;
   1431 
   1432       /* Check for a constructor call.  */
   1433       if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
   1434 	  && DECL_CXX_CONSTRUCTOR_P (fn)
   1435 	  && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
   1436 	  && gimple_call_num_args (stmt))
   1437       {
   1438 	tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
   1439 	tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
   1440 	HOST_WIDE_INT offset = 0;
   1441 	bool reverse;
   1442 
   1443 	if (dump_file)
   1444 	  {
   1445 	    fprintf (dump_file, "  Checking constructor call: ");
   1446 	    print_gimple_stmt (dump_file, stmt, 0);
   1447 	  }
   1448 
   1449 	/* See if THIS parameter seems like instance pointer.  */
   1450 	if (TREE_CODE (op) == ADDR_EXPR)
   1451 	  {
   1452 	    HOST_WIDE_INT size;
   1453 	    op = get_ref_base_and_extent_hwi (TREE_OPERAND (op, 0),
   1454 					      &offset, &size, &reverse);
   1455 	    if (!op)
   1456 	      {
   1457                 tci->speculative++;
   1458 	        return csftc_abort_walking_p (tci->speculative);
   1459 	      }
   1460 	    if (TREE_CODE (op) == MEM_REF)
   1461 	      {
   1462 		if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
   1463 		  {
   1464                     tci->speculative++;
   1465 		    return csftc_abort_walking_p (tci->speculative);
   1466 		  }
   1467 		offset += tree_to_shwi (TREE_OPERAND (op, 1))
   1468 			  * BITS_PER_UNIT;
   1469 		op = TREE_OPERAND (op, 0);
   1470 	      }
   1471 	    else if (DECL_P (op))
   1472 	      ;
   1473 	    else
   1474 	      {
   1475                 tci->speculative++;
   1476 	        return csftc_abort_walking_p (tci->speculative);
   1477 	      }
   1478 	    op = walk_ssa_copies (op);
   1479 	  }
   1480 	if (operand_equal_p (op, tci->instance, 0)
   1481 	    && TYPE_SIZE (type)
   1482 	    && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
   1483 	    && tree_fits_shwi_p (TYPE_SIZE (type))
   1484 	    && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset
   1485 	    /* Some inlined constructors may look as follows:
   1486 		  _3 = operator new (16);
   1487 		  MEM[(struct  &)_3] ={v} {CLOBBER};
   1488 		  MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
   1489 		    = &MEM[(void *)&_ZTV14CompositeClass + 16B];
   1490 		  _7 = &MEM[(struct CompositeClass *)_3].object;
   1491 		  EmptyClass::EmptyClass (_7);
   1492 
   1493 	       When determining dynamic type of _3 and because we stop at first
   1494 	       dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
   1495 	       In this case the emptyclass is not even polymorphic and we miss
   1496 	       it is contained in an outer type that is polymorphic.  */
   1497 
   1498 	    && (tci->offset == offset || contains_polymorphic_type_p (type)))
   1499 	  {
   1500 	    record_known_type (tci, type, tci->offset - offset);
   1501 	    return true;
   1502 	  }
   1503       }
   1504      /* Calls may possibly change dynamic type by placement new. Assume
   1505         it will not happen, but make result speculative only.  */
   1506      if (dump_file)
   1507 	{
   1508           fprintf (dump_file, "  Function call may change dynamic type:");
   1509 	  print_gimple_stmt (dump_file, stmt, 0);
   1510 	}
   1511      tci->speculative++;
   1512      return csftc_abort_walking_p (tci->speculative);
   1513    }
   1514   /* Check for inlined virtual table store.  */
   1515   else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
   1516     {
   1517       tree type;
   1518       HOST_WIDE_INT offset = 0;
   1519       if (dump_file)
   1520 	{
   1521 	  fprintf (dump_file, "  Checking vtbl store: ");
   1522 	  print_gimple_stmt (dump_file, stmt, 0);
   1523 	}
   1524 
   1525       type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
   1526       if (type == error_mark_node)
   1527 	return false;
   1528       gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
   1529       if (!type)
   1530 	{
   1531 	  if (dump_file)
   1532 	    fprintf (dump_file, "  Unanalyzed store may change type.\n");
   1533 	  tci->seen_unanalyzed_store = true;
   1534 	  tci->speculative++;
   1535 	}
   1536       else
   1537         record_known_type (tci, type, offset);
   1538       return true;
   1539     }
   1540   else
   1541     return false;
   1542 }
   1543 
   1544 /* THIS is polymorphic call context obtained from get_polymorphic_context.
   1545    OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
   1546    INSTANCE is pointer to the outer instance as returned by
   1547    get_polymorphic_context.  To avoid creation of temporary expressions,
   1548    INSTANCE may also be an declaration of get_polymorphic_context found the
   1549    value to be in static storage.
   1550 
   1551    If the type of instance is not fully determined
   1552    (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
   1553    is set), try to walk memory writes and find the actual construction of the
   1554    instance.
   1555 
   1556    Return true if memory is unchanged from function entry.
   1557 
   1558    We do not include this analysis in the context analysis itself, because
   1559    it needs memory SSA to be fully built and the walk may be expensive.
   1560    So it is not suitable for use withing fold_stmt and similar uses.
   1561 
   1562    AA_WALK_BUDGET_P, if not NULL, is how statements we should allow
   1563    walk_aliased_vdefs to examine.  The value should be decremented by the
   1564    number of statements we examined or set to zero if exhausted.  */
   1565 
   1566 bool
   1567 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
   1568 						tree otr_object,
   1569 						tree otr_type,
   1570 						gimple *call,
   1571 						unsigned *aa_walk_budget_p)
   1572 {
   1573   struct type_change_info tci;
   1574   ao_ref ao;
   1575   bool function_entry_reached = false;
   1576   tree instance_ref = NULL;
   1577   gimple *stmt = call;
   1578   /* Remember OFFSET before it is modified by restrict_to_inner_class.
   1579      This is because we do not update INSTANCE when walking inwards.  */
   1580   HOST_WIDE_INT instance_offset = offset;
   1581   tree instance_outer_type = outer_type;
   1582 
   1583   if (!instance)
   1584     return false;
   1585 
   1586   if (otr_type)
   1587     otr_type = TYPE_MAIN_VARIANT (otr_type);
   1588 
   1589   /* Walk into inner type. This may clear maybe_derived_type and save us
   1590      from useless work.  It also makes later comparisons with static type
   1591      easier.  */
   1592   if (outer_type && otr_type)
   1593     {
   1594       if (!restrict_to_inner_class (otr_type))
   1595         return false;
   1596     }
   1597 
   1598   if (!maybe_in_construction && !maybe_derived_type)
   1599     return false;
   1600 
   1601   /* If we are in fact not looking at any object or the instance is
   1602      some placement new into a random load, give up straight away.  */
   1603   if (TREE_CODE (instance) == MEM_REF)
   1604     return false;
   1605 
   1606   /* We need to obtain reference to virtual table pointer.  It is better
   1607      to look it up in the code rather than build our own.  This require bit
   1608      of pattern matching, but we end up verifying that what we found is
   1609      correct.
   1610 
   1611      What we pattern match is:
   1612 
   1613        tmp = instance->_vptr.A;   // vtbl ptr load
   1614        tmp2 = tmp[otr_token];	  // vtable lookup
   1615        OBJ_TYPE_REF(tmp2;instance->0) (instance);
   1616 
   1617      We want to start alias oracle walk from vtbl pointer load,
   1618      but we may not be able to identify it, for example, when PRE moved the
   1619      load around.  */
   1620 
   1621   if (gimple_code (call) == GIMPLE_CALL)
   1622     {
   1623       tree ref = gimple_call_fn (call);
   1624       bool reverse;
   1625 
   1626       if (TREE_CODE (ref) == OBJ_TYPE_REF)
   1627 	{
   1628 	  ref = OBJ_TYPE_REF_EXPR (ref);
   1629 	  ref = walk_ssa_copies (ref);
   1630 
   1631 	  /* If call target is already known, no need to do the expensive
   1632  	     memory walk.  */
   1633 	  if (is_gimple_min_invariant (ref))
   1634 	    return false;
   1635 
   1636 	  /* Check if definition looks like vtable lookup.  */
   1637 	  if (TREE_CODE (ref) == SSA_NAME
   1638 	      && !SSA_NAME_IS_DEFAULT_DEF (ref)
   1639 	      && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
   1640 	      && TREE_CODE (gimple_assign_rhs1
   1641 			     (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
   1642 	    {
   1643 	      ref = get_base_address
   1644 		     (TREE_OPERAND (gimple_assign_rhs1
   1645 				     (SSA_NAME_DEF_STMT (ref)), 0));
   1646 	      ref = walk_ssa_copies (ref);
   1647 	      /* Find base address of the lookup and see if it looks like
   1648 		 vptr load.  */
   1649 	      if (TREE_CODE (ref) == SSA_NAME
   1650 		  && !SSA_NAME_IS_DEFAULT_DEF (ref)
   1651 		  && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
   1652 		{
   1653 		  HOST_WIDE_INT offset2, size;
   1654 		  tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
   1655 		  tree base_ref
   1656 		    = get_ref_base_and_extent_hwi (ref_exp, &offset2,
   1657 						   &size, &reverse);
   1658 
   1659 		  /* Finally verify that what we found looks like read from
   1660 		     OTR_OBJECT or from INSTANCE with offset OFFSET.  */
   1661 		  if (base_ref
   1662 		      && ((TREE_CODE (base_ref) == MEM_REF
   1663 		           && ((offset2 == instance_offset
   1664 		                && TREE_OPERAND (base_ref, 0) == instance)
   1665 			       || (!offset2
   1666 				   && TREE_OPERAND (base_ref, 0)
   1667 				      == otr_object)))
   1668 			  || (DECL_P (instance) && base_ref == instance
   1669 			      && offset2 == instance_offset)))
   1670 		    {
   1671 		      stmt = SSA_NAME_DEF_STMT (ref);
   1672 		      instance_ref = ref_exp;
   1673 		    }
   1674 		}
   1675 	    }
   1676 	}
   1677     }
   1678 
   1679   /* If we failed to look up the reference in code, build our own.  */
   1680   if (!instance_ref)
   1681     {
   1682       /* If the statement in question does not use memory, we can't tell
   1683 	 anything.  */
   1684       if (!gimple_vuse (stmt))
   1685 	return false;
   1686       ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
   1687     }
   1688   else
   1689   /* Otherwise use the real reference.  */
   1690     ao_ref_init (&ao, instance_ref);
   1691 
   1692   /* We look for vtbl pointer read.  */
   1693   ao.size = POINTER_SIZE;
   1694   ao.max_size = ao.size;
   1695   /* We are looking for stores to vptr pointer within the instance of
   1696      outer type.
   1697      TODO: The vptr pointer type is globally known, we probably should
   1698      keep it and do that even when otr_type is unknown.  */
   1699   if (otr_type)
   1700     {
   1701       ao.base_alias_set
   1702 	= get_alias_set (outer_type ? outer_type : otr_type);
   1703       ao.ref_alias_set
   1704         = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
   1705     }
   1706 
   1707   if (dump_file)
   1708     {
   1709       fprintf (dump_file, "Determining dynamic type for call: ");
   1710       print_gimple_stmt (dump_file, call, 0);
   1711       fprintf (dump_file, "  Starting walk at: ");
   1712       print_gimple_stmt (dump_file, stmt, 0);
   1713       fprintf (dump_file, "  instance pointer: ");
   1714       print_generic_expr (dump_file, otr_object, TDF_SLIM);
   1715       fprintf (dump_file, "  Outer instance pointer: ");
   1716       print_generic_expr (dump_file, instance, TDF_SLIM);
   1717       fprintf (dump_file, " offset: %i (bits)", (int)instance_offset);
   1718       fprintf (dump_file, " vtbl reference: ");
   1719       print_generic_expr (dump_file, instance_ref, TDF_SLIM);
   1720       fprintf (dump_file, "\n");
   1721     }
   1722 
   1723   tci.offset = instance_offset;
   1724   tci.instance = instance;
   1725   tci.vtbl_ptr_ref = instance_ref;
   1726   tci.known_current_type = NULL_TREE;
   1727   tci.known_current_offset = 0;
   1728   tci.otr_type = otr_type;
   1729   tci.type_maybe_changed = false;
   1730   tci.multiple_types_encountered = false;
   1731   tci.speculative = 0;
   1732   tci.seen_unanalyzed_store = false;
   1733 
   1734   unsigned aa_walk_budget = 0;
   1735   if (aa_walk_budget_p)
   1736     aa_walk_budget = *aa_walk_budget_p + 1;
   1737 
   1738   int walked
   1739    = walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
   1740 			 &tci, NULL, &function_entry_reached, aa_walk_budget);
   1741 
   1742   /* If we did not find any type changing statements, we may still drop
   1743      maybe_in_construction flag if the context already have outer type.
   1744 
   1745      Here we make special assumptions about both constructors and
   1746      destructors which are all the functions that are allowed to alter the
   1747      VMT pointers.  It assumes that destructors begin with assignment into
   1748      all VMT pointers and that constructors essentially look in the
   1749      following way:
   1750 
   1751      1) The very first thing they do is that they call constructors of
   1752      ancestor sub-objects that have them.
   1753 
   1754      2) Then VMT pointers of this and all its ancestors is set to new
   1755      values corresponding to the type corresponding to the constructor.
   1756 
   1757      3) Only afterwards, other stuff such as constructor of member
   1758      sub-objects and the code written by the user is run.  Only this may
   1759      include calling virtual functions, directly or indirectly.
   1760 
   1761      4) placement new cannot be used to change type of non-POD statically
   1762      allocated variables.
   1763 
   1764      There is no way to call a constructor of an ancestor sub-object in any
   1765      other way.
   1766 
   1767      This means that we do not have to care whether constructors get the
   1768      correct type information because they will always change it (in fact,
   1769      if we define the type to be given by the VMT pointer, it is undefined).
   1770 
   1771      The most important fact to derive from the above is that if, for some
   1772      statement in the section 3, we try to detect whether the dynamic type
   1773      has changed, we can safely ignore all calls as we examine the function
   1774      body backwards until we reach statements in section 2 because these
   1775      calls cannot be ancestor constructors or destructors (if the input is
   1776      not bogus) and so do not change the dynamic type (this holds true only
   1777      for automatically allocated objects but at the moment we devirtualize
   1778      only these).  We then must detect that statements in section 2 change
   1779      the dynamic type and can try to derive the new type.  That is enough
   1780      and we can stop, we will never see the calls into constructors of
   1781      sub-objects in this code.
   1782 
   1783      Therefore if the static outer type was found (outer_type)
   1784      we can safely ignore tci.speculative that is set on calls and give up
   1785      only if there was dynamic type store that may affect given variable
   1786      (seen_unanalyzed_store)  */
   1787 
   1788   if (walked < 0)
   1789     {
   1790       if (dump_file)
   1791 	fprintf (dump_file, "  AA walk budget exhausted.\n");
   1792       *aa_walk_budget_p = 0;
   1793       return false;
   1794     }
   1795   else if (aa_walk_budget_p)
   1796     *aa_walk_budget_p -= walked;
   1797 
   1798   if (!tci.type_maybe_changed
   1799       || (outer_type
   1800 	  && !dynamic
   1801 	  && !tci.seen_unanalyzed_store
   1802 	  && !tci.multiple_types_encountered
   1803 	  && ((offset == tci.offset
   1804 	       && types_same_for_odr (tci.known_current_type,
   1805 				      outer_type))
   1806 	       || (instance_offset == offset
   1807 		   && types_same_for_odr (tci.known_current_type,
   1808 					  instance_outer_type)))))
   1809     {
   1810       if (!outer_type || tci.seen_unanalyzed_store)
   1811 	return false;
   1812       if (maybe_in_construction)
   1813         maybe_in_construction = false;
   1814       if (dump_file)
   1815 	fprintf (dump_file, "  No dynamic type change found.\n");
   1816       return true;
   1817     }
   1818 
   1819   if (tci.known_current_type
   1820       && !function_entry_reached
   1821       && !tci.multiple_types_encountered)
   1822     {
   1823       if (!tci.speculative)
   1824 	{
   1825 	  outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
   1826 	  offset = tci.known_current_offset;
   1827 	  dynamic = true;
   1828 	  maybe_in_construction = false;
   1829 	  maybe_derived_type = false;
   1830 	  if (dump_file)
   1831 	    fprintf (dump_file, "  Determined dynamic type.\n");
   1832 	}
   1833       else if (!speculative_outer_type
   1834 	       || speculative_maybe_derived_type)
   1835 	{
   1836 	  speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
   1837 	  speculative_offset = tci.known_current_offset;
   1838 	  speculative_maybe_derived_type = false;
   1839 	  if (dump_file)
   1840 	    fprintf (dump_file, "  Determined speculative dynamic type.\n");
   1841 	}
   1842     }
   1843   else if (dump_file)
   1844     {
   1845       fprintf (dump_file, "  Found multiple types%s%s\n",
   1846 	       function_entry_reached ? " (function entry reached)" : "",
   1847 	       function_entry_reached ? " (multiple types encountered)" : "");
   1848     }
   1849 
   1850   return false;
   1851 }
   1852 
   1853 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
   1854    seems consistent (and useful) with what we already have in the non-speculative context.  */
   1855 
   1856 bool
   1857 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
   1858 							HOST_WIDE_INT spec_offset,
   1859 							bool spec_maybe_derived_type,
   1860 							tree otr_type) const
   1861 {
   1862   if (!flag_devirtualize_speculatively)
   1863     return false;
   1864 
   1865   /* Non-polymorphic types are useless for deriving likely polymorphic
   1866      call targets.  */
   1867   if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
   1868     return false;
   1869 
   1870   /* If we know nothing, speculation is always good.  */
   1871   if (!outer_type)
   1872     return true;
   1873 
   1874   /* Speculation is only useful to avoid derived types.
   1875      This is not 100% true for placement new, where the outer context may
   1876      turn out to be useless, but ignore these for now.  */
   1877   if (!maybe_derived_type)
   1878     return false;
   1879 
   1880   /* If types agrees, speculation is consistent, but it makes sense only
   1881      when it says something new.  */
   1882   if (types_must_be_same_for_odr (spec_outer_type, outer_type))
   1883     return maybe_derived_type && !spec_maybe_derived_type;
   1884 
   1885   /* If speculation does not contain the type in question, ignore it.  */
   1886   if (otr_type
   1887       && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
   1888     return false;
   1889 
   1890   /* If outer type already contains speculation as a filed,
   1891      it is useless.  We already know from OUTER_TYPE
   1892      SPEC_TYPE and that it is not in the construction.  */
   1893   if (contains_type_p (outer_type, offset - spec_offset,
   1894 		       spec_outer_type, false, false))
   1895     return false;
   1896 
   1897   /* If speculative outer type is not more specified than outer
   1898      type, just give up.
   1899      We can only decide this safely if we can compare types with OUTER_TYPE.
   1900    */
   1901   if ((!in_lto_p || odr_type_p (outer_type))
   1902       && !contains_type_p (spec_outer_type,
   1903 			   spec_offset - offset,
   1904 			   outer_type, false))
   1905     return false;
   1906   return true;
   1907 }
   1908 
   1909 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
   1910    NEW_MAYBE_DERIVED_TYPE
   1911    If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
   1912 
   1913 bool
   1914 ipa_polymorphic_call_context::combine_speculation_with
   1915    (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
   1916     tree otr_type)
   1917 {
   1918   if (!new_outer_type)
   1919     return false;
   1920 
   1921   /* restrict_to_inner_class may eliminate wrong speculation making our job
   1922      easier.  */
   1923   if (otr_type)
   1924     restrict_to_inner_class (otr_type);
   1925 
   1926   if (!speculation_consistent_p (new_outer_type, new_offset,
   1927 				 new_maybe_derived_type, otr_type))
   1928     return false;
   1929 
   1930   /* New speculation is a win in case we have no speculation or new
   1931      speculation does not consider derivations.  */
   1932   if (!speculative_outer_type
   1933       || (speculative_maybe_derived_type
   1934 	  && !new_maybe_derived_type))
   1935     {
   1936       speculative_outer_type = new_outer_type;
   1937       speculative_offset = new_offset;
   1938       speculative_maybe_derived_type = new_maybe_derived_type;
   1939       return true;
   1940     }
   1941   else if (types_must_be_same_for_odr (speculative_outer_type,
   1942 				       new_outer_type))
   1943     {
   1944       if (speculative_offset != new_offset)
   1945 	{
   1946 	  /* OK we have two contexts that seems valid but they disagree,
   1947 	     just give up.
   1948 
   1949 	     This is not a lattice operation, so we may want to drop it later.  */
   1950 	  if (dump_file && (dump_flags & TDF_DETAILS))
   1951 	    fprintf (dump_file,
   1952 		     "Speculative outer types match, "
   1953 		     "offset mismatch -> invalid speculation\n");
   1954 	  clear_speculation ();
   1955 	  return true;
   1956 	}
   1957       else
   1958 	{
   1959 	  if (speculative_maybe_derived_type && !new_maybe_derived_type)
   1960 	    {
   1961 	      speculative_maybe_derived_type = false;
   1962 	      return true;
   1963 	    }
   1964 	  else
   1965 	    return false;
   1966 	}
   1967     }
   1968   /* Choose type that contains the other.  This one either contains the outer
   1969      as a field (thus giving exactly one target) or is deeper in the type
   1970      hierarchy.  */
   1971   else if (speculative_outer_type
   1972 	   && speculative_maybe_derived_type
   1973 	   && (new_offset > speculative_offset
   1974 	       || (new_offset == speculative_offset
   1975 		   && contains_type_p (new_outer_type,
   1976 				       0, speculative_outer_type, false))))
   1977     {
   1978       tree old_outer_type = speculative_outer_type;
   1979       HOST_WIDE_INT old_offset = speculative_offset;
   1980       bool old_maybe_derived_type = speculative_maybe_derived_type;
   1981 
   1982       speculative_outer_type = new_outer_type;
   1983       speculative_offset = new_offset;
   1984       speculative_maybe_derived_type = new_maybe_derived_type;
   1985 
   1986       if (otr_type)
   1987 	restrict_to_inner_class (otr_type);
   1988 
   1989       /* If the speculation turned out to make no sense, revert to sensible
   1990 	 one.  */
   1991       if (!speculative_outer_type)
   1992 	{
   1993 	  speculative_outer_type = old_outer_type;
   1994 	  speculative_offset = old_offset;
   1995 	  speculative_maybe_derived_type = old_maybe_derived_type;
   1996 	  return false;
   1997 	}
   1998       return (old_offset != speculative_offset
   1999 	      || old_maybe_derived_type != speculative_maybe_derived_type
   2000 	      || types_must_be_same_for_odr (speculative_outer_type,
   2001 					     new_outer_type));
   2002     }
   2003   return false;
   2004 }
   2005 
   2006 /* Make speculation less specific so
   2007    NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
   2008    If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
   2009 
   2010 bool
   2011 ipa_polymorphic_call_context::meet_speculation_with
   2012    (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
   2013     tree otr_type)
   2014 {
   2015   if (!new_outer_type && speculative_outer_type)
   2016     {
   2017       clear_speculation ();
   2018       return true;
   2019     }
   2020 
   2021   /* restrict_to_inner_class may eliminate wrong speculation making our job
   2022      easier.  */
   2023   if (otr_type)
   2024     restrict_to_inner_class (otr_type);
   2025 
   2026   if (!speculative_outer_type
   2027       || !speculation_consistent_p (speculative_outer_type,
   2028 				    speculative_offset,
   2029 				    speculative_maybe_derived_type,
   2030 				    otr_type))
   2031     return false;
   2032 
   2033   if (!speculation_consistent_p (new_outer_type, new_offset,
   2034 				 new_maybe_derived_type, otr_type))
   2035     {
   2036       clear_speculation ();
   2037       return true;
   2038     }
   2039 
   2040   else if (types_must_be_same_for_odr (speculative_outer_type,
   2041 				       new_outer_type))
   2042     {
   2043       if (speculative_offset != new_offset)
   2044 	{
   2045 	  clear_speculation ();
   2046 	  return true;
   2047 	}
   2048       else
   2049 	{
   2050 	  if (!speculative_maybe_derived_type && new_maybe_derived_type)
   2051 	    {
   2052 	      speculative_maybe_derived_type = true;
   2053 	      return true;
   2054 	    }
   2055 	  else
   2056 	    return false;
   2057 	}
   2058     }
   2059   /* See if one type contains the other as a field (not base).  */
   2060   else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
   2061 			    speculative_outer_type, false, false))
   2062     return false;
   2063   else if (contains_type_p (speculative_outer_type,
   2064 			    speculative_offset - new_offset,
   2065 			    new_outer_type, false, false))
   2066     {
   2067       speculative_outer_type = new_outer_type;
   2068       speculative_offset = new_offset;
   2069       speculative_maybe_derived_type = new_maybe_derived_type;
   2070       return true;
   2071     }
   2072   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
   2073   else if (contains_type_p (new_outer_type,
   2074 			    new_offset - speculative_offset,
   2075 			    speculative_outer_type, false, true))
   2076     {
   2077       if (!speculative_maybe_derived_type)
   2078 	{
   2079 	  speculative_maybe_derived_type = true;
   2080 	  return true;
   2081 	}
   2082       return false;
   2083     }
   2084   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
   2085   else if (contains_type_p (speculative_outer_type,
   2086 			    speculative_offset - new_offset, new_outer_type, false, true))
   2087     {
   2088       speculative_outer_type = new_outer_type;
   2089       speculative_offset = new_offset;
   2090       speculative_maybe_derived_type = true;
   2091       return true;
   2092     }
   2093   else
   2094     {
   2095       if (dump_file && (dump_flags & TDF_DETAILS))
   2096         fprintf (dump_file, "Giving up on speculative meet\n");
   2097       clear_speculation ();
   2098       return true;
   2099     }
   2100 }
   2101 
   2102 /* Assume that both THIS and a given context is valid and strengthen THIS
   2103    if possible.  Return true if any strengthening was made.
   2104    If actual type the context is being used in is known, OTR_TYPE should be
   2105    set accordingly. This improves quality of combined result.  */
   2106 
   2107 bool
   2108 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
   2109 					    tree otr_type)
   2110 {
   2111   bool updated = false;
   2112 
   2113   if (ctx.useless_p () || invalid)
   2114     return false;
   2115 
   2116   /* Restricting context to inner type makes merging easier, however do not
   2117      do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
   2118   if (otr_type && !invalid && !ctx.invalid)
   2119     {
   2120       restrict_to_inner_class (otr_type);
   2121       ctx.restrict_to_inner_class (otr_type);
   2122       if(invalid)
   2123         return false;
   2124     }
   2125 
   2126   if (dump_file && (dump_flags & TDF_DETAILS))
   2127     {
   2128       fprintf (dump_file, "Polymorphic call context combine:");
   2129       dump (dump_file);
   2130       fprintf (dump_file, "With context:                    ");
   2131       ctx.dump (dump_file);
   2132       if (otr_type)
   2133 	{
   2134           fprintf (dump_file, "To be used with type:            ");
   2135 	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
   2136           fprintf (dump_file, "\n");
   2137 	}
   2138     }
   2139 
   2140   /* If call is known to be invalid, we are done.  */
   2141   if (ctx.invalid)
   2142     {
   2143       if (dump_file && (dump_flags & TDF_DETAILS))
   2144         fprintf (dump_file, "-> Invalid context\n");
   2145       goto invalidate;
   2146     }
   2147 
   2148   if (!ctx.outer_type)
   2149     ;
   2150   else if (!outer_type)
   2151     {
   2152       outer_type = ctx.outer_type;
   2153       offset = ctx.offset;
   2154       dynamic = ctx.dynamic;
   2155       maybe_in_construction = ctx.maybe_in_construction;
   2156       maybe_derived_type = ctx.maybe_derived_type;
   2157       updated = true;
   2158     }
   2159   /* If types are known to be same, merging is quite easy.  */
   2160   else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
   2161     {
   2162       if (offset != ctx.offset
   2163 	  && TYPE_SIZE (outer_type)
   2164 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
   2165 	{
   2166 	  if (dump_file && (dump_flags & TDF_DETAILS))
   2167 	    fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
   2168 	  clear_speculation ();
   2169 	  clear_outer_type ();
   2170 	  invalid = true;
   2171 	  return true;
   2172 	}
   2173       if (dump_file && (dump_flags & TDF_DETAILS))
   2174         fprintf (dump_file, "Outer types match, merging flags\n");
   2175       if (maybe_in_construction && !ctx.maybe_in_construction)
   2176 	{
   2177 	  updated = true;
   2178 	  maybe_in_construction = false;
   2179 	}
   2180       if (maybe_derived_type && !ctx.maybe_derived_type)
   2181 	{
   2182 	  updated = true;
   2183 	  maybe_derived_type = false;
   2184 	}
   2185       if (dynamic && !ctx.dynamic)
   2186 	{
   2187 	  updated = true;
   2188 	  dynamic = false;
   2189 	}
   2190     }
   2191   /* If we know the type precisely, there is not much to improve.  */
   2192   else if (!maybe_derived_type && !maybe_in_construction
   2193 	   && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
   2194     {
   2195       /* It may be easy to check if second context permits the first
   2196 	 and set INVALID otherwise.  This is not easy to do in general;
   2197 	 contains_type_p may return false negatives for non-comparable
   2198 	 types.
   2199 
   2200 	 If OTR_TYPE is known, we however can expect that
   2201 	 restrict_to_inner_class should have discovered the same base
   2202 	 type.  */
   2203       if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
   2204 	{
   2205 	  if (dump_file && (dump_flags & TDF_DETAILS))
   2206 	    fprintf (dump_file, "Contextes disagree -> invalid\n");
   2207 	  goto invalidate;
   2208 	}
   2209     }
   2210   /* See if one type contains the other as a field (not base).
   2211      In this case we want to choose the wider type, because it contains
   2212      more information.  */
   2213   else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
   2214 			    outer_type, false, false))
   2215     {
   2216       if (dump_file && (dump_flags & TDF_DETAILS))
   2217 	fprintf (dump_file, "Second type contain the first as a field\n");
   2218 
   2219       if (maybe_derived_type)
   2220 	{
   2221 	  outer_type = ctx.outer_type;
   2222 	  maybe_derived_type = ctx.maybe_derived_type;
   2223 	  offset = ctx.offset;
   2224 	  dynamic = ctx.dynamic;
   2225 	  updated = true;
   2226 	}
   2227 
   2228       /* If we do not know how the context is being used, we cannot
   2229 	 clear MAYBE_IN_CONSTRUCTION because it may be offseted
   2230 	 to other component of OUTER_TYPE later and we know nothing
   2231 	 about it.  */
   2232       if (otr_type && maybe_in_construction
   2233 	  && !ctx.maybe_in_construction)
   2234 	{
   2235           maybe_in_construction = false;
   2236 	  updated = true;
   2237 	}
   2238     }
   2239   else if (contains_type_p (outer_type, offset - ctx.offset,
   2240 			    ctx.outer_type, false, false))
   2241     {
   2242       if (dump_file && (dump_flags & TDF_DETAILS))
   2243 	fprintf (dump_file, "First type contain the second as a field\n");
   2244 
   2245       if (otr_type && maybe_in_construction
   2246 	  && !ctx.maybe_in_construction)
   2247 	{
   2248           maybe_in_construction = false;
   2249 	  updated = true;
   2250 	}
   2251     }
   2252   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
   2253   else if (contains_type_p (ctx.outer_type,
   2254 			    ctx.offset - offset, outer_type, false, true))
   2255     {
   2256       if (dump_file && (dump_flags & TDF_DETAILS))
   2257 	fprintf (dump_file, "First type is base of second\n");
   2258       if (!maybe_derived_type)
   2259 	{
   2260 	  if (!ctx.maybe_in_construction
   2261 	      && types_odr_comparable (outer_type, ctx.outer_type))
   2262 	    {
   2263 	      if (dump_file && (dump_flags & TDF_DETAILS))
   2264 		fprintf (dump_file, "Second context does not permit base -> invalid\n");
   2265 	      goto invalidate;
   2266 	    }
   2267 	}
   2268       /* Pick variant deeper in the hierarchy.  */
   2269       else
   2270 	{
   2271 	  outer_type = ctx.outer_type;
   2272 	  maybe_in_construction = ctx.maybe_in_construction;
   2273 	  maybe_derived_type = ctx.maybe_derived_type;
   2274 	  offset = ctx.offset;
   2275 	  dynamic = ctx.dynamic;
   2276           updated = true;
   2277 	}
   2278     }
   2279   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
   2280   else if (contains_type_p (outer_type,
   2281 			    offset - ctx.offset, ctx.outer_type, false, true))
   2282     {
   2283       if (dump_file && (dump_flags & TDF_DETAILS))
   2284 	fprintf (dump_file, "Second type is base of first\n");
   2285       if (!ctx.maybe_derived_type)
   2286 	{
   2287 	  if (!maybe_in_construction
   2288 	      && types_odr_comparable (outer_type, ctx.outer_type))
   2289 	    {
   2290 	      if (dump_file && (dump_flags & TDF_DETAILS))
   2291 		fprintf (dump_file, "First context does not permit base -> invalid\n");
   2292 	      goto invalidate;
   2293 	    }
   2294 	  /* Pick the base type.  */
   2295 	  else if (maybe_in_construction)
   2296 	    {
   2297 	      outer_type = ctx.outer_type;
   2298 	      maybe_in_construction = ctx.maybe_in_construction;
   2299 	      maybe_derived_type = ctx.maybe_derived_type;
   2300 	      offset = ctx.offset;
   2301 	      dynamic = ctx.dynamic;
   2302 	      updated = true;
   2303 	    }
   2304 	}
   2305     }
   2306   /* TODO handle merging using hierarchy. */
   2307   else if (dump_file && (dump_flags & TDF_DETAILS))
   2308     fprintf (dump_file, "Giving up on merge\n");
   2309 
   2310   updated |= combine_speculation_with (ctx.speculative_outer_type,
   2311 				       ctx.speculative_offset,
   2312 				       ctx.speculative_maybe_derived_type,
   2313 				       otr_type);
   2314 
   2315   if (updated && dump_file && (dump_flags & TDF_DETAILS))
   2316     {
   2317       fprintf (dump_file, "Updated as:                      ");
   2318       dump (dump_file);
   2319       fprintf (dump_file, "\n");
   2320     }
   2321   return updated;
   2322 
   2323 invalidate:
   2324   invalid = true;
   2325   clear_speculation ();
   2326   clear_outer_type ();
   2327   return true;
   2328 }
   2329 
   2330 /* Take non-speculative info, merge it with speculative and clear speculation.
   2331    Used when we no longer manage to keep track of actual outer type, but we
   2332    think it is still there.
   2333 
   2334    If OTR_TYPE is set, the transformation can be done more effectively assuming
   2335    that context is going to be used only that way.  */
   2336 
   2337 void
   2338 ipa_polymorphic_call_context::make_speculative (tree otr_type)
   2339 {
   2340   tree spec_outer_type = outer_type;
   2341   HOST_WIDE_INT spec_offset = offset;
   2342   bool spec_maybe_derived_type = maybe_derived_type;
   2343 
   2344   if (invalid)
   2345     {
   2346       invalid = false;
   2347       clear_outer_type ();
   2348       clear_speculation ();
   2349       return;
   2350     }
   2351   if (!outer_type)
   2352     return;
   2353   clear_outer_type ();
   2354   combine_speculation_with (spec_outer_type, spec_offset,
   2355 			    spec_maybe_derived_type,
   2356 			    otr_type);
   2357 }
   2358 
   2359 /* Use when we cannot track dynamic type change.  This speculatively assume
   2360    type change is not happening.  */
   2361 
   2362 void
   2363 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
   2364 							    tree otr_type)
   2365 {
   2366   if (dynamic)
   2367     make_speculative (otr_type);
   2368   else if (in_poly_cdtor)
   2369     maybe_in_construction = true;
   2370 }
   2371 
   2372 /* Return TRUE if this context conveys the same information as OTHER.  */
   2373 
   2374 bool
   2375 ipa_polymorphic_call_context::equal_to
   2376     (const ipa_polymorphic_call_context &x) const
   2377 {
   2378   if (useless_p ())
   2379     return x.useless_p ();
   2380   if (invalid)
   2381     return x.invalid;
   2382   if (x.useless_p () || x.invalid)
   2383     return false;
   2384 
   2385   if (outer_type)
   2386     {
   2387       if (!x.outer_type
   2388 	  || !types_odr_comparable (outer_type, x.outer_type)
   2389 	  || !types_same_for_odr (outer_type, x.outer_type)
   2390 	  || offset != x.offset
   2391 	  || maybe_in_construction != x.maybe_in_construction
   2392 	  || maybe_derived_type != x.maybe_derived_type
   2393 	  || dynamic != x.dynamic)
   2394 	return false;
   2395     }
   2396   else if (x.outer_type)
   2397     return false;
   2398 
   2399 
   2400   if (speculative_outer_type
   2401       && speculation_consistent_p (speculative_outer_type, speculative_offset,
   2402 				   speculative_maybe_derived_type, NULL_TREE))
   2403     {
   2404       if (!x.speculative_outer_type)
   2405 	return false;
   2406 
   2407       if (!types_odr_comparable (speculative_outer_type,
   2408 				 x.speculative_outer_type)
   2409 	  || !types_same_for_odr  (speculative_outer_type,
   2410 				   x.speculative_outer_type)
   2411 	  || speculative_offset != x.speculative_offset
   2412 	  || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
   2413 	return false;
   2414     }
   2415   else if (x.speculative_outer_type
   2416 	   && x.speculation_consistent_p (x.speculative_outer_type,
   2417 					  x.speculative_offset,
   2418 				  	  x.speculative_maybe_derived_type,
   2419 					  NULL))
   2420     return false;
   2421 
   2422   return true;
   2423 }
   2424 
   2425 /* Modify context to be strictly less restrictive than CTX.  */
   2426 
   2427 bool
   2428 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
   2429 					 tree otr_type)
   2430 {
   2431   bool updated = false;
   2432 
   2433   if (useless_p () || ctx.invalid)
   2434     return false;
   2435 
   2436   /* Restricting context to inner type makes merging easier, however do not
   2437      do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
   2438   if (otr_type && !useless_p () && !ctx.useless_p ())
   2439     {
   2440       restrict_to_inner_class (otr_type);
   2441       ctx.restrict_to_inner_class (otr_type);
   2442       if(invalid)
   2443         return false;
   2444     }
   2445 
   2446   if (equal_to (ctx))
   2447     return false;
   2448 
   2449   if (ctx.useless_p () || invalid)
   2450     {
   2451       *this = ctx;
   2452       return true;
   2453     }
   2454 
   2455   if (dump_file && (dump_flags & TDF_DETAILS))
   2456     {
   2457       fprintf (dump_file, "Polymorphic call context meet:");
   2458       dump (dump_file);
   2459       fprintf (dump_file, "With context:                    ");
   2460       ctx.dump (dump_file);
   2461       if (otr_type)
   2462 	{
   2463           fprintf (dump_file, "To be used with type:            ");
   2464 	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
   2465           fprintf (dump_file, "\n");
   2466 	}
   2467     }
   2468 
   2469   if (!dynamic && ctx.dynamic)
   2470     {
   2471       dynamic = true;
   2472       updated = true;
   2473     }
   2474 
   2475   /* If call is known to be invalid, we are done.  */
   2476   if (!outer_type)
   2477     ;
   2478   else if (!ctx.outer_type)
   2479     {
   2480       clear_outer_type ();
   2481       updated = true;
   2482     }
   2483   /* If types are known to be same, merging is quite easy.  */
   2484   else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
   2485     {
   2486       if (offset != ctx.offset
   2487 	  && TYPE_SIZE (outer_type)
   2488 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
   2489 	{
   2490 	  if (dump_file && (dump_flags & TDF_DETAILS))
   2491 	    fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
   2492 	  clear_outer_type ();
   2493 	  return true;
   2494 	}
   2495       if (dump_file && (dump_flags & TDF_DETAILS))
   2496         fprintf (dump_file, "Outer types match, merging flags\n");
   2497       if (!maybe_in_construction && ctx.maybe_in_construction)
   2498 	{
   2499 	  updated = true;
   2500 	  maybe_in_construction = true;
   2501 	}
   2502       if (!maybe_derived_type && ctx.maybe_derived_type)
   2503 	{
   2504 	  updated = true;
   2505 	  maybe_derived_type = true;
   2506 	}
   2507       if (!dynamic && ctx.dynamic)
   2508 	{
   2509 	  updated = true;
   2510 	  dynamic = true;
   2511 	}
   2512     }
   2513   /* See if one type contains the other as a field (not base).  */
   2514   else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
   2515 			    outer_type, false, false))
   2516     {
   2517       if (dump_file && (dump_flags & TDF_DETAILS))
   2518 	fprintf (dump_file, "Second type contain the first as a field\n");
   2519 
   2520       /* The second type is more specified, so we keep the first.
   2521          We need to set DYNAMIC flag to avoid declaring context INVALID
   2522 	 of OFFSET ends up being out of range.  */
   2523       if (!dynamic
   2524 	  && (ctx.dynamic
   2525 	      || (!otr_type
   2526 		  && (!TYPE_SIZE (ctx.outer_type)
   2527 		      || !TYPE_SIZE (outer_type)
   2528 		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
   2529 					   TYPE_SIZE (outer_type), 0)))))
   2530 	{
   2531 	  dynamic = true;
   2532 	  updated = true;
   2533 	}
   2534     }
   2535   else if (contains_type_p (outer_type, offset - ctx.offset,
   2536 			    ctx.outer_type, false, false))
   2537     {
   2538       if (dump_file && (dump_flags & TDF_DETAILS))
   2539 	fprintf (dump_file, "First type contain the second as a field\n");
   2540 
   2541       if (!dynamic
   2542 	  && (ctx.dynamic
   2543 	      || (!otr_type
   2544 		  && (!TYPE_SIZE (ctx.outer_type)
   2545 		      || !TYPE_SIZE (outer_type)
   2546 		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
   2547 					   TYPE_SIZE (outer_type), 0)))))
   2548 	dynamic = true;
   2549       outer_type = ctx.outer_type;
   2550       offset = ctx.offset;
   2551       dynamic = ctx.dynamic;
   2552       maybe_in_construction = ctx.maybe_in_construction;
   2553       maybe_derived_type = ctx.maybe_derived_type;
   2554       updated = true;
   2555     }
   2556   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
   2557   else if (contains_type_p (ctx.outer_type,
   2558 			    ctx.offset - offset, outer_type, false, true))
   2559     {
   2560       if (dump_file && (dump_flags & TDF_DETAILS))
   2561 	fprintf (dump_file, "First type is base of second\n");
   2562       if (!maybe_derived_type)
   2563 	{
   2564 	  maybe_derived_type = true;
   2565 	  updated = true;
   2566 	}
   2567       if (!maybe_in_construction && ctx.maybe_in_construction)
   2568 	{
   2569 	  maybe_in_construction = true;
   2570 	  updated = true;
   2571 	}
   2572       if (!dynamic && ctx.dynamic)
   2573 	{
   2574 	  dynamic = true;
   2575 	  updated = true;
   2576 	}
   2577     }
   2578   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
   2579   else if (contains_type_p (outer_type,
   2580 			    offset - ctx.offset, ctx.outer_type, false, true))
   2581     {
   2582       if (dump_file && (dump_flags & TDF_DETAILS))
   2583 	fprintf (dump_file, "Second type is base of first\n");
   2584       outer_type = ctx.outer_type;
   2585       offset = ctx.offset;
   2586       updated = true;
   2587       if (!maybe_derived_type)
   2588 	maybe_derived_type = true;
   2589       if (!maybe_in_construction && ctx.maybe_in_construction)
   2590 	maybe_in_construction = true;
   2591       if (!dynamic && ctx.dynamic)
   2592 	dynamic = true;
   2593     }
   2594   /* TODO handle merging using hierarchy. */
   2595   else
   2596     {
   2597       if (dump_file && (dump_flags & TDF_DETAILS))
   2598         fprintf (dump_file, "Giving up on meet\n");
   2599       clear_outer_type ();
   2600       updated = true;
   2601     }
   2602 
   2603   updated |= meet_speculation_with (ctx.speculative_outer_type,
   2604 				    ctx.speculative_offset,
   2605 				    ctx.speculative_maybe_derived_type,
   2606 				    otr_type);
   2607 
   2608   if (updated && dump_file && (dump_flags & TDF_DETAILS))
   2609     {
   2610       fprintf (dump_file, "Updated as:                      ");
   2611       dump (dump_file);
   2612       fprintf (dump_file, "\n");
   2613     }
   2614   return updated;
   2615 }
   2616