Home | History | Annotate | Line # | Download | only in gcc
      1 /* Language-independent node constructors for parse phase of GNU compiler.
      2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
      3 
      4 This file is part of GCC.
      5 
      6 GCC is free software; you can redistribute it and/or modify it under
      7 the terms of the GNU General Public License as published by the Free
      8 Software Foundation; either version 3, or (at your option) any later
      9 version.
     10 
     11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14 for more details.
     15 
     16 You should have received a copy of the GNU General Public License
     17 along with GCC; see the file COPYING3.  If not see
     18 <http://www.gnu.org/licenses/>.  */
     19 
     20 /* This file contains the low level primitives for operating on tree nodes,
     21    including allocation, list operations, interning of identifiers,
     22    construction of data type nodes and statement nodes,
     23    and construction of type conversion nodes.  It also contains
     24    tables index by tree code that describe how to take apart
     25    nodes of that code.
     26 
     27    It is intended to be language-independent but can occasionally
     28    calls language-dependent routines.  */
     29 
     30 #include "config.h"
     31 #include "system.h"
     32 #include "coretypes.h"
     33 #include "backend.h"
     34 #include "target.h"
     35 #include "tree.h"
     36 #include "gimple.h"
     37 #include "tree-pass.h"
     38 #include "ssa.h"
     39 #include "cgraph.h"
     40 #include "diagnostic.h"
     41 #include "flags.h"
     42 #include "alias.h"
     43 #include "fold-const.h"
     44 #include "stor-layout.h"
     45 #include "calls.h"
     46 #include "attribs.h"
     47 #include "toplev.h" /* get_random_seed */
     48 #include "output.h"
     49 #include "common/common-target.h"
     50 #include "langhooks.h"
     51 #include "tree-inline.h"
     52 #include "tree-iterator.h"
     53 #include "internal-fn.h"
     54 #include "gimple-iterator.h"
     55 #include "gimplify.h"
     56 #include "tree-dfa.h"
     57 #include "langhooks-def.h"
     58 #include "tree-diagnostic.h"
     59 #include "except.h"
     60 #include "builtins.h"
     61 #include "print-tree.h"
     62 #include "ipa-utils.h"
     63 #include "selftest.h"
     64 #include "stringpool.h"
     65 #include "attribs.h"
     66 #include "rtl.h"
     67 #include "regs.h"
     68 #include "tree-vector-builder.h"
     69 #include "gimple-fold.h"
     70 #include "escaped_string.h"
     71 #include "gimple-range.h"
     72 #include "gomp-constants.h"
     73 #include "dfp.h"
     74 
     75 /* Tree code classes.  */
     76 
     77 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
     78 #define END_OF_BASE_TREE_CODES tcc_exceptional,
     79 
     80 const enum tree_code_class tree_code_type[] = {
     81 #include "all-tree.def"
     82 };
     83 
     84 #undef DEFTREECODE
     85 #undef END_OF_BASE_TREE_CODES
     86 
     87 /* Table indexed by tree code giving number of expression
     88    operands beyond the fixed part of the node structure.
     89    Not used for types or decls.  */
     90 
     91 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
     92 #define END_OF_BASE_TREE_CODES 0,
     93 
     94 const unsigned char tree_code_length[] = {
     95 #include "all-tree.def"
     96 };
     97 
     98 #undef DEFTREECODE
     99 #undef END_OF_BASE_TREE_CODES
    100 
    101 /* Names of tree components.
    102    Used for printing out the tree and error messages.  */
    103 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
    104 #define END_OF_BASE_TREE_CODES "@dummy",
    105 
    106 static const char *const tree_code_name[] = {
    107 #include "all-tree.def"
    108 };
    109 
    110 #undef DEFTREECODE
    111 #undef END_OF_BASE_TREE_CODES
    112 
    113 /* Each tree code class has an associated string representation.
    114    These must correspond to the tree_code_class entries.  */
    115 
    116 const char *const tree_code_class_strings[] =
    117 {
    118   "exceptional",
    119   "constant",
    120   "type",
    121   "declaration",
    122   "reference",
    123   "comparison",
    124   "unary",
    125   "binary",
    126   "statement",
    127   "vl_exp",
    128   "expression"
    129 };
    130 
    131 /* obstack.[ch] explicitly declined to prototype this.  */
    132 extern int _obstack_allocated_p (struct obstack *h, void *obj);
    133 
    134 /* Statistics-gathering stuff.  */
    135 
    136 static uint64_t tree_code_counts[MAX_TREE_CODES];
    137 uint64_t tree_node_counts[(int) all_kinds];
    138 uint64_t tree_node_sizes[(int) all_kinds];
    139 
    140 /* Keep in sync with tree.h:enum tree_node_kind.  */
    141 static const char * const tree_node_kind_names[] = {
    142   "decls",
    143   "types",
    144   "blocks",
    145   "stmts",
    146   "refs",
    147   "exprs",
    148   "constants",
    149   "identifiers",
    150   "vecs",
    151   "binfos",
    152   "ssa names",
    153   "constructors",
    154   "random kinds",
    155   "lang_decl kinds",
    156   "lang_type kinds",
    157   "omp clauses",
    158 };
    159 
    160 /* Unique id for next decl created.  */
    161 static GTY(()) int next_decl_uid;
    162 /* Unique id for next type created.  */
    163 static GTY(()) unsigned next_type_uid = 1;
    164 /* Unique id for next debug decl created.  Use negative numbers,
    165    to catch erroneous uses.  */
    166 static GTY(()) int next_debug_decl_uid;
    167 
    168 /* Since we cannot rehash a type after it is in the table, we have to
    169    keep the hash code.  */
    170 
    171 struct GTY((for_user)) type_hash {
    172   unsigned long hash;
    173   tree type;
    174 };
    175 
    176 /* Initial size of the hash table (rounded to next prime).  */
    177 #define TYPE_HASH_INITIAL_SIZE 1000
    178 
    179 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
    180 {
    181   static hashval_t hash (type_hash *t) { return t->hash; }
    182   static bool equal (type_hash *a, type_hash *b);
    183 
    184   static int
    185   keep_cache_entry (type_hash *&t)
    186   {
    187     return ggc_marked_p (t->type);
    188   }
    189 };
    190 
    191 /* Now here is the hash table.  When recording a type, it is added to
    192    the slot whose index is the hash code.  Note that the hash table is
    193    used for several kinds of types (function types, array types and
    194    array index range types, for now).  While all these live in the
    195    same table, they are completely independent, and the hash code is
    196    computed differently for each of these.  */
    197 
    198 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
    199 
    200 /* Hash table and temporary node for larger integer const values.  */
    201 static GTY (()) tree int_cst_node;
    202 
    203 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
    204 {
    205   static hashval_t hash (tree t);
    206   static bool equal (tree x, tree y);
    207 };
    208 
    209 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
    210 
    211 /* Class and variable for making sure that there is a single POLY_INT_CST
    212    for a given value.  */
    213 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
    214 {
    215   typedef std::pair<tree, const poly_wide_int *> compare_type;
    216   static hashval_t hash (tree t);
    217   static bool equal (tree x, const compare_type &y);
    218 };
    219 
    220 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
    221 
    222 /* Hash table for optimization flags and target option flags.  Use the same
    223    hash table for both sets of options.  Nodes for building the current
    224    optimization and target option nodes.  The assumption is most of the time
    225    the options created will already be in the hash table, so we avoid
    226    allocating and freeing up a node repeatably.  */
    227 static GTY (()) tree cl_optimization_node;
    228 static GTY (()) tree cl_target_option_node;
    229 
    230 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
    231 {
    232   static hashval_t hash (tree t);
    233   static bool equal (tree x, tree y);
    234 };
    235 
    236 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
    237 
    238 /* General tree->tree mapping  structure for use in hash tables.  */
    239 
    240 
    241 static GTY ((cache))
    242      hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
    243 
    244 static GTY ((cache))
    245      hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
    246 
    247 static GTY ((cache))
    248      hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
    249 
    250 static void set_type_quals (tree, int);
    251 static void print_type_hash_statistics (void);
    252 static void print_debug_expr_statistics (void);
    253 static void print_value_expr_statistics (void);
    254 
    255 tree global_trees[TI_MAX];
    256 tree integer_types[itk_none];
    257 
    258 bool int_n_enabled_p[NUM_INT_N_ENTS];
    259 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
    260 
    261 bool tree_contains_struct[MAX_TREE_CODES][64];
    262 
    263 /* Number of operands for each OMP clause.  */
    264 unsigned const char omp_clause_num_ops[] =
    265 {
    266   0, /* OMP_CLAUSE_ERROR  */
    267   1, /* OMP_CLAUSE_PRIVATE  */
    268   1, /* OMP_CLAUSE_SHARED  */
    269   1, /* OMP_CLAUSE_FIRSTPRIVATE  */
    270   2, /* OMP_CLAUSE_LASTPRIVATE  */
    271   5, /* OMP_CLAUSE_REDUCTION  */
    272   5, /* OMP_CLAUSE_TASK_REDUCTION  */
    273   5, /* OMP_CLAUSE_IN_REDUCTION  */
    274   1, /* OMP_CLAUSE_COPYIN  */
    275   1, /* OMP_CLAUSE_COPYPRIVATE  */
    276   3, /* OMP_CLAUSE_LINEAR  */
    277   1, /* OMP_CLAUSE_AFFINITY  */
    278   2, /* OMP_CLAUSE_ALIGNED  */
    279   3, /* OMP_CLAUSE_ALLOCATE  */
    280   1, /* OMP_CLAUSE_DEPEND  */
    281   1, /* OMP_CLAUSE_NONTEMPORAL  */
    282   1, /* OMP_CLAUSE_UNIFORM  */
    283   1, /* OMP_CLAUSE_TO_DECLARE  */
    284   1, /* OMP_CLAUSE_LINK  */
    285   1, /* OMP_CLAUSE_DETACH  */
    286   1, /* OMP_CLAUSE_USE_DEVICE_PTR  */
    287   1, /* OMP_CLAUSE_USE_DEVICE_ADDR  */
    288   1, /* OMP_CLAUSE_IS_DEVICE_PTR  */
    289   1, /* OMP_CLAUSE_INCLUSIVE  */
    290   1, /* OMP_CLAUSE_EXCLUSIVE  */
    291   2, /* OMP_CLAUSE_FROM  */
    292   2, /* OMP_CLAUSE_TO  */
    293   2, /* OMP_CLAUSE_MAP  */
    294   1, /* OMP_CLAUSE_HAS_DEVICE_ADDR  */
    295   2, /* OMP_CLAUSE__CACHE_  */
    296   2, /* OMP_CLAUSE_GANG  */
    297   1, /* OMP_CLAUSE_ASYNC  */
    298   1, /* OMP_CLAUSE_WAIT  */
    299   0, /* OMP_CLAUSE_AUTO  */
    300   0, /* OMP_CLAUSE_SEQ  */
    301   1, /* OMP_CLAUSE__LOOPTEMP_  */
    302   1, /* OMP_CLAUSE__REDUCTEMP_  */
    303   1, /* OMP_CLAUSE__CONDTEMP_  */
    304   1, /* OMP_CLAUSE__SCANTEMP_  */
    305   1, /* OMP_CLAUSE_IF  */
    306   1, /* OMP_CLAUSE_NUM_THREADS  */
    307   1, /* OMP_CLAUSE_SCHEDULE  */
    308   0, /* OMP_CLAUSE_NOWAIT  */
    309   1, /* OMP_CLAUSE_ORDERED  */
    310   0, /* OMP_CLAUSE_DEFAULT  */
    311   3, /* OMP_CLAUSE_COLLAPSE  */
    312   0, /* OMP_CLAUSE_UNTIED   */
    313   1, /* OMP_CLAUSE_FINAL  */
    314   0, /* OMP_CLAUSE_MERGEABLE  */
    315   1, /* OMP_CLAUSE_DEVICE  */
    316   1, /* OMP_CLAUSE_DIST_SCHEDULE  */
    317   0, /* OMP_CLAUSE_INBRANCH  */
    318   0, /* OMP_CLAUSE_NOTINBRANCH  */
    319   2, /* OMP_CLAUSE_NUM_TEAMS  */
    320   1, /* OMP_CLAUSE_THREAD_LIMIT  */
    321   0, /* OMP_CLAUSE_PROC_BIND  */
    322   1, /* OMP_CLAUSE_SAFELEN  */
    323   1, /* OMP_CLAUSE_SIMDLEN  */
    324   0, /* OMP_CLAUSE_DEVICE_TYPE  */
    325   0, /* OMP_CLAUSE_FOR  */
    326   0, /* OMP_CLAUSE_PARALLEL  */
    327   0, /* OMP_CLAUSE_SECTIONS  */
    328   0, /* OMP_CLAUSE_TASKGROUP  */
    329   1, /* OMP_CLAUSE_PRIORITY  */
    330   1, /* OMP_CLAUSE_GRAINSIZE  */
    331   1, /* OMP_CLAUSE_NUM_TASKS  */
    332   0, /* OMP_CLAUSE_NOGROUP  */
    333   0, /* OMP_CLAUSE_THREADS  */
    334   0, /* OMP_CLAUSE_SIMD  */
    335   1, /* OMP_CLAUSE_HINT  */
    336   0, /* OMP_CLAUSE_DEFAULTMAP  */
    337   0, /* OMP_CLAUSE_ORDER  */
    338   0, /* OMP_CLAUSE_BIND  */
    339   1, /* OMP_CLAUSE_FILTER  */
    340   1, /* OMP_CLAUSE__SIMDUID_  */
    341   0, /* OMP_CLAUSE__SIMT_  */
    342   0, /* OMP_CLAUSE_INDEPENDENT  */
    343   1, /* OMP_CLAUSE_WORKER  */
    344   1, /* OMP_CLAUSE_VECTOR  */
    345   1, /* OMP_CLAUSE_NUM_GANGS  */
    346   1, /* OMP_CLAUSE_NUM_WORKERS  */
    347   1, /* OMP_CLAUSE_VECTOR_LENGTH  */
    348   3, /* OMP_CLAUSE_TILE  */
    349   0, /* OMP_CLAUSE_IF_PRESENT */
    350   0, /* OMP_CLAUSE_FINALIZE */
    351   0, /* OMP_CLAUSE_NOHOST */
    352 };
    353 
    354 const char * const omp_clause_code_name[] =
    355 {
    356   "error_clause",
    357   "private",
    358   "shared",
    359   "firstprivate",
    360   "lastprivate",
    361   "reduction",
    362   "task_reduction",
    363   "in_reduction",
    364   "copyin",
    365   "copyprivate",
    366   "linear",
    367   "affinity",
    368   "aligned",
    369   "allocate",
    370   "depend",
    371   "nontemporal",
    372   "uniform",
    373   "to",
    374   "link",
    375   "detach",
    376   "use_device_ptr",
    377   "use_device_addr",
    378   "is_device_ptr",
    379   "inclusive",
    380   "exclusive",
    381   "from",
    382   "to",
    383   "map",
    384   "has_device_addr",
    385   "_cache_",
    386   "gang",
    387   "async",
    388   "wait",
    389   "auto",
    390   "seq",
    391   "_looptemp_",
    392   "_reductemp_",
    393   "_condtemp_",
    394   "_scantemp_",
    395   "if",
    396   "num_threads",
    397   "schedule",
    398   "nowait",
    399   "ordered",
    400   "default",
    401   "collapse",
    402   "untied",
    403   "final",
    404   "mergeable",
    405   "device",
    406   "dist_schedule",
    407   "inbranch",
    408   "notinbranch",
    409   "num_teams",
    410   "thread_limit",
    411   "proc_bind",
    412   "safelen",
    413   "simdlen",
    414   "device_type",
    415   "for",
    416   "parallel",
    417   "sections",
    418   "taskgroup",
    419   "priority",
    420   "grainsize",
    421   "num_tasks",
    422   "nogroup",
    423   "threads",
    424   "simd",
    425   "hint",
    426   "defaultmap",
    427   "order",
    428   "bind",
    429   "filter",
    430   "_simduid_",
    431   "_simt_",
    432   "independent",
    433   "worker",
    434   "vector",
    435   "num_gangs",
    436   "num_workers",
    437   "vector_length",
    438   "tile",
    439   "if_present",
    440   "finalize",
    441   "nohost",
    442 };
    443 
    444 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
    445    clause names, but for use in diagnostics etc. would like to use the "user"
    446    clause names.  */
    447 
    448 const char *
    449 user_omp_clause_code_name (tree clause, bool oacc)
    450 {
    451   /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
    452      distinguish clauses as seen by the user.  See also where front ends do
    453      'build_omp_clause' with 'OMP_CLAUSE_MAP'.  */
    454   if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
    455     switch (OMP_CLAUSE_MAP_KIND (clause))
    456       {
    457       case GOMP_MAP_FORCE_ALLOC:
    458       case GOMP_MAP_ALLOC: return "create";
    459       case GOMP_MAP_FORCE_TO:
    460       case GOMP_MAP_TO: return "copyin";
    461       case GOMP_MAP_FORCE_FROM:
    462       case GOMP_MAP_FROM: return "copyout";
    463       case GOMP_MAP_FORCE_TOFROM:
    464       case GOMP_MAP_TOFROM: return "copy";
    465       case GOMP_MAP_RELEASE: return "delete";
    466       case GOMP_MAP_FORCE_PRESENT: return "present";
    467       case GOMP_MAP_ATTACH: return "attach";
    468       case GOMP_MAP_FORCE_DETACH:
    469       case GOMP_MAP_DETACH: return "detach";
    470       case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
    471       case GOMP_MAP_LINK: return "link";
    472       case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
    473       default: break;
    474       }
    475 
    476   return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
    477 }
    478 
    479 
    480 /* Return the tree node structure used by tree code CODE.  */
    481 
    482 static inline enum tree_node_structure_enum
    483 tree_node_structure_for_code (enum tree_code code)
    484 {
    485   switch (TREE_CODE_CLASS (code))
    486     {
    487     case tcc_declaration:
    488       switch (code)
    489 	{
    490 	case CONST_DECL:	return TS_CONST_DECL;
    491 	case DEBUG_EXPR_DECL:	return TS_DECL_WRTL;
    492 	case FIELD_DECL:	return TS_FIELD_DECL;
    493 	case FUNCTION_DECL:	return TS_FUNCTION_DECL;
    494 	case LABEL_DECL:	return TS_LABEL_DECL;
    495 	case PARM_DECL:		return TS_PARM_DECL;
    496 	case RESULT_DECL:	return TS_RESULT_DECL;
    497 	case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
    498 	case TYPE_DECL:		return TS_TYPE_DECL;
    499 	case VAR_DECL:		return TS_VAR_DECL;
    500 	default: 		return TS_DECL_NON_COMMON;
    501 	}
    502 
    503     case tcc_type:		return TS_TYPE_NON_COMMON;
    504 
    505     case tcc_binary:
    506     case tcc_comparison:
    507     case tcc_expression:
    508     case tcc_reference:
    509     case tcc_statement:
    510     case tcc_unary:
    511     case tcc_vl_exp:		return TS_EXP;
    512 
    513     default:  /* tcc_constant and tcc_exceptional */
    514       break;
    515     }
    516 
    517   switch (code)
    518     {
    519       /* tcc_constant cases.  */
    520     case COMPLEX_CST:		return TS_COMPLEX;
    521     case FIXED_CST:		return TS_FIXED_CST;
    522     case INTEGER_CST:		return TS_INT_CST;
    523     case POLY_INT_CST:		return TS_POLY_INT_CST;
    524     case REAL_CST:		return TS_REAL_CST;
    525     case STRING_CST:		return TS_STRING;
    526     case VECTOR_CST:		return TS_VECTOR;
    527     case VOID_CST:		return TS_TYPED;
    528 
    529       /* tcc_exceptional cases.  */
    530     case BLOCK:			return TS_BLOCK;
    531     case CONSTRUCTOR:		return TS_CONSTRUCTOR;
    532     case ERROR_MARK:		return TS_COMMON;
    533     case IDENTIFIER_NODE:	return TS_IDENTIFIER;
    534     case OMP_CLAUSE:		return TS_OMP_CLAUSE;
    535     case OPTIMIZATION_NODE:	return TS_OPTIMIZATION;
    536     case PLACEHOLDER_EXPR:	return TS_COMMON;
    537     case SSA_NAME:		return TS_SSA_NAME;
    538     case STATEMENT_LIST:	return TS_STATEMENT_LIST;
    539     case TARGET_OPTION_NODE:	return TS_TARGET_OPTION;
    540     case TREE_BINFO:		return TS_BINFO;
    541     case TREE_LIST:		return TS_LIST;
    542     case TREE_VEC:		return TS_VEC;
    543 
    544     default:
    545       gcc_unreachable ();
    546     }
    547 }
    548 
    549 
    550 /* Initialize tree_contains_struct to describe the hierarchy of tree
    551    nodes.  */
    552 
    553 static void
    554 initialize_tree_contains_struct (void)
    555 {
    556   unsigned i;
    557 
    558   for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
    559     {
    560       enum tree_code code;
    561       enum tree_node_structure_enum ts_code;
    562 
    563       code = (enum tree_code) i;
    564       ts_code = tree_node_structure_for_code (code);
    565 
    566       /* Mark the TS structure itself.  */
    567       tree_contains_struct[code][ts_code] = 1;
    568 
    569       /* Mark all the structures that TS is derived from.  */
    570       switch (ts_code)
    571 	{
    572 	case TS_TYPED:
    573 	case TS_BLOCK:
    574 	case TS_OPTIMIZATION:
    575 	case TS_TARGET_OPTION:
    576 	  MARK_TS_BASE (code);
    577 	  break;
    578 
    579 	case TS_COMMON:
    580 	case TS_INT_CST:
    581 	case TS_POLY_INT_CST:
    582 	case TS_REAL_CST:
    583 	case TS_FIXED_CST:
    584 	case TS_VECTOR:
    585 	case TS_STRING:
    586 	case TS_COMPLEX:
    587 	case TS_SSA_NAME:
    588 	case TS_CONSTRUCTOR:
    589 	case TS_EXP:
    590 	case TS_STATEMENT_LIST:
    591 	  MARK_TS_TYPED (code);
    592 	  break;
    593 
    594 	case TS_IDENTIFIER:
    595 	case TS_DECL_MINIMAL:
    596 	case TS_TYPE_COMMON:
    597 	case TS_LIST:
    598 	case TS_VEC:
    599 	case TS_BINFO:
    600 	case TS_OMP_CLAUSE:
    601 	  MARK_TS_COMMON (code);
    602 	  break;
    603 
    604 	case TS_TYPE_WITH_LANG_SPECIFIC:
    605 	  MARK_TS_TYPE_COMMON (code);
    606 	  break;
    607 
    608 	case TS_TYPE_NON_COMMON:
    609 	  MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
    610 	  break;
    611 
    612 	case TS_DECL_COMMON:
    613 	  MARK_TS_DECL_MINIMAL (code);
    614 	  break;
    615 
    616 	case TS_DECL_WRTL:
    617 	case TS_CONST_DECL:
    618 	  MARK_TS_DECL_COMMON (code);
    619 	  break;
    620 
    621 	case TS_DECL_NON_COMMON:
    622 	  MARK_TS_DECL_WITH_VIS (code);
    623 	  break;
    624 
    625 	case TS_DECL_WITH_VIS:
    626 	case TS_PARM_DECL:
    627 	case TS_LABEL_DECL:
    628 	case TS_RESULT_DECL:
    629 	  MARK_TS_DECL_WRTL (code);
    630 	  break;
    631 
    632 	case TS_FIELD_DECL:
    633 	  MARK_TS_DECL_COMMON (code);
    634 	  break;
    635 
    636 	case TS_VAR_DECL:
    637 	  MARK_TS_DECL_WITH_VIS (code);
    638 	  break;
    639 
    640 	case TS_TYPE_DECL:
    641 	case TS_FUNCTION_DECL:
    642 	  MARK_TS_DECL_NON_COMMON (code);
    643 	  break;
    644 
    645 	case TS_TRANSLATION_UNIT_DECL:
    646 	  MARK_TS_DECL_COMMON (code);
    647 	  break;
    648 
    649 	default:
    650 	  gcc_unreachable ();
    651 	}
    652     }
    653 
    654   /* Basic consistency checks for attributes used in fold.  */
    655   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
    656   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
    657   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
    658   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
    659   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
    660   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
    661   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
    662   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
    663   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
    664   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
    665   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
    666   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
    667   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
    668   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
    669   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
    670   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
    671   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
    672   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
    673   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
    674   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
    675   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
    676   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
    677   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
    678   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
    679   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
    680   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
    681   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
    682   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
    683   gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
    684   gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
    685   gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
    686   gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
    687   gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
    688   gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
    689   gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
    690   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
    691   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
    692   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
    693   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
    694   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
    695 }
    696 
    697 
    698 /* Init tree.cc.  */
    699 
    700 void
    701 init_ttree (void)
    702 {
    703   /* Initialize the hash table of types.  */
    704   type_hash_table
    705     = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
    706 
    707   debug_expr_for_decl
    708     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
    709 
    710   value_expr_for_decl
    711     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
    712 
    713   int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
    714 
    715   poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
    716 
    717   int_cst_node = make_int_cst (1, 1);
    718 
    719   cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
    720 
    721   cl_optimization_node = make_node (OPTIMIZATION_NODE);
    722   cl_target_option_node = make_node (TARGET_OPTION_NODE);
    723 
    724   /* Initialize the tree_contains_struct array.  */
    725   initialize_tree_contains_struct ();
    726   lang_hooks.init_ts ();
    727 }
    728 
    729 
    730 /* The name of the object as the assembler will see it (but before any
    732    translations made by ASM_OUTPUT_LABELREF).  Often this is the same
    733    as DECL_NAME.  It is an IDENTIFIER_NODE.  */
    734 tree
    735 decl_assembler_name (tree decl)
    736 {
    737   if (!DECL_ASSEMBLER_NAME_SET_P (decl))
    738     lang_hooks.set_decl_assembler_name (decl);
    739   return DECL_ASSEMBLER_NAME_RAW (decl);
    740 }
    741 
    742 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
    743    (either of which may be NULL).  Inform the FE, if this changes the
    744    name.  */
    745 
    746 void
    747 overwrite_decl_assembler_name (tree decl, tree name)
    748 {
    749   if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
    750     lang_hooks.overwrite_decl_assembler_name (decl, name);
    751 }
    752 
    753 /* Return true if DECL may need an assembler name to be set.  */
    754 
    755 static inline bool
    756 need_assembler_name_p (tree decl)
    757 {
    758   /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
    759      Rule merging.  This makes type_odr_p to return true on those types during
    760      LTO and by comparing the mangled name, we can say what types are intended
    761      to be equivalent across compilation unit.
    762 
    763      We do not store names of type_in_anonymous_namespace_p.
    764 
    765      Record, union and enumeration type have linkage that allows use
    766      to check type_in_anonymous_namespace_p. We do not mangle compound types
    767      that always can be compared structurally.
    768 
    769      Similarly for builtin types, we compare properties of their main variant.
    770      A special case are integer types where mangling do make differences
    771      between char/signed char/unsigned char etc.  Storing name for these makes
    772      e.g.  -fno-signed-char/-fsigned-char mismatches to be handled well.
    773      See cp/mangle.cc:write_builtin_type for details.  */
    774 
    775   if (TREE_CODE (decl) == TYPE_DECL)
    776     {
    777       if (DECL_NAME (decl)
    778 	  && decl == TYPE_NAME (TREE_TYPE (decl))
    779 	  && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
    780 	  && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
    781 	  && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
    782 	       && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
    783 	      || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
    784 	  && (type_with_linkage_p (TREE_TYPE (decl))
    785 	      || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
    786 	  && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
    787 	return !DECL_ASSEMBLER_NAME_SET_P (decl);
    788       return false;
    789     }
    790   /* Only FUNCTION_DECLs and VAR_DECLs are considered.  */
    791   if (!VAR_OR_FUNCTION_DECL_P (decl))
    792     return false;
    793 
    794   /* If DECL already has its assembler name set, it does not need a
    795      new one.  */
    796   if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
    797       || DECL_ASSEMBLER_NAME_SET_P (decl))
    798     return false;
    799 
    800   /* Abstract decls do not need an assembler name.  */
    801   if (DECL_ABSTRACT_P (decl))
    802     return false;
    803 
    804   /* For VAR_DECLs, only static, public and external symbols need an
    805      assembler name.  */
    806   if (VAR_P (decl)
    807       && !TREE_STATIC (decl)
    808       && !TREE_PUBLIC (decl)
    809       && !DECL_EXTERNAL (decl))
    810     return false;
    811 
    812   if (TREE_CODE (decl) == FUNCTION_DECL)
    813     {
    814       /* Do not set assembler name on builtins.  Allow RTL expansion to
    815 	 decide whether to expand inline or via a regular call.  */
    816       if (fndecl_built_in_p (decl)
    817 	  && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
    818 	return false;
    819 
    820       /* Functions represented in the callgraph need an assembler name.  */
    821       if (cgraph_node::get (decl) != NULL)
    822 	return true;
    823 
    824       /* Unused and not public functions don't need an assembler name.  */
    825       if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
    826 	return false;
    827     }
    828 
    829   return true;
    830 }
    831 
    832 /* If T needs an assembler name, have one created for it.  */
    833 
    834 void
    835 assign_assembler_name_if_needed (tree t)
    836 {
    837   if (need_assembler_name_p (t))
    838     {
    839       /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
    840 	 diagnostics that use input_location to show locus
    841 	 information.  The problem here is that, at this point,
    842 	 input_location is generally anchored to the end of the file
    843 	 (since the parser is long gone), so we don't have a good
    844 	 position to pin it to.
    845 
    846 	 To alleviate this problem, this uses the location of T's
    847 	 declaration.  Examples of this are
    848 	 testsuite/g++.dg/template/cond2.C and
    849 	 testsuite/g++.dg/template/pr35240.C.  */
    850       location_t saved_location = input_location;
    851       input_location = DECL_SOURCE_LOCATION (t);
    852 
    853       decl_assembler_name (t);
    854 
    855       input_location = saved_location;
    856     }
    857 }
    858 
    859 /* When the target supports COMDAT groups, this indicates which group the
    860    DECL is associated with.  This can be either an IDENTIFIER_NODE or a
    861    decl, in which case its DECL_ASSEMBLER_NAME identifies the group.  */
    862 tree
    863 decl_comdat_group (const_tree node)
    864 {
    865   struct symtab_node *snode = symtab_node::get (node);
    866   if (!snode)
    867     return NULL;
    868   return snode->get_comdat_group ();
    869 }
    870 
    871 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE.  */
    872 tree
    873 decl_comdat_group_id (const_tree node)
    874 {
    875   struct symtab_node *snode = symtab_node::get (node);
    876   if (!snode)
    877     return NULL;
    878   return snode->get_comdat_group_id ();
    879 }
    880 
    881 /* When the target supports named section, return its name as IDENTIFIER_NODE
    882    or NULL if it is in no section.  */
    883 const char *
    884 decl_section_name (const_tree node)
    885 {
    886   struct symtab_node *snode = symtab_node::get (node);
    887   if (!snode)
    888     return NULL;
    889   return snode->get_section ();
    890 }
    891 
    892 /* Set section name of NODE to VALUE (that is expected to be
    893    identifier node) */
    894 void
    895 set_decl_section_name (tree node, const char *value)
    896 {
    897   struct symtab_node *snode;
    898 
    899   if (value == NULL)
    900     {
    901       snode = symtab_node::get (node);
    902       if (!snode)
    903 	return;
    904     }
    905   else if (VAR_P (node))
    906     snode = varpool_node::get_create (node);
    907   else
    908     snode = cgraph_node::get_create (node);
    909   snode->set_section (value);
    910 }
    911 
    912 /* Set section name of NODE to match the section name of OTHER.
    913 
    914    set_decl_section_name (decl, other) is equivalent to
    915    set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
    916    efficient.  */
    917 void
    918 set_decl_section_name (tree decl, const_tree other)
    919 {
    920   struct symtab_node *other_node = symtab_node::get (other);
    921   if (other_node)
    922     {
    923       struct symtab_node *decl_node;
    924       if (VAR_P (decl))
    925     decl_node = varpool_node::get_create (decl);
    926       else
    927     decl_node = cgraph_node::get_create (decl);
    928       decl_node->set_section (*other_node);
    929     }
    930   else
    931     {
    932       struct symtab_node *decl_node = symtab_node::get (decl);
    933       if (!decl_node)
    934     return;
    935       decl_node->set_section (NULL);
    936     }
    937 }
    938 
    939 /* Return TLS model of a variable NODE.  */
    940 enum tls_model
    941 decl_tls_model (const_tree node)
    942 {
    943   struct varpool_node *snode = varpool_node::get (node);
    944   if (!snode)
    945     return TLS_MODEL_NONE;
    946   return snode->tls_model;
    947 }
    948 
    949 /* Set TLS model of variable NODE to MODEL.  */
    950 void
    951 set_decl_tls_model (tree node, enum tls_model model)
    952 {
    953   struct varpool_node *vnode;
    954 
    955   if (model == TLS_MODEL_NONE)
    956     {
    957       vnode = varpool_node::get (node);
    958       if (!vnode)
    959 	return;
    960     }
    961   else
    962     vnode = varpool_node::get_create (node);
    963   vnode->tls_model = model;
    964 }
    965 
    966 /* Compute the number of bytes occupied by a tree with code CODE.
    967    This function cannot be used for nodes that have variable sizes,
    968    including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR.  */
    969 size_t
    970 tree_code_size (enum tree_code code)
    971 {
    972   switch (TREE_CODE_CLASS (code))
    973     {
    974     case tcc_declaration:  /* A decl node */
    975       switch (code)
    976 	{
    977 	case FIELD_DECL:	return sizeof (tree_field_decl);
    978 	case PARM_DECL:		return sizeof (tree_parm_decl);
    979 	case VAR_DECL:		return sizeof (tree_var_decl);
    980 	case LABEL_DECL:	return sizeof (tree_label_decl);
    981 	case RESULT_DECL:	return sizeof (tree_result_decl);
    982 	case CONST_DECL:	return sizeof (tree_const_decl);
    983 	case TYPE_DECL:		return sizeof (tree_type_decl);
    984 	case FUNCTION_DECL:	return sizeof (tree_function_decl);
    985 	case DEBUG_EXPR_DECL:	return sizeof (tree_decl_with_rtl);
    986 	case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
    987 	case NAMESPACE_DECL:
    988 	case IMPORTED_DECL:
    989 	case NAMELIST_DECL:	return sizeof (tree_decl_non_common);
    990 	default:
    991 	  gcc_checking_assert (code >= NUM_TREE_CODES);
    992 	  return lang_hooks.tree_size (code);
    993 	}
    994 
    995     case tcc_type:  /* a type node */
    996       switch (code)
    997 	{
    998 	case OFFSET_TYPE:
    999 	case ENUMERAL_TYPE:
   1000 	case BOOLEAN_TYPE:
   1001 	case INTEGER_TYPE:
   1002 	case REAL_TYPE:
   1003 	case OPAQUE_TYPE:
   1004 	case POINTER_TYPE:
   1005 	case REFERENCE_TYPE:
   1006 	case NULLPTR_TYPE:
   1007 	case FIXED_POINT_TYPE:
   1008 	case COMPLEX_TYPE:
   1009 	case VECTOR_TYPE:
   1010 	case ARRAY_TYPE:
   1011 	case RECORD_TYPE:
   1012 	case UNION_TYPE:
   1013 	case QUAL_UNION_TYPE:
   1014 	case VOID_TYPE:
   1015 	case FUNCTION_TYPE:
   1016 	case METHOD_TYPE:
   1017 	case LANG_TYPE:		return sizeof (tree_type_non_common);
   1018 	default:
   1019 	  gcc_checking_assert (code >= NUM_TREE_CODES);
   1020 	  return lang_hooks.tree_size (code);
   1021 	}
   1022 
   1023     case tcc_reference:   /* a reference */
   1024     case tcc_expression:  /* an expression */
   1025     case tcc_statement:   /* an expression with side effects */
   1026     case tcc_comparison:  /* a comparison expression */
   1027     case tcc_unary:       /* a unary arithmetic expression */
   1028     case tcc_binary:      /* a binary arithmetic expression */
   1029       return (sizeof (struct tree_exp)
   1030 	      + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
   1031 
   1032     case tcc_constant:  /* a constant */
   1033       switch (code)
   1034 	{
   1035 	case VOID_CST:		return sizeof (tree_typed);
   1036 	case INTEGER_CST:	gcc_unreachable ();
   1037 	case POLY_INT_CST:	return sizeof (tree_poly_int_cst);
   1038 	case REAL_CST:		return sizeof (tree_real_cst);
   1039 	case FIXED_CST:		return sizeof (tree_fixed_cst);
   1040 	case COMPLEX_CST:	return sizeof (tree_complex);
   1041 	case VECTOR_CST:	gcc_unreachable ();
   1042 	case STRING_CST:	gcc_unreachable ();
   1043 	default:
   1044 	  gcc_checking_assert (code >= NUM_TREE_CODES);
   1045 	  return lang_hooks.tree_size (code);
   1046 	}
   1047 
   1048     case tcc_exceptional:  /* something random, like an identifier.  */
   1049       switch (code)
   1050 	{
   1051 	case IDENTIFIER_NODE:	return lang_hooks.identifier_size;
   1052 	case TREE_LIST:		return sizeof (tree_list);
   1053 
   1054 	case ERROR_MARK:
   1055 	case PLACEHOLDER_EXPR:	return sizeof (tree_common);
   1056 
   1057 	case TREE_VEC:		gcc_unreachable ();
   1058 	case OMP_CLAUSE:	gcc_unreachable ();
   1059 
   1060 	case SSA_NAME:		return sizeof (tree_ssa_name);
   1061 
   1062 	case STATEMENT_LIST:	return sizeof (tree_statement_list);
   1063 	case BLOCK:		return sizeof (struct tree_block);
   1064 	case CONSTRUCTOR:	return sizeof (tree_constructor);
   1065 	case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
   1066 	case TARGET_OPTION_NODE: return sizeof (tree_target_option);
   1067 
   1068 	default:
   1069 	  gcc_checking_assert (code >= NUM_TREE_CODES);
   1070 	  return lang_hooks.tree_size (code);
   1071 	}
   1072 
   1073     default:
   1074       gcc_unreachable ();
   1075     }
   1076 }
   1077 
   1078 /* Compute the number of bytes occupied by NODE.  This routine only
   1079    looks at TREE_CODE, except for those nodes that have variable sizes.  */
   1080 size_t
   1081 tree_size (const_tree node)
   1082 {
   1083   const enum tree_code code = TREE_CODE (node);
   1084   switch (code)
   1085     {
   1086     case INTEGER_CST:
   1087       return (sizeof (struct tree_int_cst)
   1088 	      + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
   1089 
   1090     case TREE_BINFO:
   1091       return (offsetof (struct tree_binfo, base_binfos)
   1092 	      + vec<tree, va_gc>
   1093 		  ::embedded_size (BINFO_N_BASE_BINFOS (node)));
   1094 
   1095     case TREE_VEC:
   1096       return (sizeof (struct tree_vec)
   1097 	      + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
   1098 
   1099     case VECTOR_CST:
   1100       return (sizeof (struct tree_vector)
   1101 	      + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
   1102 
   1103     case STRING_CST:
   1104       return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
   1105 
   1106     case OMP_CLAUSE:
   1107       return (sizeof (struct tree_omp_clause)
   1108 	      + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
   1109 	        * sizeof (tree));
   1110 
   1111     default:
   1112       if (TREE_CODE_CLASS (code) == tcc_vl_exp)
   1113 	return (sizeof (struct tree_exp)
   1114 		+ (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
   1115       else
   1116 	return tree_code_size (code);
   1117     }
   1118 }
   1119 
   1120 /* Return tree node kind based on tree CODE.  */
   1121 
   1122 static tree_node_kind
   1123 get_stats_node_kind (enum tree_code code)
   1124 {
   1125   enum tree_code_class type = TREE_CODE_CLASS (code);
   1126 
   1127   switch (type)
   1128     {
   1129     case tcc_declaration:  /* A decl node */
   1130       return d_kind;
   1131     case tcc_type:  /* a type node */
   1132       return t_kind;
   1133     case tcc_statement:  /* an expression with side effects */
   1134       return s_kind;
   1135     case tcc_reference:  /* a reference */
   1136       return r_kind;
   1137     case tcc_expression:  /* an expression */
   1138     case tcc_comparison:  /* a comparison expression */
   1139     case tcc_unary:  /* a unary arithmetic expression */
   1140     case tcc_binary:  /* a binary arithmetic expression */
   1141       return e_kind;
   1142     case tcc_constant:  /* a constant */
   1143       return c_kind;
   1144     case tcc_exceptional:  /* something random, like an identifier.  */
   1145       switch (code)
   1146 	{
   1147 	case IDENTIFIER_NODE:
   1148 	  return id_kind;
   1149 	case TREE_VEC:
   1150 	  return vec_kind;
   1151 	case TREE_BINFO:
   1152 	  return binfo_kind;
   1153 	case SSA_NAME:
   1154 	  return ssa_name_kind;
   1155 	case BLOCK:
   1156 	  return b_kind;
   1157 	case CONSTRUCTOR:
   1158 	  return constr_kind;
   1159 	case OMP_CLAUSE:
   1160 	  return omp_clause_kind;
   1161 	default:
   1162 	  return x_kind;
   1163 	}
   1164       break;
   1165     case tcc_vl_exp:
   1166       return e_kind;
   1167     default:
   1168       gcc_unreachable ();
   1169     }
   1170 }
   1171 
   1172 /* Record interesting allocation statistics for a tree node with CODE
   1173    and LENGTH.  */
   1174 
   1175 static void
   1176 record_node_allocation_statistics (enum tree_code code, size_t length)
   1177 {
   1178   if (!GATHER_STATISTICS)
   1179     return;
   1180 
   1181   tree_node_kind kind = get_stats_node_kind (code);
   1182 
   1183   tree_code_counts[(int) code]++;
   1184   tree_node_counts[(int) kind]++;
   1185   tree_node_sizes[(int) kind] += length;
   1186 }
   1187 
   1188 /* Allocate and return a new UID from the DECL_UID namespace.  */
   1189 
   1190 int
   1191 allocate_decl_uid (void)
   1192 {
   1193   return next_decl_uid++;
   1194 }
   1195 
   1196 /* Return a newly allocated node of code CODE.  For decl and type
   1197    nodes, some other fields are initialized.  The rest of the node is
   1198    initialized to zero.  This function cannot be used for TREE_VEC,
   1199    INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
   1200    tree_code_size.
   1201 
   1202    Achoo!  I got a code in the node.  */
   1203 
   1204 tree
   1205 make_node (enum tree_code code MEM_STAT_DECL)
   1206 {
   1207   tree t;
   1208   enum tree_code_class type = TREE_CODE_CLASS (code);
   1209   size_t length = tree_code_size (code);
   1210 
   1211   record_node_allocation_statistics (code, length);
   1212 
   1213   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
   1214   TREE_SET_CODE (t, code);
   1215 
   1216   switch (type)
   1217     {
   1218     case tcc_statement:
   1219       if (code != DEBUG_BEGIN_STMT)
   1220 	TREE_SIDE_EFFECTS (t) = 1;
   1221       break;
   1222 
   1223     case tcc_declaration:
   1224       if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
   1225 	{
   1226 	  if (code == FUNCTION_DECL)
   1227 	    {
   1228 	      SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
   1229 	      SET_DECL_MODE (t, FUNCTION_MODE);
   1230 	    }
   1231 	  else
   1232 	    SET_DECL_ALIGN (t, 1);
   1233 	}
   1234       DECL_SOURCE_LOCATION (t) = input_location;
   1235       if (TREE_CODE (t) == DEBUG_EXPR_DECL)
   1236 	DECL_UID (t) = --next_debug_decl_uid;
   1237       else
   1238 	{
   1239 	  DECL_UID (t) = allocate_decl_uid ();
   1240 	  SET_DECL_PT_UID (t, -1);
   1241 	}
   1242       if (TREE_CODE (t) == LABEL_DECL)
   1243 	LABEL_DECL_UID (t) = -1;
   1244 
   1245       break;
   1246 
   1247     case tcc_type:
   1248       TYPE_UID (t) = next_type_uid++;
   1249       SET_TYPE_ALIGN (t, BITS_PER_UNIT);
   1250       TYPE_USER_ALIGN (t) = 0;
   1251       TYPE_MAIN_VARIANT (t) = t;
   1252       TYPE_CANONICAL (t) = t;
   1253 
   1254       /* Default to no attributes for type, but let target change that.  */
   1255       TYPE_ATTRIBUTES (t) = NULL_TREE;
   1256       targetm.set_default_type_attributes (t);
   1257 
   1258       /* We have not yet computed the alias set for this type.  */
   1259       TYPE_ALIAS_SET (t) = -1;
   1260       break;
   1261 
   1262     case tcc_constant:
   1263       TREE_CONSTANT (t) = 1;
   1264       break;
   1265 
   1266     case tcc_expression:
   1267       switch (code)
   1268 	{
   1269 	case INIT_EXPR:
   1270 	case MODIFY_EXPR:
   1271 	case VA_ARG_EXPR:
   1272 	case PREDECREMENT_EXPR:
   1273 	case PREINCREMENT_EXPR:
   1274 	case POSTDECREMENT_EXPR:
   1275 	case POSTINCREMENT_EXPR:
   1276 	  /* All of these have side-effects, no matter what their
   1277 	     operands are.  */
   1278 	  TREE_SIDE_EFFECTS (t) = 1;
   1279 	  break;
   1280 
   1281 	default:
   1282 	  break;
   1283 	}
   1284       break;
   1285 
   1286     case tcc_exceptional:
   1287       switch (code)
   1288         {
   1289 	case TARGET_OPTION_NODE:
   1290 	  TREE_TARGET_OPTION(t)
   1291 			    = ggc_cleared_alloc<struct cl_target_option> ();
   1292 	  break;
   1293 
   1294 	case OPTIMIZATION_NODE:
   1295 	  TREE_OPTIMIZATION (t)
   1296 			    = ggc_cleared_alloc<struct cl_optimization> ();
   1297 	  break;
   1298 
   1299 	default:
   1300 	  break;
   1301 	}
   1302       break;
   1303 
   1304     default:
   1305       /* Other classes need no special treatment.  */
   1306       break;
   1307     }
   1308 
   1309   return t;
   1310 }
   1311 
   1312 /* Free tree node.  */
   1313 
   1314 void
   1315 free_node (tree node)
   1316 {
   1317   enum tree_code code = TREE_CODE (node);
   1318   if (GATHER_STATISTICS)
   1319     {
   1320       enum tree_node_kind kind = get_stats_node_kind (code);
   1321 
   1322       gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
   1323       gcc_checking_assert (tree_node_counts[(int) kind] != 0);
   1324       gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
   1325 
   1326       tree_code_counts[(int) TREE_CODE (node)]--;
   1327       tree_node_counts[(int) kind]--;
   1328       tree_node_sizes[(int) kind] -= tree_size (node);
   1329     }
   1330   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
   1331     vec_free (CONSTRUCTOR_ELTS (node));
   1332   else if (code == BLOCK)
   1333     vec_free (BLOCK_NONLOCALIZED_VARS (node));
   1334   else if (code == TREE_BINFO)
   1335     vec_free (BINFO_BASE_ACCESSES (node));
   1336   else if (code == OPTIMIZATION_NODE)
   1337     cl_optimization_option_free (TREE_OPTIMIZATION (node));
   1338   else if (code == TARGET_OPTION_NODE)
   1339     cl_target_option_free (TREE_TARGET_OPTION (node));
   1340   ggc_free (node);
   1341 }
   1342 
   1343 /* Return a new node with the same contents as NODE except that its
   1345    TREE_CHAIN, if it has one, is zero and it has a fresh uid.  */
   1346 
   1347 tree
   1348 copy_node (tree node MEM_STAT_DECL)
   1349 {
   1350   tree t;
   1351   enum tree_code code = TREE_CODE (node);
   1352   size_t length;
   1353 
   1354   gcc_assert (code != STATEMENT_LIST);
   1355 
   1356   length = tree_size (node);
   1357   record_node_allocation_statistics (code, length);
   1358   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
   1359   memcpy (t, node, length);
   1360 
   1361   if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
   1362     TREE_CHAIN (t) = 0;
   1363   TREE_ASM_WRITTEN (t) = 0;
   1364   TREE_VISITED (t) = 0;
   1365 
   1366   if (TREE_CODE_CLASS (code) == tcc_declaration)
   1367     {
   1368       if (code == DEBUG_EXPR_DECL)
   1369 	DECL_UID (t) = --next_debug_decl_uid;
   1370       else
   1371 	{
   1372 	  DECL_UID (t) = allocate_decl_uid ();
   1373 	  if (DECL_PT_UID_SET_P (node))
   1374 	    SET_DECL_PT_UID (t, DECL_PT_UID (node));
   1375 	}
   1376       if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
   1377 	  && DECL_HAS_VALUE_EXPR_P (node))
   1378 	{
   1379 	  SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
   1380 	  DECL_HAS_VALUE_EXPR_P (t) = 1;
   1381 	}
   1382       /* DECL_DEBUG_EXPR is copied explicitly by callers.  */
   1383       if (VAR_P (node))
   1384 	{
   1385 	  DECL_HAS_DEBUG_EXPR_P (t) = 0;
   1386 	  t->decl_with_vis.symtab_node = NULL;
   1387 	}
   1388       if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
   1389 	{
   1390 	  SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
   1391 	  DECL_HAS_INIT_PRIORITY_P (t) = 1;
   1392 	}
   1393       if (TREE_CODE (node) == FUNCTION_DECL)
   1394 	{
   1395 	  DECL_STRUCT_FUNCTION (t) = NULL;
   1396 	  t->decl_with_vis.symtab_node = NULL;
   1397 	}
   1398     }
   1399   else if (TREE_CODE_CLASS (code) == tcc_type)
   1400     {
   1401       TYPE_UID (t) = next_type_uid++;
   1402       /* The following is so that the debug code for
   1403 	 the copy is different from the original type.
   1404 	 The two statements usually duplicate each other
   1405 	 (because they clear fields of the same union),
   1406 	 but the optimizer should catch that.  */
   1407       TYPE_SYMTAB_ADDRESS (t) = 0;
   1408       TYPE_SYMTAB_DIE (t) = 0;
   1409 
   1410       /* Do not copy the values cache.  */
   1411       if (TYPE_CACHED_VALUES_P (t))
   1412 	{
   1413 	  TYPE_CACHED_VALUES_P (t) = 0;
   1414 	  TYPE_CACHED_VALUES (t) = NULL_TREE;
   1415 	}
   1416     }
   1417     else if (code == TARGET_OPTION_NODE)
   1418       {
   1419 	TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
   1420 	memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
   1421 		sizeof (struct cl_target_option));
   1422       }
   1423     else if (code == OPTIMIZATION_NODE)
   1424       {
   1425 	TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
   1426 	memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
   1427 		sizeof (struct cl_optimization));
   1428       }
   1429 
   1430   return t;
   1431 }
   1432 
   1433 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
   1434    For example, this can copy a list made of TREE_LIST nodes.  */
   1435 
   1436 tree
   1437 copy_list (tree list)
   1438 {
   1439   tree head;
   1440   tree prev, next;
   1441 
   1442   if (list == 0)
   1443     return 0;
   1444 
   1445   head = prev = copy_node (list);
   1446   next = TREE_CHAIN (list);
   1447   while (next)
   1448     {
   1449       TREE_CHAIN (prev) = copy_node (next);
   1450       prev = TREE_CHAIN (prev);
   1451       next = TREE_CHAIN (next);
   1452     }
   1453   return head;
   1454 }
   1455 
   1456 
   1457 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
   1459    INTEGER_CST with value CST and type TYPE.   */
   1460 
   1461 static unsigned int
   1462 get_int_cst_ext_nunits (tree type, const wide_int &cst)
   1463 {
   1464   gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
   1465   /* We need extra HWIs if CST is an unsigned integer with its
   1466      upper bit set.  */
   1467   if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
   1468     return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
   1469   return cst.get_len ();
   1470 }
   1471 
   1472 /* Return a new INTEGER_CST with value CST and type TYPE.  */
   1473 
   1474 static tree
   1475 build_new_int_cst (tree type, const wide_int &cst)
   1476 {
   1477   unsigned int len = cst.get_len ();
   1478   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
   1479   tree nt = make_int_cst (len, ext_len);
   1480 
   1481   if (len < ext_len)
   1482     {
   1483       --ext_len;
   1484       TREE_INT_CST_ELT (nt, ext_len)
   1485 	= zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
   1486       for (unsigned int i = len; i < ext_len; ++i)
   1487 	TREE_INT_CST_ELT (nt, i) = -1;
   1488     }
   1489   else if (TYPE_UNSIGNED (type)
   1490 	   && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
   1491     {
   1492       len--;
   1493       TREE_INT_CST_ELT (nt, len)
   1494 	= zext_hwi (cst.elt (len),
   1495 		    cst.get_precision () % HOST_BITS_PER_WIDE_INT);
   1496     }
   1497 
   1498   for (unsigned int i = 0; i < len; i++)
   1499     TREE_INT_CST_ELT (nt, i) = cst.elt (i);
   1500   TREE_TYPE (nt) = type;
   1501   return nt;
   1502 }
   1503 
   1504 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE.  */
   1505 
   1506 static tree
   1507 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
   1508 			CXX_MEM_STAT_INFO)
   1509 {
   1510   size_t length = sizeof (struct tree_poly_int_cst);
   1511   record_node_allocation_statistics (POLY_INT_CST, length);
   1512 
   1513   tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
   1514 
   1515   TREE_SET_CODE (t, POLY_INT_CST);
   1516   TREE_CONSTANT (t) = 1;
   1517   TREE_TYPE (t) = type;
   1518   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   1519     POLY_INT_CST_COEFF (t, i) = coeffs[i];
   1520   return t;
   1521 }
   1522 
   1523 /* Create a constant tree that contains CST sign-extended to TYPE.  */
   1524 
   1525 tree
   1526 build_int_cst (tree type, poly_int64 cst)
   1527 {
   1528   /* Support legacy code.  */
   1529   if (!type)
   1530     type = integer_type_node;
   1531 
   1532   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
   1533 }
   1534 
   1535 /* Create a constant tree that contains CST zero-extended to TYPE.  */
   1536 
   1537 tree
   1538 build_int_cstu (tree type, poly_uint64 cst)
   1539 {
   1540   return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
   1541 }
   1542 
   1543 /* Create a constant tree that contains CST sign-extended to TYPE.  */
   1544 
   1545 tree
   1546 build_int_cst_type (tree type, poly_int64 cst)
   1547 {
   1548   gcc_assert (type);
   1549   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
   1550 }
   1551 
   1552 /* Constructs tree in type TYPE from with value given by CST.  Signedness
   1553    of CST is assumed to be the same as the signedness of TYPE.  */
   1554 
   1555 tree
   1556 double_int_to_tree (tree type, double_int cst)
   1557 {
   1558   return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
   1559 }
   1560 
   1561 /* We force the wide_int CST to the range of the type TYPE by sign or
   1562    zero extending it.  OVERFLOWABLE indicates if we are interested in
   1563    overflow of the value, when >0 we are only interested in signed
   1564    overflow, for <0 we are interested in any overflow.  OVERFLOWED
   1565    indicates whether overflow has already occurred.  CONST_OVERFLOWED
   1566    indicates whether constant overflow has already occurred.  We force
   1567    T's value to be within range of T's type (by setting to 0 or 1 all
   1568    the bits outside the type's range).  We set TREE_OVERFLOWED if,
   1569         OVERFLOWED is nonzero,
   1570         or OVERFLOWABLE is >0 and signed overflow occurs
   1571         or OVERFLOWABLE is <0 and any overflow occurs
   1572    We return a new tree node for the extended wide_int.  The node
   1573    is shared if no overflow flags are set.  */
   1574 
   1575 
   1576 tree
   1577 force_fit_type (tree type, const poly_wide_int_ref &cst,
   1578 		int overflowable, bool overflowed)
   1579 {
   1580   signop sign = TYPE_SIGN (type);
   1581 
   1582   /* If we need to set overflow flags, return a new unshared node.  */
   1583   if (overflowed || !wi::fits_to_tree_p (cst, type))
   1584     {
   1585       if (overflowed
   1586 	  || overflowable < 0
   1587 	  || (overflowable > 0 && sign == SIGNED))
   1588 	{
   1589 	  poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
   1590 						   sign);
   1591 	  tree t;
   1592 	  if (tmp.is_constant ())
   1593 	    t = build_new_int_cst (type, tmp.coeffs[0]);
   1594 	  else
   1595 	    {
   1596 	      tree coeffs[NUM_POLY_INT_COEFFS];
   1597 	      for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   1598 		{
   1599 		  coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
   1600 		  TREE_OVERFLOW (coeffs[i]) = 1;
   1601 		}
   1602 	      t = build_new_poly_int_cst (type, coeffs);
   1603 	    }
   1604 	  TREE_OVERFLOW (t) = 1;
   1605 	  return t;
   1606 	}
   1607     }
   1608 
   1609   /* Else build a shared node.  */
   1610   return wide_int_to_tree (type, cst);
   1611 }
   1612 
   1613 /* These are the hash table functions for the hash table of INTEGER_CST
   1614    nodes of a sizetype.  */
   1615 
   1616 /* Return the hash code X, an INTEGER_CST.  */
   1617 
   1618 hashval_t
   1619 int_cst_hasher::hash (tree x)
   1620 {
   1621   const_tree const t = x;
   1622   hashval_t code = TYPE_UID (TREE_TYPE (t));
   1623   int i;
   1624 
   1625   for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
   1626     code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
   1627 
   1628   return code;
   1629 }
   1630 
   1631 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
   1632    is the same as that given by *Y, which is the same.  */
   1633 
   1634 bool
   1635 int_cst_hasher::equal (tree x, tree y)
   1636 {
   1637   const_tree const xt = x;
   1638   const_tree const yt = y;
   1639 
   1640   if (TREE_TYPE (xt) != TREE_TYPE (yt)
   1641       || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
   1642       || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
   1643     return false;
   1644 
   1645   for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
   1646     if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
   1647       return false;
   1648 
   1649   return true;
   1650 }
   1651 
   1652 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
   1653    SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
   1654    number of slots that can be cached for the type.  */
   1655 
   1656 static inline tree
   1657 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
   1658 			      int slot, int max_slots)
   1659 {
   1660   gcc_checking_assert (slot >= 0);
   1661   /* Initialize cache.  */
   1662   if (!TYPE_CACHED_VALUES_P (type))
   1663     {
   1664       TYPE_CACHED_VALUES_P (type) = 1;
   1665       TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
   1666     }
   1667   tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
   1668   if (!t)
   1669     {
   1670       /* Create a new shared int.  */
   1671       t = build_new_int_cst (type, cst);
   1672       TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
   1673     }
   1674   return t;
   1675 }
   1676 
   1677 /* Create an INT_CST node of TYPE and value CST.
   1678    The returned node is always shared.  For small integers we use a
   1679    per-type vector cache, for larger ones we use a single hash table.
   1680    The value is extended from its precision according to the sign of
   1681    the type to be a multiple of HOST_BITS_PER_WIDE_INT.  This defines
   1682    the upper bits and ensures that hashing and value equality based
   1683    upon the underlying HOST_WIDE_INTs works without masking.  */
   1684 
   1685 static tree
   1686 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
   1687 {
   1688   tree t;
   1689   int ix = -1;
   1690   int limit = 0;
   1691 
   1692   gcc_assert (type);
   1693   unsigned int prec = TYPE_PRECISION (type);
   1694   signop sgn = TYPE_SIGN (type);
   1695 
   1696   /* Verify that everything is canonical.  */
   1697   int l = pcst.get_len ();
   1698   if (l > 1)
   1699     {
   1700       if (pcst.elt (l - 1) == 0)
   1701 	gcc_checking_assert (pcst.elt (l - 2) < 0);
   1702       if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
   1703 	gcc_checking_assert (pcst.elt (l - 2) >= 0);
   1704     }
   1705 
   1706   wide_int cst = wide_int::from (pcst, prec, sgn);
   1707   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
   1708 
   1709   enum tree_code code = TREE_CODE (type);
   1710   if (code == POINTER_TYPE || code == REFERENCE_TYPE)
   1711     {
   1712       /* Cache NULL pointer and zero bounds.  */
   1713       if (cst == 0)
   1714 	ix = 0;
   1715       /* Cache upper bounds of pointers.  */
   1716       else if (cst == wi::max_value (prec, sgn))
   1717 	ix = 1;
   1718       /* Cache 1 which is used for a non-zero range.  */
   1719       else if (cst == 1)
   1720 	ix = 2;
   1721 
   1722       if (ix >= 0)
   1723 	{
   1724 	  t = cache_wide_int_in_type_cache (type, cst, ix, 3);
   1725 	  /* Make sure no one is clobbering the shared constant.  */
   1726 	  gcc_checking_assert (TREE_TYPE (t) == type
   1727 			       && cst == wi::to_wide (t));
   1728 	  return t;
   1729 	}
   1730     }
   1731   if (ext_len == 1)
   1732     {
   1733       /* We just need to store a single HOST_WIDE_INT.  */
   1734       HOST_WIDE_INT hwi;
   1735       if (TYPE_UNSIGNED (type))
   1736 	hwi = cst.to_uhwi ();
   1737       else
   1738 	hwi = cst.to_shwi ();
   1739 
   1740       switch (code)
   1741 	{
   1742 	case NULLPTR_TYPE:
   1743 	  gcc_assert (hwi == 0);
   1744 	  /* Fallthru.  */
   1745 
   1746 	case POINTER_TYPE:
   1747 	case REFERENCE_TYPE:
   1748 	  /* Ignore pointers, as they were already handled above.  */
   1749 	  break;
   1750 
   1751 	case BOOLEAN_TYPE:
   1752 	  /* Cache false or true.  */
   1753 	  limit = 2;
   1754 	  if (IN_RANGE (hwi, 0, 1))
   1755 	    ix = hwi;
   1756 	  break;
   1757 
   1758 	case INTEGER_TYPE:
   1759 	case OFFSET_TYPE:
   1760 	  if (TYPE_SIGN (type) == UNSIGNED)
   1761 	    {
   1762 	      /* Cache [0, N).  */
   1763 	      limit = param_integer_share_limit;
   1764 	      if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
   1765 		ix = hwi;
   1766 	    }
   1767 	  else
   1768 	    {
   1769 	      /* Cache [-1, N).  */
   1770 	      limit = param_integer_share_limit + 1;
   1771 	      if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
   1772 		ix = hwi + 1;
   1773 	    }
   1774 	  break;
   1775 
   1776 	case ENUMERAL_TYPE:
   1777 	  break;
   1778 
   1779 	default:
   1780 	  gcc_unreachable ();
   1781 	}
   1782 
   1783       if (ix >= 0)
   1784 	{
   1785 	  t = cache_wide_int_in_type_cache (type, cst, ix, limit);
   1786 	  /* Make sure no one is clobbering the shared constant.  */
   1787 	  gcc_checking_assert (TREE_TYPE (t) == type
   1788 			       && TREE_INT_CST_NUNITS (t) == 1
   1789 			       && TREE_INT_CST_OFFSET_NUNITS (t) == 1
   1790 			       && TREE_INT_CST_EXT_NUNITS (t) == 1
   1791 			       && TREE_INT_CST_ELT (t, 0) == hwi);
   1792 	  return t;
   1793 	}
   1794       else
   1795 	{
   1796 	  /* Use the cache of larger shared ints, using int_cst_node as
   1797 	     a temporary.  */
   1798 
   1799 	  TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
   1800 	  TREE_TYPE (int_cst_node) = type;
   1801 
   1802 	  tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
   1803 	  t = *slot;
   1804 	  if (!t)
   1805 	    {
   1806 	      /* Insert this one into the hash table.  */
   1807 	      t = int_cst_node;
   1808 	      *slot = t;
   1809 	      /* Make a new node for next time round.  */
   1810 	      int_cst_node = make_int_cst (1, 1);
   1811 	    }
   1812 	}
   1813     }
   1814   else
   1815     {
   1816       /* The value either hashes properly or we drop it on the floor
   1817 	 for the gc to take care of.  There will not be enough of them
   1818 	 to worry about.  */
   1819 
   1820       tree nt = build_new_int_cst (type, cst);
   1821       tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
   1822       t = *slot;
   1823       if (!t)
   1824 	{
   1825 	  /* Insert this one into the hash table.  */
   1826 	  t = nt;
   1827 	  *slot = t;
   1828 	}
   1829       else
   1830 	ggc_free (nt);
   1831     }
   1832 
   1833   return t;
   1834 }
   1835 
   1836 hashval_t
   1837 poly_int_cst_hasher::hash (tree t)
   1838 {
   1839   inchash::hash hstate;
   1840 
   1841   hstate.add_int (TYPE_UID (TREE_TYPE (t)));
   1842   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   1843     hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
   1844 
   1845   return hstate.end ();
   1846 }
   1847 
   1848 bool
   1849 poly_int_cst_hasher::equal (tree x, const compare_type &y)
   1850 {
   1851   if (TREE_TYPE (x) != y.first)
   1852     return false;
   1853   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   1854     if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
   1855       return false;
   1856   return true;
   1857 }
   1858 
   1859 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
   1860    The elements must also have type TYPE.  */
   1861 
   1862 tree
   1863 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
   1864 {
   1865   unsigned int prec = TYPE_PRECISION (type);
   1866   gcc_assert (prec <= values.coeffs[0].get_precision ());
   1867   poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
   1868 
   1869   inchash::hash h;
   1870   h.add_int (TYPE_UID (type));
   1871   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   1872     h.add_wide_int (c.coeffs[i]);
   1873   poly_int_cst_hasher::compare_type comp (type, &c);
   1874   tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
   1875 							     INSERT);
   1876   if (*slot == NULL_TREE)
   1877     {
   1878       tree coeffs[NUM_POLY_INT_COEFFS];
   1879       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   1880 	coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
   1881       *slot = build_new_poly_int_cst (type, coeffs);
   1882     }
   1883   return *slot;
   1884 }
   1885 
   1886 /* Create a constant tree with value VALUE in type TYPE.  */
   1887 
   1888 tree
   1889 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
   1890 {
   1891   if (value.is_constant ())
   1892     return wide_int_to_tree_1 (type, value.coeffs[0]);
   1893   return build_poly_int_cst (type, value);
   1894 }
   1895 
   1896 /* Insert INTEGER_CST T into a cache of integer constants.  And return
   1897    the cached constant (which may or may not be T).  If MIGHT_DUPLICATE
   1898    is false, and T falls into the type's 'smaller values' range, there
   1899    cannot be an existing entry.  Otherwise, if MIGHT_DUPLICATE is true,
   1900    or the value is large, should an existing entry exist, it is
   1901    returned (rather than inserting T).  */
   1902 
   1903 tree
   1904 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
   1905 {
   1906   tree type = TREE_TYPE (t);
   1907   int ix = -1;
   1908   int limit = 0;
   1909   int prec = TYPE_PRECISION (type);
   1910 
   1911   gcc_assert (!TREE_OVERFLOW (t));
   1912 
   1913   /* The caching indices here must match those in
   1914      wide_int_to_type_1.  */
   1915   switch (TREE_CODE (type))
   1916     {
   1917     case NULLPTR_TYPE:
   1918       gcc_checking_assert (integer_zerop (t));
   1919       /* Fallthru.  */
   1920 
   1921     case POINTER_TYPE:
   1922     case REFERENCE_TYPE:
   1923       {
   1924 	if (integer_zerop (t))
   1925 	  ix = 0;
   1926 	else if (integer_onep (t))
   1927 	  ix = 2;
   1928 
   1929 	if (ix >= 0)
   1930 	  limit = 3;
   1931       }
   1932       break;
   1933 
   1934     case BOOLEAN_TYPE:
   1935       /* Cache false or true.  */
   1936       limit = 2;
   1937       if (wi::ltu_p (wi::to_wide (t), 2))
   1938 	ix = TREE_INT_CST_ELT (t, 0);
   1939       break;
   1940 
   1941     case INTEGER_TYPE:
   1942     case OFFSET_TYPE:
   1943       if (TYPE_UNSIGNED (type))
   1944 	{
   1945 	  /* Cache 0..N */
   1946 	  limit = param_integer_share_limit;
   1947 
   1948 	  /* This is a little hokie, but if the prec is smaller than
   1949 	     what is necessary to hold param_integer_share_limit, then the
   1950 	     obvious test will not get the correct answer.  */
   1951 	  if (prec < HOST_BITS_PER_WIDE_INT)
   1952 	    {
   1953 	      if (tree_to_uhwi (t)
   1954 		  < (unsigned HOST_WIDE_INT) param_integer_share_limit)
   1955 		ix = tree_to_uhwi (t);
   1956 	    }
   1957 	  else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
   1958 	    ix = tree_to_uhwi (t);
   1959 	}
   1960       else
   1961 	{
   1962 	  /* Cache -1..N */
   1963 	  limit = param_integer_share_limit + 1;
   1964 
   1965 	  if (integer_minus_onep (t))
   1966 	    ix = 0;
   1967 	  else if (!wi::neg_p (wi::to_wide (t)))
   1968 	    {
   1969 	      if (prec < HOST_BITS_PER_WIDE_INT)
   1970 		{
   1971 		  if (tree_to_shwi (t) < param_integer_share_limit)
   1972 		    ix = tree_to_shwi (t) + 1;
   1973 		}
   1974 	      else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
   1975 		ix = tree_to_shwi (t) + 1;
   1976 	    }
   1977 	}
   1978       break;
   1979 
   1980     case ENUMERAL_TYPE:
   1981       /* The slot used by TYPE_CACHED_VALUES is used for the enum
   1982 	 members.  */
   1983       break;
   1984 
   1985     default:
   1986       gcc_unreachable ();
   1987     }
   1988 
   1989   if (ix >= 0)
   1990     {
   1991       /* Look for it in the type's vector of small shared ints.  */
   1992       if (!TYPE_CACHED_VALUES_P (type))
   1993 	{
   1994 	  TYPE_CACHED_VALUES_P (type) = 1;
   1995 	  TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
   1996 	}
   1997 
   1998       if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
   1999 	{
   2000 	  gcc_checking_assert (might_duplicate);
   2001 	  t = r;
   2002 	}
   2003       else
   2004 	TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
   2005     }
   2006   else
   2007     {
   2008       /* Use the cache of larger shared ints.  */
   2009       tree *slot = int_cst_hash_table->find_slot (t, INSERT);
   2010       if (tree r = *slot)
   2011 	{
   2012 	  /* If there is already an entry for the number verify it's the
   2013 	     same value.  */
   2014 	  gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
   2015 	  /* And return the cached value.  */
   2016 	  t = r;
   2017 	}
   2018       else
   2019 	/* Otherwise insert this one into the hash table.  */
   2020 	*slot = t;
   2021     }
   2022 
   2023   return t;
   2024 }
   2025 
   2026 
   2027 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
   2028    and the rest are zeros.  */
   2029 
   2030 tree
   2031 build_low_bits_mask (tree type, unsigned bits)
   2032 {
   2033   gcc_assert (bits <= TYPE_PRECISION (type));
   2034 
   2035   return wide_int_to_tree (type, wi::mask (bits, false,
   2036 					   TYPE_PRECISION (type)));
   2037 }
   2038 
   2039 /* Checks that X is integer constant that can be expressed in (unsigned)
   2040    HOST_WIDE_INT without loss of precision.  */
   2041 
   2042 bool
   2043 cst_and_fits_in_hwi (const_tree x)
   2044 {
   2045   return (TREE_CODE (x) == INTEGER_CST
   2046 	  && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
   2047 }
   2048 
   2049 /* Build a newly constructed VECTOR_CST with the given values of
   2050    (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN.  */
   2051 
   2052 tree
   2053 make_vector (unsigned log2_npatterns,
   2054 	     unsigned int nelts_per_pattern MEM_STAT_DECL)
   2055 {
   2056   gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
   2057   tree t;
   2058   unsigned npatterns = 1 << log2_npatterns;
   2059   unsigned encoded_nelts = npatterns * nelts_per_pattern;
   2060   unsigned length = (sizeof (struct tree_vector)
   2061 		     + (encoded_nelts - 1) * sizeof (tree));
   2062 
   2063   record_node_allocation_statistics (VECTOR_CST, length);
   2064 
   2065   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
   2066 
   2067   TREE_SET_CODE (t, VECTOR_CST);
   2068   TREE_CONSTANT (t) = 1;
   2069   VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
   2070   VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
   2071 
   2072   return t;
   2073 }
   2074 
   2075 /* Return a new VECTOR_CST node whose type is TYPE and whose values
   2076    are extracted from V, a vector of CONSTRUCTOR_ELT.  */
   2077 
   2078 tree
   2079 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
   2080 {
   2081   if (vec_safe_length (v) == 0)
   2082     return build_zero_cst (type);
   2083 
   2084   unsigned HOST_WIDE_INT idx, nelts;
   2085   tree value;
   2086 
   2087   /* We can't construct a VECTOR_CST for a variable number of elements.  */
   2088   nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
   2089   tree_vector_builder vec (type, nelts, 1);
   2090   FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
   2091     {
   2092       if (TREE_CODE (value) == VECTOR_CST)
   2093 	{
   2094 	  /* If NELTS is constant then this must be too.  */
   2095 	  unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
   2096 	  for (unsigned i = 0; i < sub_nelts; ++i)
   2097 	    vec.quick_push (VECTOR_CST_ELT (value, i));
   2098 	}
   2099       else
   2100 	vec.quick_push (value);
   2101     }
   2102   while (vec.length () < nelts)
   2103     vec.quick_push (build_zero_cst (TREE_TYPE (type)));
   2104 
   2105   return vec.build ();
   2106 }
   2107 
   2108 /* Build a vector of type VECTYPE where all the elements are SCs.  */
   2109 tree
   2110 build_vector_from_val (tree vectype, tree sc)
   2111 {
   2112   unsigned HOST_WIDE_INT i, nunits;
   2113 
   2114   if (sc == error_mark_node)
   2115     return sc;
   2116 
   2117   /* Verify that the vector type is suitable for SC.  Note that there
   2118      is some inconsistency in the type-system with respect to restrict
   2119      qualifications of pointers.  Vector types always have a main-variant
   2120      element type and the qualification is applied to the vector-type.
   2121      So TREE_TYPE (vector-type) does not return a properly qualified
   2122      vector element-type.  */
   2123   gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
   2124 					   TREE_TYPE (vectype)));
   2125 
   2126   if (CONSTANT_CLASS_P (sc))
   2127     {
   2128       tree_vector_builder v (vectype, 1, 1);
   2129       v.quick_push (sc);
   2130       return v.build ();
   2131     }
   2132   else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
   2133     return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
   2134   else
   2135     {
   2136       vec<constructor_elt, va_gc> *v;
   2137       vec_alloc (v, nunits);
   2138       for (i = 0; i < nunits; ++i)
   2139 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
   2140       return build_constructor (vectype, v);
   2141     }
   2142 }
   2143 
   2144 /* If TYPE is not a vector type, just return SC, otherwise return
   2145    build_vector_from_val (TYPE, SC).  */
   2146 
   2147 tree
   2148 build_uniform_cst (tree type, tree sc)
   2149 {
   2150   if (!VECTOR_TYPE_P (type))
   2151     return sc;
   2152 
   2153   return build_vector_from_val (type, sc);
   2154 }
   2155 
   2156 /* Build a vector series of type TYPE in which element I has the value
   2157    BASE + I * STEP.  The result is a constant if BASE and STEP are constant
   2158    and a VEC_SERIES_EXPR otherwise.  */
   2159 
   2160 tree
   2161 build_vec_series (tree type, tree base, tree step)
   2162 {
   2163   if (integer_zerop (step))
   2164     return build_vector_from_val (type, base);
   2165   if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
   2166     {
   2167       tree_vector_builder builder (type, 1, 3);
   2168       tree elt1 = wide_int_to_tree (TREE_TYPE (base),
   2169 				    wi::to_wide (base) + wi::to_wide (step));
   2170       tree elt2 = wide_int_to_tree (TREE_TYPE (base),
   2171 				    wi::to_wide (elt1) + wi::to_wide (step));
   2172       builder.quick_push (base);
   2173       builder.quick_push (elt1);
   2174       builder.quick_push (elt2);
   2175       return builder.build ();
   2176     }
   2177   return build2 (VEC_SERIES_EXPR, type, base, step);
   2178 }
   2179 
   2180 /* Return a vector with the same number of units and number of bits
   2181    as VEC_TYPE, but in which the elements are a linear series of unsigned
   2182    integers { BASE, BASE + STEP, BASE + STEP * 2, ... }.  */
   2183 
   2184 tree
   2185 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
   2186 {
   2187   tree index_vec_type = vec_type;
   2188   tree index_elt_type = TREE_TYPE (vec_type);
   2189   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
   2190   if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
   2191     {
   2192       index_elt_type = build_nonstandard_integer_type
   2193 	(GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
   2194       index_vec_type = build_vector_type (index_elt_type, nunits);
   2195     }
   2196 
   2197   tree_vector_builder v (index_vec_type, 1, 3);
   2198   for (unsigned int i = 0; i < 3; ++i)
   2199     v.quick_push (build_int_cstu (index_elt_type, base + i * step));
   2200   return v.build ();
   2201 }
   2202 
   2203 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
   2204    elements are A and the rest are B.  */
   2205 
   2206 tree
   2207 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
   2208 {
   2209   gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
   2210   unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
   2211   /* Optimize the constant case.  */
   2212   if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
   2213     count /= 2;
   2214   tree_vector_builder builder (vec_type, count, 2);
   2215   for (unsigned int i = 0; i < count * 2; ++i)
   2216     builder.quick_push (i < num_a ? a : b);
   2217   return builder.build ();
   2218 }
   2219 
   2220 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
   2221    calculate TREE_CONSTANT and TREE_SIDE_EFFECTS.  */
   2222 
   2223 void
   2224 recompute_constructor_flags (tree c)
   2225 {
   2226   unsigned int i;
   2227   tree val;
   2228   bool constant_p = true;
   2229   bool side_effects_p = false;
   2230   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
   2231 
   2232   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
   2233     {
   2234       /* Mostly ctors will have elts that don't have side-effects, so
   2235 	 the usual case is to scan all the elements.  Hence a single
   2236 	 loop for both const and side effects, rather than one loop
   2237 	 each (with early outs).  */
   2238       if (!TREE_CONSTANT (val))
   2239 	constant_p = false;
   2240       if (TREE_SIDE_EFFECTS (val))
   2241 	side_effects_p = true;
   2242     }
   2243 
   2244   TREE_SIDE_EFFECTS (c) = side_effects_p;
   2245   TREE_CONSTANT (c) = constant_p;
   2246 }
   2247 
   2248 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
   2249    CONSTRUCTOR C.  */
   2250 
   2251 void
   2252 verify_constructor_flags (tree c)
   2253 {
   2254   unsigned int i;
   2255   tree val;
   2256   bool constant_p = TREE_CONSTANT (c);
   2257   bool side_effects_p = TREE_SIDE_EFFECTS (c);
   2258   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
   2259 
   2260   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
   2261     {
   2262       if (constant_p && !TREE_CONSTANT (val))
   2263 	internal_error ("non-constant element in constant CONSTRUCTOR");
   2264       if (!side_effects_p && TREE_SIDE_EFFECTS (val))
   2265 	internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
   2266     }
   2267 }
   2268 
   2269 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
   2270    are in the vec pointed to by VALS.  */
   2271 tree
   2272 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
   2273 {
   2274   tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
   2275 
   2276   TREE_TYPE (c) = type;
   2277   CONSTRUCTOR_ELTS (c) = vals;
   2278 
   2279   recompute_constructor_flags (c);
   2280 
   2281   return c;
   2282 }
   2283 
   2284 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
   2285    INDEX and VALUE.  */
   2286 tree
   2287 build_constructor_single (tree type, tree index, tree value)
   2288 {
   2289   vec<constructor_elt, va_gc> *v;
   2290   constructor_elt elt = {index, value};
   2291 
   2292   vec_alloc (v, 1);
   2293   v->quick_push (elt);
   2294 
   2295   return build_constructor (type, v);
   2296 }
   2297 
   2298 
   2299 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
   2300    are in a list pointed to by VALS.  */
   2301 tree
   2302 build_constructor_from_list (tree type, tree vals)
   2303 {
   2304   tree t;
   2305   vec<constructor_elt, va_gc> *v = NULL;
   2306 
   2307   if (vals)
   2308     {
   2309       vec_alloc (v, list_length (vals));
   2310       for (t = vals; t; t = TREE_CHAIN (t))
   2311 	CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
   2312     }
   2313 
   2314   return build_constructor (type, v);
   2315 }
   2316 
   2317 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
   2318    are in a vector pointed to by VALS.  Note that the TREE_PURPOSE
   2319    fields in the constructor remain null.  */
   2320 
   2321 tree
   2322 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
   2323 {
   2324   vec<constructor_elt, va_gc> *v = NULL;
   2325 
   2326   for (tree t : vals)
   2327     CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
   2328 
   2329   return build_constructor (type, v);
   2330 }
   2331 
   2332 /* Return a new CONSTRUCTOR node whose type is TYPE.  NELTS is the number
   2333    of elements, provided as index/value pairs.  */
   2334 
   2335 tree
   2336 build_constructor_va (tree type, int nelts, ...)
   2337 {
   2338   vec<constructor_elt, va_gc> *v = NULL;
   2339   va_list p;
   2340 
   2341   va_start (p, nelts);
   2342   vec_alloc (v, nelts);
   2343   while (nelts--)
   2344     {
   2345       tree index = va_arg (p, tree);
   2346       tree value = va_arg (p, tree);
   2347       CONSTRUCTOR_APPEND_ELT (v, index, value);
   2348     }
   2349   va_end (p);
   2350   return build_constructor (type, v);
   2351 }
   2352 
   2353 /* Return a node of type TYPE for which TREE_CLOBBER_P is true.  */
   2354 
   2355 tree
   2356 build_clobber (tree type, enum clobber_kind kind)
   2357 {
   2358   tree clobber = build_constructor (type, NULL);
   2359   TREE_THIS_VOLATILE (clobber) = true;
   2360   CLOBBER_KIND (clobber) = kind;
   2361   return clobber;
   2362 }
   2363 
   2364 /* Return a new FIXED_CST node whose type is TYPE and value is F.  */
   2365 
   2366 tree
   2367 build_fixed (tree type, FIXED_VALUE_TYPE f)
   2368 {
   2369   tree v;
   2370   FIXED_VALUE_TYPE *fp;
   2371 
   2372   v = make_node (FIXED_CST);
   2373   fp = ggc_alloc<fixed_value> ();
   2374   memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
   2375 
   2376   TREE_TYPE (v) = type;
   2377   TREE_FIXED_CST_PTR (v) = fp;
   2378   return v;
   2379 }
   2380 
   2381 /* Return a new REAL_CST node whose type is TYPE and value is D.  */
   2382 
   2383 tree
   2384 build_real (tree type, REAL_VALUE_TYPE d)
   2385 {
   2386   tree v;
   2387   REAL_VALUE_TYPE *dp;
   2388   int overflow = 0;
   2389 
   2390   /* dconst{1,2,m1,half} are used in various places in
   2391      the middle-end and optimizers, allow them here
   2392      even for decimal floating point types as an exception
   2393      by converting them to decimal.  */
   2394   if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
   2395       && d.cl == rvc_normal
   2396       && !d.decimal)
   2397     {
   2398       if (memcmp (&d, &dconst1, sizeof (d)) == 0)
   2399 	decimal_real_from_string (&d, "1");
   2400       else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
   2401 	decimal_real_from_string (&d, "2");
   2402       else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
   2403 	decimal_real_from_string (&d, "-1");
   2404       else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
   2405 	decimal_real_from_string (&d, "0.5");
   2406       else
   2407 	gcc_unreachable ();
   2408     }
   2409 
   2410   /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
   2411      Consider doing it via real_convert now.  */
   2412 
   2413   v = make_node (REAL_CST);
   2414   dp = ggc_alloc<real_value> ();
   2415   memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
   2416 
   2417   TREE_TYPE (v) = type;
   2418   TREE_REAL_CST_PTR (v) = dp;
   2419   TREE_OVERFLOW (v) = overflow;
   2420   return v;
   2421 }
   2422 
   2423 /* Like build_real, but first truncate D to the type.  */
   2424 
   2425 tree
   2426 build_real_truncate (tree type, REAL_VALUE_TYPE d)
   2427 {
   2428   return build_real (type, real_value_truncate (TYPE_MODE (type), d));
   2429 }
   2430 
   2431 /* Return a new REAL_CST node whose type is TYPE
   2432    and whose value is the integer value of the INTEGER_CST node I.  */
   2433 
   2434 REAL_VALUE_TYPE
   2435 real_value_from_int_cst (const_tree type, const_tree i)
   2436 {
   2437   REAL_VALUE_TYPE d;
   2438 
   2439   /* Clear all bits of the real value type so that we can later do
   2440      bitwise comparisons to see if two values are the same.  */
   2441   memset (&d, 0, sizeof d);
   2442 
   2443   real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
   2444 		     TYPE_SIGN (TREE_TYPE (i)));
   2445   return d;
   2446 }
   2447 
   2448 /* Given a tree representing an integer constant I, return a tree
   2449    representing the same value as a floating-point constant of type TYPE.  */
   2450 
   2451 tree
   2452 build_real_from_int_cst (tree type, const_tree i)
   2453 {
   2454   tree v;
   2455   int overflow = TREE_OVERFLOW (i);
   2456 
   2457   v = build_real (type, real_value_from_int_cst (type, i));
   2458 
   2459   TREE_OVERFLOW (v) |= overflow;
   2460   return v;
   2461 }
   2462 
   2463 /* Return a new REAL_CST node whose type is TYPE
   2464    and whose value is the integer value I which has sign SGN.  */
   2465 
   2466 tree
   2467 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
   2468 {
   2469   REAL_VALUE_TYPE d;
   2470 
   2471   /* Clear all bits of the real value type so that we can later do
   2472      bitwise comparisons to see if two values are the same.  */
   2473   memset (&d, 0, sizeof d);
   2474 
   2475   real_from_integer (&d, TYPE_MODE (type), i, sgn);
   2476   return build_real (type, d);
   2477 }
   2478 
   2479 /* Return a newly constructed STRING_CST node whose value is the LEN
   2480    characters at STR when STR is nonnull, or all zeros otherwise.
   2481    Note that for a C string literal, LEN should include the trailing NUL.
   2482    The TREE_TYPE is not initialized.  */
   2483 
   2484 tree
   2485 build_string (unsigned len, const char *str /*= NULL */)
   2486 {
   2487   /* Do not waste bytes provided by padding of struct tree_string.  */
   2488   unsigned size = len + offsetof (struct tree_string, str) + 1;
   2489 
   2490   record_node_allocation_statistics (STRING_CST, size);
   2491 
   2492   tree s = (tree) ggc_internal_alloc (size);
   2493 
   2494   memset (s, 0, sizeof (struct tree_typed));
   2495   TREE_SET_CODE (s, STRING_CST);
   2496   TREE_CONSTANT (s) = 1;
   2497   TREE_STRING_LENGTH (s) = len;
   2498   if (str)
   2499     memcpy (s->string.str, str, len);
   2500   else
   2501     memset (s->string.str, 0, len);
   2502   s->string.str[len] = '\0';
   2503 
   2504   return s;
   2505 }
   2506 
   2507 /* Return a newly constructed COMPLEX_CST node whose value is
   2508    specified by the real and imaginary parts REAL and IMAG.
   2509    Both REAL and IMAG should be constant nodes.  TYPE, if specified,
   2510    will be the type of the COMPLEX_CST; otherwise a new type will be made.  */
   2511 
   2512 tree
   2513 build_complex (tree type, tree real, tree imag)
   2514 {
   2515   gcc_assert (CONSTANT_CLASS_P (real));
   2516   gcc_assert (CONSTANT_CLASS_P (imag));
   2517 
   2518   tree t = make_node (COMPLEX_CST);
   2519 
   2520   TREE_REALPART (t) = real;
   2521   TREE_IMAGPART (t) = imag;
   2522   TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
   2523   TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
   2524   return t;
   2525 }
   2526 
   2527 /* Build a complex (inf +- 0i), such as for the result of cproj.
   2528    TYPE is the complex tree type of the result.  If NEG is true, the
   2529    imaginary zero is negative.  */
   2530 
   2531 tree
   2532 build_complex_inf (tree type, bool neg)
   2533 {
   2534   REAL_VALUE_TYPE rinf, rzero = dconst0;
   2535 
   2536   real_inf (&rinf);
   2537   rzero.sign = neg;
   2538   return build_complex (type, build_real (TREE_TYPE (type), rinf),
   2539 			build_real (TREE_TYPE (type), rzero));
   2540 }
   2541 
   2542 /* Return the constant 1 in type TYPE.  If TYPE has several elements, each
   2543    element is set to 1.  In particular, this is 1 + i for complex types.  */
   2544 
   2545 tree
   2546 build_each_one_cst (tree type)
   2547 {
   2548   if (TREE_CODE (type) == COMPLEX_TYPE)
   2549     {
   2550       tree scalar = build_one_cst (TREE_TYPE (type));
   2551       return build_complex (type, scalar, scalar);
   2552     }
   2553   else
   2554     return build_one_cst (type);
   2555 }
   2556 
   2557 /* Return a constant of arithmetic type TYPE which is the
   2558    multiplicative identity of the set TYPE.  */
   2559 
   2560 tree
   2561 build_one_cst (tree type)
   2562 {
   2563   switch (TREE_CODE (type))
   2564     {
   2565     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
   2566     case POINTER_TYPE: case REFERENCE_TYPE:
   2567     case OFFSET_TYPE:
   2568       return build_int_cst (type, 1);
   2569 
   2570     case REAL_TYPE:
   2571       return build_real (type, dconst1);
   2572 
   2573     case FIXED_POINT_TYPE:
   2574       /* We can only generate 1 for accum types.  */
   2575       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
   2576       return build_fixed (type, FCONST1 (TYPE_MODE (type)));
   2577 
   2578     case VECTOR_TYPE:
   2579       {
   2580 	tree scalar = build_one_cst (TREE_TYPE (type));
   2581 
   2582 	return build_vector_from_val (type, scalar);
   2583       }
   2584 
   2585     case COMPLEX_TYPE:
   2586       return build_complex (type,
   2587 			    build_one_cst (TREE_TYPE (type)),
   2588 			    build_zero_cst (TREE_TYPE (type)));
   2589 
   2590     default:
   2591       gcc_unreachable ();
   2592     }
   2593 }
   2594 
   2595 /* Return an integer of type TYPE containing all 1's in as much precision as
   2596    it contains, or a complex or vector whose subparts are such integers.  */
   2597 
   2598 tree
   2599 build_all_ones_cst (tree type)
   2600 {
   2601   if (TREE_CODE (type) == COMPLEX_TYPE)
   2602     {
   2603       tree scalar = build_all_ones_cst (TREE_TYPE (type));
   2604       return build_complex (type, scalar, scalar);
   2605     }
   2606   else
   2607     return build_minus_one_cst (type);
   2608 }
   2609 
   2610 /* Return a constant of arithmetic type TYPE which is the
   2611    opposite of the multiplicative identity of the set TYPE.  */
   2612 
   2613 tree
   2614 build_minus_one_cst (tree type)
   2615 {
   2616   switch (TREE_CODE (type))
   2617     {
   2618     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
   2619     case POINTER_TYPE: case REFERENCE_TYPE:
   2620     case OFFSET_TYPE:
   2621       return build_int_cst (type, -1);
   2622 
   2623     case REAL_TYPE:
   2624       return build_real (type, dconstm1);
   2625 
   2626     case FIXED_POINT_TYPE:
   2627       /* We can only generate 1 for accum types.  */
   2628       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
   2629       return build_fixed (type,
   2630 			  fixed_from_double_int (double_int_minus_one,
   2631 						 SCALAR_TYPE_MODE (type)));
   2632 
   2633     case VECTOR_TYPE:
   2634       {
   2635 	tree scalar = build_minus_one_cst (TREE_TYPE (type));
   2636 
   2637 	return build_vector_from_val (type, scalar);
   2638       }
   2639 
   2640     case COMPLEX_TYPE:
   2641       return build_complex (type,
   2642 			    build_minus_one_cst (TREE_TYPE (type)),
   2643 			    build_zero_cst (TREE_TYPE (type)));
   2644 
   2645     default:
   2646       gcc_unreachable ();
   2647     }
   2648 }
   2649 
   2650 /* Build 0 constant of type TYPE.  This is used by constructor folding
   2651    and thus the constant should be represented in memory by
   2652    zero(es).  */
   2653 
   2654 tree
   2655 build_zero_cst (tree type)
   2656 {
   2657   switch (TREE_CODE (type))
   2658     {
   2659     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
   2660     case POINTER_TYPE: case REFERENCE_TYPE:
   2661     case OFFSET_TYPE: case NULLPTR_TYPE:
   2662       return build_int_cst (type, 0);
   2663 
   2664     case REAL_TYPE:
   2665       return build_real (type, dconst0);
   2666 
   2667     case FIXED_POINT_TYPE:
   2668       return build_fixed (type, FCONST0 (TYPE_MODE (type)));
   2669 
   2670     case VECTOR_TYPE:
   2671       {
   2672 	tree scalar = build_zero_cst (TREE_TYPE (type));
   2673 
   2674 	return build_vector_from_val (type, scalar);
   2675       }
   2676 
   2677     case COMPLEX_TYPE:
   2678       {
   2679 	tree zero = build_zero_cst (TREE_TYPE (type));
   2680 
   2681 	return build_complex (type, zero, zero);
   2682       }
   2683 
   2684     default:
   2685       if (!AGGREGATE_TYPE_P (type))
   2686 	return fold_convert (type, integer_zero_node);
   2687       return build_constructor (type, NULL);
   2688     }
   2689 }
   2690 
   2691 
   2692 /* Build a BINFO with LEN language slots.  */
   2693 
   2694 tree
   2695 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
   2696 {
   2697   tree t;
   2698   size_t length = (offsetof (struct tree_binfo, base_binfos)
   2699 		   + vec<tree, va_gc>::embedded_size (base_binfos));
   2700 
   2701   record_node_allocation_statistics (TREE_BINFO, length);
   2702 
   2703   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
   2704 
   2705   memset (t, 0, offsetof (struct tree_binfo, base_binfos));
   2706 
   2707   TREE_SET_CODE (t, TREE_BINFO);
   2708 
   2709   BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
   2710 
   2711   return t;
   2712 }
   2713 
   2714 /* Create a CASE_LABEL_EXPR tree node and return it.  */
   2715 
   2716 tree
   2717 build_case_label (tree low_value, tree high_value, tree label_decl)
   2718 {
   2719   tree t = make_node (CASE_LABEL_EXPR);
   2720 
   2721   TREE_TYPE (t) = void_type_node;
   2722   SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
   2723 
   2724   CASE_LOW (t) = low_value;
   2725   CASE_HIGH (t) = high_value;
   2726   CASE_LABEL (t) = label_decl;
   2727   CASE_CHAIN (t) = NULL_TREE;
   2728 
   2729   return t;
   2730 }
   2731 
   2732 /* Build a newly constructed INTEGER_CST node.  LEN and EXT_LEN are the
   2733    values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
   2734    The latter determines the length of the HOST_WIDE_INT vector.  */
   2735 
   2736 tree
   2737 make_int_cst (int len, int ext_len MEM_STAT_DECL)
   2738 {
   2739   tree t;
   2740   int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
   2741 		+ sizeof (struct tree_int_cst));
   2742 
   2743   gcc_assert (len);
   2744   record_node_allocation_statistics (INTEGER_CST, length);
   2745 
   2746   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
   2747 
   2748   TREE_SET_CODE (t, INTEGER_CST);
   2749   TREE_INT_CST_NUNITS (t) = len;
   2750   TREE_INT_CST_EXT_NUNITS (t) = ext_len;
   2751   /* to_offset can only be applied to trees that are offset_int-sized
   2752      or smaller.  EXT_LEN is correct if it fits, otherwise the constant
   2753      must be exactly the precision of offset_int and so LEN is correct.  */
   2754   if (ext_len <= OFFSET_INT_ELTS)
   2755     TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
   2756   else
   2757     TREE_INT_CST_OFFSET_NUNITS (t) = len;
   2758 
   2759   TREE_CONSTANT (t) = 1;
   2760 
   2761   return t;
   2762 }
   2763 
   2764 /* Build a newly constructed TREE_VEC node of length LEN.  */
   2765 
   2766 tree
   2767 make_tree_vec (int len MEM_STAT_DECL)
   2768 {
   2769   tree t;
   2770   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
   2771 
   2772   record_node_allocation_statistics (TREE_VEC, length);
   2773 
   2774   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
   2775 
   2776   TREE_SET_CODE (t, TREE_VEC);
   2777   TREE_VEC_LENGTH (t) = len;
   2778 
   2779   return t;
   2780 }
   2781 
   2782 /* Grow a TREE_VEC node to new length LEN.  */
   2783 
   2784 tree
   2785 grow_tree_vec (tree v, int len MEM_STAT_DECL)
   2786 {
   2787   gcc_assert (TREE_CODE (v) == TREE_VEC);
   2788 
   2789   int oldlen = TREE_VEC_LENGTH (v);
   2790   gcc_assert (len > oldlen);
   2791 
   2792   size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
   2793   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
   2794 
   2795   record_node_allocation_statistics (TREE_VEC, length - oldlength);
   2796 
   2797   v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
   2798 
   2799   TREE_VEC_LENGTH (v) = len;
   2800 
   2801   return v;
   2802 }
   2803 
   2804 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
   2806    fixed, and scalar, complex or vector.  */
   2807 
   2808 bool
   2809 zerop (const_tree expr)
   2810 {
   2811   return (integer_zerop (expr)
   2812 	  || real_zerop (expr)
   2813 	  || fixed_zerop (expr));
   2814 }
   2815 
   2816 /* Return 1 if EXPR is the integer constant zero or a complex constant
   2817    of zero, or a location wrapper for such a constant.  */
   2818 
   2819 bool
   2820 integer_zerop (const_tree expr)
   2821 {
   2822   STRIP_ANY_LOCATION_WRAPPER (expr);
   2823 
   2824   switch (TREE_CODE (expr))
   2825     {
   2826     case INTEGER_CST:
   2827       return wi::to_wide (expr) == 0;
   2828     case COMPLEX_CST:
   2829       return (integer_zerop (TREE_REALPART (expr))
   2830 	      && integer_zerop (TREE_IMAGPART (expr)));
   2831     case VECTOR_CST:
   2832       return (VECTOR_CST_NPATTERNS (expr) == 1
   2833 	      && VECTOR_CST_DUPLICATE_P (expr)
   2834 	      && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
   2835     default:
   2836       return false;
   2837     }
   2838 }
   2839 
   2840 /* Return 1 if EXPR is the integer constant one or the corresponding
   2841    complex constant, or a location wrapper for such a constant.  */
   2842 
   2843 bool
   2844 integer_onep (const_tree expr)
   2845 {
   2846   STRIP_ANY_LOCATION_WRAPPER (expr);
   2847 
   2848   switch (TREE_CODE (expr))
   2849     {
   2850     case INTEGER_CST:
   2851       return wi::eq_p (wi::to_widest (expr), 1);
   2852     case COMPLEX_CST:
   2853       return (integer_onep (TREE_REALPART (expr))
   2854 	      && integer_zerop (TREE_IMAGPART (expr)));
   2855     case VECTOR_CST:
   2856       return (VECTOR_CST_NPATTERNS (expr) == 1
   2857 	      && VECTOR_CST_DUPLICATE_P (expr)
   2858 	      && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
   2859     default:
   2860       return false;
   2861     }
   2862 }
   2863 
   2864 /* Return 1 if EXPR is the integer constant one.  For complex and vector,
   2865    return 1 if every piece is the integer constant one.
   2866    Also return 1 for location wrappers for such a constant.  */
   2867 
   2868 bool
   2869 integer_each_onep (const_tree expr)
   2870 {
   2871   STRIP_ANY_LOCATION_WRAPPER (expr);
   2872 
   2873   if (TREE_CODE (expr) == COMPLEX_CST)
   2874     return (integer_onep (TREE_REALPART (expr))
   2875 	    && integer_onep (TREE_IMAGPART (expr)));
   2876   else
   2877     return integer_onep (expr);
   2878 }
   2879 
   2880 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
   2881    it contains, or a complex or vector whose subparts are such integers,
   2882    or a location wrapper for such a constant.  */
   2883 
   2884 bool
   2885 integer_all_onesp (const_tree expr)
   2886 {
   2887   STRIP_ANY_LOCATION_WRAPPER (expr);
   2888 
   2889   if (TREE_CODE (expr) == COMPLEX_CST
   2890       && integer_all_onesp (TREE_REALPART (expr))
   2891       && integer_all_onesp (TREE_IMAGPART (expr)))
   2892     return true;
   2893 
   2894   else if (TREE_CODE (expr) == VECTOR_CST)
   2895     return (VECTOR_CST_NPATTERNS (expr) == 1
   2896 	    && VECTOR_CST_DUPLICATE_P (expr)
   2897 	    && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
   2898 
   2899   else if (TREE_CODE (expr) != INTEGER_CST)
   2900     return false;
   2901 
   2902   return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
   2903 	  == wi::to_wide (expr));
   2904 }
   2905 
   2906 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
   2907    for such a constant.  */
   2908 
   2909 bool
   2910 integer_minus_onep (const_tree expr)
   2911 {
   2912   STRIP_ANY_LOCATION_WRAPPER (expr);
   2913 
   2914   if (TREE_CODE (expr) == COMPLEX_CST)
   2915     return (integer_all_onesp (TREE_REALPART (expr))
   2916 	    && integer_zerop (TREE_IMAGPART (expr)));
   2917   else
   2918     return integer_all_onesp (expr);
   2919 }
   2920 
   2921 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
   2922    one bit on), or a location wrapper for such a constant.  */
   2923 
   2924 bool
   2925 integer_pow2p (const_tree expr)
   2926 {
   2927   STRIP_ANY_LOCATION_WRAPPER (expr);
   2928 
   2929   if (TREE_CODE (expr) == COMPLEX_CST
   2930       && integer_pow2p (TREE_REALPART (expr))
   2931       && integer_zerop (TREE_IMAGPART (expr)))
   2932     return true;
   2933 
   2934   if (TREE_CODE (expr) != INTEGER_CST)
   2935     return false;
   2936 
   2937   return wi::popcount (wi::to_wide (expr)) == 1;
   2938 }
   2939 
   2940 /* Return 1 if EXPR is an integer constant other than zero or a
   2941    complex constant other than zero, or a location wrapper for such a
   2942    constant.  */
   2943 
   2944 bool
   2945 integer_nonzerop (const_tree expr)
   2946 {
   2947   STRIP_ANY_LOCATION_WRAPPER (expr);
   2948 
   2949   return ((TREE_CODE (expr) == INTEGER_CST
   2950 	   && wi::to_wide (expr) != 0)
   2951 	  || (TREE_CODE (expr) == COMPLEX_CST
   2952 	      && (integer_nonzerop (TREE_REALPART (expr))
   2953 		  || integer_nonzerop (TREE_IMAGPART (expr)))));
   2954 }
   2955 
   2956 /* Return 1 if EXPR is the integer constant one.  For vector,
   2957    return 1 if every piece is the integer constant minus one
   2958    (representing the value TRUE).
   2959    Also return 1 for location wrappers for such a constant.  */
   2960 
   2961 bool
   2962 integer_truep (const_tree expr)
   2963 {
   2964   STRIP_ANY_LOCATION_WRAPPER (expr);
   2965 
   2966   if (TREE_CODE (expr) == VECTOR_CST)
   2967     return integer_all_onesp (expr);
   2968   return integer_onep (expr);
   2969 }
   2970 
   2971 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
   2972    for such a constant.  */
   2973 
   2974 bool
   2975 fixed_zerop (const_tree expr)
   2976 {
   2977   STRIP_ANY_LOCATION_WRAPPER (expr);
   2978 
   2979   return (TREE_CODE (expr) == FIXED_CST
   2980 	  && TREE_FIXED_CST (expr).data.is_zero ());
   2981 }
   2982 
   2983 /* Return the power of two represented by a tree node known to be a
   2984    power of two.  */
   2985 
   2986 int
   2987 tree_log2 (const_tree expr)
   2988 {
   2989   if (TREE_CODE (expr) == COMPLEX_CST)
   2990     return tree_log2 (TREE_REALPART (expr));
   2991 
   2992   return wi::exact_log2 (wi::to_wide (expr));
   2993 }
   2994 
   2995 /* Similar, but return the largest integer Y such that 2 ** Y is less
   2996    than or equal to EXPR.  */
   2997 
   2998 int
   2999 tree_floor_log2 (const_tree expr)
   3000 {
   3001   if (TREE_CODE (expr) == COMPLEX_CST)
   3002     return tree_log2 (TREE_REALPART (expr));
   3003 
   3004   return wi::floor_log2 (wi::to_wide (expr));
   3005 }
   3006 
   3007 /* Return number of known trailing zero bits in EXPR, or, if the value of
   3008    EXPR is known to be zero, the precision of it's type.  */
   3009 
   3010 unsigned int
   3011 tree_ctz (const_tree expr)
   3012 {
   3013   if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
   3014       && !POINTER_TYPE_P (TREE_TYPE (expr)))
   3015     return 0;
   3016 
   3017   unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
   3018   switch (TREE_CODE (expr))
   3019     {
   3020     case INTEGER_CST:
   3021       ret1 = wi::ctz (wi::to_wide (expr));
   3022       return MIN (ret1, prec);
   3023     case SSA_NAME:
   3024       ret1 = wi::ctz (get_nonzero_bits (expr));
   3025       return MIN (ret1, prec);
   3026     case PLUS_EXPR:
   3027     case MINUS_EXPR:
   3028     case BIT_IOR_EXPR:
   3029     case BIT_XOR_EXPR:
   3030     case MIN_EXPR:
   3031     case MAX_EXPR:
   3032       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
   3033       if (ret1 == 0)
   3034 	return ret1;
   3035       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
   3036       return MIN (ret1, ret2);
   3037     case POINTER_PLUS_EXPR:
   3038       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
   3039       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
   3040       /* Second operand is sizetype, which could be in theory
   3041 	 wider than pointer's precision.  Make sure we never
   3042 	 return more than prec.  */
   3043       ret2 = MIN (ret2, prec);
   3044       return MIN (ret1, ret2);
   3045     case BIT_AND_EXPR:
   3046       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
   3047       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
   3048       return MAX (ret1, ret2);
   3049     case MULT_EXPR:
   3050       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
   3051       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
   3052       return MIN (ret1 + ret2, prec);
   3053     case LSHIFT_EXPR:
   3054       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
   3055       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
   3056 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
   3057 	{
   3058 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
   3059 	  return MIN (ret1 + ret2, prec);
   3060 	}
   3061       return ret1;
   3062     case RSHIFT_EXPR:
   3063       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
   3064 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
   3065 	{
   3066 	  ret1 = tree_ctz (TREE_OPERAND (expr, 0));
   3067 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
   3068 	  if (ret1 > ret2)
   3069 	    return ret1 - ret2;
   3070 	}
   3071       return 0;
   3072     case TRUNC_DIV_EXPR:
   3073     case CEIL_DIV_EXPR:
   3074     case FLOOR_DIV_EXPR:
   3075     case ROUND_DIV_EXPR:
   3076     case EXACT_DIV_EXPR:
   3077       if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
   3078 	  && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
   3079 	{
   3080 	  int l = tree_log2 (TREE_OPERAND (expr, 1));
   3081 	  if (l >= 0)
   3082 	    {
   3083 	      ret1 = tree_ctz (TREE_OPERAND (expr, 0));
   3084 	      ret2 = l;
   3085 	      if (ret1 > ret2)
   3086 		return ret1 - ret2;
   3087 	    }
   3088 	}
   3089       return 0;
   3090     CASE_CONVERT:
   3091       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
   3092       if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
   3093 	ret1 = prec;
   3094       return MIN (ret1, prec);
   3095     case SAVE_EXPR:
   3096       return tree_ctz (TREE_OPERAND (expr, 0));
   3097     case COND_EXPR:
   3098       ret1 = tree_ctz (TREE_OPERAND (expr, 1));
   3099       if (ret1 == 0)
   3100 	return 0;
   3101       ret2 = tree_ctz (TREE_OPERAND (expr, 2));
   3102       return MIN (ret1, ret2);
   3103     case COMPOUND_EXPR:
   3104       return tree_ctz (TREE_OPERAND (expr, 1));
   3105     case ADDR_EXPR:
   3106       ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
   3107       if (ret1 > BITS_PER_UNIT)
   3108 	{
   3109 	  ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
   3110 	  return MIN (ret1, prec);
   3111 	}
   3112       return 0;
   3113     default:
   3114       return 0;
   3115     }
   3116 }
   3117 
   3118 /* Return 1 if EXPR is the real constant zero.  Trailing zeroes matter for
   3119    decimal float constants, so don't return 1 for them.
   3120    Also return 1 for location wrappers around such a constant.  */
   3121 
   3122 bool
   3123 real_zerop (const_tree expr)
   3124 {
   3125   STRIP_ANY_LOCATION_WRAPPER (expr);
   3126 
   3127   switch (TREE_CODE (expr))
   3128     {
   3129     case REAL_CST:
   3130       return real_equal (&TREE_REAL_CST (expr), &dconst0)
   3131 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
   3132     case COMPLEX_CST:
   3133       return real_zerop (TREE_REALPART (expr))
   3134 	     && real_zerop (TREE_IMAGPART (expr));
   3135     case VECTOR_CST:
   3136       {
   3137 	/* Don't simply check for a duplicate because the predicate
   3138 	   accepts both +0.0 and -0.0.  */
   3139 	unsigned count = vector_cst_encoded_nelts (expr);
   3140 	for (unsigned int i = 0; i < count; ++i)
   3141 	  if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
   3142 	    return false;
   3143 	return true;
   3144       }
   3145     default:
   3146       return false;
   3147     }
   3148 }
   3149 
   3150 /* Return 1 if EXPR is the real constant one in real or complex form.
   3151    Trailing zeroes matter for decimal float constants, so don't return
   3152    1 for them.
   3153    Also return 1 for location wrappers around such a constant.  */
   3154 
   3155 bool
   3156 real_onep (const_tree expr)
   3157 {
   3158   STRIP_ANY_LOCATION_WRAPPER (expr);
   3159 
   3160   switch (TREE_CODE (expr))
   3161     {
   3162     case REAL_CST:
   3163       return real_equal (&TREE_REAL_CST (expr), &dconst1)
   3164 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
   3165     case COMPLEX_CST:
   3166       return real_onep (TREE_REALPART (expr))
   3167 	     && real_zerop (TREE_IMAGPART (expr));
   3168     case VECTOR_CST:
   3169       return (VECTOR_CST_NPATTERNS (expr) == 1
   3170 	      && VECTOR_CST_DUPLICATE_P (expr)
   3171 	      && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
   3172     default:
   3173       return false;
   3174     }
   3175 }
   3176 
   3177 /* Return 1 if EXPR is the real constant minus one.  Trailing zeroes
   3178    matter for decimal float constants, so don't return 1 for them.
   3179    Also return 1 for location wrappers around such a constant.  */
   3180 
   3181 bool
   3182 real_minus_onep (const_tree expr)
   3183 {
   3184   STRIP_ANY_LOCATION_WRAPPER (expr);
   3185 
   3186   switch (TREE_CODE (expr))
   3187     {
   3188     case REAL_CST:
   3189       return real_equal (&TREE_REAL_CST (expr), &dconstm1)
   3190 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
   3191     case COMPLEX_CST:
   3192       return real_minus_onep (TREE_REALPART (expr))
   3193 	     && real_zerop (TREE_IMAGPART (expr));
   3194     case VECTOR_CST:
   3195       return (VECTOR_CST_NPATTERNS (expr) == 1
   3196 	      && VECTOR_CST_DUPLICATE_P (expr)
   3197 	      && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
   3198     default:
   3199       return false;
   3200     }
   3201 }
   3202 
   3203 /* Return true if T could be a floating point zero.  */
   3204 
   3205 bool
   3206 real_maybe_zerop (const_tree expr)
   3207 {
   3208   switch (TREE_CODE (expr))
   3209     {
   3210     case REAL_CST:
   3211       /* Can't use real_zerop here, as it always returns false for decimal
   3212 	 floats.  And can't use TREE_REAL_CST (expr).cl == rvc_zero
   3213 	 either, as decimal zeros are rvc_normal.  */
   3214       return real_equal (&TREE_REAL_CST (expr), &dconst0);
   3215     case COMPLEX_CST:
   3216       return (real_maybe_zerop (TREE_REALPART (expr))
   3217 	      || real_maybe_zerop (TREE_IMAGPART (expr)));
   3218     case VECTOR_CST:
   3219       {
   3220 	unsigned count = vector_cst_encoded_nelts (expr);
   3221 	for (unsigned int i = 0; i < count; ++i)
   3222 	  if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
   3223 	    return true;
   3224 	return false;
   3225       }
   3226     default:
   3227       /* Perhaps for SSA_NAMEs we could query frange.  */
   3228       return true;
   3229     }
   3230 }
   3231 
   3232 /* Nonzero if EXP is a constant or a cast of a constant.  */
   3233 
   3234 bool
   3235 really_constant_p (const_tree exp)
   3236 {
   3237   /* This is not quite the same as STRIP_NOPS.  It does more.  */
   3238   while (CONVERT_EXPR_P (exp)
   3239 	 || TREE_CODE (exp) == NON_LVALUE_EXPR)
   3240     exp = TREE_OPERAND (exp, 0);
   3241   return TREE_CONSTANT (exp);
   3242 }
   3243 
   3244 /* Return true if T holds a polynomial pointer difference, storing it in
   3245    *VALUE if so.  A true return means that T's precision is no greater
   3246    than 64 bits, which is the largest address space we support, so *VALUE
   3247    never loses precision.  However, the signedness of the result does
   3248    not necessarily match the signedness of T: sometimes an unsigned type
   3249    like sizetype is used to encode a value that is actually negative.  */
   3250 
   3251 bool
   3252 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
   3253 {
   3254   if (!t)
   3255     return false;
   3256   if (TREE_CODE (t) == INTEGER_CST)
   3257     {
   3258       if (!cst_and_fits_in_hwi (t))
   3259 	return false;
   3260       *value = int_cst_value (t);
   3261       return true;
   3262     }
   3263   if (POLY_INT_CST_P (t))
   3264     {
   3265       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   3266 	if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
   3267 	  return false;
   3268       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   3269 	value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
   3270       return true;
   3271     }
   3272   return false;
   3273 }
   3274 
   3275 poly_int64
   3276 tree_to_poly_int64 (const_tree t)
   3277 {
   3278   gcc_assert (tree_fits_poly_int64_p (t));
   3279   if (POLY_INT_CST_P (t))
   3280     return poly_int_cst_value (t).force_shwi ();
   3281   return TREE_INT_CST_LOW (t);
   3282 }
   3283 
   3284 poly_uint64
   3285 tree_to_poly_uint64 (const_tree t)
   3286 {
   3287   gcc_assert (tree_fits_poly_uint64_p (t));
   3288   if (POLY_INT_CST_P (t))
   3289     return poly_int_cst_value (t).force_uhwi ();
   3290   return TREE_INT_CST_LOW (t);
   3291 }
   3292 
   3293 /* Return first list element whose TREE_VALUE is ELEM.
   3295    Return 0 if ELEM is not in LIST.  */
   3296 
   3297 tree
   3298 value_member (tree elem, tree list)
   3299 {
   3300   while (list)
   3301     {
   3302       if (elem == TREE_VALUE (list))
   3303 	return list;
   3304       list = TREE_CHAIN (list);
   3305     }
   3306   return NULL_TREE;
   3307 }
   3308 
   3309 /* Return first list element whose TREE_PURPOSE is ELEM.
   3310    Return 0 if ELEM is not in LIST.  */
   3311 
   3312 tree
   3313 purpose_member (const_tree elem, tree list)
   3314 {
   3315   while (list)
   3316     {
   3317       if (elem == TREE_PURPOSE (list))
   3318 	return list;
   3319       list = TREE_CHAIN (list);
   3320     }
   3321   return NULL_TREE;
   3322 }
   3323 
   3324 /* Return true if ELEM is in V.  */
   3325 
   3326 bool
   3327 vec_member (const_tree elem, vec<tree, va_gc> *v)
   3328 {
   3329   unsigned ix;
   3330   tree t;
   3331   FOR_EACH_VEC_SAFE_ELT (v, ix, t)
   3332     if (elem == t)
   3333       return true;
   3334   return false;
   3335 }
   3336 
   3337 /* Returns element number IDX (zero-origin) of chain CHAIN, or
   3338    NULL_TREE.  */
   3339 
   3340 tree
   3341 chain_index (int idx, tree chain)
   3342 {
   3343   for (; chain && idx > 0; --idx)
   3344     chain = TREE_CHAIN (chain);
   3345   return chain;
   3346 }
   3347 
   3348 /* Return nonzero if ELEM is part of the chain CHAIN.  */
   3349 
   3350 bool
   3351 chain_member (const_tree elem, const_tree chain)
   3352 {
   3353   while (chain)
   3354     {
   3355       if (elem == chain)
   3356 	return true;
   3357       chain = DECL_CHAIN (chain);
   3358     }
   3359 
   3360   return false;
   3361 }
   3362 
   3363 /* Return the length of a chain of nodes chained through TREE_CHAIN.
   3364    We expect a null pointer to mark the end of the chain.
   3365    This is the Lisp primitive `length'.  */
   3366 
   3367 int
   3368 list_length (const_tree t)
   3369 {
   3370   const_tree p = t;
   3371 #ifdef ENABLE_TREE_CHECKING
   3372   const_tree q = t;
   3373 #endif
   3374   int len = 0;
   3375 
   3376   while (p)
   3377     {
   3378       p = TREE_CHAIN (p);
   3379 #ifdef ENABLE_TREE_CHECKING
   3380       if (len % 2)
   3381 	q = TREE_CHAIN (q);
   3382       gcc_assert (p != q);
   3383 #endif
   3384       len++;
   3385     }
   3386 
   3387   return len;
   3388 }
   3389 
   3390 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
   3391    UNION_TYPE TYPE, or NULL_TREE if none.  */
   3392 
   3393 tree
   3394 first_field (const_tree type)
   3395 {
   3396   tree t = TYPE_FIELDS (type);
   3397   while (t && TREE_CODE (t) != FIELD_DECL)
   3398     t = TREE_CHAIN (t);
   3399   return t;
   3400 }
   3401 
   3402 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
   3403    UNION_TYPE TYPE, or NULL_TREE if none.  */
   3404 
   3405 tree
   3406 last_field (const_tree type)
   3407 {
   3408   tree last = NULL_TREE;
   3409 
   3410   for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
   3411     {
   3412       if (TREE_CODE (fld) != FIELD_DECL)
   3413 	continue;
   3414 
   3415       last = fld;
   3416     }
   3417 
   3418   return last;
   3419 }
   3420 
   3421 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
   3422    by modifying the last node in chain 1 to point to chain 2.
   3423    This is the Lisp primitive `nconc'.  */
   3424 
   3425 tree
   3426 chainon (tree op1, tree op2)
   3427 {
   3428   tree t1;
   3429 
   3430   if (!op1)
   3431     return op2;
   3432   if (!op2)
   3433     return op1;
   3434 
   3435   for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
   3436     continue;
   3437   TREE_CHAIN (t1) = op2;
   3438 
   3439 #ifdef ENABLE_TREE_CHECKING
   3440   {
   3441     tree t2;
   3442     for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
   3443       gcc_assert (t2 != t1);
   3444   }
   3445 #endif
   3446 
   3447   return op1;
   3448 }
   3449 
   3450 /* Return the last node in a chain of nodes (chained through TREE_CHAIN).  */
   3451 
   3452 tree
   3453 tree_last (tree chain)
   3454 {
   3455   tree next;
   3456   if (chain)
   3457     while ((next = TREE_CHAIN (chain)))
   3458       chain = next;
   3459   return chain;
   3460 }
   3461 
   3462 /* Reverse the order of elements in the chain T,
   3463    and return the new head of the chain (old last element).  */
   3464 
   3465 tree
   3466 nreverse (tree t)
   3467 {
   3468   tree prev = 0, decl, next;
   3469   for (decl = t; decl; decl = next)
   3470     {
   3471       /* We shouldn't be using this function to reverse BLOCK chains; we
   3472 	 have blocks_nreverse for that.  */
   3473       gcc_checking_assert (TREE_CODE (decl) != BLOCK);
   3474       next = TREE_CHAIN (decl);
   3475       TREE_CHAIN (decl) = prev;
   3476       prev = decl;
   3477     }
   3478   return prev;
   3479 }
   3480 
   3481 /* Return a newly created TREE_LIST node whose
   3483    purpose and value fields are PARM and VALUE.  */
   3484 
   3485 tree
   3486 build_tree_list (tree parm, tree value MEM_STAT_DECL)
   3487 {
   3488   tree t = make_node (TREE_LIST PASS_MEM_STAT);
   3489   TREE_PURPOSE (t) = parm;
   3490   TREE_VALUE (t) = value;
   3491   return t;
   3492 }
   3493 
   3494 /* Build a chain of TREE_LIST nodes from a vector.  */
   3495 
   3496 tree
   3497 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
   3498 {
   3499   tree ret = NULL_TREE;
   3500   tree *pp = &ret;
   3501   unsigned int i;
   3502   tree t;
   3503   FOR_EACH_VEC_SAFE_ELT (vec, i, t)
   3504     {
   3505       *pp = build_tree_list (NULL, t PASS_MEM_STAT);
   3506       pp = &TREE_CHAIN (*pp);
   3507     }
   3508   return ret;
   3509 }
   3510 
   3511 /* Return a newly created TREE_LIST node whose
   3512    purpose and value fields are PURPOSE and VALUE
   3513    and whose TREE_CHAIN is CHAIN.  */
   3514 
   3515 tree
   3516 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
   3517 {
   3518   tree node;
   3519 
   3520   node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
   3521   memset (node, 0, sizeof (struct tree_common));
   3522 
   3523   record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
   3524 
   3525   TREE_SET_CODE (node, TREE_LIST);
   3526   TREE_CHAIN (node) = chain;
   3527   TREE_PURPOSE (node) = purpose;
   3528   TREE_VALUE (node) = value;
   3529   return node;
   3530 }
   3531 
   3532 /* Return the values of the elements of a CONSTRUCTOR as a vector of
   3533    trees.  */
   3534 
   3535 vec<tree, va_gc> *
   3536 ctor_to_vec (tree ctor)
   3537 {
   3538   vec<tree, va_gc> *vec;
   3539   vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
   3540   unsigned int ix;
   3541   tree val;
   3542 
   3543   FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
   3544     vec->quick_push (val);
   3545 
   3546   return vec;
   3547 }
   3548 
   3549 /* Return the size nominally occupied by an object of type TYPE
   3551    when it resides in memory.  The value is measured in units of bytes,
   3552    and its data type is that normally used for type sizes
   3553    (which is the first type created by make_signed_type or
   3554    make_unsigned_type).  */
   3555 
   3556 tree
   3557 size_in_bytes_loc (location_t loc, const_tree type)
   3558 {
   3559   tree t;
   3560 
   3561   if (type == error_mark_node)
   3562     return integer_zero_node;
   3563 
   3564   type = TYPE_MAIN_VARIANT (type);
   3565   t = TYPE_SIZE_UNIT (type);
   3566 
   3567   if (t == 0)
   3568     {
   3569       lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
   3570       return size_zero_node;
   3571     }
   3572 
   3573   return t;
   3574 }
   3575 
   3576 /* Return the size of TYPE (in bytes) as a wide integer
   3577    or return -1 if the size can vary or is larger than an integer.  */
   3578 
   3579 HOST_WIDE_INT
   3580 int_size_in_bytes (const_tree type)
   3581 {
   3582   tree t;
   3583 
   3584   if (type == error_mark_node)
   3585     return 0;
   3586 
   3587   type = TYPE_MAIN_VARIANT (type);
   3588   t = TYPE_SIZE_UNIT (type);
   3589 
   3590   if (t && tree_fits_uhwi_p (t))
   3591     return TREE_INT_CST_LOW (t);
   3592   else
   3593     return -1;
   3594 }
   3595 
   3596 /* Return the maximum size of TYPE (in bytes) as a wide integer
   3597    or return -1 if the size can vary or is larger than an integer.  */
   3598 
   3599 HOST_WIDE_INT
   3600 max_int_size_in_bytes (const_tree type)
   3601 {
   3602   HOST_WIDE_INT size = -1;
   3603   tree size_tree;
   3604 
   3605   /* If this is an array type, check for a possible MAX_SIZE attached.  */
   3606 
   3607   if (TREE_CODE (type) == ARRAY_TYPE)
   3608     {
   3609       size_tree = TYPE_ARRAY_MAX_SIZE (type);
   3610 
   3611       if (size_tree && tree_fits_uhwi_p (size_tree))
   3612 	size = tree_to_uhwi (size_tree);
   3613     }
   3614 
   3615   /* If we still haven't been able to get a size, see if the language
   3616      can compute a maximum size.  */
   3617 
   3618   if (size == -1)
   3619     {
   3620       size_tree = lang_hooks.types.max_size (type);
   3621 
   3622       if (size_tree && tree_fits_uhwi_p (size_tree))
   3623 	size = tree_to_uhwi (size_tree);
   3624     }
   3625 
   3626   return size;
   3627 }
   3628 
   3629 /* Return the bit position of FIELD, in bits from the start of the record.
   3631    This is a tree of type bitsizetype.  */
   3632 
   3633 tree
   3634 bit_position (const_tree field)
   3635 {
   3636   return bit_from_pos (DECL_FIELD_OFFSET (field),
   3637 		       DECL_FIELD_BIT_OFFSET (field));
   3638 }
   3639 
   3640 /* Return the byte position of FIELD, in bytes from the start of the record.
   3642    This is a tree of type sizetype.  */
   3643 
   3644 tree
   3645 byte_position (const_tree field)
   3646 {
   3647   return byte_from_pos (DECL_FIELD_OFFSET (field),
   3648 			DECL_FIELD_BIT_OFFSET (field));
   3649 }
   3650 
   3651 /* Likewise, but return as an integer.  It must be representable in
   3652    that way (since it could be a signed value, we don't have the
   3653    option of returning -1 like int_size_in_byte can.  */
   3654 
   3655 HOST_WIDE_INT
   3656 int_byte_position (const_tree field)
   3657 {
   3658   return tree_to_shwi (byte_position (field));
   3659 }
   3660 
   3661 /* Return, as a tree node, the number of elements for TYPE (which is an
   3663    ARRAY_TYPE) minus one. This counts only elements of the top array.  */
   3664 
   3665 tree
   3666 array_type_nelts (const_tree type)
   3667 {
   3668   tree index_type, min, max;
   3669 
   3670   /* If they did it with unspecified bounds, then we should have already
   3671      given an error about it before we got here.  */
   3672   if (! TYPE_DOMAIN (type))
   3673     return error_mark_node;
   3674 
   3675   index_type = TYPE_DOMAIN (type);
   3676   min = TYPE_MIN_VALUE (index_type);
   3677   max = TYPE_MAX_VALUE (index_type);
   3678 
   3679   /* TYPE_MAX_VALUE may not be set if the array has unknown length.  */
   3680   if (!max)
   3681     {
   3682       /* zero sized arrays are represented from C FE as complete types with
   3683 	 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
   3684 	 them as min 0, max -1.  */
   3685       if (COMPLETE_TYPE_P (type)
   3686 	  && integer_zerop (TYPE_SIZE (type))
   3687 	  && integer_zerop (min))
   3688 	return build_int_cst (TREE_TYPE (min), -1);
   3689 
   3690       return error_mark_node;
   3691     }
   3692 
   3693   return (integer_zerop (min)
   3694 	  ? max
   3695 	  : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
   3696 }
   3697 
   3698 /* If arg is static -- a reference to an object in static storage -- then
   3700    return the object.  This is not the same as the C meaning of `static'.
   3701    If arg isn't static, return NULL.  */
   3702 
   3703 tree
   3704 staticp (tree arg)
   3705 {
   3706   switch (TREE_CODE (arg))
   3707     {
   3708     case FUNCTION_DECL:
   3709       /* Nested functions are static, even though taking their address will
   3710 	 involve a trampoline as we unnest the nested function and create
   3711 	 the trampoline on the tree level.  */
   3712       return arg;
   3713 
   3714     case VAR_DECL:
   3715       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
   3716 	      && ! DECL_THREAD_LOCAL_P (arg)
   3717 	      && ! DECL_DLLIMPORT_P (arg)
   3718 	      ? arg : NULL);
   3719 
   3720     case CONST_DECL:
   3721       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
   3722 	      ? arg : NULL);
   3723 
   3724     case CONSTRUCTOR:
   3725       return TREE_STATIC (arg) ? arg : NULL;
   3726 
   3727     case LABEL_DECL:
   3728     case STRING_CST:
   3729       return arg;
   3730 
   3731     case COMPONENT_REF:
   3732       /* If the thing being referenced is not a field, then it is
   3733 	 something language specific.  */
   3734       gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
   3735 
   3736       /* If we are referencing a bitfield, we can't evaluate an
   3737 	 ADDR_EXPR at compile time and so it isn't a constant.  */
   3738       if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
   3739 	return NULL;
   3740 
   3741       return staticp (TREE_OPERAND (arg, 0));
   3742 
   3743     case BIT_FIELD_REF:
   3744       return NULL;
   3745 
   3746     case INDIRECT_REF:
   3747       return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
   3748 
   3749     case ARRAY_REF:
   3750     case ARRAY_RANGE_REF:
   3751       if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
   3752 	  && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
   3753 	return staticp (TREE_OPERAND (arg, 0));
   3754       else
   3755 	return NULL;
   3756 
   3757     case COMPOUND_LITERAL_EXPR:
   3758       return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
   3759 
   3760     default:
   3761       return NULL;
   3762     }
   3763 }
   3764 
   3765 
   3766 
   3768 
   3769 /* Return whether OP is a DECL whose address is function-invariant.  */
   3770 
   3771 bool
   3772 decl_address_invariant_p (const_tree op)
   3773 {
   3774   /* The conditions below are slightly less strict than the one in
   3775      staticp.  */
   3776 
   3777   switch (TREE_CODE (op))
   3778     {
   3779     case PARM_DECL:
   3780     case RESULT_DECL:
   3781     case LABEL_DECL:
   3782     case FUNCTION_DECL:
   3783       return true;
   3784 
   3785     case VAR_DECL:
   3786       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
   3787           || DECL_THREAD_LOCAL_P (op)
   3788           || DECL_CONTEXT (op) == current_function_decl
   3789           || decl_function_context (op) == current_function_decl)
   3790         return true;
   3791       break;
   3792 
   3793     case CONST_DECL:
   3794       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
   3795           || decl_function_context (op) == current_function_decl)
   3796         return true;
   3797       break;
   3798 
   3799     default:
   3800       break;
   3801     }
   3802 
   3803   return false;
   3804 }
   3805 
   3806 /* Return whether OP is a DECL whose address is interprocedural-invariant.  */
   3807 
   3808 bool
   3809 decl_address_ip_invariant_p (const_tree op)
   3810 {
   3811   /* The conditions below are slightly less strict than the one in
   3812      staticp.  */
   3813 
   3814   switch (TREE_CODE (op))
   3815     {
   3816     case LABEL_DECL:
   3817     case FUNCTION_DECL:
   3818     case STRING_CST:
   3819       return true;
   3820 
   3821     case VAR_DECL:
   3822       if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
   3823            && !DECL_DLLIMPORT_P (op))
   3824           || DECL_THREAD_LOCAL_P (op))
   3825         return true;
   3826       break;
   3827 
   3828     case CONST_DECL:
   3829       if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
   3830         return true;
   3831       break;
   3832 
   3833     default:
   3834       break;
   3835     }
   3836 
   3837   return false;
   3838 }
   3839 
   3840 
   3841 /* Return true if T is function-invariant (internal function, does
   3842    not handle arithmetic; that's handled in skip_simple_arithmetic and
   3843    tree_invariant_p).  */
   3844 
   3845 static bool
   3846 tree_invariant_p_1 (tree t)
   3847 {
   3848   tree op;
   3849 
   3850   if (TREE_CONSTANT (t)
   3851       || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
   3852     return true;
   3853 
   3854   switch (TREE_CODE (t))
   3855     {
   3856     case SAVE_EXPR:
   3857       return true;
   3858 
   3859     case ADDR_EXPR:
   3860       op = TREE_OPERAND (t, 0);
   3861       while (handled_component_p (op))
   3862 	{
   3863 	  switch (TREE_CODE (op))
   3864 	    {
   3865 	    case ARRAY_REF:
   3866 	    case ARRAY_RANGE_REF:
   3867 	      if (!tree_invariant_p (TREE_OPERAND (op, 1))
   3868 		  || TREE_OPERAND (op, 2) != NULL_TREE
   3869 		  || TREE_OPERAND (op, 3) != NULL_TREE)
   3870 		return false;
   3871 	      break;
   3872 
   3873 	    case COMPONENT_REF:
   3874 	      if (TREE_OPERAND (op, 2) != NULL_TREE)
   3875 		return false;
   3876 	      break;
   3877 
   3878 	    default:;
   3879 	    }
   3880 	  op = TREE_OPERAND (op, 0);
   3881 	}
   3882 
   3883       return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
   3884 
   3885     default:
   3886       break;
   3887     }
   3888 
   3889   return false;
   3890 }
   3891 
   3892 /* Return true if T is function-invariant.  */
   3893 
   3894 bool
   3895 tree_invariant_p (tree t)
   3896 {
   3897   tree inner = skip_simple_arithmetic (t);
   3898   return tree_invariant_p_1 (inner);
   3899 }
   3900 
   3901 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
   3902    Do this to any expression which may be used in more than one place,
   3903    but must be evaluated only once.
   3904 
   3905    Normally, expand_expr would reevaluate the expression each time.
   3906    Calling save_expr produces something that is evaluated and recorded
   3907    the first time expand_expr is called on it.  Subsequent calls to
   3908    expand_expr just reuse the recorded value.
   3909 
   3910    The call to expand_expr that generates code that actually computes
   3911    the value is the first call *at compile time*.  Subsequent calls
   3912    *at compile time* generate code to use the saved value.
   3913    This produces correct result provided that *at run time* control
   3914    always flows through the insns made by the first expand_expr
   3915    before reaching the other places where the save_expr was evaluated.
   3916    You, the caller of save_expr, must make sure this is so.
   3917 
   3918    Constants, and certain read-only nodes, are returned with no
   3919    SAVE_EXPR because that is safe.  Expressions containing placeholders
   3920    are not touched; see tree.def for an explanation of what these
   3921    are used for.  */
   3922 
   3923 tree
   3924 save_expr (tree expr)
   3925 {
   3926   tree inner;
   3927 
   3928   /* If the tree evaluates to a constant, then we don't want to hide that
   3929      fact (i.e. this allows further folding, and direct checks for constants).
   3930      However, a read-only object that has side effects cannot be bypassed.
   3931      Since it is no problem to reevaluate literals, we just return the
   3932      literal node.  */
   3933   inner = skip_simple_arithmetic (expr);
   3934   if (TREE_CODE (inner) == ERROR_MARK)
   3935     return inner;
   3936 
   3937   if (tree_invariant_p_1 (inner))
   3938     return expr;
   3939 
   3940   /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
   3941      it means that the size or offset of some field of an object depends on
   3942      the value within another field.
   3943 
   3944      Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
   3945      and some variable since it would then need to be both evaluated once and
   3946      evaluated more than once.  Front-ends must assure this case cannot
   3947      happen by surrounding any such subexpressions in their own SAVE_EXPR
   3948      and forcing evaluation at the proper time.  */
   3949   if (contains_placeholder_p (inner))
   3950     return expr;
   3951 
   3952   expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
   3953 
   3954   /* This expression might be placed ahead of a jump to ensure that the
   3955      value was computed on both sides of the jump.  So make sure it isn't
   3956      eliminated as dead.  */
   3957   TREE_SIDE_EFFECTS (expr) = 1;
   3958   return expr;
   3959 }
   3960 
   3961 /* Look inside EXPR into any simple arithmetic operations.  Return the
   3962    outermost non-arithmetic or non-invariant node.  */
   3963 
   3964 tree
   3965 skip_simple_arithmetic (tree expr)
   3966 {
   3967   /* We don't care about whether this can be used as an lvalue in this
   3968      context.  */
   3969   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
   3970     expr = TREE_OPERAND (expr, 0);
   3971 
   3972   /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
   3973      a constant, it will be more efficient to not make another SAVE_EXPR since
   3974      it will allow better simplification and GCSE will be able to merge the
   3975      computations if they actually occur.  */
   3976   while (true)
   3977     {
   3978       if (UNARY_CLASS_P (expr))
   3979 	expr = TREE_OPERAND (expr, 0);
   3980       else if (BINARY_CLASS_P (expr))
   3981 	{
   3982 	  /* Before commutative binary operands are canonicalized,
   3983 	     it is quite common to have constants in the first operand.
   3984 	     Check for that common case first so that we don't walk
   3985 	     large expressions with tree_invariant_p unnecessarily.
   3986 	     This can still have terrible compile time complexity,
   3987 	     we should limit the depth of the tree_invariant_p and
   3988 	     skip_simple_arithmetic recursion.  */
   3989 	  if ((TREE_CONSTANT (TREE_OPERAND (expr, 0))
   3990 	       || (TREE_READONLY (TREE_OPERAND (expr, 0))
   3991 		   && !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 0))))
   3992 	      && tree_invariant_p (TREE_OPERAND (expr, 0)))
   3993 	    expr = TREE_OPERAND (expr, 1);
   3994 	  else if (tree_invariant_p (TREE_OPERAND (expr, 1)))
   3995 	    expr = TREE_OPERAND (expr, 0);
   3996 	  else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
   3997 	    expr = TREE_OPERAND (expr, 1);
   3998 	  else
   3999 	    break;
   4000 	}
   4001       else
   4002 	break;
   4003     }
   4004 
   4005   return expr;
   4006 }
   4007 
   4008 /* Look inside EXPR into simple arithmetic operations involving constants.
   4009    Return the outermost non-arithmetic or non-constant node.  */
   4010 
   4011 tree
   4012 skip_simple_constant_arithmetic (tree expr)
   4013 {
   4014   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
   4015     expr = TREE_OPERAND (expr, 0);
   4016 
   4017   while (true)
   4018     {
   4019       if (UNARY_CLASS_P (expr))
   4020 	expr = TREE_OPERAND (expr, 0);
   4021       else if (BINARY_CLASS_P (expr))
   4022 	{
   4023 	  if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
   4024 	    expr = TREE_OPERAND (expr, 0);
   4025 	  else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
   4026 	    expr = TREE_OPERAND (expr, 1);
   4027 	  else
   4028 	    break;
   4029 	}
   4030       else
   4031 	break;
   4032     }
   4033 
   4034   return expr;
   4035 }
   4036 
   4037 /* Return which tree structure is used by T.  */
   4038 
   4039 enum tree_node_structure_enum
   4040 tree_node_structure (const_tree t)
   4041 {
   4042   const enum tree_code code = TREE_CODE (t);
   4043   return tree_node_structure_for_code (code);
   4044 }
   4045 
   4046 /* Set various status flags when building a CALL_EXPR object T.  */
   4047 
   4048 static void
   4049 process_call_operands (tree t)
   4050 {
   4051   bool side_effects = TREE_SIDE_EFFECTS (t);
   4052   bool read_only = false;
   4053   int i = call_expr_flags (t);
   4054 
   4055   /* Calls have side-effects, except those to const or pure functions.  */
   4056   if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
   4057     side_effects = true;
   4058   /* Propagate TREE_READONLY of arguments for const functions.  */
   4059   if (i & ECF_CONST)
   4060     read_only = true;
   4061 
   4062   if (!side_effects || read_only)
   4063     for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
   4064       {
   4065 	tree op = TREE_OPERAND (t, i);
   4066 	if (op && TREE_SIDE_EFFECTS (op))
   4067 	  side_effects = true;
   4068 	if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
   4069 	  read_only = false;
   4070       }
   4071 
   4072   TREE_SIDE_EFFECTS (t) = side_effects;
   4073   TREE_READONLY (t) = read_only;
   4074 }
   4075 
   4076 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
   4078    size or offset that depends on a field within a record.  */
   4079 
   4080 bool
   4081 contains_placeholder_p (const_tree exp)
   4082 {
   4083   enum tree_code code;
   4084 
   4085   if (!exp)
   4086     return 0;
   4087 
   4088   code = TREE_CODE (exp);
   4089   if (code == PLACEHOLDER_EXPR)
   4090     return 1;
   4091 
   4092   switch (TREE_CODE_CLASS (code))
   4093     {
   4094     case tcc_reference:
   4095       /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
   4096 	 position computations since they will be converted into a
   4097 	 WITH_RECORD_EXPR involving the reference, which will assume
   4098 	 here will be valid.  */
   4099       return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
   4100 
   4101     case tcc_exceptional:
   4102       if (code == TREE_LIST)
   4103 	return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
   4104 		|| CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
   4105       break;
   4106 
   4107     case tcc_unary:
   4108     case tcc_binary:
   4109     case tcc_comparison:
   4110     case tcc_expression:
   4111       switch (code)
   4112 	{
   4113 	case COMPOUND_EXPR:
   4114 	  /* Ignoring the first operand isn't quite right, but works best.  */
   4115 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
   4116 
   4117 	case COND_EXPR:
   4118 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
   4119 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
   4120 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
   4121 
   4122 	case SAVE_EXPR:
   4123 	  /* The save_expr function never wraps anything containing
   4124 	     a PLACEHOLDER_EXPR. */
   4125 	  return 0;
   4126 
   4127 	default:
   4128 	  break;
   4129 	}
   4130 
   4131       switch (TREE_CODE_LENGTH (code))
   4132 	{
   4133 	case 1:
   4134 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
   4135 	case 2:
   4136 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
   4137 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
   4138 	default:
   4139 	  return 0;
   4140 	}
   4141 
   4142     case tcc_vl_exp:
   4143       switch (code)
   4144 	{
   4145 	case CALL_EXPR:
   4146 	  {
   4147 	    const_tree arg;
   4148 	    const_call_expr_arg_iterator iter;
   4149 	    FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
   4150 	      if (CONTAINS_PLACEHOLDER_P (arg))
   4151 		return 1;
   4152 	    return 0;
   4153 	  }
   4154 	default:
   4155 	  return 0;
   4156 	}
   4157 
   4158     default:
   4159       return 0;
   4160     }
   4161   return 0;
   4162 }
   4163 
   4164 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
   4165    directly.  This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
   4166    field positions.  */
   4167 
   4168 static bool
   4169 type_contains_placeholder_1 (const_tree type)
   4170 {
   4171   /* If the size contains a placeholder or the parent type (component type in
   4172      the case of arrays) type involves a placeholder, this type does.  */
   4173   if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
   4174       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
   4175       || (!POINTER_TYPE_P (type)
   4176 	  && TREE_TYPE (type)
   4177 	  && type_contains_placeholder_p (TREE_TYPE (type))))
   4178     return true;
   4179 
   4180   /* Now do type-specific checks.  Note that the last part of the check above
   4181      greatly limits what we have to do below.  */
   4182   switch (TREE_CODE (type))
   4183     {
   4184     case VOID_TYPE:
   4185     case OPAQUE_TYPE:
   4186     case COMPLEX_TYPE:
   4187     case ENUMERAL_TYPE:
   4188     case BOOLEAN_TYPE:
   4189     case POINTER_TYPE:
   4190     case OFFSET_TYPE:
   4191     case REFERENCE_TYPE:
   4192     case METHOD_TYPE:
   4193     case FUNCTION_TYPE:
   4194     case VECTOR_TYPE:
   4195     case NULLPTR_TYPE:
   4196       return false;
   4197 
   4198     case INTEGER_TYPE:
   4199     case REAL_TYPE:
   4200     case FIXED_POINT_TYPE:
   4201       /* Here we just check the bounds.  */
   4202       return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
   4203 	      || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
   4204 
   4205     case ARRAY_TYPE:
   4206       /* We have already checked the component type above, so just check
   4207 	 the domain type.  Flexible array members have a null domain.  */
   4208       return TYPE_DOMAIN (type) ?
   4209 	type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
   4210 
   4211     case RECORD_TYPE:
   4212     case UNION_TYPE:
   4213     case QUAL_UNION_TYPE:
   4214       {
   4215 	tree field;
   4216 
   4217 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
   4218 	  if (TREE_CODE (field) == FIELD_DECL
   4219 	      && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
   4220 		  || (TREE_CODE (type) == QUAL_UNION_TYPE
   4221 		      && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
   4222 		  || type_contains_placeholder_p (TREE_TYPE (field))))
   4223 	    return true;
   4224 
   4225 	return false;
   4226       }
   4227 
   4228     default:
   4229       gcc_unreachable ();
   4230     }
   4231 }
   4232 
   4233 /* Wrapper around above function used to cache its result.  */
   4234 
   4235 bool
   4236 type_contains_placeholder_p (tree type)
   4237 {
   4238   bool result;
   4239 
   4240   /* If the contains_placeholder_bits field has been initialized,
   4241      then we know the answer.  */
   4242   if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
   4243     return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
   4244 
   4245   /* Indicate that we've seen this type node, and the answer is false.
   4246      This is what we want to return if we run into recursion via fields.  */
   4247   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
   4248 
   4249   /* Compute the real value.  */
   4250   result = type_contains_placeholder_1 (type);
   4251 
   4252   /* Store the real value.  */
   4253   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
   4254 
   4255   return result;
   4256 }
   4257 
   4258 /* Push tree EXP onto vector QUEUE if it is not already present.  */
   4260 
   4261 static void
   4262 push_without_duplicates (tree exp, vec<tree> *queue)
   4263 {
   4264   unsigned int i;
   4265   tree iter;
   4266 
   4267   FOR_EACH_VEC_ELT (*queue, i, iter)
   4268     if (simple_cst_equal (iter, exp) == 1)
   4269       break;
   4270 
   4271   if (!iter)
   4272     queue->safe_push (exp);
   4273 }
   4274 
   4275 /* Given a tree EXP, find all occurrences of references to fields
   4276    in a PLACEHOLDER_EXPR and place them in vector REFS without
   4277    duplicates.  Also record VAR_DECLs and CONST_DECLs.  Note that
   4278    we assume here that EXP contains only arithmetic expressions
   4279    or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
   4280    argument list.  */
   4281 
   4282 void
   4283 find_placeholder_in_expr (tree exp, vec<tree> *refs)
   4284 {
   4285   enum tree_code code = TREE_CODE (exp);
   4286   tree inner;
   4287   int i;
   4288 
   4289   /* We handle TREE_LIST and COMPONENT_REF separately.  */
   4290   if (code == TREE_LIST)
   4291     {
   4292       FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
   4293       FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
   4294     }
   4295   else if (code == COMPONENT_REF)
   4296     {
   4297       for (inner = TREE_OPERAND (exp, 0);
   4298 	   REFERENCE_CLASS_P (inner);
   4299 	   inner = TREE_OPERAND (inner, 0))
   4300 	;
   4301 
   4302       if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
   4303 	push_without_duplicates (exp, refs);
   4304       else
   4305 	FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
   4306    }
   4307   else
   4308     switch (TREE_CODE_CLASS (code))
   4309       {
   4310       case tcc_constant:
   4311 	break;
   4312 
   4313       case tcc_declaration:
   4314 	/* Variables allocated to static storage can stay.  */
   4315         if (!TREE_STATIC (exp))
   4316 	  push_without_duplicates (exp, refs);
   4317 	break;
   4318 
   4319       case tcc_expression:
   4320 	/* This is the pattern built in ada/make_aligning_type.  */
   4321 	if (code == ADDR_EXPR
   4322 	    && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
   4323 	  {
   4324 	    push_without_duplicates (exp, refs);
   4325 	    break;
   4326 	  }
   4327 
   4328         /* Fall through.  */
   4329 
   4330       case tcc_exceptional:
   4331       case tcc_unary:
   4332       case tcc_binary:
   4333       case tcc_comparison:
   4334       case tcc_reference:
   4335 	for (i = 0; i < TREE_CODE_LENGTH (code); i++)
   4336 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
   4337 	break;
   4338 
   4339       case tcc_vl_exp:
   4340 	for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
   4341 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
   4342 	break;
   4343 
   4344       default:
   4345 	gcc_unreachable ();
   4346       }
   4347 }
   4348 
   4349 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
   4350    return a tree with all occurrences of references to F in a
   4351    PLACEHOLDER_EXPR replaced by R.  Also handle VAR_DECLs and
   4352    CONST_DECLs.  Note that we assume here that EXP contains only
   4353    arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
   4354    occurring only in their argument list.  */
   4355 
   4356 tree
   4357 substitute_in_expr (tree exp, tree f, tree r)
   4358 {
   4359   enum tree_code code = TREE_CODE (exp);
   4360   tree op0, op1, op2, op3;
   4361   tree new_tree;
   4362 
   4363   /* We handle TREE_LIST and COMPONENT_REF separately.  */
   4364   if (code == TREE_LIST)
   4365     {
   4366       op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
   4367       op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
   4368       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
   4369 	return exp;
   4370 
   4371       return tree_cons (TREE_PURPOSE (exp), op1, op0);
   4372     }
   4373   else if (code == COMPONENT_REF)
   4374     {
   4375       tree inner;
   4376 
   4377       /* If this expression is getting a value from a PLACEHOLDER_EXPR
   4378 	 and it is the right field, replace it with R.  */
   4379       for (inner = TREE_OPERAND (exp, 0);
   4380 	   REFERENCE_CLASS_P (inner);
   4381 	   inner = TREE_OPERAND (inner, 0))
   4382 	;
   4383 
   4384       /* The field.  */
   4385       op1 = TREE_OPERAND (exp, 1);
   4386 
   4387       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
   4388 	return r;
   4389 
   4390       /* If this expression hasn't been completed let, leave it alone.  */
   4391       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
   4392 	return exp;
   4393 
   4394       op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
   4395       if (op0 == TREE_OPERAND (exp, 0))
   4396 	return exp;
   4397 
   4398       new_tree
   4399 	= fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
   4400    }
   4401   else
   4402     switch (TREE_CODE_CLASS (code))
   4403       {
   4404       case tcc_constant:
   4405 	return exp;
   4406 
   4407       case tcc_declaration:
   4408 	if (exp == f)
   4409 	  return r;
   4410 	else
   4411 	  return exp;
   4412 
   4413       case tcc_expression:
   4414 	if (exp == f)
   4415 	  return r;
   4416 
   4417         /* Fall through.  */
   4418 
   4419       case tcc_exceptional:
   4420       case tcc_unary:
   4421       case tcc_binary:
   4422       case tcc_comparison:
   4423       case tcc_reference:
   4424 	switch (TREE_CODE_LENGTH (code))
   4425 	  {
   4426 	  case 0:
   4427 	    return exp;
   4428 
   4429 	  case 1:
   4430 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
   4431 	    if (op0 == TREE_OPERAND (exp, 0))
   4432 	      return exp;
   4433 
   4434 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
   4435 	    break;
   4436 
   4437 	  case 2:
   4438 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
   4439 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
   4440 
   4441 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
   4442 	      return exp;
   4443 
   4444 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
   4445 	    break;
   4446 
   4447 	  case 3:
   4448 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
   4449 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
   4450 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
   4451 
   4452 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
   4453 		&& op2 == TREE_OPERAND (exp, 2))
   4454 	      return exp;
   4455 
   4456 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
   4457 	    break;
   4458 
   4459 	  case 4:
   4460 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
   4461 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
   4462 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
   4463 	    op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
   4464 
   4465 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
   4466 		&& op2 == TREE_OPERAND (exp, 2)
   4467 		&& op3 == TREE_OPERAND (exp, 3))
   4468 	      return exp;
   4469 
   4470 	    new_tree
   4471 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
   4472 	    break;
   4473 
   4474 	  default:
   4475 	    gcc_unreachable ();
   4476 	  }
   4477 	break;
   4478 
   4479       case tcc_vl_exp:
   4480 	{
   4481 	  int i;
   4482 
   4483 	  new_tree = NULL_TREE;
   4484 
   4485 	  /* If we are trying to replace F with a constant or with another
   4486 	     instance of one of the arguments of the call, inline back
   4487 	     functions which do nothing else than computing a value from
   4488 	     the arguments they are passed.  This makes it possible to
   4489 	     fold partially or entirely the replacement expression.  */
   4490 	  if (code == CALL_EXPR)
   4491 	    {
   4492 	      bool maybe_inline = false;
   4493 	      if (CONSTANT_CLASS_P (r))
   4494 		maybe_inline = true;
   4495 	      else
   4496 		for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
   4497 		  if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
   4498 		    {
   4499 		      maybe_inline = true;
   4500 		      break;
   4501 		    }
   4502 	      if (maybe_inline)
   4503 		{
   4504 		  tree t = maybe_inline_call_in_expr (exp);
   4505 		  if (t)
   4506 		    return SUBSTITUTE_IN_EXPR (t, f, r);
   4507 		}
   4508 	    }
   4509 
   4510 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
   4511 	    {
   4512 	      tree op = TREE_OPERAND (exp, i);
   4513 	      tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
   4514 	      if (new_op != op)
   4515 		{
   4516 		  if (!new_tree)
   4517 		    new_tree = copy_node (exp);
   4518 		  TREE_OPERAND (new_tree, i) = new_op;
   4519 		}
   4520 	    }
   4521 
   4522 	  if (new_tree)
   4523 	    {
   4524 	      new_tree = fold (new_tree);
   4525 	      if (TREE_CODE (new_tree) == CALL_EXPR)
   4526 		process_call_operands (new_tree);
   4527 	    }
   4528 	  else
   4529 	    return exp;
   4530 	}
   4531 	break;
   4532 
   4533       default:
   4534 	gcc_unreachable ();
   4535       }
   4536 
   4537   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
   4538 
   4539   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
   4540     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
   4541 
   4542   return new_tree;
   4543 }
   4544 
   4545 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
   4546    for it within OBJ, a tree that is an object or a chain of references.  */
   4547 
   4548 tree
   4549 substitute_placeholder_in_expr (tree exp, tree obj)
   4550 {
   4551   enum tree_code code = TREE_CODE (exp);
   4552   tree op0, op1, op2, op3;
   4553   tree new_tree;
   4554 
   4555   /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
   4556      in the chain of OBJ.  */
   4557   if (code == PLACEHOLDER_EXPR)
   4558     {
   4559       tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
   4560       tree elt;
   4561 
   4562       for (elt = obj; elt != 0;
   4563 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
   4564 		   || TREE_CODE (elt) == COND_EXPR)
   4565 		  ? TREE_OPERAND (elt, 1)
   4566 		  : (REFERENCE_CLASS_P (elt)
   4567 		     || UNARY_CLASS_P (elt)
   4568 		     || BINARY_CLASS_P (elt)
   4569 		     || VL_EXP_CLASS_P (elt)
   4570 		     || EXPRESSION_CLASS_P (elt))
   4571 		  ? TREE_OPERAND (elt, 0) : 0))
   4572 	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
   4573 	  return elt;
   4574 
   4575       for (elt = obj; elt != 0;
   4576 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
   4577 		   || TREE_CODE (elt) == COND_EXPR)
   4578 		  ? TREE_OPERAND (elt, 1)
   4579 		  : (REFERENCE_CLASS_P (elt)
   4580 		     || UNARY_CLASS_P (elt)
   4581 		     || BINARY_CLASS_P (elt)
   4582 		     || VL_EXP_CLASS_P (elt)
   4583 		     || EXPRESSION_CLASS_P (elt))
   4584 		  ? TREE_OPERAND (elt, 0) : 0))
   4585 	if (POINTER_TYPE_P (TREE_TYPE (elt))
   4586 	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
   4587 		== need_type))
   4588 	  return fold_build1 (INDIRECT_REF, need_type, elt);
   4589 
   4590       /* If we didn't find it, return the original PLACEHOLDER_EXPR.  If it
   4591 	 survives until RTL generation, there will be an error.  */
   4592       return exp;
   4593     }
   4594 
   4595   /* TREE_LIST is special because we need to look at TREE_VALUE
   4596      and TREE_CHAIN, not TREE_OPERANDS.  */
   4597   else if (code == TREE_LIST)
   4598     {
   4599       op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
   4600       op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
   4601       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
   4602 	return exp;
   4603 
   4604       return tree_cons (TREE_PURPOSE (exp), op1, op0);
   4605     }
   4606   else
   4607     switch (TREE_CODE_CLASS (code))
   4608       {
   4609       case tcc_constant:
   4610       case tcc_declaration:
   4611 	return exp;
   4612 
   4613       case tcc_exceptional:
   4614       case tcc_unary:
   4615       case tcc_binary:
   4616       case tcc_comparison:
   4617       case tcc_expression:
   4618       case tcc_reference:
   4619       case tcc_statement:
   4620 	switch (TREE_CODE_LENGTH (code))
   4621 	  {
   4622 	  case 0:
   4623 	    return exp;
   4624 
   4625 	  case 1:
   4626 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
   4627 	    if (op0 == TREE_OPERAND (exp, 0))
   4628 	      return exp;
   4629 
   4630 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
   4631 	    break;
   4632 
   4633 	  case 2:
   4634 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
   4635 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
   4636 
   4637 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
   4638 	      return exp;
   4639 
   4640 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
   4641 	    break;
   4642 
   4643 	  case 3:
   4644 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
   4645 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
   4646 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
   4647 
   4648 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
   4649 		&& op2 == TREE_OPERAND (exp, 2))
   4650 	      return exp;
   4651 
   4652 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
   4653 	    break;
   4654 
   4655 	  case 4:
   4656 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
   4657 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
   4658 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
   4659 	    op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
   4660 
   4661 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
   4662 		&& op2 == TREE_OPERAND (exp, 2)
   4663 		&& op3 == TREE_OPERAND (exp, 3))
   4664 	      return exp;
   4665 
   4666 	    new_tree
   4667 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
   4668 	    break;
   4669 
   4670 	  default:
   4671 	    gcc_unreachable ();
   4672 	  }
   4673 	break;
   4674 
   4675       case tcc_vl_exp:
   4676 	{
   4677 	  int i;
   4678 
   4679 	  new_tree = NULL_TREE;
   4680 
   4681 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
   4682 	    {
   4683 	      tree op = TREE_OPERAND (exp, i);
   4684 	      tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
   4685 	      if (new_op != op)
   4686 		{
   4687 		  if (!new_tree)
   4688 		    new_tree = copy_node (exp);
   4689 		  TREE_OPERAND (new_tree, i) = new_op;
   4690 		}
   4691 	    }
   4692 
   4693 	  if (new_tree)
   4694 	    {
   4695 	      new_tree = fold (new_tree);
   4696 	      if (TREE_CODE (new_tree) == CALL_EXPR)
   4697 		process_call_operands (new_tree);
   4698 	    }
   4699 	  else
   4700 	    return exp;
   4701 	}
   4702 	break;
   4703 
   4704       default:
   4705 	gcc_unreachable ();
   4706       }
   4707 
   4708   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
   4709 
   4710   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
   4711     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
   4712 
   4713   return new_tree;
   4714 }
   4715 
   4716 
   4718 /* Subroutine of stabilize_reference; this is called for subtrees of
   4719    references.  Any expression with side-effects must be put in a SAVE_EXPR
   4720    to ensure that it is only evaluated once.
   4721 
   4722    We don't put SAVE_EXPR nodes around everything, because assigning very
   4723    simple expressions to temporaries causes us to miss good opportunities
   4724    for optimizations.  Among other things, the opportunity to fold in the
   4725    addition of a constant into an addressing mode often gets lost, e.g.
   4726    "y[i+1] += x;".  In general, we take the approach that we should not make
   4727    an assignment unless we are forced into it - i.e., that any non-side effect
   4728    operator should be allowed, and that cse should take care of coalescing
   4729    multiple utterances of the same expression should that prove fruitful.  */
   4730 
   4731 static tree
   4732 stabilize_reference_1 (tree e)
   4733 {
   4734   tree result;
   4735   enum tree_code code = TREE_CODE (e);
   4736 
   4737   /* We cannot ignore const expressions because it might be a reference
   4738      to a const array but whose index contains side-effects.  But we can
   4739      ignore things that are actual constant or that already have been
   4740      handled by this function.  */
   4741 
   4742   if (tree_invariant_p (e))
   4743     return e;
   4744 
   4745   switch (TREE_CODE_CLASS (code))
   4746     {
   4747     case tcc_exceptional:
   4748       /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
   4749 	 have side-effects.  */
   4750       if (code == STATEMENT_LIST)
   4751 	return save_expr (e);
   4752       /* FALLTHRU */
   4753     case tcc_type:
   4754     case tcc_declaration:
   4755     case tcc_comparison:
   4756     case tcc_statement:
   4757     case tcc_expression:
   4758     case tcc_reference:
   4759     case tcc_vl_exp:
   4760       /* If the expression has side-effects, then encase it in a SAVE_EXPR
   4761 	 so that it will only be evaluated once.  */
   4762       /* The reference (r) and comparison (<) classes could be handled as
   4763 	 below, but it is generally faster to only evaluate them once.  */
   4764       if (TREE_SIDE_EFFECTS (e))
   4765 	return save_expr (e);
   4766       return e;
   4767 
   4768     case tcc_constant:
   4769       /* Constants need no processing.  In fact, we should never reach
   4770 	 here.  */
   4771       return e;
   4772 
   4773     case tcc_binary:
   4774       /* Division is slow and tends to be compiled with jumps,
   4775 	 especially the division by powers of 2 that is often
   4776 	 found inside of an array reference.  So do it just once.  */
   4777       if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
   4778 	  || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
   4779 	  || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
   4780 	  || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
   4781 	return save_expr (e);
   4782       /* Recursively stabilize each operand.  */
   4783       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
   4784 			 stabilize_reference_1 (TREE_OPERAND (e, 1)));
   4785       break;
   4786 
   4787     case tcc_unary:
   4788       /* Recursively stabilize each operand.  */
   4789       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
   4790       break;
   4791 
   4792     default:
   4793       gcc_unreachable ();
   4794     }
   4795 
   4796   TREE_TYPE (result) = TREE_TYPE (e);
   4797   TREE_READONLY (result) = TREE_READONLY (e);
   4798   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
   4799   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
   4800 
   4801   return result;
   4802 }
   4803 
   4804 /* Stabilize a reference so that we can use it any number of times
   4805    without causing its operands to be evaluated more than once.
   4806    Returns the stabilized reference.  This works by means of save_expr,
   4807    so see the caveats in the comments about save_expr.
   4808 
   4809    Also allows conversion expressions whose operands are references.
   4810    Any other kind of expression is returned unchanged.  */
   4811 
   4812 tree
   4813 stabilize_reference (tree ref)
   4814 {
   4815   tree result;
   4816   enum tree_code code = TREE_CODE (ref);
   4817 
   4818   switch (code)
   4819     {
   4820     case VAR_DECL:
   4821     case PARM_DECL:
   4822     case RESULT_DECL:
   4823       /* No action is needed in this case.  */
   4824       return ref;
   4825 
   4826     CASE_CONVERT:
   4827     case FLOAT_EXPR:
   4828     case FIX_TRUNC_EXPR:
   4829       result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
   4830       break;
   4831 
   4832     case INDIRECT_REF:
   4833       result = build_nt (INDIRECT_REF,
   4834 			 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
   4835       break;
   4836 
   4837     case COMPONENT_REF:
   4838       result = build_nt (COMPONENT_REF,
   4839 			 stabilize_reference (TREE_OPERAND (ref, 0)),
   4840 			 TREE_OPERAND (ref, 1), NULL_TREE);
   4841       break;
   4842 
   4843     case BIT_FIELD_REF:
   4844       result = build_nt (BIT_FIELD_REF,
   4845 			 stabilize_reference (TREE_OPERAND (ref, 0)),
   4846 			 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
   4847       REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
   4848       break;
   4849 
   4850     case ARRAY_REF:
   4851       result = build_nt (ARRAY_REF,
   4852 			 stabilize_reference (TREE_OPERAND (ref, 0)),
   4853 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
   4854 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
   4855       break;
   4856 
   4857     case ARRAY_RANGE_REF:
   4858       result = build_nt (ARRAY_RANGE_REF,
   4859 			 stabilize_reference (TREE_OPERAND (ref, 0)),
   4860 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
   4861 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
   4862       break;
   4863 
   4864     case COMPOUND_EXPR:
   4865       /* We cannot wrap the first expression in a SAVE_EXPR, as then
   4866 	 it wouldn't be ignored.  This matters when dealing with
   4867 	 volatiles.  */
   4868       return stabilize_reference_1 (ref);
   4869 
   4870       /* If arg isn't a kind of lvalue we recognize, make no change.
   4871 	 Caller should recognize the error for an invalid lvalue.  */
   4872     default:
   4873       return ref;
   4874 
   4875     case ERROR_MARK:
   4876       return error_mark_node;
   4877     }
   4878 
   4879   TREE_TYPE (result) = TREE_TYPE (ref);
   4880   TREE_READONLY (result) = TREE_READONLY (ref);
   4881   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
   4882   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
   4883   protected_set_expr_location (result, EXPR_LOCATION (ref));
   4884 
   4885   return result;
   4886 }
   4887 
   4888 /* Low-level constructors for expressions.  */
   4890 
   4891 /* A helper function for build1 and constant folders.  Set TREE_CONSTANT,
   4892    and TREE_SIDE_EFFECTS for an ADDR_EXPR.  */
   4893 
   4894 void
   4895 recompute_tree_invariant_for_addr_expr (tree t)
   4896 {
   4897   tree node;
   4898   bool tc = true, se = false;
   4899 
   4900   gcc_assert (TREE_CODE (t) == ADDR_EXPR);
   4901 
   4902   /* We started out assuming this address is both invariant and constant, but
   4903      does not have side effects.  Now go down any handled components and see if
   4904      any of them involve offsets that are either non-constant or non-invariant.
   4905      Also check for side-effects.
   4906 
   4907      ??? Note that this code makes no attempt to deal with the case where
   4908      taking the address of something causes a copy due to misalignment.  */
   4909 
   4910 #define UPDATE_FLAGS(NODE)  \
   4911 do { tree _node = (NODE); \
   4912      if (_node && !TREE_CONSTANT (_node)) tc = false; \
   4913      if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
   4914 
   4915   for (node = TREE_OPERAND (t, 0); handled_component_p (node);
   4916        node = TREE_OPERAND (node, 0))
   4917     {
   4918       /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
   4919 	 array reference (probably made temporarily by the G++ front end),
   4920 	 so ignore all the operands.  */
   4921       if ((TREE_CODE (node) == ARRAY_REF
   4922 	   || TREE_CODE (node) == ARRAY_RANGE_REF)
   4923 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
   4924 	{
   4925 	  UPDATE_FLAGS (TREE_OPERAND (node, 1));
   4926 	  if (TREE_OPERAND (node, 2))
   4927 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
   4928 	  if (TREE_OPERAND (node, 3))
   4929 	    UPDATE_FLAGS (TREE_OPERAND (node, 3));
   4930 	}
   4931       /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
   4932 	 FIELD_DECL, apparently.  The G++ front end can put something else
   4933 	 there, at least temporarily.  */
   4934       else if (TREE_CODE (node) == COMPONENT_REF
   4935 	       && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
   4936 	{
   4937 	  if (TREE_OPERAND (node, 2))
   4938 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
   4939 	}
   4940     }
   4941 
   4942   node = lang_hooks.expr_to_decl (node, &tc, &se);
   4943 
   4944   /* Now see what's inside.  If it's an INDIRECT_REF, copy our properties from
   4945      the address, since &(*a)->b is a form of addition.  If it's a constant, the
   4946      address is constant too.  If it's a decl, its address is constant if the
   4947      decl is static.  Everything else is not constant and, furthermore,
   4948      taking the address of a volatile variable is not volatile.  */
   4949   if (TREE_CODE (node) == INDIRECT_REF
   4950       || TREE_CODE (node) == MEM_REF)
   4951     UPDATE_FLAGS (TREE_OPERAND (node, 0));
   4952   else if (CONSTANT_CLASS_P (node))
   4953     ;
   4954   else if (DECL_P (node))
   4955     tc &= (staticp (node) != NULL_TREE);
   4956   else
   4957     {
   4958       tc = false;
   4959       se |= TREE_SIDE_EFFECTS (node);
   4960     }
   4961 
   4962 
   4963   TREE_CONSTANT (t) = tc;
   4964   TREE_SIDE_EFFECTS (t) = se;
   4965 #undef UPDATE_FLAGS
   4966 }
   4967 
   4968 /* Build an expression of code CODE, data type TYPE, and operands as
   4969    specified.  Expressions and reference nodes can be created this way.
   4970    Constants, decls, types and misc nodes cannot be.
   4971 
   4972    We define 5 non-variadic functions, from 0 to 4 arguments.  This is
   4973    enough for all extant tree codes.  */
   4974 
   4975 tree
   4976 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
   4977 {
   4978   tree t;
   4979 
   4980   gcc_assert (TREE_CODE_LENGTH (code) == 0);
   4981 
   4982   t = make_node (code PASS_MEM_STAT);
   4983   TREE_TYPE (t) = tt;
   4984 
   4985   return t;
   4986 }
   4987 
   4988 tree
   4989 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
   4990 {
   4991   int length = sizeof (struct tree_exp);
   4992   tree t;
   4993 
   4994   record_node_allocation_statistics (code, length);
   4995 
   4996   gcc_assert (TREE_CODE_LENGTH (code) == 1);
   4997 
   4998   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
   4999 
   5000   memset (t, 0, sizeof (struct tree_common));
   5001 
   5002   TREE_SET_CODE (t, code);
   5003 
   5004   TREE_TYPE (t) = type;
   5005   SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
   5006   TREE_OPERAND (t, 0) = node;
   5007   if (node && !TYPE_P (node))
   5008     {
   5009       TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
   5010       TREE_READONLY (t) = TREE_READONLY (node);
   5011     }
   5012 
   5013   if (TREE_CODE_CLASS (code) == tcc_statement)
   5014     {
   5015       if (code != DEBUG_BEGIN_STMT)
   5016 	TREE_SIDE_EFFECTS (t) = 1;
   5017     }
   5018   else switch (code)
   5019     {
   5020     case VA_ARG_EXPR:
   5021       /* All of these have side-effects, no matter what their
   5022 	 operands are.  */
   5023       TREE_SIDE_EFFECTS (t) = 1;
   5024       TREE_READONLY (t) = 0;
   5025       break;
   5026 
   5027     case INDIRECT_REF:
   5028       /* Whether a dereference is readonly has nothing to do with whether
   5029 	 its operand is readonly.  */
   5030       TREE_READONLY (t) = 0;
   5031       break;
   5032 
   5033     case ADDR_EXPR:
   5034       if (node)
   5035 	recompute_tree_invariant_for_addr_expr (t);
   5036       break;
   5037 
   5038     default:
   5039       if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
   5040 	  && node && !TYPE_P (node)
   5041 	  && TREE_CONSTANT (node))
   5042 	TREE_CONSTANT (t) = 1;
   5043       if (TREE_CODE_CLASS (code) == tcc_reference
   5044 	  && node && TREE_THIS_VOLATILE (node))
   5045 	TREE_THIS_VOLATILE (t) = 1;
   5046       break;
   5047     }
   5048 
   5049   return t;
   5050 }
   5051 
   5052 #define PROCESS_ARG(N)				\
   5053   do {						\
   5054     TREE_OPERAND (t, N) = arg##N;		\
   5055     if (arg##N &&!TYPE_P (arg##N))		\
   5056       {						\
   5057         if (TREE_SIDE_EFFECTS (arg##N))		\
   5058 	  side_effects = 1;			\
   5059         if (!TREE_READONLY (arg##N)		\
   5060 	    && !CONSTANT_CLASS_P (arg##N))	\
   5061 	  (void) (read_only = 0);		\
   5062         if (!TREE_CONSTANT (arg##N))		\
   5063 	  (void) (constant = 0);		\
   5064       }						\
   5065   } while (0)
   5066 
   5067 tree
   5068 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
   5069 {
   5070   bool constant, read_only, side_effects, div_by_zero;
   5071   tree t;
   5072 
   5073   gcc_assert (TREE_CODE_LENGTH (code) == 2);
   5074 
   5075   if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
   5076       && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
   5077       /* When sizetype precision doesn't match that of pointers
   5078          we need to be able to build explicit extensions or truncations
   5079 	 of the offset argument.  */
   5080       && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
   5081     gcc_assert (TREE_CODE (arg0) == INTEGER_CST
   5082 		&& TREE_CODE (arg1) == INTEGER_CST);
   5083 
   5084   if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
   5085     gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
   5086 		&& ptrofftype_p (TREE_TYPE (arg1)));
   5087 
   5088   t = make_node (code PASS_MEM_STAT);
   5089   TREE_TYPE (t) = tt;
   5090 
   5091   /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
   5092      result based on those same flags for the arguments.  But if the
   5093      arguments aren't really even `tree' expressions, we shouldn't be trying
   5094      to do this.  */
   5095 
   5096   /* Expressions without side effects may be constant if their
   5097      arguments are as well.  */
   5098   constant = (TREE_CODE_CLASS (code) == tcc_comparison
   5099 	      || TREE_CODE_CLASS (code) == tcc_binary);
   5100   read_only = 1;
   5101   side_effects = TREE_SIDE_EFFECTS (t);
   5102 
   5103   switch (code)
   5104     {
   5105     case TRUNC_DIV_EXPR:
   5106     case CEIL_DIV_EXPR:
   5107     case FLOOR_DIV_EXPR:
   5108     case ROUND_DIV_EXPR:
   5109     case EXACT_DIV_EXPR:
   5110     case CEIL_MOD_EXPR:
   5111     case FLOOR_MOD_EXPR:
   5112     case ROUND_MOD_EXPR:
   5113     case TRUNC_MOD_EXPR:
   5114       div_by_zero = integer_zerop (arg1);
   5115       break;
   5116     default:
   5117       div_by_zero = false;
   5118     }
   5119 
   5120   PROCESS_ARG (0);
   5121   PROCESS_ARG (1);
   5122 
   5123   TREE_SIDE_EFFECTS (t) = side_effects;
   5124   if (code == MEM_REF)
   5125     {
   5126       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
   5127 	{
   5128 	  tree o = TREE_OPERAND (arg0, 0);
   5129 	  TREE_READONLY (t) = TREE_READONLY (o);
   5130 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
   5131 	}
   5132     }
   5133   else
   5134     {
   5135       TREE_READONLY (t) = read_only;
   5136       /* Don't mark X / 0 as constant.  */
   5137       TREE_CONSTANT (t) = constant && !div_by_zero;
   5138       TREE_THIS_VOLATILE (t)
   5139 	= (TREE_CODE_CLASS (code) == tcc_reference
   5140 	   && arg0 && TREE_THIS_VOLATILE (arg0));
   5141     }
   5142 
   5143   return t;
   5144 }
   5145 
   5146 
   5147 tree
   5148 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
   5149 	tree arg2 MEM_STAT_DECL)
   5150 {
   5151   bool constant, read_only, side_effects;
   5152   tree t;
   5153 
   5154   gcc_assert (TREE_CODE_LENGTH (code) == 3);
   5155   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
   5156 
   5157   t = make_node (code PASS_MEM_STAT);
   5158   TREE_TYPE (t) = tt;
   5159 
   5160   read_only = 1;
   5161 
   5162   /* As a special exception, if COND_EXPR has NULL branches, we
   5163      assume that it is a gimple statement and always consider
   5164      it to have side effects.  */
   5165   if (code == COND_EXPR
   5166       && tt == void_type_node
   5167       && arg1 == NULL_TREE
   5168       && arg2 == NULL_TREE)
   5169     side_effects = true;
   5170   else
   5171     side_effects = TREE_SIDE_EFFECTS (t);
   5172 
   5173   PROCESS_ARG (0);
   5174   PROCESS_ARG (1);
   5175   PROCESS_ARG (2);
   5176 
   5177   if (code == COND_EXPR)
   5178     TREE_READONLY (t) = read_only;
   5179 
   5180   TREE_SIDE_EFFECTS (t) = side_effects;
   5181   TREE_THIS_VOLATILE (t)
   5182     = (TREE_CODE_CLASS (code) == tcc_reference
   5183        && arg0 && TREE_THIS_VOLATILE (arg0));
   5184 
   5185   return t;
   5186 }
   5187 
   5188 tree
   5189 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
   5190 	tree arg2, tree arg3 MEM_STAT_DECL)
   5191 {
   5192   bool constant, read_only, side_effects;
   5193   tree t;
   5194 
   5195   gcc_assert (TREE_CODE_LENGTH (code) == 4);
   5196 
   5197   t = make_node (code PASS_MEM_STAT);
   5198   TREE_TYPE (t) = tt;
   5199 
   5200   side_effects = TREE_SIDE_EFFECTS (t);
   5201 
   5202   PROCESS_ARG (0);
   5203   PROCESS_ARG (1);
   5204   PROCESS_ARG (2);
   5205   PROCESS_ARG (3);
   5206 
   5207   TREE_SIDE_EFFECTS (t) = side_effects;
   5208   TREE_THIS_VOLATILE (t)
   5209     = (TREE_CODE_CLASS (code) == tcc_reference
   5210        && arg0 && TREE_THIS_VOLATILE (arg0));
   5211 
   5212   return t;
   5213 }
   5214 
   5215 tree
   5216 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
   5217 	tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
   5218 {
   5219   bool constant, read_only, side_effects;
   5220   tree t;
   5221 
   5222   gcc_assert (TREE_CODE_LENGTH (code) == 5);
   5223 
   5224   t = make_node (code PASS_MEM_STAT);
   5225   TREE_TYPE (t) = tt;
   5226 
   5227   side_effects = TREE_SIDE_EFFECTS (t);
   5228 
   5229   PROCESS_ARG (0);
   5230   PROCESS_ARG (1);
   5231   PROCESS_ARG (2);
   5232   PROCESS_ARG (3);
   5233   PROCESS_ARG (4);
   5234 
   5235   TREE_SIDE_EFFECTS (t) = side_effects;
   5236   if (code == TARGET_MEM_REF)
   5237     {
   5238       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
   5239 	{
   5240 	  tree o = TREE_OPERAND (arg0, 0);
   5241 	  TREE_READONLY (t) = TREE_READONLY (o);
   5242 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
   5243 	}
   5244     }
   5245   else
   5246     TREE_THIS_VOLATILE (t)
   5247       = (TREE_CODE_CLASS (code) == tcc_reference
   5248 	 && arg0 && TREE_THIS_VOLATILE (arg0));
   5249 
   5250   return t;
   5251 }
   5252 
   5253 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
   5254    on the pointer PTR.  */
   5255 
   5256 tree
   5257 build_simple_mem_ref_loc (location_t loc, tree ptr)
   5258 {
   5259   poly_int64 offset = 0;
   5260   tree ptype = TREE_TYPE (ptr);
   5261   tree tem;
   5262   /* For convenience allow addresses that collapse to a simple base
   5263      and offset.  */
   5264   if (TREE_CODE (ptr) == ADDR_EXPR
   5265       && (handled_component_p (TREE_OPERAND (ptr, 0))
   5266 	  || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
   5267     {
   5268       ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
   5269       gcc_assert (ptr);
   5270       if (TREE_CODE (ptr) == MEM_REF)
   5271 	{
   5272 	  offset += mem_ref_offset (ptr).force_shwi ();
   5273 	  ptr = TREE_OPERAND (ptr, 0);
   5274 	}
   5275       else
   5276 	ptr = build_fold_addr_expr (ptr);
   5277       gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
   5278     }
   5279   tem = build2 (MEM_REF, TREE_TYPE (ptype),
   5280 		ptr, build_int_cst (ptype, offset));
   5281   SET_EXPR_LOCATION (tem, loc);
   5282   return tem;
   5283 }
   5284 
   5285 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T.  */
   5286 
   5287 poly_offset_int
   5288 mem_ref_offset (const_tree t)
   5289 {
   5290   return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
   5291 				SIGNED);
   5292 }
   5293 
   5294 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
   5295    offsetted by OFFSET units.  */
   5296 
   5297 tree
   5298 build_invariant_address (tree type, tree base, poly_int64 offset)
   5299 {
   5300   tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
   5301 			  build_fold_addr_expr (base),
   5302 			  build_int_cst (ptr_type_node, offset));
   5303   tree addr = build1 (ADDR_EXPR, type, ref);
   5304   recompute_tree_invariant_for_addr_expr (addr);
   5305   return addr;
   5306 }
   5307 
   5308 /* Similar except don't specify the TREE_TYPE
   5309    and leave the TREE_SIDE_EFFECTS as 0.
   5310    It is permissible for arguments to be null,
   5311    or even garbage if their values do not matter.  */
   5312 
   5313 tree
   5314 build_nt (enum tree_code code, ...)
   5315 {
   5316   tree t;
   5317   int length;
   5318   int i;
   5319   va_list p;
   5320 
   5321   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
   5322 
   5323   va_start (p, code);
   5324 
   5325   t = make_node (code);
   5326   length = TREE_CODE_LENGTH (code);
   5327 
   5328   for (i = 0; i < length; i++)
   5329     TREE_OPERAND (t, i) = va_arg (p, tree);
   5330 
   5331   va_end (p);
   5332   return t;
   5333 }
   5334 
   5335 /* Similar to build_nt, but for creating a CALL_EXPR object with a
   5336    tree vec.  */
   5337 
   5338 tree
   5339 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
   5340 {
   5341   tree ret, t;
   5342   unsigned int ix;
   5343 
   5344   ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
   5345   CALL_EXPR_FN (ret) = fn;
   5346   CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
   5347   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
   5348     CALL_EXPR_ARG (ret, ix) = t;
   5349   return ret;
   5350 }
   5351 
   5352 /* Create a DECL_... node of code CODE, name NAME  (if non-null)
   5354    and data type TYPE.
   5355    We do NOT enter this node in any sort of symbol table.
   5356 
   5357    LOC is the location of the decl.
   5358 
   5359    layout_decl is used to set up the decl's storage layout.
   5360    Other slots are initialized to 0 or null pointers.  */
   5361 
   5362 tree
   5363 build_decl (location_t loc, enum tree_code code, tree name,
   5364     		 tree type MEM_STAT_DECL)
   5365 {
   5366   tree t;
   5367 
   5368   t = make_node (code PASS_MEM_STAT);
   5369   DECL_SOURCE_LOCATION (t) = loc;
   5370 
   5371 /*  if (type == error_mark_node)
   5372     type = integer_type_node; */
   5373 /* That is not done, deliberately, so that having error_mark_node
   5374    as the type can suppress useless errors in the use of this variable.  */
   5375 
   5376   DECL_NAME (t) = name;
   5377   TREE_TYPE (t) = type;
   5378 
   5379   if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
   5380     layout_decl (t, 0);
   5381 
   5382   return t;
   5383 }
   5384 
   5385 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE.  */
   5386 
   5387 tree
   5388 build_debug_expr_decl (tree type)
   5389 {
   5390   tree vexpr = make_node (DEBUG_EXPR_DECL);
   5391   DECL_ARTIFICIAL (vexpr) = 1;
   5392   TREE_TYPE (vexpr) = type;
   5393   SET_DECL_MODE (vexpr, TYPE_MODE (type));
   5394   return vexpr;
   5395 }
   5396 
   5397 /* Builds and returns function declaration with NAME and TYPE.  */
   5398 
   5399 tree
   5400 build_fn_decl (const char *name, tree type)
   5401 {
   5402   tree id = get_identifier (name);
   5403   tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
   5404 
   5405   DECL_EXTERNAL (decl) = 1;
   5406   TREE_PUBLIC (decl) = 1;
   5407   DECL_ARTIFICIAL (decl) = 1;
   5408   TREE_NOTHROW (decl) = 1;
   5409 
   5410   return decl;
   5411 }
   5412 
   5413 vec<tree, va_gc> *all_translation_units;
   5414 
   5415 /* Builds a new translation-unit decl with name NAME, queues it in the
   5416    global list of translation-unit decls and returns it.   */
   5417 
   5418 tree
   5419 build_translation_unit_decl (tree name)
   5420 {
   5421   tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
   5422 			name, NULL_TREE);
   5423   TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
   5424   vec_safe_push (all_translation_units, tu);
   5425   return tu;
   5426 }
   5427 
   5428 
   5429 /* BLOCK nodes are used to represent the structure of binding contours
   5431    and declarations, once those contours have been exited and their contents
   5432    compiled.  This information is used for outputting debugging info.  */
   5433 
   5434 tree
   5435 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
   5436 {
   5437   tree block = make_node (BLOCK);
   5438 
   5439   BLOCK_VARS (block) = vars;
   5440   BLOCK_SUBBLOCKS (block) = subblocks;
   5441   BLOCK_SUPERCONTEXT (block) = supercontext;
   5442   BLOCK_CHAIN (block) = chain;
   5443   return block;
   5444 }
   5445 
   5446 
   5447 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
   5449 
   5450    LOC is the location to use in tree T.  */
   5451 
   5452 void
   5453 protected_set_expr_location (tree t, location_t loc)
   5454 {
   5455   if (CAN_HAVE_LOCATION_P (t))
   5456     SET_EXPR_LOCATION (t, loc);
   5457   else if (t && TREE_CODE (t) == STATEMENT_LIST)
   5458     {
   5459       t = expr_single (t);
   5460       if (t && CAN_HAVE_LOCATION_P (t))
   5461 	SET_EXPR_LOCATION (t, loc);
   5462     }
   5463 }
   5464 
   5465 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
   5466    UNKNOWN_LOCATION.  */
   5467 
   5468 void
   5469 protected_set_expr_location_if_unset (tree t, location_t loc)
   5470 {
   5471   t = expr_single (t);
   5472   if (t && !EXPR_HAS_LOCATION (t))
   5473     protected_set_expr_location (t, loc);
   5474 }
   5475 
   5476 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
   5478    of the various TYPE_QUAL values.  */
   5479 
   5480 static void
   5481 set_type_quals (tree type, int type_quals)
   5482 {
   5483   TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
   5484   TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
   5485   TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
   5486   TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
   5487   TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
   5488 }
   5489 
   5490 /* Returns true iff CAND and BASE have equivalent language-specific
   5491    qualifiers.  */
   5492 
   5493 bool
   5494 check_lang_type (const_tree cand, const_tree base)
   5495 {
   5496   if (lang_hooks.types.type_hash_eq == NULL)
   5497     return true;
   5498   /* type_hash_eq currently only applies to these types.  */
   5499   if (TREE_CODE (cand) != FUNCTION_TYPE
   5500       && TREE_CODE (cand) != METHOD_TYPE)
   5501     return true;
   5502   return lang_hooks.types.type_hash_eq (cand, base);
   5503 }
   5504 
   5505 /* This function checks to see if TYPE matches the size one of the built-in
   5506    atomic types, and returns that core atomic type.  */
   5507 
   5508 static tree
   5509 find_atomic_core_type (const_tree type)
   5510 {
   5511   tree base_atomic_type;
   5512 
   5513   /* Only handle complete types.  */
   5514   if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
   5515     return NULL_TREE;
   5516 
   5517   switch (tree_to_uhwi (TYPE_SIZE (type)))
   5518     {
   5519     case 8:
   5520       base_atomic_type = atomicQI_type_node;
   5521       break;
   5522 
   5523     case 16:
   5524       base_atomic_type = atomicHI_type_node;
   5525       break;
   5526 
   5527     case 32:
   5528       base_atomic_type = atomicSI_type_node;
   5529       break;
   5530 
   5531     case 64:
   5532       base_atomic_type = atomicDI_type_node;
   5533       break;
   5534 
   5535     case 128:
   5536       base_atomic_type = atomicTI_type_node;
   5537       break;
   5538 
   5539     default:
   5540       base_atomic_type = NULL_TREE;
   5541     }
   5542 
   5543   return base_atomic_type;
   5544 }
   5545 
   5546 /* Returns true iff unqualified CAND and BASE are equivalent.  */
   5547 
   5548 bool
   5549 check_base_type (const_tree cand, const_tree base)
   5550 {
   5551   if (TYPE_NAME (cand) != TYPE_NAME (base)
   5552       /* Apparently this is needed for Objective-C.  */
   5553       || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
   5554       || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
   5555 			        TYPE_ATTRIBUTES (base)))
   5556     return false;
   5557   /* Check alignment.  */
   5558   if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
   5559       && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
   5560     return true;
   5561   /* Atomic types increase minimal alignment.  We must to do so as well
   5562      or we get duplicated canonical types. See PR88686.  */
   5563   if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
   5564     {
   5565       /* See if this object can map to a basic atomic type.  */
   5566       tree atomic_type = find_atomic_core_type (cand);
   5567       if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
   5568        return true;
   5569     }
   5570   return false;
   5571 }
   5572 
   5573 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS.  */
   5574 
   5575 bool
   5576 check_qualified_type (const_tree cand, const_tree base, int type_quals)
   5577 {
   5578   return (TYPE_QUALS (cand) == type_quals
   5579 	  && check_base_type (cand, base)
   5580 	  && check_lang_type (cand, base));
   5581 }
   5582 
   5583 /* Returns true iff CAND is equivalent to BASE with ALIGN.  */
   5584 
   5585 static bool
   5586 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
   5587 {
   5588   return (TYPE_QUALS (cand) == TYPE_QUALS (base)
   5589 	  && TYPE_NAME (cand) == TYPE_NAME (base)
   5590 	  /* Apparently this is needed for Objective-C.  */
   5591 	  && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
   5592 	  /* Check alignment.  */
   5593 	  && TYPE_ALIGN (cand) == align
   5594 	  /* Check this is a user-aligned type as build_aligned_type
   5595 	     would create.  */
   5596 	  && TYPE_USER_ALIGN (cand)
   5597 	  && attribute_list_equal (TYPE_ATTRIBUTES (cand),
   5598 				   TYPE_ATTRIBUTES (base))
   5599 	  && check_lang_type (cand, base));
   5600 }
   5601 
   5602 /* Return a version of the TYPE, qualified as indicated by the
   5603    TYPE_QUALS, if one exists.  If no qualified version exists yet,
   5604    return NULL_TREE.  */
   5605 
   5606 tree
   5607 get_qualified_type (tree type, int type_quals)
   5608 {
   5609   if (TYPE_QUALS (type) == type_quals)
   5610     return type;
   5611 
   5612   tree mv = TYPE_MAIN_VARIANT (type);
   5613   if (check_qualified_type (mv, type, type_quals))
   5614     return mv;
   5615 
   5616   /* Search the chain of variants to see if there is already one there just
   5617      like the one we need to have.  If so, use that existing one.  We must
   5618      preserve the TYPE_NAME, since there is code that depends on this.  */
   5619   for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
   5620     if (check_qualified_type (*tp, type, type_quals))
   5621       {
   5622 	/* Put the found variant at the head of the variant list so
   5623 	   frequently searched variants get found faster.  The C++ FE
   5624 	   benefits greatly from this.  */
   5625 	tree t = *tp;
   5626 	*tp = TYPE_NEXT_VARIANT (t);
   5627 	TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
   5628 	TYPE_NEXT_VARIANT (mv) = t;
   5629 	return t;
   5630       }
   5631 
   5632   return NULL_TREE;
   5633 }
   5634 
   5635 /* Like get_qualified_type, but creates the type if it does not
   5636    exist.  This function never returns NULL_TREE.  */
   5637 
   5638 tree
   5639 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
   5640 {
   5641   tree t;
   5642 
   5643   /* See if we already have the appropriate qualified variant.  */
   5644   t = get_qualified_type (type, type_quals);
   5645 
   5646   /* If not, build it.  */
   5647   if (!t)
   5648     {
   5649       t = build_variant_type_copy (type PASS_MEM_STAT);
   5650       set_type_quals (t, type_quals);
   5651 
   5652       if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
   5653 	{
   5654 	  /* See if this object can map to a basic atomic type.  */
   5655 	  tree atomic_type = find_atomic_core_type (type);
   5656 	  if (atomic_type)
   5657 	    {
   5658 	      /* Ensure the alignment of this type is compatible with
   5659 		 the required alignment of the atomic type.  */
   5660 	      if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
   5661 		SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
   5662 	    }
   5663 	}
   5664 
   5665       if (TYPE_STRUCTURAL_EQUALITY_P (type))
   5666 	/* Propagate structural equality. */
   5667 	SET_TYPE_STRUCTURAL_EQUALITY (t);
   5668       else if (TYPE_CANONICAL (type) != type)
   5669 	/* Build the underlying canonical type, since it is different
   5670 	   from TYPE. */
   5671 	{
   5672 	  tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
   5673 	  TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
   5674 	}
   5675       else
   5676 	/* T is its own canonical type. */
   5677 	TYPE_CANONICAL (t) = t;
   5678 
   5679     }
   5680 
   5681   return t;
   5682 }
   5683 
   5684 /* Create a variant of type T with alignment ALIGN which
   5685    is measured in bits.  */
   5686 
   5687 tree
   5688 build_aligned_type (tree type, unsigned int align)
   5689 {
   5690   tree t;
   5691 
   5692   if (TYPE_PACKED (type)
   5693       || TYPE_ALIGN (type) == align)
   5694     return type;
   5695 
   5696   for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
   5697     if (check_aligned_type (t, type, align))
   5698       return t;
   5699 
   5700   t = build_variant_type_copy (type);
   5701   SET_TYPE_ALIGN (t, align);
   5702   TYPE_USER_ALIGN (t) = 1;
   5703 
   5704   return t;
   5705 }
   5706 
   5707 /* Create a new distinct copy of TYPE.  The new type is made its own
   5708    MAIN_VARIANT. If TYPE requires structural equality checks, the
   5709    resulting type requires structural equality checks; otherwise, its
   5710    TYPE_CANONICAL points to itself. */
   5711 
   5712 tree
   5713 build_distinct_type_copy (tree type MEM_STAT_DECL)
   5714 {
   5715   tree t = copy_node (type PASS_MEM_STAT);
   5716 
   5717   TYPE_POINTER_TO (t) = 0;
   5718   TYPE_REFERENCE_TO (t) = 0;
   5719 
   5720   /* Set the canonical type either to a new equivalence class, or
   5721      propagate the need for structural equality checks. */
   5722   if (TYPE_STRUCTURAL_EQUALITY_P (type))
   5723     SET_TYPE_STRUCTURAL_EQUALITY (t);
   5724   else
   5725     TYPE_CANONICAL (t) = t;
   5726 
   5727   /* Make it its own variant.  */
   5728   TYPE_MAIN_VARIANT (t) = t;
   5729   TYPE_NEXT_VARIANT (t) = 0;
   5730 
   5731   /* Note that it is now possible for TYPE_MIN_VALUE to be a value
   5732      whose TREE_TYPE is not t.  This can also happen in the Ada
   5733      frontend when using subtypes.  */
   5734 
   5735   return t;
   5736 }
   5737 
   5738 /* Create a new variant of TYPE, equivalent but distinct.  This is so
   5739    the caller can modify it. TYPE_CANONICAL for the return type will
   5740    be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
   5741    are considered equal by the language itself (or that both types
   5742    require structural equality checks). */
   5743 
   5744 tree
   5745 build_variant_type_copy (tree type MEM_STAT_DECL)
   5746 {
   5747   tree t, m = TYPE_MAIN_VARIANT (type);
   5748 
   5749   t = build_distinct_type_copy (type PASS_MEM_STAT);
   5750 
   5751   /* Since we're building a variant, assume that it is a non-semantic
   5752      variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
   5753   TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
   5754   /* Type variants have no alias set defined.  */
   5755   TYPE_ALIAS_SET (t) = -1;
   5756 
   5757   /* Add the new type to the chain of variants of TYPE.  */
   5758   TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
   5759   TYPE_NEXT_VARIANT (m) = t;
   5760   TYPE_MAIN_VARIANT (t) = m;
   5761 
   5762   return t;
   5763 }
   5764 
   5765 /* Return true if the from tree in both tree maps are equal.  */
   5767 
   5768 int
   5769 tree_map_base_eq (const void *va, const void *vb)
   5770 {
   5771   const struct tree_map_base  *const a = (const struct tree_map_base *) va,
   5772     *const b = (const struct tree_map_base *) vb;
   5773   return (a->from == b->from);
   5774 }
   5775 
   5776 /* Hash a from tree in a tree_base_map.  */
   5777 
   5778 unsigned int
   5779 tree_map_base_hash (const void *item)
   5780 {
   5781   return htab_hash_pointer (((const struct tree_map_base *)item)->from);
   5782 }
   5783 
   5784 /* Return true if this tree map structure is marked for garbage collection
   5785    purposes.  We simply return true if the from tree is marked, so that this
   5786    structure goes away when the from tree goes away.  */
   5787 
   5788 int
   5789 tree_map_base_marked_p (const void *p)
   5790 {
   5791   return ggc_marked_p (((const struct tree_map_base *) p)->from);
   5792 }
   5793 
   5794 /* Hash a from tree in a tree_map.  */
   5795 
   5796 unsigned int
   5797 tree_map_hash (const void *item)
   5798 {
   5799   return (((const struct tree_map *) item)->hash);
   5800 }
   5801 
   5802 /* Hash a from tree in a tree_decl_map.  */
   5803 
   5804 unsigned int
   5805 tree_decl_map_hash (const void *item)
   5806 {
   5807   return DECL_UID (((const struct tree_decl_map *) item)->base.from);
   5808 }
   5809 
   5810 /* Return the initialization priority for DECL.  */
   5811 
   5812 priority_type
   5813 decl_init_priority_lookup (tree decl)
   5814 {
   5815   symtab_node *snode = symtab_node::get (decl);
   5816 
   5817   if (!snode)
   5818     return DEFAULT_INIT_PRIORITY;
   5819   return
   5820     snode->get_init_priority ();
   5821 }
   5822 
   5823 /* Return the finalization priority for DECL.  */
   5824 
   5825 priority_type
   5826 decl_fini_priority_lookup (tree decl)
   5827 {
   5828   cgraph_node *node = cgraph_node::get (decl);
   5829 
   5830   if (!node)
   5831     return DEFAULT_INIT_PRIORITY;
   5832   return
   5833     node->get_fini_priority ();
   5834 }
   5835 
   5836 /* Set the initialization priority for DECL to PRIORITY.  */
   5837 
   5838 void
   5839 decl_init_priority_insert (tree decl, priority_type priority)
   5840 {
   5841   struct symtab_node *snode;
   5842 
   5843   if (priority == DEFAULT_INIT_PRIORITY)
   5844     {
   5845       snode = symtab_node::get (decl);
   5846       if (!snode)
   5847 	return;
   5848     }
   5849   else if (VAR_P (decl))
   5850     snode = varpool_node::get_create (decl);
   5851   else
   5852     snode = cgraph_node::get_create (decl);
   5853   snode->set_init_priority (priority);
   5854 }
   5855 
   5856 /* Set the finalization priority for DECL to PRIORITY.  */
   5857 
   5858 void
   5859 decl_fini_priority_insert (tree decl, priority_type priority)
   5860 {
   5861   struct cgraph_node *node;
   5862 
   5863   if (priority == DEFAULT_INIT_PRIORITY)
   5864     {
   5865       node = cgraph_node::get (decl);
   5866       if (!node)
   5867 	return;
   5868     }
   5869   else
   5870     node = cgraph_node::get_create (decl);
   5871   node->set_fini_priority (priority);
   5872 }
   5873 
   5874 /* Print out the statistics for the DECL_DEBUG_EXPR hash table.  */
   5875 
   5876 static void
   5877 print_debug_expr_statistics (void)
   5878 {
   5879   fprintf (stderr, "DECL_DEBUG_EXPR  hash: size %ld, %ld elements, %f collisions\n",
   5880 	   (long) debug_expr_for_decl->size (),
   5881 	   (long) debug_expr_for_decl->elements (),
   5882 	   debug_expr_for_decl->collisions ());
   5883 }
   5884 
   5885 /* Print out the statistics for the DECL_VALUE_EXPR hash table.  */
   5886 
   5887 static void
   5888 print_value_expr_statistics (void)
   5889 {
   5890   fprintf (stderr, "DECL_VALUE_EXPR  hash: size %ld, %ld elements, %f collisions\n",
   5891 	   (long) value_expr_for_decl->size (),
   5892 	   (long) value_expr_for_decl->elements (),
   5893 	   value_expr_for_decl->collisions ());
   5894 }
   5895 
   5896 /* Lookup a debug expression for FROM, and return it if we find one.  */
   5897 
   5898 tree
   5899 decl_debug_expr_lookup (tree from)
   5900 {
   5901   struct tree_decl_map *h, in;
   5902   in.base.from = from;
   5903 
   5904   h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
   5905   if (h)
   5906     return h->to;
   5907   return NULL_TREE;
   5908 }
   5909 
   5910 /* Insert a mapping FROM->TO in the debug expression hashtable.  */
   5911 
   5912 void
   5913 decl_debug_expr_insert (tree from, tree to)
   5914 {
   5915   struct tree_decl_map *h;
   5916 
   5917   h = ggc_alloc<tree_decl_map> ();
   5918   h->base.from = from;
   5919   h->to = to;
   5920   *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
   5921 }
   5922 
   5923 /* Lookup a value expression for FROM, and return it if we find one.  */
   5924 
   5925 tree
   5926 decl_value_expr_lookup (tree from)
   5927 {
   5928   struct tree_decl_map *h, in;
   5929   in.base.from = from;
   5930 
   5931   h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
   5932   if (h)
   5933     return h->to;
   5934   return NULL_TREE;
   5935 }
   5936 
   5937 /* Insert a mapping FROM->TO in the value expression hashtable.  */
   5938 
   5939 void
   5940 decl_value_expr_insert (tree from, tree to)
   5941 {
   5942   struct tree_decl_map *h;
   5943 
   5944   h = ggc_alloc<tree_decl_map> ();
   5945   h->base.from = from;
   5946   h->to = to;
   5947   *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
   5948 }
   5949 
   5950 /* Lookup a vector of debug arguments for FROM, and return it if we
   5951    find one.  */
   5952 
   5953 vec<tree, va_gc> **
   5954 decl_debug_args_lookup (tree from)
   5955 {
   5956   struct tree_vec_map *h, in;
   5957 
   5958   if (!DECL_HAS_DEBUG_ARGS_P (from))
   5959     return NULL;
   5960   gcc_checking_assert (debug_args_for_decl != NULL);
   5961   in.base.from = from;
   5962   h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
   5963   if (h)
   5964     return &h->to;
   5965   return NULL;
   5966 }
   5967 
   5968 /* Insert a mapping FROM->empty vector of debug arguments in the value
   5969    expression hashtable.  */
   5970 
   5971 vec<tree, va_gc> **
   5972 decl_debug_args_insert (tree from)
   5973 {
   5974   struct tree_vec_map *h;
   5975   tree_vec_map **loc;
   5976 
   5977   if (DECL_HAS_DEBUG_ARGS_P (from))
   5978     return decl_debug_args_lookup (from);
   5979   if (debug_args_for_decl == NULL)
   5980     debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
   5981   h = ggc_alloc<tree_vec_map> ();
   5982   h->base.from = from;
   5983   h->to = NULL;
   5984   loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
   5985   *loc = h;
   5986   DECL_HAS_DEBUG_ARGS_P (from) = 1;
   5987   return &h->to;
   5988 }
   5989 
   5990 /* Hashing of types so that we don't make duplicates.
   5991    The entry point is `type_hash_canon'.  */
   5992 
   5993 /* Generate the default hash code for TYPE.  This is designed for
   5994    speed, rather than maximum entropy.  */
   5995 
   5996 hashval_t
   5997 type_hash_canon_hash (tree type)
   5998 {
   5999   inchash::hash hstate;
   6000 
   6001   hstate.add_int (TREE_CODE (type));
   6002 
   6003   if (TREE_TYPE (type))
   6004     hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
   6005 
   6006   for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
   6007     /* Just the identifier is adequate to distinguish.  */
   6008     hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
   6009 
   6010   switch (TREE_CODE (type))
   6011     {
   6012     case METHOD_TYPE:
   6013       hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
   6014       /* FALLTHROUGH. */
   6015     case FUNCTION_TYPE:
   6016       for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
   6017 	if (TREE_VALUE (t) != error_mark_node)
   6018 	  hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
   6019       break;
   6020 
   6021     case OFFSET_TYPE:
   6022       hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
   6023       break;
   6024 
   6025     case ARRAY_TYPE:
   6026       {
   6027 	if (TYPE_DOMAIN (type))
   6028 	  hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
   6029 	if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
   6030 	  {
   6031 	    unsigned typeless = TYPE_TYPELESS_STORAGE (type);
   6032 	    hstate.add_object (typeless);
   6033 	  }
   6034       }
   6035       break;
   6036 
   6037     case INTEGER_TYPE:
   6038       {
   6039 	tree t = TYPE_MAX_VALUE (type);
   6040 	if (!t)
   6041 	  t = TYPE_MIN_VALUE (type);
   6042 	for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
   6043 	  hstate.add_object (TREE_INT_CST_ELT (t, i));
   6044 	break;
   6045       }
   6046 
   6047     case REAL_TYPE:
   6048     case FIXED_POINT_TYPE:
   6049       {
   6050 	unsigned prec = TYPE_PRECISION (type);
   6051 	hstate.add_object (prec);
   6052 	break;
   6053       }
   6054 
   6055     case VECTOR_TYPE:
   6056       hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
   6057       break;
   6058 
   6059     default:
   6060       break;
   6061     }
   6062 
   6063   return hstate.end ();
   6064 }
   6065 
   6066 /* These are the Hashtable callback functions.  */
   6067 
   6068 /* Returns true iff the types are equivalent.  */
   6069 
   6070 bool
   6071 type_cache_hasher::equal (type_hash *a, type_hash *b)
   6072 {
   6073   /* First test the things that are the same for all types.  */
   6074   if (a->hash != b->hash
   6075       || TREE_CODE (a->type) != TREE_CODE (b->type)
   6076       || TREE_TYPE (a->type) != TREE_TYPE (b->type)
   6077       || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
   6078 				 TYPE_ATTRIBUTES (b->type))
   6079       || (TREE_CODE (a->type) != COMPLEX_TYPE
   6080           && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
   6081     return 0;
   6082 
   6083   /* Be careful about comparing arrays before and after the element type
   6084      has been completed; don't compare TYPE_ALIGN unless both types are
   6085      complete.  */
   6086   if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
   6087       && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
   6088 	  || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
   6089     return 0;
   6090 
   6091   switch (TREE_CODE (a->type))
   6092     {
   6093     case VOID_TYPE:
   6094     case OPAQUE_TYPE:
   6095     case COMPLEX_TYPE:
   6096     case POINTER_TYPE:
   6097     case REFERENCE_TYPE:
   6098     case NULLPTR_TYPE:
   6099       return 1;
   6100 
   6101     case VECTOR_TYPE:
   6102       return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
   6103 		       TYPE_VECTOR_SUBPARTS (b->type));
   6104 
   6105     case ENUMERAL_TYPE:
   6106       if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
   6107 	  && !(TYPE_VALUES (a->type)
   6108 	       && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
   6109 	       && TYPE_VALUES (b->type)
   6110 	       && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
   6111 	       && type_list_equal (TYPE_VALUES (a->type),
   6112 				   TYPE_VALUES (b->type))))
   6113 	return 0;
   6114 
   6115       /* fall through */
   6116 
   6117     case INTEGER_TYPE:
   6118     case REAL_TYPE:
   6119     case BOOLEAN_TYPE:
   6120       if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
   6121 	return false;
   6122       return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
   6123 	       || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
   6124 				      TYPE_MAX_VALUE (b->type)))
   6125 	      && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
   6126 		  || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
   6127 					 TYPE_MIN_VALUE (b->type))));
   6128 
   6129     case FIXED_POINT_TYPE:
   6130       return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
   6131 
   6132     case OFFSET_TYPE:
   6133       return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
   6134 
   6135     case METHOD_TYPE:
   6136       if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
   6137 	  && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
   6138 	      || (TYPE_ARG_TYPES (a->type)
   6139 		  && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
   6140 		  && TYPE_ARG_TYPES (b->type)
   6141 		  && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
   6142 		  && type_list_equal (TYPE_ARG_TYPES (a->type),
   6143 				      TYPE_ARG_TYPES (b->type)))))
   6144         break;
   6145       return 0;
   6146     case ARRAY_TYPE:
   6147       /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
   6148 	 where the flag should be inherited from the element type
   6149 	 and can change after ARRAY_TYPEs are created; on non-aggregates
   6150 	 compare it and hash it, scalars will never have that flag set
   6151 	 and we need to differentiate between arrays created by different
   6152 	 front-ends or middle-end created arrays.  */
   6153       return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
   6154 	      && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
   6155 		  || (TYPE_TYPELESS_STORAGE (a->type)
   6156 		      == TYPE_TYPELESS_STORAGE (b->type))));
   6157 
   6158     case RECORD_TYPE:
   6159     case UNION_TYPE:
   6160     case QUAL_UNION_TYPE:
   6161       return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
   6162 	      || (TYPE_FIELDS (a->type)
   6163 		  && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
   6164 		  && TYPE_FIELDS (b->type)
   6165 		  && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
   6166 		  && type_list_equal (TYPE_FIELDS (a->type),
   6167 				      TYPE_FIELDS (b->type))));
   6168 
   6169     case FUNCTION_TYPE:
   6170       if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
   6171 	  || (TYPE_ARG_TYPES (a->type)
   6172 	      && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
   6173 	      && TYPE_ARG_TYPES (b->type)
   6174 	      && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
   6175 	      && type_list_equal (TYPE_ARG_TYPES (a->type),
   6176 				  TYPE_ARG_TYPES (b->type))))
   6177 	break;
   6178       return 0;
   6179 
   6180     default:
   6181       return 0;
   6182     }
   6183 
   6184   if (lang_hooks.types.type_hash_eq != NULL)
   6185     return lang_hooks.types.type_hash_eq (a->type, b->type);
   6186 
   6187   return 1;
   6188 }
   6189 
   6190 /* Given TYPE, and HASHCODE its hash code, return the canonical
   6191    object for an identical type if one already exists.
   6192    Otherwise, return TYPE, and record it as the canonical object.
   6193 
   6194    To use this function, first create a type of the sort you want.
   6195    Then compute its hash code from the fields of the type that
   6196    make it different from other similar types.
   6197    Then call this function and use the value.  */
   6198 
   6199 tree
   6200 type_hash_canon (unsigned int hashcode, tree type)
   6201 {
   6202   type_hash in;
   6203   type_hash **loc;
   6204 
   6205   /* The hash table only contains main variants, so ensure that's what we're
   6206      being passed.  */
   6207   gcc_assert (TYPE_MAIN_VARIANT (type) == type);
   6208 
   6209   /* The TYPE_ALIGN field of a type is set by layout_type(), so we
   6210      must call that routine before comparing TYPE_ALIGNs.  */
   6211   layout_type (type);
   6212 
   6213   in.hash = hashcode;
   6214   in.type = type;
   6215 
   6216   loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
   6217   if (*loc)
   6218     {
   6219       tree t1 = ((type_hash *) *loc)->type;
   6220       gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
   6221 		  && t1 != type);
   6222       if (TYPE_UID (type) + 1 == next_type_uid)
   6223 	--next_type_uid;
   6224       /* Free also min/max values and the cache for integer
   6225 	 types.  This can't be done in free_node, as LTO frees
   6226 	 those on its own.  */
   6227       if (TREE_CODE (type) == INTEGER_TYPE)
   6228 	{
   6229 	  if (TYPE_MIN_VALUE (type)
   6230 	      && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
   6231 	    {
   6232 	      /* Zero is always in TYPE_CACHED_VALUES.  */
   6233 	      if (! TYPE_UNSIGNED (type))
   6234 		int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
   6235 	      ggc_free (TYPE_MIN_VALUE (type));
   6236 	    }
   6237 	  if (TYPE_MAX_VALUE (type)
   6238 	      && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
   6239 	    {
   6240 	      int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
   6241 	      ggc_free (TYPE_MAX_VALUE (type));
   6242 	    }
   6243 	  if (TYPE_CACHED_VALUES_P (type))
   6244 	    ggc_free (TYPE_CACHED_VALUES (type));
   6245 	}
   6246       free_node (type);
   6247       return t1;
   6248     }
   6249   else
   6250     {
   6251       struct type_hash *h;
   6252 
   6253       h = ggc_alloc<type_hash> ();
   6254       h->hash = hashcode;
   6255       h->type = type;
   6256       *loc = h;
   6257 
   6258       return type;
   6259     }
   6260 }
   6261 
   6262 static void
   6263 print_type_hash_statistics (void)
   6264 {
   6265   fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
   6266 	   (long) type_hash_table->size (),
   6267 	   (long) type_hash_table->elements (),
   6268 	   type_hash_table->collisions ());
   6269 }
   6270 
   6271 /* Given two lists of types
   6272    (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
   6273    return 1 if the lists contain the same types in the same order.
   6274    Also, the TREE_PURPOSEs must match.  */
   6275 
   6276 bool
   6277 type_list_equal (const_tree l1, const_tree l2)
   6278 {
   6279   const_tree t1, t2;
   6280 
   6281   for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
   6282     if (TREE_VALUE (t1) != TREE_VALUE (t2)
   6283 	|| (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
   6284 	    && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
   6285 		  && (TREE_TYPE (TREE_PURPOSE (t1))
   6286 		      == TREE_TYPE (TREE_PURPOSE (t2))))))
   6287       return false;
   6288 
   6289   return t1 == t2;
   6290 }
   6291 
   6292 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
   6293    given by TYPE.  If the argument list accepts variable arguments,
   6294    then this function counts only the ordinary arguments.  */
   6295 
   6296 int
   6297 type_num_arguments (const_tree fntype)
   6298 {
   6299   int i = 0;
   6300 
   6301   for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
   6302     /* If the function does not take a variable number of arguments,
   6303        the last element in the list will have type `void'.  */
   6304     if (VOID_TYPE_P (TREE_VALUE (t)))
   6305       break;
   6306     else
   6307       ++i;
   6308 
   6309   return i;
   6310 }
   6311 
   6312 /* Return the type of the function TYPE's argument ARGNO if known.
   6313    For vararg function's where ARGNO refers to one of the variadic
   6314    arguments return null.  Otherwise, return a void_type_node for
   6315    out-of-bounds ARGNO.  */
   6316 
   6317 tree
   6318 type_argument_type (const_tree fntype, unsigned argno)
   6319 {
   6320   /* Treat zero the same as an out-of-bounds argument number.  */
   6321   if (!argno)
   6322     return void_type_node;
   6323 
   6324   function_args_iterator iter;
   6325 
   6326   tree argtype;
   6327   unsigned i = 1;
   6328   FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
   6329     {
   6330       /* A vararg function's argument list ends in a null.  Otherwise,
   6331 	 an ordinary function's argument list ends with void.  Return
   6332 	 null if ARGNO refers to a vararg argument, void_type_node if
   6333 	 it's out of bounds, and the formal argument type otherwise.  */
   6334       if (!argtype)
   6335 	break;
   6336 
   6337       if (i == argno || VOID_TYPE_P (argtype))
   6338 	return argtype;
   6339 
   6340       ++i;
   6341     }
   6342 
   6343   return NULL_TREE;
   6344 }
   6345 
   6346 /* Nonzero if integer constants T1 and T2
   6347    represent the same constant value.  */
   6348 
   6349 int
   6350 tree_int_cst_equal (const_tree t1, const_tree t2)
   6351 {
   6352   if (t1 == t2)
   6353     return 1;
   6354 
   6355   if (t1 == 0 || t2 == 0)
   6356     return 0;
   6357 
   6358   STRIP_ANY_LOCATION_WRAPPER (t1);
   6359   STRIP_ANY_LOCATION_WRAPPER (t2);
   6360 
   6361   if (TREE_CODE (t1) == INTEGER_CST
   6362       && TREE_CODE (t2) == INTEGER_CST
   6363       && wi::to_widest (t1) == wi::to_widest (t2))
   6364     return 1;
   6365 
   6366   return 0;
   6367 }
   6368 
   6369 /* Return true if T is an INTEGER_CST whose numerical value (extended
   6370    according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  */
   6371 
   6372 bool
   6373 tree_fits_shwi_p (const_tree t)
   6374 {
   6375   return (t != NULL_TREE
   6376 	  && TREE_CODE (t) == INTEGER_CST
   6377 	  && wi::fits_shwi_p (wi::to_widest (t)));
   6378 }
   6379 
   6380 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
   6381    value (extended according to TYPE_UNSIGNED) fits in a poly_int64.  */
   6382 
   6383 bool
   6384 tree_fits_poly_int64_p (const_tree t)
   6385 {
   6386   if (t == NULL_TREE)
   6387     return false;
   6388   if (POLY_INT_CST_P (t))
   6389     {
   6390       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
   6391 	if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
   6392 	  return false;
   6393       return true;
   6394     }
   6395   return (TREE_CODE (t) == INTEGER_CST
   6396 	  && wi::fits_shwi_p (wi::to_widest (t)));
   6397 }
   6398 
   6399 /* Return true if T is an INTEGER_CST whose numerical value (extended
   6400    according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  */
   6401 
   6402 bool
   6403 tree_fits_uhwi_p (const_tree t)
   6404 {
   6405   return (t != NULL_TREE
   6406 	  && TREE_CODE (t) == INTEGER_CST
   6407 	  && wi::fits_uhwi_p (wi::to_widest (t)));
   6408 }
   6409 
   6410 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
   6411    value (extended according to TYPE_UNSIGNED) fits in a poly_uint64.  */
   6412 
   6413 bool
   6414 tree_fits_poly_uint64_p (const_tree t)
   6415 {
   6416   if (t == NULL_TREE)
   6417     return false;
   6418   if (POLY_INT_CST_P (t))
   6419     {
   6420       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
   6421 	if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
   6422 	  return false;
   6423       return true;
   6424     }
   6425   return (TREE_CODE (t) == INTEGER_CST
   6426 	  && wi::fits_uhwi_p (wi::to_widest (t)));
   6427 }
   6428 
   6429 /* T is an INTEGER_CST whose numerical value (extended according to
   6430    TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  Return that
   6431    HOST_WIDE_INT.  */
   6432 
   6433 HOST_WIDE_INT
   6434 tree_to_shwi (const_tree t)
   6435 {
   6436   gcc_assert (tree_fits_shwi_p (t));
   6437   return TREE_INT_CST_LOW (t);
   6438 }
   6439 
   6440 /* T is an INTEGER_CST whose numerical value (extended according to
   6441    TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  Return that
   6442    HOST_WIDE_INT.  */
   6443 
   6444 unsigned HOST_WIDE_INT
   6445 tree_to_uhwi (const_tree t)
   6446 {
   6447   gcc_assert (tree_fits_uhwi_p (t));
   6448   return TREE_INT_CST_LOW (t);
   6449 }
   6450 
   6451 /* Return the most significant (sign) bit of T.  */
   6452 
   6453 int
   6454 tree_int_cst_sign_bit (const_tree t)
   6455 {
   6456   unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
   6457 
   6458   return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
   6459 }
   6460 
   6461 /* Return an indication of the sign of the integer constant T.
   6462    The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
   6463    Note that -1 will never be returned if T's type is unsigned.  */
   6464 
   6465 int
   6466 tree_int_cst_sgn (const_tree t)
   6467 {
   6468   if (wi::to_wide (t) == 0)
   6469     return 0;
   6470   else if (TYPE_UNSIGNED (TREE_TYPE (t)))
   6471     return 1;
   6472   else if (wi::neg_p (wi::to_wide (t)))
   6473     return -1;
   6474   else
   6475     return 1;
   6476 }
   6477 
   6478 /* Return the minimum number of bits needed to represent VALUE in a
   6479    signed or unsigned type, UNSIGNEDP says which.  */
   6480 
   6481 unsigned int
   6482 tree_int_cst_min_precision (tree value, signop sgn)
   6483 {
   6484   /* If the value is negative, compute its negative minus 1.  The latter
   6485      adjustment is because the absolute value of the largest negative value
   6486      is one larger than the largest positive value.  This is equivalent to
   6487      a bit-wise negation, so use that operation instead.  */
   6488 
   6489   if (tree_int_cst_sgn (value) < 0)
   6490     value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
   6491 
   6492   /* Return the number of bits needed, taking into account the fact
   6493      that we need one more bit for a signed than unsigned type.
   6494      If value is 0 or -1, the minimum precision is 1 no matter
   6495      whether unsignedp is true or false.  */
   6496 
   6497   if (integer_zerop (value))
   6498     return 1;
   6499   else
   6500     return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
   6501 }
   6502 
   6503 /* Return truthvalue of whether T1 is the same tree structure as T2.
   6504    Return 1 if they are the same.
   6505    Return 0 if they are understandably different.
   6506    Return -1 if either contains tree structure not understood by
   6507    this function.  */
   6508 
   6509 int
   6510 simple_cst_equal (const_tree t1, const_tree t2)
   6511 {
   6512   enum tree_code code1, code2;
   6513   int cmp;
   6514   int i;
   6515 
   6516   if (t1 == t2)
   6517     return 1;
   6518   if (t1 == 0 || t2 == 0)
   6519     return 0;
   6520 
   6521   /* For location wrappers to be the same, they must be at the same
   6522      source location (and wrap the same thing).  */
   6523   if (location_wrapper_p (t1) && location_wrapper_p (t2))
   6524     {
   6525       if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
   6526 	return 0;
   6527       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
   6528     }
   6529 
   6530   code1 = TREE_CODE (t1);
   6531   code2 = TREE_CODE (t2);
   6532 
   6533   if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
   6534     {
   6535       if (CONVERT_EXPR_CODE_P (code2)
   6536 	  || code2 == NON_LVALUE_EXPR)
   6537 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
   6538       else
   6539 	return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
   6540     }
   6541 
   6542   else if (CONVERT_EXPR_CODE_P (code2)
   6543 	   || code2 == NON_LVALUE_EXPR)
   6544     return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
   6545 
   6546   if (code1 != code2)
   6547     return 0;
   6548 
   6549   switch (code1)
   6550     {
   6551     case INTEGER_CST:
   6552       return wi::to_widest (t1) == wi::to_widest (t2);
   6553 
   6554     case REAL_CST:
   6555       return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
   6556 
   6557     case FIXED_CST:
   6558       return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
   6559 
   6560     case STRING_CST:
   6561       return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
   6562 	      && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
   6563 			 TREE_STRING_LENGTH (t1)));
   6564 
   6565     case CONSTRUCTOR:
   6566       {
   6567 	unsigned HOST_WIDE_INT idx;
   6568 	vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
   6569 	vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
   6570 
   6571 	if (vec_safe_length (v1) != vec_safe_length (v2))
   6572 	  return false;
   6573 
   6574         for (idx = 0; idx < vec_safe_length (v1); ++idx)
   6575 	  /* ??? Should we handle also fields here? */
   6576 	  if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
   6577 	    return false;
   6578 	return true;
   6579       }
   6580 
   6581     case SAVE_EXPR:
   6582       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
   6583 
   6584     case CALL_EXPR:
   6585       cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
   6586       if (cmp <= 0)
   6587 	return cmp;
   6588       if (call_expr_nargs (t1) != call_expr_nargs (t2))
   6589 	return 0;
   6590       {
   6591 	const_tree arg1, arg2;
   6592 	const_call_expr_arg_iterator iter1, iter2;
   6593 	for (arg1 = first_const_call_expr_arg (t1, &iter1),
   6594 	       arg2 = first_const_call_expr_arg (t2, &iter2);
   6595 	     arg1 && arg2;
   6596 	     arg1 = next_const_call_expr_arg (&iter1),
   6597 	       arg2 = next_const_call_expr_arg (&iter2))
   6598 	  {
   6599 	    cmp = simple_cst_equal (arg1, arg2);
   6600 	    if (cmp <= 0)
   6601 	      return cmp;
   6602 	  }
   6603 	return arg1 == arg2;
   6604       }
   6605 
   6606     case TARGET_EXPR:
   6607       /* Special case: if either target is an unallocated VAR_DECL,
   6608 	 it means that it's going to be unified with whatever the
   6609 	 TARGET_EXPR is really supposed to initialize, so treat it
   6610 	 as being equivalent to anything.  */
   6611       if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
   6612 	   && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
   6613 	   && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
   6614 	  || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
   6615 	      && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
   6616 	      && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
   6617 	cmp = 1;
   6618       else
   6619 	cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
   6620 
   6621       if (cmp <= 0)
   6622 	return cmp;
   6623 
   6624       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
   6625 
   6626     case WITH_CLEANUP_EXPR:
   6627       cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
   6628       if (cmp <= 0)
   6629 	return cmp;
   6630 
   6631       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
   6632 
   6633     case COMPONENT_REF:
   6634       if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
   6635 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
   6636 
   6637       return 0;
   6638 
   6639     case VAR_DECL:
   6640     case PARM_DECL:
   6641     case CONST_DECL:
   6642     case FUNCTION_DECL:
   6643       return 0;
   6644 
   6645     default:
   6646       if (POLY_INT_CST_P (t1))
   6647 	/* A false return means maybe_ne rather than known_ne.  */
   6648 	return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
   6649 						TYPE_SIGN (TREE_TYPE (t1))),
   6650 			 poly_widest_int::from (poly_int_cst_value (t2),
   6651 						TYPE_SIGN (TREE_TYPE (t2))));
   6652       break;
   6653     }
   6654 
   6655   /* This general rule works for most tree codes.  All exceptions should be
   6656      handled above.  If this is a language-specific tree code, we can't
   6657      trust what might be in the operand, so say we don't know
   6658      the situation.  */
   6659   if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
   6660     return -1;
   6661 
   6662   switch (TREE_CODE_CLASS (code1))
   6663     {
   6664     case tcc_unary:
   6665     case tcc_binary:
   6666     case tcc_comparison:
   6667     case tcc_expression:
   6668     case tcc_reference:
   6669     case tcc_statement:
   6670       cmp = 1;
   6671       for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
   6672 	{
   6673 	  cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
   6674 	  if (cmp <= 0)
   6675 	    return cmp;
   6676 	}
   6677 
   6678       return cmp;
   6679 
   6680     default:
   6681       return -1;
   6682     }
   6683 }
   6684 
   6685 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
   6686    Return -1, 0, or 1 if the value of T is less than, equal to, or greater
   6687    than U, respectively.  */
   6688 
   6689 int
   6690 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
   6691 {
   6692   if (tree_int_cst_sgn (t) < 0)
   6693     return -1;
   6694   else if (!tree_fits_uhwi_p (t))
   6695     return 1;
   6696   else if (TREE_INT_CST_LOW (t) == u)
   6697     return 0;
   6698   else if (TREE_INT_CST_LOW (t) < u)
   6699     return -1;
   6700   else
   6701     return 1;
   6702 }
   6703 
   6704 /* Return true if SIZE represents a constant size that is in bounds of
   6705    what the middle-end and the backend accepts (covering not more than
   6706    half of the address-space).
   6707    When PERR is non-null, set *PERR on failure to the description of
   6708    why SIZE is not valid.  */
   6709 
   6710 bool
   6711 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
   6712 {
   6713   if (POLY_INT_CST_P (size))
   6714     {
   6715       if (TREE_OVERFLOW (size))
   6716 	return false;
   6717       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
   6718 	if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
   6719 	  return false;
   6720       return true;
   6721     }
   6722 
   6723   cst_size_error error;
   6724   if (!perr)
   6725     perr = &error;
   6726 
   6727   if (TREE_CODE (size) != INTEGER_CST)
   6728     {
   6729       *perr = cst_size_not_constant;
   6730       return false;
   6731     }
   6732 
   6733   if (TREE_OVERFLOW_P (size))
   6734     {
   6735       *perr = cst_size_overflow;
   6736       return false;
   6737     }
   6738 
   6739   if (tree_int_cst_sgn (size) < 0)
   6740     {
   6741       *perr = cst_size_negative;
   6742       return false;
   6743     }
   6744   if (!tree_fits_uhwi_p (size)
   6745       || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
   6746 	  < wi::to_widest (size) * 2))
   6747     {
   6748       *perr = cst_size_too_big;
   6749       return false;
   6750     }
   6751 
   6752   return true;
   6753 }
   6754 
   6755 /* Return the precision of the type, or for a complex or vector type the
   6756    precision of the type of its elements.  */
   6757 
   6758 unsigned int
   6759 element_precision (const_tree type)
   6760 {
   6761   if (!TYPE_P (type))
   6762     type = TREE_TYPE (type);
   6763   enum tree_code code = TREE_CODE (type);
   6764   if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
   6765     type = TREE_TYPE (type);
   6766 
   6767   return TYPE_PRECISION (type);
   6768 }
   6769 
   6770 /* Return true if CODE represents an associative tree code.  Otherwise
   6771    return false.  */
   6772 bool
   6773 associative_tree_code (enum tree_code code)
   6774 {
   6775   switch (code)
   6776     {
   6777     case BIT_IOR_EXPR:
   6778     case BIT_AND_EXPR:
   6779     case BIT_XOR_EXPR:
   6780     case PLUS_EXPR:
   6781     case MULT_EXPR:
   6782     case MIN_EXPR:
   6783     case MAX_EXPR:
   6784       return true;
   6785 
   6786     default:
   6787       break;
   6788     }
   6789   return false;
   6790 }
   6791 
   6792 /* Return true if CODE represents a commutative tree code.  Otherwise
   6793    return false.  */
   6794 bool
   6795 commutative_tree_code (enum tree_code code)
   6796 {
   6797   switch (code)
   6798     {
   6799     case PLUS_EXPR:
   6800     case MULT_EXPR:
   6801     case MULT_HIGHPART_EXPR:
   6802     case MIN_EXPR:
   6803     case MAX_EXPR:
   6804     case BIT_IOR_EXPR:
   6805     case BIT_XOR_EXPR:
   6806     case BIT_AND_EXPR:
   6807     case NE_EXPR:
   6808     case EQ_EXPR:
   6809     case UNORDERED_EXPR:
   6810     case ORDERED_EXPR:
   6811     case UNEQ_EXPR:
   6812     case LTGT_EXPR:
   6813     case TRUTH_AND_EXPR:
   6814     case TRUTH_XOR_EXPR:
   6815     case TRUTH_OR_EXPR:
   6816     case WIDEN_MULT_EXPR:
   6817     case VEC_WIDEN_MULT_HI_EXPR:
   6818     case VEC_WIDEN_MULT_LO_EXPR:
   6819     case VEC_WIDEN_MULT_EVEN_EXPR:
   6820     case VEC_WIDEN_MULT_ODD_EXPR:
   6821       return true;
   6822 
   6823     default:
   6824       break;
   6825     }
   6826   return false;
   6827 }
   6828 
   6829 /* Return true if CODE represents a ternary tree code for which the
   6830    first two operands are commutative.  Otherwise return false.  */
   6831 bool
   6832 commutative_ternary_tree_code (enum tree_code code)
   6833 {
   6834   switch (code)
   6835     {
   6836     case WIDEN_MULT_PLUS_EXPR:
   6837     case WIDEN_MULT_MINUS_EXPR:
   6838     case DOT_PROD_EXPR:
   6839       return true;
   6840 
   6841     default:
   6842       break;
   6843     }
   6844   return false;
   6845 }
   6846 
   6847 /* Returns true if CODE can overflow.  */
   6848 
   6849 bool
   6850 operation_can_overflow (enum tree_code code)
   6851 {
   6852   switch (code)
   6853     {
   6854     case PLUS_EXPR:
   6855     case MINUS_EXPR:
   6856     case MULT_EXPR:
   6857     case LSHIFT_EXPR:
   6858       /* Can overflow in various ways.  */
   6859       return true;
   6860     case TRUNC_DIV_EXPR:
   6861     case EXACT_DIV_EXPR:
   6862     case FLOOR_DIV_EXPR:
   6863     case CEIL_DIV_EXPR:
   6864       /* For INT_MIN / -1.  */
   6865       return true;
   6866     case NEGATE_EXPR:
   6867     case ABS_EXPR:
   6868       /* For -INT_MIN.  */
   6869       return true;
   6870     default:
   6871       /* These operators cannot overflow.  */
   6872       return false;
   6873     }
   6874 }
   6875 
   6876 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
   6877    ftrapv doesn't generate trapping insns for CODE.  */
   6878 
   6879 bool
   6880 operation_no_trapping_overflow (tree type, enum tree_code code)
   6881 {
   6882   gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
   6883 
   6884   /* We don't generate instructions that trap on overflow for complex or vector
   6885      types.  */
   6886   if (!INTEGRAL_TYPE_P (type))
   6887     return true;
   6888 
   6889   if (!TYPE_OVERFLOW_TRAPS (type))
   6890     return true;
   6891 
   6892   switch (code)
   6893     {
   6894     case PLUS_EXPR:
   6895     case MINUS_EXPR:
   6896     case MULT_EXPR:
   6897     case NEGATE_EXPR:
   6898     case ABS_EXPR:
   6899       /* These operators can overflow, and -ftrapv generates trapping code for
   6900 	 these.  */
   6901       return false;
   6902     case TRUNC_DIV_EXPR:
   6903     case EXACT_DIV_EXPR:
   6904     case FLOOR_DIV_EXPR:
   6905     case CEIL_DIV_EXPR:
   6906     case LSHIFT_EXPR:
   6907       /* These operators can overflow, but -ftrapv does not generate trapping
   6908 	 code for these.  */
   6909       return true;
   6910     default:
   6911       /* These operators cannot overflow.  */
   6912       return true;
   6913     }
   6914 }
   6915 
   6916 /* Constructors for pointer, array and function types.
   6917    (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
   6918    constructed by language-dependent code, not here.)  */
   6919 
   6920 /* Construct, lay out and return the type of pointers to TO_TYPE with
   6921    mode MODE.  If MODE is VOIDmode, a pointer mode for the address
   6922    space of TO_TYPE will be picked.  If CAN_ALIAS_ALL is TRUE,
   6923    indicate this type can reference all of memory. If such a type has
   6924    already been constructed, reuse it.  */
   6925 
   6926 tree
   6927 build_pointer_type_for_mode (tree to_type, machine_mode mode,
   6928 			     bool can_alias_all)
   6929 {
   6930   tree t;
   6931   bool could_alias = can_alias_all;
   6932 
   6933   if (to_type == error_mark_node)
   6934     return error_mark_node;
   6935 
   6936   if (mode == VOIDmode)
   6937     {
   6938       addr_space_t as = TYPE_ADDR_SPACE (to_type);
   6939       mode = targetm.addr_space.pointer_mode (as);
   6940     }
   6941 
   6942   /* If the pointed-to type has the may_alias attribute set, force
   6943      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
   6944   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
   6945     can_alias_all = true;
   6946 
   6947   /* In some cases, languages will have things that aren't a POINTER_TYPE
   6948      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
   6949      In that case, return that type without regard to the rest of our
   6950      operands.
   6951 
   6952      ??? This is a kludge, but consistent with the way this function has
   6953      always operated and there doesn't seem to be a good way to avoid this
   6954      at the moment.  */
   6955   if (TYPE_POINTER_TO (to_type) != 0
   6956       && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
   6957     return TYPE_POINTER_TO (to_type);
   6958 
   6959   /* First, if we already have a type for pointers to TO_TYPE and it's
   6960      the proper mode, use it.  */
   6961   for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
   6962     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
   6963       return t;
   6964 
   6965   t = make_node (POINTER_TYPE);
   6966 
   6967   TREE_TYPE (t) = to_type;
   6968   SET_TYPE_MODE (t, mode);
   6969   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
   6970   TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
   6971   TYPE_POINTER_TO (to_type) = t;
   6972 
   6973   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
   6974   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
   6975     SET_TYPE_STRUCTURAL_EQUALITY (t);
   6976   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
   6977     TYPE_CANONICAL (t)
   6978       = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
   6979 				     mode, false);
   6980 
   6981   /* Lay out the type.  This function has many callers that are concerned
   6982      with expression-construction, and this simplifies them all.  */
   6983   layout_type (t);
   6984 
   6985   return t;
   6986 }
   6987 
   6988 /* By default build pointers in ptr_mode.  */
   6989 
   6990 tree
   6991 build_pointer_type (tree to_type)
   6992 {
   6993   return build_pointer_type_for_mode (to_type, VOIDmode, false);
   6994 }
   6995 
   6996 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE.  */
   6997 
   6998 tree
   6999 build_reference_type_for_mode (tree to_type, machine_mode mode,
   7000 			       bool can_alias_all)
   7001 {
   7002   tree t;
   7003   bool could_alias = can_alias_all;
   7004 
   7005   if (to_type == error_mark_node)
   7006     return error_mark_node;
   7007 
   7008   if (mode == VOIDmode)
   7009     {
   7010       addr_space_t as = TYPE_ADDR_SPACE (to_type);
   7011       mode = targetm.addr_space.pointer_mode (as);
   7012     }
   7013 
   7014   /* If the pointed-to type has the may_alias attribute set, force
   7015      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
   7016   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
   7017     can_alias_all = true;
   7018 
   7019   /* In some cases, languages will have things that aren't a REFERENCE_TYPE
   7020      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
   7021      In that case, return that type without regard to the rest of our
   7022      operands.
   7023 
   7024      ??? This is a kludge, but consistent with the way this function has
   7025      always operated and there doesn't seem to be a good way to avoid this
   7026      at the moment.  */
   7027   if (TYPE_REFERENCE_TO (to_type) != 0
   7028       && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
   7029     return TYPE_REFERENCE_TO (to_type);
   7030 
   7031   /* First, if we already have a type for pointers to TO_TYPE and it's
   7032      the proper mode, use it.  */
   7033   for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
   7034     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
   7035       return t;
   7036 
   7037   t = make_node (REFERENCE_TYPE);
   7038 
   7039   TREE_TYPE (t) = to_type;
   7040   SET_TYPE_MODE (t, mode);
   7041   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
   7042   TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
   7043   TYPE_REFERENCE_TO (to_type) = t;
   7044 
   7045   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
   7046   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
   7047     SET_TYPE_STRUCTURAL_EQUALITY (t);
   7048   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
   7049     TYPE_CANONICAL (t)
   7050       = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
   7051 				       mode, false);
   7052 
   7053   layout_type (t);
   7054 
   7055   return t;
   7056 }
   7057 
   7058 
   7059 /* Build the node for the type of references-to-TO_TYPE by default
   7060    in ptr_mode.  */
   7061 
   7062 tree
   7063 build_reference_type (tree to_type)
   7064 {
   7065   return build_reference_type_for_mode (to_type, VOIDmode, false);
   7066 }
   7067 
   7068 #define MAX_INT_CACHED_PREC \
   7069   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
   7070 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
   7071 
   7072 static void
   7073 clear_nonstandard_integer_type_cache (void)
   7074 {
   7075   for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
   7076   {
   7077     nonstandard_integer_type_cache[i] = NULL;
   7078   }
   7079 }
   7080 
   7081 /* Builds a signed or unsigned integer type of precision PRECISION.
   7082    Used for C bitfields whose precision does not match that of
   7083    built-in target types.  */
   7084 tree
   7085 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
   7086 				int unsignedp)
   7087 {
   7088   tree itype, ret;
   7089 
   7090   if (unsignedp)
   7091     unsignedp = MAX_INT_CACHED_PREC + 1;
   7092 
   7093   if (precision <= MAX_INT_CACHED_PREC)
   7094     {
   7095       itype = nonstandard_integer_type_cache[precision + unsignedp];
   7096       if (itype)
   7097 	return itype;
   7098     }
   7099 
   7100   itype = make_node (INTEGER_TYPE);
   7101   TYPE_PRECISION (itype) = precision;
   7102 
   7103   if (unsignedp)
   7104     fixup_unsigned_type (itype);
   7105   else
   7106     fixup_signed_type (itype);
   7107 
   7108   inchash::hash hstate;
   7109   inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
   7110   ret = type_hash_canon (hstate.end (), itype);
   7111   if (precision <= MAX_INT_CACHED_PREC)
   7112     nonstandard_integer_type_cache[precision + unsignedp] = ret;
   7113 
   7114   return ret;
   7115 }
   7116 
   7117 #define MAX_BOOL_CACHED_PREC \
   7118   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
   7119 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
   7120 
   7121 /* Builds a boolean type of precision PRECISION.
   7122    Used for boolean vectors to choose proper vector element size.  */
   7123 tree
   7124 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
   7125 {
   7126   tree type;
   7127 
   7128   if (precision <= MAX_BOOL_CACHED_PREC)
   7129     {
   7130       type = nonstandard_boolean_type_cache[precision];
   7131       if (type)
   7132 	return type;
   7133     }
   7134 
   7135   type = make_node (BOOLEAN_TYPE);
   7136   TYPE_PRECISION (type) = precision;
   7137   fixup_signed_type (type);
   7138 
   7139   if (precision <= MAX_INT_CACHED_PREC)
   7140     nonstandard_boolean_type_cache[precision] = type;
   7141 
   7142   return type;
   7143 }
   7144 
   7145 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
   7146    or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL.  If SHARED
   7147    is true, reuse such a type that has already been constructed.  */
   7148 
   7149 static tree
   7150 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
   7151 {
   7152   tree itype = make_node (INTEGER_TYPE);
   7153 
   7154   TREE_TYPE (itype) = type;
   7155 
   7156   TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
   7157   TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
   7158 
   7159   TYPE_PRECISION (itype) = TYPE_PRECISION (type);
   7160   SET_TYPE_MODE (itype, TYPE_MODE (type));
   7161   TYPE_SIZE (itype) = TYPE_SIZE (type);
   7162   TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
   7163   SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
   7164   TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
   7165   SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
   7166 
   7167   if (!shared)
   7168     return itype;
   7169 
   7170   if ((TYPE_MIN_VALUE (itype)
   7171        && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
   7172       || (TYPE_MAX_VALUE (itype)
   7173 	  && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
   7174     {
   7175       /* Since we cannot reliably merge this type, we need to compare it using
   7176 	 structural equality checks.  */
   7177       SET_TYPE_STRUCTURAL_EQUALITY (itype);
   7178       return itype;
   7179     }
   7180 
   7181   hashval_t hash = type_hash_canon_hash (itype);
   7182   itype = type_hash_canon (hash, itype);
   7183 
   7184   return itype;
   7185 }
   7186 
   7187 /* Wrapper around build_range_type_1 with SHARED set to true.  */
   7188 
   7189 tree
   7190 build_range_type (tree type, tree lowval, tree highval)
   7191 {
   7192   return build_range_type_1 (type, lowval, highval, true);
   7193 }
   7194 
   7195 /* Wrapper around build_range_type_1 with SHARED set to false.  */
   7196 
   7197 tree
   7198 build_nonshared_range_type (tree type, tree lowval, tree highval)
   7199 {
   7200   return build_range_type_1 (type, lowval, highval, false);
   7201 }
   7202 
   7203 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
   7204    MAXVAL should be the maximum value in the domain
   7205    (one less than the length of the array).
   7206 
   7207    The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
   7208    We don't enforce this limit, that is up to caller (e.g. language front end).
   7209    The limit exists because the result is a signed type and we don't handle
   7210    sizes that use more than one HOST_WIDE_INT.  */
   7211 
   7212 tree
   7213 build_index_type (tree maxval)
   7214 {
   7215   return build_range_type (sizetype, size_zero_node, maxval);
   7216 }
   7217 
   7218 /* Return true if the debug information for TYPE, a subtype, should be emitted
   7219    as a subrange type.  If so, set LOWVAL to the low bound and HIGHVAL to the
   7220    high bound, respectively.  Sometimes doing so unnecessarily obfuscates the
   7221    debug info and doesn't reflect the source code.  */
   7222 
   7223 bool
   7224 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
   7225 {
   7226   tree base_type = TREE_TYPE (type), low, high;
   7227 
   7228   /* Subrange types have a base type which is an integral type.  */
   7229   if (!INTEGRAL_TYPE_P (base_type))
   7230     return false;
   7231 
   7232   /* Get the real bounds of the subtype.  */
   7233   if (lang_hooks.types.get_subrange_bounds)
   7234     lang_hooks.types.get_subrange_bounds (type, &low, &high);
   7235   else
   7236     {
   7237       low = TYPE_MIN_VALUE (type);
   7238       high = TYPE_MAX_VALUE (type);
   7239     }
   7240 
   7241   /* If the type and its base type have the same representation and the same
   7242      name, then the type is not a subrange but a copy of the base type.  */
   7243   if ((TREE_CODE (base_type) == INTEGER_TYPE
   7244        || TREE_CODE (base_type) == BOOLEAN_TYPE)
   7245       && int_size_in_bytes (type) == int_size_in_bytes (base_type)
   7246       && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
   7247       && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
   7248       && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
   7249     return false;
   7250 
   7251   if (lowval)
   7252     *lowval = low;
   7253   if (highval)
   7254     *highval = high;
   7255   return true;
   7256 }
   7257 
   7258 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
   7259    and number of elements specified by the range of values of INDEX_TYPE.
   7260    If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
   7261    If SHARED is true, reuse such a type that has already been constructed.
   7262    If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type.  */
   7263 
   7264 tree
   7265 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
   7266 		    bool shared, bool set_canonical)
   7267 {
   7268   tree t;
   7269 
   7270   if (TREE_CODE (elt_type) == FUNCTION_TYPE)
   7271     {
   7272       error ("arrays of functions are not meaningful");
   7273       elt_type = integer_type_node;
   7274     }
   7275 
   7276   t = make_node (ARRAY_TYPE);
   7277   TREE_TYPE (t) = elt_type;
   7278   TYPE_DOMAIN (t) = index_type;
   7279   TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
   7280   TYPE_TYPELESS_STORAGE (t) = typeless_storage;
   7281   layout_type (t);
   7282 
   7283   if (shared)
   7284     {
   7285       hashval_t hash = type_hash_canon_hash (t);
   7286       t = type_hash_canon (hash, t);
   7287     }
   7288 
   7289   if (TYPE_CANONICAL (t) == t && set_canonical)
   7290     {
   7291       if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
   7292 	  || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
   7293 	  || in_lto_p)
   7294 	SET_TYPE_STRUCTURAL_EQUALITY (t);
   7295       else if (TYPE_CANONICAL (elt_type) != elt_type
   7296 	       || (index_type && TYPE_CANONICAL (index_type) != index_type))
   7297 	TYPE_CANONICAL (t)
   7298 	  = build_array_type_1 (TYPE_CANONICAL (elt_type),
   7299 				index_type
   7300 				? TYPE_CANONICAL (index_type) : NULL_TREE,
   7301 				typeless_storage, shared, set_canonical);
   7302     }
   7303 
   7304   return t;
   7305 }
   7306 
   7307 /* Wrapper around build_array_type_1 with SHARED set to true.  */
   7308 
   7309 tree
   7310 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
   7311 {
   7312   return
   7313     build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
   7314 }
   7315 
   7316 /* Wrapper around build_array_type_1 with SHARED set to false.  */
   7317 
   7318 tree
   7319 build_nonshared_array_type (tree elt_type, tree index_type)
   7320 {
   7321   return build_array_type_1 (elt_type, index_type, false, false, true);
   7322 }
   7323 
   7324 /* Return a representation of ELT_TYPE[NELTS], using indices of type
   7325    sizetype.  */
   7326 
   7327 tree
   7328 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
   7329 {
   7330   return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
   7331 }
   7332 
   7333 /* Recursively examines the array elements of TYPE, until a non-array
   7334    element type is found.  */
   7335 
   7336 tree
   7337 strip_array_types (tree type)
   7338 {
   7339   while (TREE_CODE (type) == ARRAY_TYPE)
   7340     type = TREE_TYPE (type);
   7341 
   7342   return type;
   7343 }
   7344 
   7345 /* Computes the canonical argument types from the argument type list
   7346    ARGTYPES.
   7347 
   7348    Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
   7349    on entry to this function, or if any of the ARGTYPES are
   7350    structural.
   7351 
   7352    Upon return, *ANY_NONCANONICAL_P will be true iff either it was
   7353    true on entry to this function, or if any of the ARGTYPES are
   7354    non-canonical.
   7355 
   7356    Returns a canonical argument list, which may be ARGTYPES when the
   7357    canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
   7358    true) or would not differ from ARGTYPES.  */
   7359 
   7360 static tree
   7361 maybe_canonicalize_argtypes (tree argtypes,
   7362 			     bool *any_structural_p,
   7363 			     bool *any_noncanonical_p)
   7364 {
   7365   tree arg;
   7366   bool any_noncanonical_argtypes_p = false;
   7367 
   7368   for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
   7369     {
   7370       if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
   7371 	/* Fail gracefully by stating that the type is structural.  */
   7372 	*any_structural_p = true;
   7373       else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
   7374 	*any_structural_p = true;
   7375       else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
   7376 	       || TREE_PURPOSE (arg))
   7377 	/* If the argument has a default argument, we consider it
   7378 	   non-canonical even though the type itself is canonical.
   7379 	   That way, different variants of function and method types
   7380 	   with default arguments will all point to the variant with
   7381 	   no defaults as their canonical type.  */
   7382         any_noncanonical_argtypes_p = true;
   7383     }
   7384 
   7385   if (*any_structural_p)
   7386     return argtypes;
   7387 
   7388   if (any_noncanonical_argtypes_p)
   7389     {
   7390       /* Build the canonical list of argument types.  */
   7391       tree canon_argtypes = NULL_TREE;
   7392       bool is_void = false;
   7393 
   7394       for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
   7395         {
   7396           if (arg == void_list_node)
   7397             is_void = true;
   7398           else
   7399             canon_argtypes = tree_cons (NULL_TREE,
   7400                                         TYPE_CANONICAL (TREE_VALUE (arg)),
   7401                                         canon_argtypes);
   7402         }
   7403 
   7404       canon_argtypes = nreverse (canon_argtypes);
   7405       if (is_void)
   7406         canon_argtypes = chainon (canon_argtypes, void_list_node);
   7407 
   7408       /* There is a non-canonical type.  */
   7409       *any_noncanonical_p = true;
   7410       return canon_argtypes;
   7411     }
   7412 
   7413   /* The canonical argument types are the same as ARGTYPES.  */
   7414   return argtypes;
   7415 }
   7416 
   7417 /* Construct, lay out and return
   7418    the type of functions returning type VALUE_TYPE
   7419    given arguments of types ARG_TYPES.
   7420    ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
   7421    are data type nodes for the arguments of the function.
   7422    If such a type has already been constructed, reuse it.  */
   7423 
   7424 tree
   7425 build_function_type (tree value_type, tree arg_types)
   7426 {
   7427   tree t;
   7428   inchash::hash hstate;
   7429   bool any_structural_p, any_noncanonical_p;
   7430   tree canon_argtypes;
   7431 
   7432   gcc_assert (arg_types != error_mark_node);
   7433 
   7434   if (TREE_CODE (value_type) == FUNCTION_TYPE)
   7435     {
   7436       error ("function return type cannot be function");
   7437       value_type = integer_type_node;
   7438     }
   7439 
   7440   /* Make a node of the sort we want.  */
   7441   t = make_node (FUNCTION_TYPE);
   7442   TREE_TYPE (t) = value_type;
   7443   TYPE_ARG_TYPES (t) = arg_types;
   7444 
   7445   /* If we already have such a type, use the old one.  */
   7446   hashval_t hash = type_hash_canon_hash (t);
   7447   t = type_hash_canon (hash, t);
   7448 
   7449   /* Set up the canonical type. */
   7450   any_structural_p   = TYPE_STRUCTURAL_EQUALITY_P (value_type);
   7451   any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
   7452   canon_argtypes = maybe_canonicalize_argtypes (arg_types,
   7453 						&any_structural_p,
   7454 						&any_noncanonical_p);
   7455   if (any_structural_p)
   7456     SET_TYPE_STRUCTURAL_EQUALITY (t);
   7457   else if (any_noncanonical_p)
   7458     TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
   7459 					      canon_argtypes);
   7460 
   7461   if (!COMPLETE_TYPE_P (t))
   7462     layout_type (t);
   7463   return t;
   7464 }
   7465 
   7466 /* Build a function type.  The RETURN_TYPE is the type returned by the
   7467    function.  If VAARGS is set, no void_type_node is appended to the
   7468    list.  ARGP must be always be terminated be a NULL_TREE.  */
   7469 
   7470 static tree
   7471 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
   7472 {
   7473   tree t, args, last;
   7474 
   7475   t = va_arg (argp, tree);
   7476   for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
   7477     args = tree_cons (NULL_TREE, t, args);
   7478 
   7479   if (vaargs)
   7480     {
   7481       last = args;
   7482       if (args != NULL_TREE)
   7483 	args = nreverse (args);
   7484       gcc_assert (last != void_list_node);
   7485     }
   7486   else if (args == NULL_TREE)
   7487     args = void_list_node;
   7488   else
   7489     {
   7490       last = args;
   7491       args = nreverse (args);
   7492       TREE_CHAIN (last) = void_list_node;
   7493     }
   7494   args = build_function_type (return_type, args);
   7495 
   7496   return args;
   7497 }
   7498 
   7499 /* Build a function type.  The RETURN_TYPE is the type returned by the
   7500    function.  If additional arguments are provided, they are
   7501    additional argument types.  The list of argument types must always
   7502    be terminated by NULL_TREE.  */
   7503 
   7504 tree
   7505 build_function_type_list (tree return_type, ...)
   7506 {
   7507   tree args;
   7508   va_list p;
   7509 
   7510   va_start (p, return_type);
   7511   args = build_function_type_list_1 (false, return_type, p);
   7512   va_end (p);
   7513   return args;
   7514 }
   7515 
   7516 /* Build a variable argument function type.  The RETURN_TYPE is the
   7517    type returned by the function.  If additional arguments are provided,
   7518    they are additional argument types.  The list of argument types must
   7519    always be terminated by NULL_TREE.  */
   7520 
   7521 tree
   7522 build_varargs_function_type_list (tree return_type, ...)
   7523 {
   7524   tree args;
   7525   va_list p;
   7526 
   7527   va_start (p, return_type);
   7528   args = build_function_type_list_1 (true, return_type, p);
   7529   va_end (p);
   7530 
   7531   return args;
   7532 }
   7533 
   7534 /* Build a function type.  RETURN_TYPE is the type returned by the
   7535    function; VAARGS indicates whether the function takes varargs.  The
   7536    function takes N named arguments, the types of which are provided in
   7537    ARG_TYPES.  */
   7538 
   7539 static tree
   7540 build_function_type_array_1 (bool vaargs, tree return_type, int n,
   7541 			     tree *arg_types)
   7542 {
   7543   int i;
   7544   tree t = vaargs ? NULL_TREE : void_list_node;
   7545 
   7546   for (i = n - 1; i >= 0; i--)
   7547     t = tree_cons (NULL_TREE, arg_types[i], t);
   7548 
   7549   return build_function_type (return_type, t);
   7550 }
   7551 
   7552 /* Build a function type.  RETURN_TYPE is the type returned by the
   7553    function.  The function takes N named arguments, the types of which
   7554    are provided in ARG_TYPES.  */
   7555 
   7556 tree
   7557 build_function_type_array (tree return_type, int n, tree *arg_types)
   7558 {
   7559   return build_function_type_array_1 (false, return_type, n, arg_types);
   7560 }
   7561 
   7562 /* Build a variable argument function type.  RETURN_TYPE is the type
   7563    returned by the function.  The function takes N named arguments, the
   7564    types of which are provided in ARG_TYPES.  */
   7565 
   7566 tree
   7567 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
   7568 {
   7569   return build_function_type_array_1 (true, return_type, n, arg_types);
   7570 }
   7571 
   7572 /* Build a METHOD_TYPE for a member of BASETYPE.  The RETTYPE (a TYPE)
   7573    and ARGTYPES (a TREE_LIST) are the return type and arguments types
   7574    for the method.  An implicit additional parameter (of type
   7575    pointer-to-BASETYPE) is added to the ARGTYPES.  */
   7576 
   7577 tree
   7578 build_method_type_directly (tree basetype,
   7579 			    tree rettype,
   7580 			    tree argtypes)
   7581 {
   7582   tree t;
   7583   tree ptype;
   7584   bool any_structural_p, any_noncanonical_p;
   7585   tree canon_argtypes;
   7586 
   7587   /* Make a node of the sort we want.  */
   7588   t = make_node (METHOD_TYPE);
   7589 
   7590   TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
   7591   TREE_TYPE (t) = rettype;
   7592   ptype = build_pointer_type (basetype);
   7593 
   7594   /* The actual arglist for this function includes a "hidden" argument
   7595      which is "this".  Put it into the list of argument types.  */
   7596   argtypes = tree_cons (NULL_TREE, ptype, argtypes);
   7597   TYPE_ARG_TYPES (t) = argtypes;
   7598 
   7599   /* If we already have such a type, use the old one.  */
   7600   hashval_t hash = type_hash_canon_hash (t);
   7601   t = type_hash_canon (hash, t);
   7602 
   7603   /* Set up the canonical type. */
   7604   any_structural_p
   7605     = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
   7606        || TYPE_STRUCTURAL_EQUALITY_P (rettype));
   7607   any_noncanonical_p
   7608     = (TYPE_CANONICAL (basetype) != basetype
   7609        || TYPE_CANONICAL (rettype) != rettype);
   7610   canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
   7611 						&any_structural_p,
   7612 						&any_noncanonical_p);
   7613   if (any_structural_p)
   7614     SET_TYPE_STRUCTURAL_EQUALITY (t);
   7615   else if (any_noncanonical_p)
   7616     TYPE_CANONICAL (t)
   7617       = build_method_type_directly (TYPE_CANONICAL (basetype),
   7618 				    TYPE_CANONICAL (rettype),
   7619 				    canon_argtypes);
   7620   if (!COMPLETE_TYPE_P (t))
   7621     layout_type (t);
   7622 
   7623   return t;
   7624 }
   7625 
   7626 /* Construct, lay out and return the type of methods belonging to class
   7627    BASETYPE and whose arguments and values are described by TYPE.
   7628    If that type exists already, reuse it.
   7629    TYPE must be a FUNCTION_TYPE node.  */
   7630 
   7631 tree
   7632 build_method_type (tree basetype, tree type)
   7633 {
   7634   gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
   7635 
   7636   return build_method_type_directly (basetype,
   7637 				     TREE_TYPE (type),
   7638 				     TYPE_ARG_TYPES (type));
   7639 }
   7640 
   7641 /* Construct, lay out and return the type of offsets to a value
   7642    of type TYPE, within an object of type BASETYPE.
   7643    If a suitable offset type exists already, reuse it.  */
   7644 
   7645 tree
   7646 build_offset_type (tree basetype, tree type)
   7647 {
   7648   tree t;
   7649 
   7650   /* Make a node of the sort we want.  */
   7651   t = make_node (OFFSET_TYPE);
   7652 
   7653   TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
   7654   TREE_TYPE (t) = type;
   7655 
   7656   /* If we already have such a type, use the old one.  */
   7657   hashval_t hash = type_hash_canon_hash (t);
   7658   t = type_hash_canon (hash, t);
   7659 
   7660   if (!COMPLETE_TYPE_P (t))
   7661     layout_type (t);
   7662 
   7663   if (TYPE_CANONICAL (t) == t)
   7664     {
   7665       if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
   7666 	  || TYPE_STRUCTURAL_EQUALITY_P (type))
   7667 	SET_TYPE_STRUCTURAL_EQUALITY (t);
   7668       else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
   7669 	       || TYPE_CANONICAL (type) != type)
   7670 	TYPE_CANONICAL (t)
   7671 	  = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
   7672 			       TYPE_CANONICAL (type));
   7673     }
   7674 
   7675   return t;
   7676 }
   7677 
   7678 /* Create a complex type whose components are COMPONENT_TYPE.
   7679 
   7680    If NAMED is true, the type is given a TYPE_NAME.  We do not always
   7681    do so because this creates a DECL node and thus make the DECL_UIDs
   7682    dependent on the type canonicalization hashtable, which is GC-ed,
   7683    so the DECL_UIDs would not be stable wrt garbage collection.  */
   7684 
   7685 tree
   7686 build_complex_type (tree component_type, bool named)
   7687 {
   7688   gcc_assert (INTEGRAL_TYPE_P (component_type)
   7689 	      || SCALAR_FLOAT_TYPE_P (component_type)
   7690 	      || FIXED_POINT_TYPE_P (component_type));
   7691 
   7692   /* Make a node of the sort we want.  */
   7693   tree probe = make_node (COMPLEX_TYPE);
   7694 
   7695   TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
   7696 
   7697   /* If we already have such a type, use the old one.  */
   7698   hashval_t hash = type_hash_canon_hash (probe);
   7699   tree t = type_hash_canon (hash, probe);
   7700 
   7701   if (t == probe)
   7702     {
   7703       /* We created a new type.  The hash insertion will have laid
   7704 	 out the type.  We need to check the canonicalization and
   7705 	 maybe set the name.  */
   7706       gcc_checking_assert (COMPLETE_TYPE_P (t)
   7707 			   && !TYPE_NAME (t)
   7708 			   && TYPE_CANONICAL (t) == t);
   7709 
   7710       if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
   7711 	SET_TYPE_STRUCTURAL_EQUALITY (t);
   7712       else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
   7713 	TYPE_CANONICAL (t)
   7714 	  = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
   7715 
   7716       /* We need to create a name, since complex is a fundamental type.  */
   7717       if (named)
   7718 	{
   7719 	  const char *name = NULL;
   7720 
   7721 	  if (TREE_TYPE (t) == char_type_node)
   7722 	    name = "complex char";
   7723 	  else if (TREE_TYPE (t) == signed_char_type_node)
   7724 	    name = "complex signed char";
   7725 	  else if (TREE_TYPE (t) == unsigned_char_type_node)
   7726 	    name = "complex unsigned char";
   7727 	  else if (TREE_TYPE (t) == short_integer_type_node)
   7728 	    name = "complex short int";
   7729 	  else if (TREE_TYPE (t) == short_unsigned_type_node)
   7730 	    name = "complex short unsigned int";
   7731 	  else if (TREE_TYPE (t) == integer_type_node)
   7732 	    name = "complex int";
   7733 	  else if (TREE_TYPE (t) == unsigned_type_node)
   7734 	    name = "complex unsigned int";
   7735 	  else if (TREE_TYPE (t) == long_integer_type_node)
   7736 	    name = "complex long int";
   7737 	  else if (TREE_TYPE (t) == long_unsigned_type_node)
   7738 	    name = "complex long unsigned int";
   7739 	  else if (TREE_TYPE (t) == long_long_integer_type_node)
   7740 	    name = "complex long long int";
   7741 	  else if (TREE_TYPE (t) == long_long_unsigned_type_node)
   7742 	    name = "complex long long unsigned int";
   7743 
   7744 	  if (name != NULL)
   7745 	    TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
   7746 					get_identifier (name), t);
   7747 	}
   7748     }
   7749 
   7750   return build_qualified_type (t, TYPE_QUALS (component_type));
   7751 }
   7752 
   7753 /* If TYPE is a real or complex floating-point type and the target
   7754    does not directly support arithmetic on TYPE then return the wider
   7755    type to be used for arithmetic on TYPE.  Otherwise, return
   7756    NULL_TREE.  */
   7757 
   7758 tree
   7759 excess_precision_type (tree type)
   7760 {
   7761   /* The target can give two different responses to the question of
   7762      which excess precision mode it would like depending on whether we
   7763      are in -fexcess-precision=standard or -fexcess-precision=fast.  */
   7764 
   7765   enum excess_precision_type requested_type
   7766     = (flag_excess_precision == EXCESS_PRECISION_FAST
   7767        ? EXCESS_PRECISION_TYPE_FAST
   7768        : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
   7769 	  ? EXCESS_PRECISION_TYPE_FLOAT16 :EXCESS_PRECISION_TYPE_STANDARD));
   7770 
   7771   enum flt_eval_method target_flt_eval_method
   7772     = targetm.c.excess_precision (requested_type);
   7773 
   7774   /* The target should not ask for unpredictable float evaluation (though
   7775      it might advertise that implicitly the evaluation is unpredictable,
   7776      but we don't care about that here, it will have been reported
   7777      elsewhere).  If it does ask for unpredictable evaluation, we have
   7778      nothing to do here.  */
   7779   gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
   7780 
   7781   /* Nothing to do.  The target has asked for all types we know about
   7782      to be computed with their native precision and range.  */
   7783   if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
   7784     return NULL_TREE;
   7785 
   7786   /* The target will promote this type in a target-dependent way, so excess
   7787      precision ought to leave it alone.  */
   7788   if (targetm.promoted_type (type) != NULL_TREE)
   7789     return NULL_TREE;
   7790 
   7791   machine_mode float16_type_mode = (float16_type_node
   7792 				    ? TYPE_MODE (float16_type_node)
   7793 				    : VOIDmode);
   7794   machine_mode float_type_mode = TYPE_MODE (float_type_node);
   7795   machine_mode double_type_mode = TYPE_MODE (double_type_node);
   7796 
   7797   switch (TREE_CODE (type))
   7798     {
   7799     case REAL_TYPE:
   7800       {
   7801 	machine_mode type_mode = TYPE_MODE (type);
   7802 	switch (target_flt_eval_method)
   7803 	  {
   7804 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
   7805 	    if (type_mode == float16_type_mode)
   7806 	      return float_type_node;
   7807 	    break;
   7808 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
   7809 	    if (type_mode == float16_type_mode
   7810 		|| type_mode == float_type_mode)
   7811 	      return double_type_node;
   7812 	    break;
   7813 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
   7814 	    if (type_mode == float16_type_mode
   7815 		|| type_mode == float_type_mode
   7816 		|| type_mode == double_type_mode)
   7817 	      return long_double_type_node;
   7818 	    break;
   7819 	  default:
   7820 	    gcc_unreachable ();
   7821 	  }
   7822 	break;
   7823       }
   7824     case COMPLEX_TYPE:
   7825       {
   7826 	if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
   7827 	  return NULL_TREE;
   7828 	machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
   7829 	switch (target_flt_eval_method)
   7830 	  {
   7831 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
   7832 	    if (type_mode == float16_type_mode)
   7833 	      return complex_float_type_node;
   7834 	    break;
   7835 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
   7836 	    if (type_mode == float16_type_mode
   7837 		|| type_mode == float_type_mode)
   7838 	      return complex_double_type_node;
   7839 	    break;
   7840 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
   7841 	    if (type_mode == float16_type_mode
   7842 		|| type_mode == float_type_mode
   7843 		|| type_mode == double_type_mode)
   7844 	      return complex_long_double_type_node;
   7845 	    break;
   7846 	  default:
   7847 	    gcc_unreachable ();
   7848 	  }
   7849 	break;
   7850       }
   7851     default:
   7852       break;
   7853     }
   7854 
   7855   return NULL_TREE;
   7856 }
   7857 
   7858 /* Return OP, stripped of any conversions to wider types as much as is safe.
   7860    Converting the value back to OP's type makes a value equivalent to OP.
   7861 
   7862    If FOR_TYPE is nonzero, we return a value which, if converted to
   7863    type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
   7864 
   7865    OP must have integer, real or enumeral type.  Pointers are not allowed!
   7866 
   7867    There are some cases where the obvious value we could return
   7868    would regenerate to OP if converted to OP's type,
   7869    but would not extend like OP to wider types.
   7870    If FOR_TYPE indicates such extension is contemplated, we eschew such values.
   7871    For example, if OP is (unsigned short)(signed char)-1,
   7872    we avoid returning (signed char)-1 if FOR_TYPE is int,
   7873    even though extending that to an unsigned short would regenerate OP,
   7874    since the result of extending (signed char)-1 to (int)
   7875    is different from (int) OP.  */
   7876 
   7877 tree
   7878 get_unwidened (tree op, tree for_type)
   7879 {
   7880   /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension.  */
   7881   tree type = TREE_TYPE (op);
   7882   unsigned final_prec
   7883     = TYPE_PRECISION (for_type != 0 ? for_type : type);
   7884   int uns
   7885     = (for_type != 0 && for_type != type
   7886        && final_prec > TYPE_PRECISION (type)
   7887        && TYPE_UNSIGNED (type));
   7888   tree win = op;
   7889 
   7890   while (CONVERT_EXPR_P (op))
   7891     {
   7892       int bitschange;
   7893 
   7894       /* TYPE_PRECISION on vector types has different meaning
   7895 	 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
   7896 	 so avoid them here.  */
   7897       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
   7898 	break;
   7899 
   7900       bitschange = TYPE_PRECISION (TREE_TYPE (op))
   7901 		   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
   7902 
   7903       /* Truncations are many-one so cannot be removed.
   7904 	 Unless we are later going to truncate down even farther.  */
   7905       if (bitschange < 0
   7906 	  && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
   7907 	break;
   7908 
   7909       /* See what's inside this conversion.  If we decide to strip it,
   7910 	 we will set WIN.  */
   7911       op = TREE_OPERAND (op, 0);
   7912 
   7913       /* If we have not stripped any zero-extensions (uns is 0),
   7914 	 we can strip any kind of extension.
   7915 	 If we have previously stripped a zero-extension,
   7916 	 only zero-extensions can safely be stripped.
   7917 	 Any extension can be stripped if the bits it would produce
   7918 	 are all going to be discarded later by truncating to FOR_TYPE.  */
   7919 
   7920       if (bitschange > 0)
   7921 	{
   7922 	  if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
   7923 	    win = op;
   7924 	  /* TYPE_UNSIGNED says whether this is a zero-extension.
   7925 	     Let's avoid computing it if it does not affect WIN
   7926 	     and if UNS will not be needed again.  */
   7927 	  if ((uns
   7928 	       || CONVERT_EXPR_P (op))
   7929 	      && TYPE_UNSIGNED (TREE_TYPE (op)))
   7930 	    {
   7931 	      uns = 1;
   7932 	      win = op;
   7933 	    }
   7934 	}
   7935     }
   7936 
   7937   /* If we finally reach a constant see if it fits in sth smaller and
   7938      in that case convert it.  */
   7939   if (TREE_CODE (win) == INTEGER_CST)
   7940     {
   7941       tree wtype = TREE_TYPE (win);
   7942       unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
   7943       if (for_type)
   7944 	prec = MAX (prec, final_prec);
   7945       if (prec < TYPE_PRECISION (wtype))
   7946 	{
   7947 	  tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
   7948 	  if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
   7949 	    win = fold_convert (t, win);
   7950 	}
   7951     }
   7952 
   7953   return win;
   7954 }
   7955 
   7956 /* Return OP or a simpler expression for a narrower value
   7958    which can be sign-extended or zero-extended to give back OP.
   7959    Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
   7960    or 0 if the value should be sign-extended.  */
   7961 
   7962 tree
   7963 get_narrower (tree op, int *unsignedp_ptr)
   7964 {
   7965   int uns = 0;
   7966   int first = 1;
   7967   tree win = op;
   7968   bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
   7969 
   7970   if (TREE_CODE (op) == COMPOUND_EXPR)
   7971     {
   7972       do
   7973 	op = TREE_OPERAND (op, 1);
   7974       while (TREE_CODE (op) == COMPOUND_EXPR);
   7975       tree ret = get_narrower (op, unsignedp_ptr);
   7976       if (ret == op)
   7977 	return win;
   7978       auto_vec <tree, 16> v;
   7979       unsigned int i;
   7980       for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
   7981 	   op = TREE_OPERAND (op, 1))
   7982 	v.safe_push (op);
   7983       FOR_EACH_VEC_ELT_REVERSE (v, i, op)
   7984 	ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
   7985 			  TREE_TYPE (ret), TREE_OPERAND (op, 0),
   7986 			  ret);
   7987       return ret;
   7988     }
   7989   while (TREE_CODE (op) == NOP_EXPR)
   7990     {
   7991       int bitschange
   7992 	= (TYPE_PRECISION (TREE_TYPE (op))
   7993 	   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
   7994 
   7995       /* Truncations are many-one so cannot be removed.  */
   7996       if (bitschange < 0)
   7997 	break;
   7998 
   7999       /* See what's inside this conversion.  If we decide to strip it,
   8000 	 we will set WIN.  */
   8001 
   8002       if (bitschange > 0)
   8003 	{
   8004 	  op = TREE_OPERAND (op, 0);
   8005 	  /* An extension: the outermost one can be stripped,
   8006 	     but remember whether it is zero or sign extension.  */
   8007 	  if (first)
   8008 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
   8009 	  /* Otherwise, if a sign extension has been stripped,
   8010 	     only sign extensions can now be stripped;
   8011 	     if a zero extension has been stripped, only zero-extensions.  */
   8012 	  else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
   8013 	    break;
   8014 	  first = 0;
   8015 	}
   8016       else /* bitschange == 0 */
   8017 	{
   8018 	  /* A change in nominal type can always be stripped, but we must
   8019 	     preserve the unsignedness.  */
   8020 	  if (first)
   8021 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
   8022 	  first = 0;
   8023 	  op = TREE_OPERAND (op, 0);
   8024 	  /* Keep trying to narrow, but don't assign op to win if it
   8025 	     would turn an integral type into something else.  */
   8026 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
   8027 	    continue;
   8028 	}
   8029 
   8030       win = op;
   8031     }
   8032 
   8033   if (TREE_CODE (op) == COMPONENT_REF
   8034       /* Since type_for_size always gives an integer type.  */
   8035       && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
   8036       && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
   8037       /* Ensure field is laid out already.  */
   8038       && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
   8039       && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
   8040     {
   8041       unsigned HOST_WIDE_INT innerprec
   8042 	= tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
   8043       int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
   8044 		       || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
   8045       tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
   8046 
   8047       /* We can get this structure field in a narrower type that fits it,
   8048 	 but the resulting extension to its nominal type (a fullword type)
   8049 	 must satisfy the same conditions as for other extensions.
   8050 
   8051 	 Do this only for fields that are aligned (not bit-fields),
   8052 	 because when bit-field insns will be used there is no
   8053 	 advantage in doing this.  */
   8054 
   8055       if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
   8056 	  && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
   8057 	  && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
   8058 	  && type != 0)
   8059 	{
   8060 	  if (first)
   8061 	    uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
   8062 	  win = fold_convert (type, op);
   8063 	}
   8064     }
   8065 
   8066   *unsignedp_ptr = uns;
   8067   return win;
   8068 }
   8069 
   8070 /* Return true if integer constant C has a value that is permissible
   8072    for TYPE, an integral type.  */
   8073 
   8074 bool
   8075 int_fits_type_p (const_tree c, const_tree type)
   8076 {
   8077   tree type_low_bound, type_high_bound;
   8078   bool ok_for_low_bound, ok_for_high_bound;
   8079   signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
   8080 
   8081   /* Non-standard boolean types can have arbitrary precision but various
   8082      transformations assume that they can only take values 0 and +/-1.  */
   8083   if (TREE_CODE (type) == BOOLEAN_TYPE)
   8084     return wi::fits_to_boolean_p (wi::to_wide (c), type);
   8085 
   8086 retry:
   8087   type_low_bound = TYPE_MIN_VALUE (type);
   8088   type_high_bound = TYPE_MAX_VALUE (type);
   8089 
   8090   /* If at least one bound of the type is a constant integer, we can check
   8091      ourselves and maybe make a decision. If no such decision is possible, but
   8092      this type is a subtype, try checking against that.  Otherwise, use
   8093      fits_to_tree_p, which checks against the precision.
   8094 
   8095      Compute the status for each possibly constant bound, and return if we see
   8096      one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
   8097      for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
   8098      for "constant known to fit".  */
   8099 
   8100   /* Check if c >= type_low_bound.  */
   8101   if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
   8102     {
   8103       if (tree_int_cst_lt (c, type_low_bound))
   8104 	return false;
   8105       ok_for_low_bound = true;
   8106     }
   8107   else
   8108     ok_for_low_bound = false;
   8109 
   8110   /* Check if c <= type_high_bound.  */
   8111   if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
   8112     {
   8113       if (tree_int_cst_lt (type_high_bound, c))
   8114 	return false;
   8115       ok_for_high_bound = true;
   8116     }
   8117   else
   8118     ok_for_high_bound = false;
   8119 
   8120   /* If the constant fits both bounds, the result is known.  */
   8121   if (ok_for_low_bound && ok_for_high_bound)
   8122     return true;
   8123 
   8124   /* Perform some generic filtering which may allow making a decision
   8125      even if the bounds are not constant.  First, negative integers
   8126      never fit in unsigned types, */
   8127   if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
   8128     return false;
   8129 
   8130   /* Second, narrower types always fit in wider ones.  */
   8131   if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
   8132     return true;
   8133 
   8134   /* Third, unsigned integers with top bit set never fit signed types.  */
   8135   if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
   8136     {
   8137       int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
   8138       if (prec < TYPE_PRECISION (TREE_TYPE (c)))
   8139 	{
   8140 	  /* When a tree_cst is converted to a wide-int, the precision
   8141 	     is taken from the type.  However, if the precision of the
   8142 	     mode underneath the type is smaller than that, it is
   8143 	     possible that the value will not fit.  The test below
   8144 	     fails if any bit is set between the sign bit of the
   8145 	     underlying mode and the top bit of the type.  */
   8146 	  if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
   8147 	    return false;
   8148 	}
   8149       else if (wi::neg_p (wi::to_wide (c)))
   8150 	return false;
   8151     }
   8152 
   8153   /* If we haven't been able to decide at this point, there nothing more we
   8154      can check ourselves here.  Look at the base type if we have one and it
   8155      has the same precision.  */
   8156   if (TREE_CODE (type) == INTEGER_TYPE
   8157       && TREE_TYPE (type) != 0
   8158       && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
   8159     {
   8160       type = TREE_TYPE (type);
   8161       goto retry;
   8162     }
   8163 
   8164   /* Or to fits_to_tree_p, if nothing else.  */
   8165   return wi::fits_to_tree_p (wi::to_wide (c), type);
   8166 }
   8167 
   8168 /* Stores bounds of an integer TYPE in MIN and MAX.  If TYPE has non-constant
   8169    bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
   8170    represented (assuming two's-complement arithmetic) within the bit
   8171    precision of the type are returned instead.  */
   8172 
   8173 void
   8174 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
   8175 {
   8176   if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
   8177       && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
   8178     wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
   8179   else
   8180     {
   8181       if (TYPE_UNSIGNED (type))
   8182 	mpz_set_ui (min, 0);
   8183       else
   8184 	{
   8185 	  wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
   8186 	  wi::to_mpz (mn, min, SIGNED);
   8187 	}
   8188     }
   8189 
   8190   if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
   8191       && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
   8192     wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
   8193   else
   8194     {
   8195       wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
   8196       wi::to_mpz (mn, max, TYPE_SIGN (type));
   8197     }
   8198 }
   8199 
   8200 /* Return true if VAR is an automatic variable.  */
   8201 
   8202 bool
   8203 auto_var_p (const_tree var)
   8204 {
   8205   return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
   8206 	    || TREE_CODE (var) == PARM_DECL)
   8207 	   && ! TREE_STATIC (var))
   8208 	  || TREE_CODE (var) == RESULT_DECL);
   8209 }
   8210 
   8211 /* Return true if VAR is an automatic variable defined in function FN.  */
   8212 
   8213 bool
   8214 auto_var_in_fn_p (const_tree var, const_tree fn)
   8215 {
   8216   return (DECL_P (var) && DECL_CONTEXT (var) == fn
   8217 	  && (auto_var_p (var)
   8218 	      || TREE_CODE (var) == LABEL_DECL));
   8219 }
   8220 
   8221 /* Subprogram of following function.  Called by walk_tree.
   8222 
   8223    Return *TP if it is an automatic variable or parameter of the
   8224    function passed in as DATA.  */
   8225 
   8226 static tree
   8227 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
   8228 {
   8229   tree fn = (tree) data;
   8230 
   8231   if (TYPE_P (*tp))
   8232     *walk_subtrees = 0;
   8233 
   8234   else if (DECL_P (*tp)
   8235 	   && auto_var_in_fn_p (*tp, fn))
   8236     return *tp;
   8237 
   8238   return NULL_TREE;
   8239 }
   8240 
   8241 /* Returns true if T is, contains, or refers to a type with variable
   8242    size.  For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
   8243    arguments, but not the return type.  If FN is nonzero, only return
   8244    true if a modifier of the type or position of FN is a variable or
   8245    parameter inside FN.
   8246 
   8247    This concept is more general than that of C99 'variably modified types':
   8248    in C99, a struct type is never variably modified because a VLA may not
   8249    appear as a structure member.  However, in GNU C code like:
   8250 
   8251      struct S { int i[f()]; };
   8252 
   8253    is valid, and other languages may define similar constructs.  */
   8254 
   8255 bool
   8256 variably_modified_type_p (tree type, tree fn)
   8257 {
   8258   tree t;
   8259 
   8260 /* Test if T is either variable (if FN is zero) or an expression containing
   8261    a variable in FN.  If TYPE isn't gimplified, return true also if
   8262    gimplify_one_sizepos would gimplify the expression into a local
   8263    variable.  */
   8264 #define RETURN_TRUE_IF_VAR(T)						\
   8265   do { tree _t = (T);							\
   8266     if (_t != NULL_TREE							\
   8267 	&& _t != error_mark_node					\
   8268 	&& !CONSTANT_CLASS_P (_t)					\
   8269 	&& TREE_CODE (_t) != PLACEHOLDER_EXPR				\
   8270 	&& (!fn								\
   8271 	    || (!TYPE_SIZES_GIMPLIFIED (type)				\
   8272 		&& (TREE_CODE (_t) != VAR_DECL				\
   8273 		    && !CONTAINS_PLACEHOLDER_P (_t)))			\
   8274 	    || walk_tree (&_t, find_var_from_fn, fn, NULL)))		\
   8275       return true;  } while (0)
   8276 
   8277   if (type == error_mark_node)
   8278     return false;
   8279 
   8280   /* If TYPE itself has variable size, it is variably modified.  */
   8281   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
   8282   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
   8283 
   8284   switch (TREE_CODE (type))
   8285     {
   8286     case POINTER_TYPE:
   8287     case REFERENCE_TYPE:
   8288     case VECTOR_TYPE:
   8289       /* Ada can have pointer types refering to themselves indirectly.  */
   8290       if (TREE_VISITED (type))
   8291 	return false;
   8292       TREE_VISITED (type) = true;
   8293       if (variably_modified_type_p (TREE_TYPE (type), fn))
   8294 	{
   8295 	  TREE_VISITED (type) = false;
   8296 	  return true;
   8297 	}
   8298       TREE_VISITED (type) = false;
   8299       break;
   8300 
   8301     case FUNCTION_TYPE:
   8302     case METHOD_TYPE:
   8303       /* If TYPE is a function type, it is variably modified if the
   8304 	 return type is variably modified.  */
   8305       if (variably_modified_type_p (TREE_TYPE (type), fn))
   8306 	  return true;
   8307       break;
   8308 
   8309     case INTEGER_TYPE:
   8310     case REAL_TYPE:
   8311     case FIXED_POINT_TYPE:
   8312     case ENUMERAL_TYPE:
   8313     case BOOLEAN_TYPE:
   8314       /* Scalar types are variably modified if their end points
   8315 	 aren't constant.  */
   8316       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
   8317       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
   8318       break;
   8319 
   8320     case RECORD_TYPE:
   8321     case UNION_TYPE:
   8322     case QUAL_UNION_TYPE:
   8323       /* We can't see if any of the fields are variably-modified by the
   8324 	 definition we normally use, since that would produce infinite
   8325 	 recursion via pointers.  */
   8326       /* This is variably modified if some field's type is.  */
   8327       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
   8328 	if (TREE_CODE (t) == FIELD_DECL)
   8329 	  {
   8330 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
   8331 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
   8332 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
   8333 
   8334 	    /* If the type is a qualified union, then the DECL_QUALIFIER
   8335 	       of fields can also be an expression containing a variable.  */
   8336 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
   8337 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
   8338 
   8339 	    /* If the field is a qualified union, then it's only a container
   8340 	       for what's inside so we look into it.  That's necessary in LTO
   8341 	       mode because the sizes of the field tested above have been set
   8342 	       to PLACEHOLDER_EXPRs by free_lang_data.  */
   8343 	    if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
   8344 		&& variably_modified_type_p (TREE_TYPE (t), fn))
   8345 	      return true;
   8346 	  }
   8347       break;
   8348 
   8349     case ARRAY_TYPE:
   8350       /* Do not call ourselves to avoid infinite recursion.  This is
   8351 	 variably modified if the element type is.  */
   8352       RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
   8353       RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
   8354       break;
   8355 
   8356     default:
   8357       break;
   8358     }
   8359 
   8360   /* The current language may have other cases to check, but in general,
   8361      all other types are not variably modified.  */
   8362   return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
   8363 
   8364 #undef RETURN_TRUE_IF_VAR
   8365 }
   8366 
   8367 /* Given a DECL or TYPE, return the scope in which it was declared, or
   8368    NULL_TREE if there is no containing scope.  */
   8369 
   8370 tree
   8371 get_containing_scope (const_tree t)
   8372 {
   8373   return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
   8374 }
   8375 
   8376 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL.  */
   8377 
   8378 const_tree
   8379 get_ultimate_context (const_tree decl)
   8380 {
   8381   while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
   8382     {
   8383       if (TREE_CODE (decl) == BLOCK)
   8384 	decl = BLOCK_SUPERCONTEXT (decl);
   8385       else
   8386 	decl = get_containing_scope (decl);
   8387     }
   8388   return decl;
   8389 }
   8390 
   8391 /* Return the innermost context enclosing DECL that is
   8392    a FUNCTION_DECL, or zero if none.  */
   8393 
   8394 tree
   8395 decl_function_context (const_tree decl)
   8396 {
   8397   tree context;
   8398 
   8399   if (TREE_CODE (decl) == ERROR_MARK)
   8400     return 0;
   8401 
   8402   /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
   8403      where we look up the function at runtime.  Such functions always take
   8404      a first argument of type 'pointer to real context'.
   8405 
   8406      C++ should really be fixed to use DECL_CONTEXT for the real context,
   8407      and use something else for the "virtual context".  */
   8408   else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
   8409     context
   8410       = TYPE_MAIN_VARIANT
   8411 	(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
   8412   else
   8413     context = DECL_CONTEXT (decl);
   8414 
   8415   while (context && TREE_CODE (context) != FUNCTION_DECL)
   8416     {
   8417       if (TREE_CODE (context) == BLOCK)
   8418 	context = BLOCK_SUPERCONTEXT (context);
   8419       else
   8420 	context = get_containing_scope (context);
   8421     }
   8422 
   8423   return context;
   8424 }
   8425 
   8426 /* Return the innermost context enclosing DECL that is
   8427    a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
   8428    TYPE_DECLs and FUNCTION_DECLs are transparent to this function.  */
   8429 
   8430 tree
   8431 decl_type_context (const_tree decl)
   8432 {
   8433   tree context = DECL_CONTEXT (decl);
   8434 
   8435   while (context)
   8436     switch (TREE_CODE (context))
   8437       {
   8438       case NAMESPACE_DECL:
   8439       case TRANSLATION_UNIT_DECL:
   8440 	return NULL_TREE;
   8441 
   8442       case RECORD_TYPE:
   8443       case UNION_TYPE:
   8444       case QUAL_UNION_TYPE:
   8445 	return context;
   8446 
   8447       case TYPE_DECL:
   8448       case FUNCTION_DECL:
   8449 	context = DECL_CONTEXT (context);
   8450 	break;
   8451 
   8452       case BLOCK:
   8453 	context = BLOCK_SUPERCONTEXT (context);
   8454 	break;
   8455 
   8456       default:
   8457 	gcc_unreachable ();
   8458       }
   8459 
   8460   return NULL_TREE;
   8461 }
   8462 
   8463 /* CALL is a CALL_EXPR.  Return the declaration for the function
   8464    called, or NULL_TREE if the called function cannot be
   8465    determined.  */
   8466 
   8467 tree
   8468 get_callee_fndecl (const_tree call)
   8469 {
   8470   tree addr;
   8471 
   8472   if (call == error_mark_node)
   8473     return error_mark_node;
   8474 
   8475   /* It's invalid to call this function with anything but a
   8476      CALL_EXPR.  */
   8477   gcc_assert (TREE_CODE (call) == CALL_EXPR);
   8478 
   8479   /* The first operand to the CALL is the address of the function
   8480      called.  */
   8481   addr = CALL_EXPR_FN (call);
   8482 
   8483   /* If there is no function, return early.  */
   8484   if (addr == NULL_TREE)
   8485     return NULL_TREE;
   8486 
   8487   STRIP_NOPS (addr);
   8488 
   8489   /* If this is a readonly function pointer, extract its initial value.  */
   8490   if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
   8491       && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
   8492       && DECL_INITIAL (addr))
   8493     addr = DECL_INITIAL (addr);
   8494 
   8495   /* If the address is just `&f' for some function `f', then we know
   8496      that `f' is being called.  */
   8497   if (TREE_CODE (addr) == ADDR_EXPR
   8498       && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
   8499     return TREE_OPERAND (addr, 0);
   8500 
   8501   /* We couldn't figure out what was being called.  */
   8502   return NULL_TREE;
   8503 }
   8504 
   8505 /* Return true when STMTs arguments and return value match those of FNDECL,
   8506    a decl of a builtin function.  */
   8507 
   8508 static bool
   8509 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
   8510 {
   8511   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
   8512 
   8513   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
   8514     if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
   8515       fndecl = decl;
   8516 
   8517   bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
   8518   if (gimple_form
   8519       ? !useless_type_conversion_p (TREE_TYPE (call),
   8520 				    TREE_TYPE (TREE_TYPE (fndecl)))
   8521       : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
   8522 	 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
   8523     return false;
   8524 
   8525   tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
   8526   unsigned nargs = call_expr_nargs (call);
   8527   for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
   8528     {
   8529       /* Variadic args follow.  */
   8530       if (!targs)
   8531 	return true;
   8532       tree arg = CALL_EXPR_ARG (call, i);
   8533       tree type = TREE_VALUE (targs);
   8534       if (gimple_form
   8535 	  ? !useless_type_conversion_p (type, TREE_TYPE (arg))
   8536 	  : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
   8537 	{
   8538 	  /* For pointer arguments be more forgiving, e.g. due to
   8539 	     FILE * vs. fileptr_type_node, or say char * vs. const char *
   8540 	     differences etc.  */
   8541 	  if (!gimple_form
   8542 	      && POINTER_TYPE_P (type)
   8543 	      && POINTER_TYPE_P (TREE_TYPE (arg))
   8544 	      && tree_nop_conversion_p (type, TREE_TYPE (arg)))
   8545 	    continue;
   8546 	  /* char/short integral arguments are promoted to int
   8547 	     by several frontends if targetm.calls.promote_prototypes
   8548 	     is true.  Allow such promotion too.  */
   8549 	  if (INTEGRAL_TYPE_P (type)
   8550 	      && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
   8551 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg))
   8552 	      && !TYPE_UNSIGNED (TREE_TYPE (arg))
   8553 	      && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
   8554 	      && (gimple_form
   8555 		  ? useless_type_conversion_p (integer_type_node,
   8556 					       TREE_TYPE (arg))
   8557 		  : tree_nop_conversion_p (integer_type_node,
   8558 					   TREE_TYPE (arg))))
   8559 	    continue;
   8560 	  return false;
   8561 	}
   8562     }
   8563   if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
   8564     return false;
   8565   return true;
   8566 }
   8567 
   8568 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
   8569    return the associated function code, otherwise return CFN_LAST.  */
   8570 
   8571 combined_fn
   8572 get_call_combined_fn (const_tree call)
   8573 {
   8574   /* It's invalid to call this function with anything but a CALL_EXPR.  */
   8575   gcc_assert (TREE_CODE (call) == CALL_EXPR);
   8576 
   8577   if (!CALL_EXPR_FN (call))
   8578     return as_combined_fn (CALL_EXPR_IFN (call));
   8579 
   8580   tree fndecl = get_callee_fndecl (call);
   8581   if (fndecl
   8582       && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
   8583       && tree_builtin_call_types_compatible_p (call, fndecl))
   8584     return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
   8585 
   8586   return CFN_LAST;
   8587 }
   8588 
   8589 /* Comparator of indices based on tree_node_counts.  */
   8590 
   8591 static int
   8592 tree_nodes_cmp (const void *p1, const void *p2)
   8593 {
   8594   const unsigned *n1 = (const unsigned *)p1;
   8595   const unsigned *n2 = (const unsigned *)p2;
   8596 
   8597   return tree_node_counts[*n1] - tree_node_counts[*n2];
   8598 }
   8599 
   8600 /* Comparator of indices based on tree_code_counts.  */
   8601 
   8602 static int
   8603 tree_codes_cmp (const void *p1, const void *p2)
   8604 {
   8605   const unsigned *n1 = (const unsigned *)p1;
   8606   const unsigned *n2 = (const unsigned *)p2;
   8607 
   8608   return tree_code_counts[*n1] - tree_code_counts[*n2];
   8609 }
   8610 
   8611 #define TREE_MEM_USAGE_SPACES 40
   8612 
   8613 /* Print debugging information about tree nodes generated during the compile,
   8614    and any language-specific information.  */
   8615 
   8616 void
   8617 dump_tree_statistics (void)
   8618 {
   8619   if (GATHER_STATISTICS)
   8620     {
   8621       uint64_t total_nodes, total_bytes;
   8622       fprintf (stderr, "\nKind                   Nodes      Bytes\n");
   8623       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
   8624       total_nodes = total_bytes = 0;
   8625 
   8626       {
   8627 	auto_vec<unsigned> indices (all_kinds);
   8628 	for (unsigned i = 0; i < all_kinds; i++)
   8629 	  indices.quick_push (i);
   8630 	indices.qsort (tree_nodes_cmp);
   8631 
   8632 	for (unsigned i = 0; i < (int) all_kinds; i++)
   8633 	  {
   8634 	    unsigned j = indices[i];
   8635 	    fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
   8636 		     tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
   8637 		     SIZE_AMOUNT (tree_node_sizes[j]));
   8638 	    total_nodes += tree_node_counts[j];
   8639 	    total_bytes += tree_node_sizes[j];
   8640 	  }
   8641 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
   8642 	fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
   8643 		 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
   8644 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
   8645       }
   8646 
   8647       {
   8648 	fprintf (stderr, "Code                              Nodes\n");
   8649 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
   8650 
   8651 	auto_vec<unsigned> indices (MAX_TREE_CODES);
   8652 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
   8653 	  indices.quick_push (i);
   8654 	indices.qsort (tree_codes_cmp);
   8655 
   8656 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
   8657 	  {
   8658 	    unsigned j = indices[i];
   8659 	    fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
   8660 		     get_tree_code_name ((enum tree_code) j),
   8661 		     SIZE_AMOUNT (tree_code_counts[j]));
   8662 	  }
   8663 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
   8664 	fprintf (stderr, "\n");
   8665 	ssanames_print_statistics ();
   8666 	fprintf (stderr, "\n");
   8667 	phinodes_print_statistics ();
   8668 	fprintf (stderr, "\n");
   8669       }
   8670     }
   8671   else
   8672     fprintf (stderr, "(No per-node statistics)\n");
   8673 
   8674   print_type_hash_statistics ();
   8675   print_debug_expr_statistics ();
   8676   print_value_expr_statistics ();
   8677   lang_hooks.print_statistics ();
   8678 }
   8679 
   8680 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
   8682 
   8683 /* Generate a crc32 of the low BYTES bytes of VALUE.  */
   8684 
   8685 unsigned
   8686 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
   8687 {
   8688   /* This relies on the raw feedback's top 4 bits being zero.  */
   8689 #define FEEDBACK(X) ((X) * 0x04c11db7)
   8690 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
   8691 		     ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
   8692   static const unsigned syndromes[16] =
   8693     {
   8694       SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
   8695       SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
   8696       SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
   8697       SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
   8698     };
   8699 #undef FEEDBACK
   8700 #undef SYNDROME
   8701 
   8702   value <<= (32 - bytes * 8);
   8703   for (unsigned ix = bytes * 2; ix--; value <<= 4)
   8704     {
   8705       unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
   8706 
   8707       chksum = (chksum << 4) ^ feedback;
   8708     }
   8709 
   8710   return chksum;
   8711 }
   8712 
   8713 /* Generate a crc32 of a string.  */
   8714 
   8715 unsigned
   8716 crc32_string (unsigned chksum, const char *string)
   8717 {
   8718   do
   8719     chksum = crc32_byte (chksum, *string);
   8720   while (*string++);
   8721   return chksum;
   8722 }
   8723 
   8724 /* P is a string that will be used in a symbol.  Mask out any characters
   8725    that are not valid in that context.  */
   8726 
   8727 void
   8728 clean_symbol_name (char *p)
   8729 {
   8730   for (; *p; p++)
   8731     if (! (ISALNUM (*p)
   8732 #ifndef NO_DOLLAR_IN_LABEL	/* this for `$'; unlikely, but... -- kr */
   8733 	    || *p == '$'
   8734 #endif
   8735 #ifndef NO_DOT_IN_LABEL		/* this for `.'; unlikely, but...  */
   8736 	    || *p == '.'
   8737 #endif
   8738 	   ))
   8739       *p = '_';
   8740 }
   8741 
   8742 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH.  */
   8743 
   8744 /* Create a unique anonymous identifier.  The identifier is still a
   8745    valid assembly label.  */
   8746 
   8747 tree
   8748 make_anon_name ()
   8749 {
   8750   const char *fmt =
   8751 #if !defined (NO_DOT_IN_LABEL)
   8752     "."
   8753 #elif !defined (NO_DOLLAR_IN_LABEL)
   8754     "$"
   8755 #else
   8756     "_"
   8757 #endif
   8758     "_anon_%d";
   8759 
   8760   char buf[24];
   8761   int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
   8762   gcc_checking_assert (len < int (sizeof (buf)));
   8763 
   8764   tree id = get_identifier_with_length (buf, len);
   8765   IDENTIFIER_ANON_P (id) = true;
   8766 
   8767   return id;
   8768 }
   8769 
   8770 /* Generate a name for a special-purpose function.
   8771    The generated name may need to be unique across the whole link.
   8772    Changes to this function may also require corresponding changes to
   8773    xstrdup_mask_random.
   8774    TYPE is some string to identify the purpose of this function to the
   8775    linker or collect2; it must start with an uppercase letter,
   8776    one of:
   8777    I - for constructors
   8778    D - for destructors
   8779    N - for C++ anonymous namespaces
   8780    F - for DWARF unwind frame information.  */
   8781 
   8782 tree
   8783 get_file_function_name (const char *type)
   8784 {
   8785   char *buf;
   8786   const char *p;
   8787   char *q;
   8788 
   8789   /* If we already have a name we know to be unique, just use that.  */
   8790   if (first_global_object_name)
   8791     p = q = ASTRDUP (first_global_object_name);
   8792   /* If the target is handling the constructors/destructors, they
   8793      will be local to this file and the name is only necessary for
   8794      debugging purposes.
   8795      We also assign sub_I and sub_D sufixes to constructors called from
   8796      the global static constructors.  These are always local.  */
   8797   else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
   8798 	   || (startswith (type, "sub_")
   8799 	       && (type[4] == 'I' || type[4] == 'D')))
   8800     {
   8801       const char *file = main_input_filename;
   8802       if (! file)
   8803 	file = LOCATION_FILE (input_location);
   8804       /* Just use the file's basename, because the full pathname
   8805 	 might be quite long.  */
   8806       p = q = ASTRDUP (lbasename (file));
   8807     }
   8808   else
   8809     {
   8810       /* Otherwise, the name must be unique across the entire link.
   8811 	 We don't have anything that we know to be unique to this translation
   8812 	 unit, so use what we do have and throw in some randomness.  */
   8813       unsigned len;
   8814       const char *name = weak_global_object_name;
   8815       const char *file = main_input_filename;
   8816 
   8817       if (! name)
   8818 	name = "";
   8819       if (! file)
   8820 	file = LOCATION_FILE (input_location);
   8821 
   8822       len = strlen (file);
   8823       q = (char *) alloca (9 + 19 + len + 1);
   8824       memcpy (q, file, len + 1);
   8825 
   8826       snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
   8827 		crc32_string (0, name), get_random_seed (false));
   8828 
   8829       p = q;
   8830     }
   8831 
   8832   clean_symbol_name (q);
   8833   buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
   8834 			 + strlen (type));
   8835 
   8836   /* Set up the name of the file-level functions we may need.
   8837      Use a global object (which is already required to be unique over
   8838      the program) rather than the file name (which imposes extra
   8839      constraints).  */
   8840   sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
   8841 
   8842   return get_identifier (buf);
   8843 }
   8844 
   8845 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
   8847 
   8848 /* Complain that the tree code of NODE does not match the expected 0
   8849    terminated list of trailing codes. The trailing code list can be
   8850    empty, for a more vague error message.  FILE, LINE, and FUNCTION
   8851    are of the caller.  */
   8852 
   8853 void
   8854 tree_check_failed (const_tree node, const char *file,
   8855 		   int line, const char *function, ...)
   8856 {
   8857   va_list args;
   8858   const char *buffer;
   8859   unsigned length = 0;
   8860   enum tree_code code;
   8861 
   8862   va_start (args, function);
   8863   while ((code = (enum tree_code) va_arg (args, int)))
   8864     length += 4 + strlen (get_tree_code_name (code));
   8865   va_end (args);
   8866   if (length)
   8867     {
   8868       char *tmp;
   8869       va_start (args, function);
   8870       length += strlen ("expected ");
   8871       buffer = tmp = (char *) alloca (length);
   8872       length = 0;
   8873       while ((code = (enum tree_code) va_arg (args, int)))
   8874 	{
   8875 	  const char *prefix = length ? " or " : "expected ";
   8876 
   8877 	  strcpy (tmp + length, prefix);
   8878 	  length += strlen (prefix);
   8879 	  strcpy (tmp + length, get_tree_code_name (code));
   8880 	  length += strlen (get_tree_code_name (code));
   8881 	}
   8882       va_end (args);
   8883     }
   8884   else
   8885     buffer = "unexpected node";
   8886 
   8887   internal_error ("tree check: %s, have %s in %s, at %s:%d",
   8888 		  buffer, get_tree_code_name (TREE_CODE (node)),
   8889 		  function, trim_filename (file), line);
   8890 }
   8891 
   8892 /* Complain that the tree code of NODE does match the expected 0
   8893    terminated list of trailing codes. FILE, LINE, and FUNCTION are of
   8894    the caller.  */
   8895 
   8896 void
   8897 tree_not_check_failed (const_tree node, const char *file,
   8898 		       int line, const char *function, ...)
   8899 {
   8900   va_list args;
   8901   char *buffer;
   8902   unsigned length = 0;
   8903   enum tree_code code;
   8904 
   8905   va_start (args, function);
   8906   while ((code = (enum tree_code) va_arg (args, int)))
   8907     length += 4 + strlen (get_tree_code_name (code));
   8908   va_end (args);
   8909   va_start (args, function);
   8910   buffer = (char *) alloca (length);
   8911   length = 0;
   8912   while ((code = (enum tree_code) va_arg (args, int)))
   8913     {
   8914       if (length)
   8915 	{
   8916 	  strcpy (buffer + length, " or ");
   8917 	  length += 4;
   8918 	}
   8919       strcpy (buffer + length, get_tree_code_name (code));
   8920       length += strlen (get_tree_code_name (code));
   8921     }
   8922   va_end (args);
   8923 
   8924   internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
   8925 		  buffer, get_tree_code_name (TREE_CODE (node)),
   8926 		  function, trim_filename (file), line);
   8927 }
   8928 
   8929 /* Similar to tree_check_failed, except that we check for a class of tree
   8930    code, given in CL.  */
   8931 
   8932 void
   8933 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
   8934 			 const char *file, int line, const char *function)
   8935 {
   8936   internal_error
   8937     ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
   8938      TREE_CODE_CLASS_STRING (cl),
   8939      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
   8940      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
   8941 }
   8942 
   8943 /* Similar to tree_check_failed, except that instead of specifying a
   8944    dozen codes, use the knowledge that they're all sequential.  */
   8945 
   8946 void
   8947 tree_range_check_failed (const_tree node, const char *file, int line,
   8948 			 const char *function, enum tree_code c1,
   8949 			 enum tree_code c2)
   8950 {
   8951   char *buffer;
   8952   unsigned length = 0;
   8953   unsigned int c;
   8954 
   8955   for (c = c1; c <= c2; ++c)
   8956     length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
   8957 
   8958   length += strlen ("expected ");
   8959   buffer = (char *) alloca (length);
   8960   length = 0;
   8961 
   8962   for (c = c1; c <= c2; ++c)
   8963     {
   8964       const char *prefix = length ? " or " : "expected ";
   8965 
   8966       strcpy (buffer + length, prefix);
   8967       length += strlen (prefix);
   8968       strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
   8969       length += strlen (get_tree_code_name ((enum tree_code) c));
   8970     }
   8971 
   8972   internal_error ("tree check: %s, have %s in %s, at %s:%d",
   8973 		  buffer, get_tree_code_name (TREE_CODE (node)),
   8974 		  function, trim_filename (file), line);
   8975 }
   8976 
   8977 
   8978 /* Similar to tree_check_failed, except that we check that a tree does
   8979    not have the specified code, given in CL.  */
   8980 
   8981 void
   8982 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
   8983 			     const char *file, int line, const char *function)
   8984 {
   8985   internal_error
   8986     ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
   8987      TREE_CODE_CLASS_STRING (cl),
   8988      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
   8989      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
   8990 }
   8991 
   8992 
   8993 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes.  */
   8994 
   8995 void
   8996 omp_clause_check_failed (const_tree node, const char *file, int line,
   8997                          const char *function, enum omp_clause_code code)
   8998 {
   8999   internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
   9000 		  "in %s, at %s:%d",
   9001 		  omp_clause_code_name[code],
   9002 		  get_tree_code_name (TREE_CODE (node)),
   9003 		  function, trim_filename (file), line);
   9004 }
   9005 
   9006 
   9007 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes.  */
   9008 
   9009 void
   9010 omp_clause_range_check_failed (const_tree node, const char *file, int line,
   9011 			       const char *function, enum omp_clause_code c1,
   9012 			       enum omp_clause_code c2)
   9013 {
   9014   char *buffer;
   9015   unsigned length = 0;
   9016   unsigned int c;
   9017 
   9018   for (c = c1; c <= c2; ++c)
   9019     length += 4 + strlen (omp_clause_code_name[c]);
   9020 
   9021   length += strlen ("expected ");
   9022   buffer = (char *) alloca (length);
   9023   length = 0;
   9024 
   9025   for (c = c1; c <= c2; ++c)
   9026     {
   9027       const char *prefix = length ? " or " : "expected ";
   9028 
   9029       strcpy (buffer + length, prefix);
   9030       length += strlen (prefix);
   9031       strcpy (buffer + length, omp_clause_code_name[c]);
   9032       length += strlen (omp_clause_code_name[c]);
   9033     }
   9034 
   9035   internal_error ("tree check: %s, have %s in %s, at %s:%d",
   9036 		  buffer, omp_clause_code_name[TREE_CODE (node)],
   9037 		  function, trim_filename (file), line);
   9038 }
   9039 
   9040 
   9041 #undef DEFTREESTRUCT
   9042 #define DEFTREESTRUCT(VAL, NAME) NAME,
   9043 
   9044 static const char *ts_enum_names[] = {
   9045 #include "treestruct.def"
   9046 };
   9047 #undef DEFTREESTRUCT
   9048 
   9049 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
   9050 
   9051 /* Similar to tree_class_check_failed, except that we check for
   9052    whether CODE contains the tree structure identified by EN.  */
   9053 
   9054 void
   9055 tree_contains_struct_check_failed (const_tree node,
   9056 				   const enum tree_node_structure_enum en,
   9057 				   const char *file, int line,
   9058 				   const char *function)
   9059 {
   9060   internal_error
   9061     ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
   9062      TS_ENUM_NAME (en),
   9063      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
   9064 }
   9065 
   9066 
   9067 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
   9068    (dynamically sized) vector.  */
   9069 
   9070 void
   9071 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
   9072 			       const char *function)
   9073 {
   9074   internal_error
   9075     ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
   9076      "at %s:%d",
   9077      idx + 1, len, function, trim_filename (file), line);
   9078 }
   9079 
   9080 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
   9081    (dynamically sized) vector.  */
   9082 
   9083 void
   9084 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
   9085 			   const char *function)
   9086 {
   9087   internal_error
   9088     ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
   9089      idx + 1, len, function, trim_filename (file), line);
   9090 }
   9091 
   9092 /* Similar to above, except that the check is for the bounds of the operand
   9093    vector of an expression node EXP.  */
   9094 
   9095 void
   9096 tree_operand_check_failed (int idx, const_tree exp, const char *file,
   9097 			   int line, const char *function)
   9098 {
   9099   enum tree_code code = TREE_CODE (exp);
   9100   internal_error
   9101     ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
   9102      idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
   9103      function, trim_filename (file), line);
   9104 }
   9105 
   9106 /* Similar to above, except that the check is for the number of
   9107    operands of an OMP_CLAUSE node.  */
   9108 
   9109 void
   9110 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
   9111 			         int line, const char *function)
   9112 {
   9113   internal_error
   9114     ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
   9115      "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
   9116      omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
   9117      trim_filename (file), line);
   9118 }
   9119 #endif /* ENABLE_TREE_CHECKING */
   9120 
   9121 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
   9123    and mapped to the machine mode MODE.  Initialize its fields and build
   9124    the information necessary for debugging output.  */
   9125 
   9126 static tree
   9127 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
   9128 {
   9129   tree t;
   9130   tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
   9131 
   9132   t = make_node (VECTOR_TYPE);
   9133   TREE_TYPE (t) = mv_innertype;
   9134   SET_TYPE_VECTOR_SUBPARTS (t, nunits);
   9135   SET_TYPE_MODE (t, mode);
   9136 
   9137   if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
   9138     SET_TYPE_STRUCTURAL_EQUALITY (t);
   9139   else if ((TYPE_CANONICAL (mv_innertype) != innertype
   9140 	    || mode != VOIDmode)
   9141 	   && !VECTOR_BOOLEAN_TYPE_P (t))
   9142     TYPE_CANONICAL (t)
   9143       = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
   9144 
   9145   layout_type (t);
   9146 
   9147   hashval_t hash = type_hash_canon_hash (t);
   9148   t = type_hash_canon (hash, t);
   9149 
   9150   /* We have built a main variant, based on the main variant of the
   9151      inner type. Use it to build the variant we return.  */
   9152   if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
   9153       && TREE_TYPE (t) != innertype)
   9154     return build_type_attribute_qual_variant (t,
   9155 					      TYPE_ATTRIBUTES (innertype),
   9156 					      TYPE_QUALS (innertype));
   9157 
   9158   return t;
   9159 }
   9160 
   9161 static tree
   9162 make_or_reuse_type (unsigned size, int unsignedp)
   9163 {
   9164   int i;
   9165 
   9166   if (size == INT_TYPE_SIZE)
   9167     return unsignedp ? unsigned_type_node : integer_type_node;
   9168   if (size == CHAR_TYPE_SIZE)
   9169     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
   9170   if (size == SHORT_TYPE_SIZE)
   9171     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
   9172   if (size == LONG_TYPE_SIZE)
   9173     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
   9174   if (size == LONG_LONG_TYPE_SIZE)
   9175     return (unsignedp ? long_long_unsigned_type_node
   9176             : long_long_integer_type_node);
   9177 
   9178   for (i = 0; i < NUM_INT_N_ENTS; i ++)
   9179     if (size == int_n_data[i].bitsize
   9180 	&& int_n_enabled_p[i])
   9181       return (unsignedp ? int_n_trees[i].unsigned_type
   9182 	      : int_n_trees[i].signed_type);
   9183 
   9184   if (unsignedp)
   9185     return make_unsigned_type (size);
   9186   else
   9187     return make_signed_type (size);
   9188 }
   9189 
   9190 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP.  */
   9191 
   9192 static tree
   9193 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
   9194 {
   9195   if (satp)
   9196     {
   9197       if (size == SHORT_FRACT_TYPE_SIZE)
   9198 	return unsignedp ? sat_unsigned_short_fract_type_node
   9199 			 : sat_short_fract_type_node;
   9200       if (size == FRACT_TYPE_SIZE)
   9201 	return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
   9202       if (size == LONG_FRACT_TYPE_SIZE)
   9203 	return unsignedp ? sat_unsigned_long_fract_type_node
   9204 			 : sat_long_fract_type_node;
   9205       if (size == LONG_LONG_FRACT_TYPE_SIZE)
   9206 	return unsignedp ? sat_unsigned_long_long_fract_type_node
   9207 			 : sat_long_long_fract_type_node;
   9208     }
   9209   else
   9210     {
   9211       if (size == SHORT_FRACT_TYPE_SIZE)
   9212 	return unsignedp ? unsigned_short_fract_type_node
   9213 			 : short_fract_type_node;
   9214       if (size == FRACT_TYPE_SIZE)
   9215 	return unsignedp ? unsigned_fract_type_node : fract_type_node;
   9216       if (size == LONG_FRACT_TYPE_SIZE)
   9217 	return unsignedp ? unsigned_long_fract_type_node
   9218 			 : long_fract_type_node;
   9219       if (size == LONG_LONG_FRACT_TYPE_SIZE)
   9220 	return unsignedp ? unsigned_long_long_fract_type_node
   9221 			 : long_long_fract_type_node;
   9222     }
   9223 
   9224   return make_fract_type (size, unsignedp, satp);
   9225 }
   9226 
   9227 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP.  */
   9228 
   9229 static tree
   9230 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
   9231 {
   9232   if (satp)
   9233     {
   9234       if (size == SHORT_ACCUM_TYPE_SIZE)
   9235 	return unsignedp ? sat_unsigned_short_accum_type_node
   9236 			 : sat_short_accum_type_node;
   9237       if (size == ACCUM_TYPE_SIZE)
   9238 	return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
   9239       if (size == LONG_ACCUM_TYPE_SIZE)
   9240 	return unsignedp ? sat_unsigned_long_accum_type_node
   9241 			 : sat_long_accum_type_node;
   9242       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
   9243 	return unsignedp ? sat_unsigned_long_long_accum_type_node
   9244 			 : sat_long_long_accum_type_node;
   9245     }
   9246   else
   9247     {
   9248       if (size == SHORT_ACCUM_TYPE_SIZE)
   9249 	return unsignedp ? unsigned_short_accum_type_node
   9250 			 : short_accum_type_node;
   9251       if (size == ACCUM_TYPE_SIZE)
   9252 	return unsignedp ? unsigned_accum_type_node : accum_type_node;
   9253       if (size == LONG_ACCUM_TYPE_SIZE)
   9254 	return unsignedp ? unsigned_long_accum_type_node
   9255 			 : long_accum_type_node;
   9256       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
   9257 	return unsignedp ? unsigned_long_long_accum_type_node
   9258 			 : long_long_accum_type_node;
   9259     }
   9260 
   9261   return make_accum_type (size, unsignedp, satp);
   9262 }
   9263 
   9264 
   9265 /* Create an atomic variant node for TYPE.  This routine is called
   9266    during initialization of data types to create the 5 basic atomic
   9267    types. The generic build_variant_type function requires these to
   9268    already be set up in order to function properly, so cannot be
   9269    called from there.  If ALIGN is non-zero, then ensure alignment is
   9270    overridden to this value.  */
   9271 
   9272 static tree
   9273 build_atomic_base (tree type, unsigned int align)
   9274 {
   9275   tree t;
   9276 
   9277   /* Make sure its not already registered.  */
   9278   if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
   9279     return t;
   9280 
   9281   t = build_variant_type_copy (type);
   9282   set_type_quals (t, TYPE_QUAL_ATOMIC);
   9283 
   9284   if (align)
   9285     SET_TYPE_ALIGN (t, align);
   9286 
   9287   return t;
   9288 }
   9289 
   9290 /* Information about the _FloatN and _FloatNx types.  This must be in
   9291    the same order as the corresponding TI_* enum values.  */
   9292 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
   9293   {
   9294     { 16, false },
   9295     { 32, false },
   9296     { 64, false },
   9297     { 128, false },
   9298     { 32, true },
   9299     { 64, true },
   9300     { 128, true },
   9301   };
   9302 
   9303 
   9304 /* Create nodes for all integer types (and error_mark_node) using the sizes
   9305    of C datatypes.  SIGNED_CHAR specifies whether char is signed.  */
   9306 
   9307 void
   9308 build_common_tree_nodes (bool signed_char)
   9309 {
   9310   int i;
   9311 
   9312   error_mark_node = make_node (ERROR_MARK);
   9313   TREE_TYPE (error_mark_node) = error_mark_node;
   9314 
   9315   initialize_sizetypes ();
   9316 
   9317   /* Define both `signed char' and `unsigned char'.  */
   9318   signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
   9319   TYPE_STRING_FLAG (signed_char_type_node) = 1;
   9320   unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
   9321   TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
   9322 
   9323   /* Define `char', which is like either `signed char' or `unsigned char'
   9324      but not the same as either.  */
   9325   char_type_node
   9326     = (signed_char
   9327        ? make_signed_type (CHAR_TYPE_SIZE)
   9328        : make_unsigned_type (CHAR_TYPE_SIZE));
   9329   TYPE_STRING_FLAG (char_type_node) = 1;
   9330 
   9331   short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
   9332   short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
   9333   integer_type_node = make_signed_type (INT_TYPE_SIZE);
   9334   unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
   9335   long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
   9336   long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
   9337   long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
   9338   long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
   9339 
   9340   for (i = 0; i < NUM_INT_N_ENTS; i ++)
   9341     {
   9342       int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
   9343       int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
   9344 
   9345       if (int_n_enabled_p[i])
   9346 	{
   9347 	  integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
   9348 	  integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
   9349 	}
   9350     }
   9351 
   9352   /* Define a boolean type.  This type only represents boolean values but
   9353      may be larger than char depending on the value of BOOL_TYPE_SIZE.  */
   9354   boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
   9355   TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
   9356   TYPE_PRECISION (boolean_type_node) = 1;
   9357   TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
   9358 
   9359   /* Define what type to use for size_t.  */
   9360   if (strcmp (SIZE_TYPE, "unsigned int") == 0)
   9361     size_type_node = unsigned_type_node;
   9362   else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
   9363     size_type_node = long_unsigned_type_node;
   9364   else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
   9365     size_type_node = long_long_unsigned_type_node;
   9366   else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
   9367     size_type_node = short_unsigned_type_node;
   9368   else
   9369     {
   9370       int i;
   9371 
   9372       size_type_node = NULL_TREE;
   9373       for (i = 0; i < NUM_INT_N_ENTS; i++)
   9374 	if (int_n_enabled_p[i])
   9375 	  {
   9376 	    char name[50], altname[50];
   9377 	    sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
   9378 	    sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
   9379 
   9380 	    if (strcmp (name, SIZE_TYPE) == 0
   9381 		|| strcmp (altname, SIZE_TYPE) == 0)
   9382 	      {
   9383 		size_type_node = int_n_trees[i].unsigned_type;
   9384 	      }
   9385 	  }
   9386       if (size_type_node == NULL_TREE)
   9387 	gcc_unreachable ();
   9388     }
   9389 
   9390   /* Define what type to use for ptrdiff_t.  */
   9391   if (strcmp (PTRDIFF_TYPE, "int") == 0)
   9392     ptrdiff_type_node = integer_type_node;
   9393   else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
   9394     ptrdiff_type_node = long_integer_type_node;
   9395   else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
   9396     ptrdiff_type_node = long_long_integer_type_node;
   9397   else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
   9398     ptrdiff_type_node = short_integer_type_node;
   9399   else
   9400     {
   9401       ptrdiff_type_node = NULL_TREE;
   9402       for (int i = 0; i < NUM_INT_N_ENTS; i++)
   9403 	if (int_n_enabled_p[i])
   9404 	  {
   9405 	    char name[50], altname[50];
   9406 	    sprintf (name, "__int%d", int_n_data[i].bitsize);
   9407 	    sprintf (altname, "__int%d__", int_n_data[i].bitsize);
   9408 
   9409 	    if (strcmp (name, PTRDIFF_TYPE) == 0
   9410 		|| strcmp (altname, PTRDIFF_TYPE) == 0)
   9411 	      ptrdiff_type_node = int_n_trees[i].signed_type;
   9412 	  }
   9413       if (ptrdiff_type_node == NULL_TREE)
   9414 	gcc_unreachable ();
   9415     }
   9416 
   9417   /* Fill in the rest of the sized types.  Reuse existing type nodes
   9418      when possible.  */
   9419   intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
   9420   intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
   9421   intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
   9422   intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
   9423   intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
   9424 
   9425   unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
   9426   unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
   9427   unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
   9428   unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
   9429   unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
   9430 
   9431   /* Don't call build_qualified type for atomics.  That routine does
   9432      special processing for atomics, and until they are initialized
   9433      it's better not to make that call.
   9434 
   9435      Check to see if there is a target override for atomic types.  */
   9436 
   9437   atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
   9438 					targetm.atomic_align_for_mode (QImode));
   9439   atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
   9440 					targetm.atomic_align_for_mode (HImode));
   9441   atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
   9442 					targetm.atomic_align_for_mode (SImode));
   9443   atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
   9444 					targetm.atomic_align_for_mode (DImode));
   9445   atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
   9446 					targetm.atomic_align_for_mode (TImode));
   9447 
   9448   access_public_node = get_identifier ("public");
   9449   access_protected_node = get_identifier ("protected");
   9450   access_private_node = get_identifier ("private");
   9451 
   9452   /* Define these next since types below may used them.  */
   9453   integer_zero_node = build_int_cst (integer_type_node, 0);
   9454   integer_one_node = build_int_cst (integer_type_node, 1);
   9455   integer_three_node = build_int_cst (integer_type_node, 3);
   9456   integer_minus_one_node = build_int_cst (integer_type_node, -1);
   9457 
   9458   size_zero_node = size_int (0);
   9459   size_one_node = size_int (1);
   9460   bitsize_zero_node = bitsize_int (0);
   9461   bitsize_one_node = bitsize_int (1);
   9462   bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
   9463 
   9464   boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
   9465   boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
   9466 
   9467   void_type_node = make_node (VOID_TYPE);
   9468   layout_type (void_type_node);
   9469 
   9470   /* We are not going to have real types in C with less than byte alignment,
   9471      so we might as well not have any types that claim to have it.  */
   9472   SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
   9473   TYPE_USER_ALIGN (void_type_node) = 0;
   9474 
   9475   void_node = make_node (VOID_CST);
   9476   TREE_TYPE (void_node) = void_type_node;
   9477 
   9478   null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
   9479   layout_type (TREE_TYPE (null_pointer_node));
   9480 
   9481   ptr_type_node = build_pointer_type (void_type_node);
   9482   const_ptr_type_node
   9483     = build_pointer_type (build_type_variant (void_type_node, 1, 0));
   9484   for (unsigned i = 0;
   9485        i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
   9486        ++i)
   9487     builtin_structptr_types[i].node = builtin_structptr_types[i].base;
   9488 
   9489   pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
   9490 
   9491   float_type_node = make_node (REAL_TYPE);
   9492   TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
   9493   layout_type (float_type_node);
   9494 
   9495   double_type_node = make_node (REAL_TYPE);
   9496   TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
   9497   layout_type (double_type_node);
   9498 
   9499   long_double_type_node = make_node (REAL_TYPE);
   9500   TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
   9501   layout_type (long_double_type_node);
   9502 
   9503   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
   9504     {
   9505       int n = floatn_nx_types[i].n;
   9506       bool extended = floatn_nx_types[i].extended;
   9507       scalar_float_mode mode;
   9508       if (!targetm.floatn_mode (n, extended).exists (&mode))
   9509 	continue;
   9510       int precision = GET_MODE_PRECISION (mode);
   9511       /* Work around the rs6000 KFmode having precision 113 not
   9512 	 128.  */
   9513       const struct real_format *fmt = REAL_MODE_FORMAT (mode);
   9514       gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
   9515       int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
   9516       if (!extended)
   9517 	gcc_assert (min_precision == n);
   9518       if (precision < min_precision)
   9519 	precision = min_precision;
   9520       FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
   9521       TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
   9522       layout_type (FLOATN_NX_TYPE_NODE (i));
   9523       SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
   9524     }
   9525 
   9526   float_ptr_type_node = build_pointer_type (float_type_node);
   9527   double_ptr_type_node = build_pointer_type (double_type_node);
   9528   long_double_ptr_type_node = build_pointer_type (long_double_type_node);
   9529   integer_ptr_type_node = build_pointer_type (integer_type_node);
   9530 
   9531   /* Fixed size integer types.  */
   9532   uint16_type_node = make_or_reuse_type (16, 1);
   9533   uint32_type_node = make_or_reuse_type (32, 1);
   9534   uint64_type_node = make_or_reuse_type (64, 1);
   9535   if (targetm.scalar_mode_supported_p (TImode))
   9536     uint128_type_node = make_or_reuse_type (128, 1);
   9537 
   9538   /* Decimal float types. */
   9539   if (targetm.decimal_float_supported_p ())
   9540     {
   9541       dfloat32_type_node = make_node (REAL_TYPE);
   9542       TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
   9543       SET_TYPE_MODE (dfloat32_type_node, SDmode);
   9544       layout_type (dfloat32_type_node);
   9545 
   9546       dfloat64_type_node = make_node (REAL_TYPE);
   9547       TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
   9548       SET_TYPE_MODE (dfloat64_type_node, DDmode);
   9549       layout_type (dfloat64_type_node);
   9550 
   9551       dfloat128_type_node = make_node (REAL_TYPE);
   9552       TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
   9553       SET_TYPE_MODE (dfloat128_type_node, TDmode);
   9554       layout_type (dfloat128_type_node);
   9555     }
   9556 
   9557   complex_integer_type_node = build_complex_type (integer_type_node, true);
   9558   complex_float_type_node = build_complex_type (float_type_node, true);
   9559   complex_double_type_node = build_complex_type (double_type_node, true);
   9560   complex_long_double_type_node = build_complex_type (long_double_type_node,
   9561 						      true);
   9562 
   9563   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
   9564     {
   9565       if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
   9566 	COMPLEX_FLOATN_NX_TYPE_NODE (i)
   9567 	  = build_complex_type (FLOATN_NX_TYPE_NODE (i));
   9568     }
   9569 
   9570 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned.  */
   9571 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
   9572   sat_ ## KIND ## _type_node = \
   9573     make_sat_signed_ ## KIND ## _type (SIZE); \
   9574   sat_unsigned_ ## KIND ## _type_node = \
   9575     make_sat_unsigned_ ## KIND ## _type (SIZE); \
   9576   KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
   9577   unsigned_ ## KIND ## _type_node = \
   9578     make_unsigned_ ## KIND ## _type (SIZE);
   9579 
   9580 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
   9581   sat_ ## WIDTH ## KIND ## _type_node = \
   9582     make_sat_signed_ ## KIND ## _type (SIZE); \
   9583   sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
   9584     make_sat_unsigned_ ## KIND ## _type (SIZE); \
   9585   WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
   9586   unsigned_ ## WIDTH ## KIND ## _type_node = \
   9587     make_unsigned_ ## KIND ## _type (SIZE);
   9588 
   9589 /* Make fixed-point type nodes based on four different widths.  */
   9590 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
   9591   MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
   9592   MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
   9593   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
   9594   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
   9595 
   9596 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned.  */
   9597 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
   9598   NAME ## _type_node = \
   9599     make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
   9600   u ## NAME ## _type_node = \
   9601     make_or_reuse_unsigned_ ## KIND ## _type \
   9602       (GET_MODE_BITSIZE (U ## MODE ## mode)); \
   9603   sat_ ## NAME ## _type_node = \
   9604     make_or_reuse_sat_signed_ ## KIND ## _type \
   9605       (GET_MODE_BITSIZE (MODE ## mode)); \
   9606   sat_u ## NAME ## _type_node = \
   9607     make_or_reuse_sat_unsigned_ ## KIND ## _type \
   9608       (GET_MODE_BITSIZE (U ## MODE ## mode));
   9609 
   9610   /* Fixed-point type and mode nodes.  */
   9611   MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
   9612   MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
   9613   MAKE_FIXED_MODE_NODE (fract, qq, QQ)
   9614   MAKE_FIXED_MODE_NODE (fract, hq, HQ)
   9615   MAKE_FIXED_MODE_NODE (fract, sq, SQ)
   9616   MAKE_FIXED_MODE_NODE (fract, dq, DQ)
   9617   MAKE_FIXED_MODE_NODE (fract, tq, TQ)
   9618   MAKE_FIXED_MODE_NODE (accum, ha, HA)
   9619   MAKE_FIXED_MODE_NODE (accum, sa, SA)
   9620   MAKE_FIXED_MODE_NODE (accum, da, DA)
   9621   MAKE_FIXED_MODE_NODE (accum, ta, TA)
   9622 
   9623   {
   9624     tree t = targetm.build_builtin_va_list ();
   9625 
   9626     /* Many back-ends define record types without setting TYPE_NAME.
   9627        If we copied the record type here, we'd keep the original
   9628        record type without a name.  This breaks name mangling.  So,
   9629        don't copy record types and let c_common_nodes_and_builtins()
   9630        declare the type to be __builtin_va_list.  */
   9631     if (TREE_CODE (t) != RECORD_TYPE)
   9632       t = build_variant_type_copy (t);
   9633 
   9634     va_list_type_node = t;
   9635   }
   9636 
   9637   /* SCEV analyzer global shared trees.  */
   9638   chrec_dont_know = make_node (SCEV_NOT_KNOWN);
   9639   TREE_TYPE (chrec_dont_know) = void_type_node;
   9640   chrec_known = make_node (SCEV_KNOWN);
   9641   TREE_TYPE (chrec_known) = void_type_node;
   9642 }
   9643 
   9644 /* Modify DECL for given flags.
   9645    TM_PURE attribute is set only on types, so the function will modify
   9646    DECL's type when ECF_TM_PURE is used.  */
   9647 
   9648 void
   9649 set_call_expr_flags (tree decl, int flags)
   9650 {
   9651   if (flags & ECF_NOTHROW)
   9652     TREE_NOTHROW (decl) = 1;
   9653   if (flags & ECF_CONST)
   9654     TREE_READONLY (decl) = 1;
   9655   if (flags & ECF_PURE)
   9656     DECL_PURE_P (decl) = 1;
   9657   if (flags & ECF_LOOPING_CONST_OR_PURE)
   9658     DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
   9659   if (flags & ECF_NOVOPS)
   9660     DECL_IS_NOVOPS (decl) = 1;
   9661   if (flags & ECF_NORETURN)
   9662     TREE_THIS_VOLATILE (decl) = 1;
   9663   if (flags & ECF_MALLOC)
   9664     DECL_IS_MALLOC (decl) = 1;
   9665   if (flags & ECF_RETURNS_TWICE)
   9666     DECL_IS_RETURNS_TWICE (decl) = 1;
   9667   if (flags & ECF_LEAF)
   9668     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
   9669 					NULL, DECL_ATTRIBUTES (decl));
   9670   if (flags & ECF_COLD)
   9671     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
   9672 					NULL, DECL_ATTRIBUTES (decl));
   9673   if (flags & ECF_RET1)
   9674     DECL_ATTRIBUTES (decl)
   9675       = tree_cons (get_identifier ("fn spec"),
   9676 		   build_tree_list (NULL_TREE, build_string (2, "1 ")),
   9677 		   DECL_ATTRIBUTES (decl));
   9678   if ((flags & ECF_TM_PURE) && flag_tm)
   9679     apply_tm_attr (decl, get_identifier ("transaction_pure"));
   9680   /* Looping const or pure is implied by noreturn.
   9681      There is currently no way to declare looping const or looping pure alone.  */
   9682   gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
   9683 	      || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
   9684 }
   9685 
   9686 
   9687 /* A subroutine of build_common_builtin_nodes.  Define a builtin function.  */
   9688 
   9689 static void
   9690 local_define_builtin (const char *name, tree type, enum built_in_function code,
   9691                       const char *library_name, int ecf_flags)
   9692 {
   9693   tree decl;
   9694 
   9695   decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
   9696 			       library_name, NULL_TREE);
   9697   set_call_expr_flags (decl, ecf_flags);
   9698 
   9699   set_builtin_decl (code, decl, true);
   9700 }
   9701 
   9702 /* Call this function after instantiating all builtins that the language
   9703    front end cares about.  This will build the rest of the builtins
   9704    and internal functions that are relied upon by the tree optimizers and
   9705    the middle-end.  */
   9706 
   9707 void
   9708 build_common_builtin_nodes (void)
   9709 {
   9710   tree tmp, ftype;
   9711   int ecf_flags;
   9712 
   9713   if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
   9714     {
   9715       ftype = build_function_type_list (void_type_node,
   9716 					ptr_type_node,
   9717 					ptr_type_node,
   9718 					integer_type_node,
   9719 					NULL_TREE);
   9720       local_define_builtin ("__builtin_clear_padding", ftype,
   9721 			    BUILT_IN_CLEAR_PADDING,
   9722 			    "__builtin_clear_padding",
   9723 			    ECF_LEAF | ECF_NOTHROW);
   9724     }
   9725 
   9726   if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
   9727       || !builtin_decl_explicit_p (BUILT_IN_ABORT))
   9728     {
   9729       ftype = build_function_type (void_type_node, void_list_node);
   9730       if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
   9731 	local_define_builtin ("__builtin_unreachable", ftype,
   9732 			      BUILT_IN_UNREACHABLE,
   9733 			      "__builtin_unreachable",
   9734 			      ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
   9735 			      | ECF_CONST | ECF_COLD);
   9736       if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
   9737 	local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
   9738 			      "abort",
   9739 			      ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
   9740     }
   9741 
   9742   if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
   9743       || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
   9744     {
   9745       ftype = build_function_type_list (ptr_type_node,
   9746 					ptr_type_node, const_ptr_type_node,
   9747 					size_type_node, NULL_TREE);
   9748 
   9749       if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
   9750 	local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
   9751 			      "memcpy", ECF_NOTHROW | ECF_LEAF);
   9752       if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
   9753 	local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
   9754 			      "memmove", ECF_NOTHROW | ECF_LEAF);
   9755     }
   9756 
   9757   if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
   9758     {
   9759       ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
   9760 					const_ptr_type_node, size_type_node,
   9761 					NULL_TREE);
   9762       local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
   9763 			    "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
   9764     }
   9765 
   9766   if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
   9767     {
   9768       ftype = build_function_type_list (ptr_type_node,
   9769 					ptr_type_node, integer_type_node,
   9770 					size_type_node, NULL_TREE);
   9771       local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
   9772 			    "memset", ECF_NOTHROW | ECF_LEAF);
   9773     }
   9774 
   9775   /* If we're checking the stack, `alloca' can throw.  */
   9776   const int alloca_flags
   9777     = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
   9778 
   9779   if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
   9780     {
   9781       ftype = build_function_type_list (ptr_type_node,
   9782 					size_type_node, NULL_TREE);
   9783       local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
   9784 			    "alloca", alloca_flags);
   9785     }
   9786 
   9787   ftype = build_function_type_list (ptr_type_node, size_type_node,
   9788 				    size_type_node, NULL_TREE);
   9789   local_define_builtin ("__builtin_alloca_with_align", ftype,
   9790 			BUILT_IN_ALLOCA_WITH_ALIGN,
   9791 			"__builtin_alloca_with_align",
   9792 			alloca_flags);
   9793 
   9794   ftype = build_function_type_list (ptr_type_node, size_type_node,
   9795 				    size_type_node, size_type_node, NULL_TREE);
   9796   local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
   9797 			BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
   9798 			"__builtin_alloca_with_align_and_max",
   9799 			alloca_flags);
   9800 
   9801   ftype = build_function_type_list (void_type_node,
   9802 				    ptr_type_node, ptr_type_node,
   9803 				    ptr_type_node, NULL_TREE);
   9804   local_define_builtin ("__builtin_init_trampoline", ftype,
   9805 			BUILT_IN_INIT_TRAMPOLINE,
   9806 			"__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
   9807   local_define_builtin ("__builtin_init_heap_trampoline", ftype,
   9808 			BUILT_IN_INIT_HEAP_TRAMPOLINE,
   9809 			"__builtin_init_heap_trampoline",
   9810 			ECF_NOTHROW | ECF_LEAF);
   9811   local_define_builtin ("__builtin_init_descriptor", ftype,
   9812 			BUILT_IN_INIT_DESCRIPTOR,
   9813 			"__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
   9814 
   9815   ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
   9816   local_define_builtin ("__builtin_adjust_trampoline", ftype,
   9817 			BUILT_IN_ADJUST_TRAMPOLINE,
   9818 			"__builtin_adjust_trampoline",
   9819 			ECF_CONST | ECF_NOTHROW);
   9820   local_define_builtin ("__builtin_adjust_descriptor", ftype,
   9821 			BUILT_IN_ADJUST_DESCRIPTOR,
   9822 			"__builtin_adjust_descriptor",
   9823 			ECF_CONST | ECF_NOTHROW);
   9824 
   9825   ftype = build_function_type_list (void_type_node,
   9826 				    ptr_type_node, ptr_type_node, NULL_TREE);
   9827   if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
   9828     local_define_builtin ("__builtin___clear_cache", ftype,
   9829 			  BUILT_IN_CLEAR_CACHE,
   9830 			  "__clear_cache",
   9831 			  ECF_NOTHROW);
   9832 
   9833   local_define_builtin ("__builtin_nonlocal_goto", ftype,
   9834 			BUILT_IN_NONLOCAL_GOTO,
   9835 			"__builtin_nonlocal_goto",
   9836 			ECF_NORETURN | ECF_NOTHROW);
   9837 
   9838   ftype = build_function_type_list (void_type_node,
   9839 				    ptr_type_node, ptr_type_node, NULL_TREE);
   9840   local_define_builtin ("__builtin_setjmp_setup", ftype,
   9841 			BUILT_IN_SETJMP_SETUP,
   9842 			"__builtin_setjmp_setup", ECF_NOTHROW);
   9843 
   9844   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
   9845   local_define_builtin ("__builtin_setjmp_receiver", ftype,
   9846 			BUILT_IN_SETJMP_RECEIVER,
   9847 			"__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
   9848 
   9849   ftype = build_function_type_list (ptr_type_node, NULL_TREE);
   9850   local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
   9851 			"__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
   9852 
   9853   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
   9854   local_define_builtin ("__builtin_stack_restore", ftype,
   9855 			BUILT_IN_STACK_RESTORE,
   9856 			"__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
   9857 
   9858   ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
   9859 				    const_ptr_type_node, size_type_node,
   9860 				    NULL_TREE);
   9861   local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
   9862 			"__builtin_memcmp_eq",
   9863 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
   9864 
   9865   local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
   9866 			"__builtin_strncmp_eq",
   9867 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
   9868 
   9869   local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
   9870 			"__builtin_strcmp_eq",
   9871 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
   9872 
   9873   /* If there's a possibility that we might use the ARM EABI, build the
   9874     alternate __cxa_end_cleanup node used to resume from C++.  */
   9875   if (targetm.arm_eabi_unwinder)
   9876     {
   9877       ftype = build_function_type_list (void_type_node, NULL_TREE);
   9878       local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
   9879 			    BUILT_IN_CXA_END_CLEANUP,
   9880 			    "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
   9881     }
   9882 
   9883   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
   9884   local_define_builtin ("__builtin_unwind_resume", ftype,
   9885 			BUILT_IN_UNWIND_RESUME,
   9886 			((targetm_common.except_unwind_info (&global_options)
   9887 			  == UI_SJLJ)
   9888 			 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
   9889 			ECF_NORETURN);
   9890 
   9891   if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
   9892     {
   9893       ftype = build_function_type_list (ptr_type_node, integer_type_node,
   9894 					NULL_TREE);
   9895       local_define_builtin ("__builtin_return_address", ftype,
   9896 			    BUILT_IN_RETURN_ADDRESS,
   9897 			    "__builtin_return_address",
   9898 			    ECF_NOTHROW);
   9899     }
   9900 
   9901   if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
   9902       || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
   9903     {
   9904       ftype = build_function_type_list (void_type_node, ptr_type_node,
   9905 					ptr_type_node, NULL_TREE);
   9906       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
   9907 	local_define_builtin ("__cyg_profile_func_enter", ftype,
   9908 			      BUILT_IN_PROFILE_FUNC_ENTER,
   9909 			      "__cyg_profile_func_enter", 0);
   9910       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
   9911 	local_define_builtin ("__cyg_profile_func_exit", ftype,
   9912 			      BUILT_IN_PROFILE_FUNC_EXIT,
   9913 			      "__cyg_profile_func_exit", 0);
   9914     }
   9915 
   9916   /* The exception object and filter values from the runtime.  The argument
   9917      must be zero before exception lowering, i.e. from the front end.  After
   9918      exception lowering, it will be the region number for the exception
   9919      landing pad.  These functions are PURE instead of CONST to prevent
   9920      them from being hoisted past the exception edge that will initialize
   9921      its value in the landing pad.  */
   9922   ftype = build_function_type_list (ptr_type_node,
   9923 				    integer_type_node, NULL_TREE);
   9924   ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
   9925   /* Only use TM_PURE if we have TM language support.  */
   9926   if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
   9927     ecf_flags |= ECF_TM_PURE;
   9928   local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
   9929 			"__builtin_eh_pointer", ecf_flags);
   9930 
   9931   tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
   9932   ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
   9933   local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
   9934 			"__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
   9935 
   9936   ftype = build_function_type_list (void_type_node,
   9937 				    integer_type_node, integer_type_node,
   9938 				    NULL_TREE);
   9939   local_define_builtin ("__builtin_eh_copy_values", ftype,
   9940 			BUILT_IN_EH_COPY_VALUES,
   9941 			"__builtin_eh_copy_values", ECF_NOTHROW);
   9942 
   9943   /* Complex multiplication and division.  These are handled as builtins
   9944      rather than optabs because emit_library_call_value doesn't support
   9945      complex.  Further, we can do slightly better with folding these
   9946      beasties if the real and complex parts of the arguments are separate.  */
   9947   {
   9948     int mode;
   9949 
   9950     for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
   9951       {
   9952 	char mode_name_buf[4], *q;
   9953 	const char *p;
   9954 	enum built_in_function mcode, dcode;
   9955 	tree type, inner_type;
   9956 	const char *prefix = "__";
   9957 
   9958 	if (targetm.libfunc_gnu_prefix)
   9959 	  prefix = "__gnu_";
   9960 
   9961 	type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
   9962 	if (type == NULL)
   9963 	  continue;
   9964 	inner_type = TREE_TYPE (type);
   9965 
   9966 	ftype = build_function_type_list (type, inner_type, inner_type,
   9967 					  inner_type, inner_type, NULL_TREE);
   9968 
   9969         mcode = ((enum built_in_function)
   9970 		 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
   9971         dcode = ((enum built_in_function)
   9972 		 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
   9973 
   9974         for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
   9975 	  *q = TOLOWER (*p);
   9976 	*q = '\0';
   9977 
   9978 	/* For -ftrapping-math these should throw from a former
   9979 	   -fnon-call-exception stmt.  */
   9980 	built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
   9981 					NULL);
   9982         local_define_builtin (built_in_names[mcode], ftype, mcode,
   9983 			      built_in_names[mcode],
   9984 			      ECF_CONST | ECF_LEAF);
   9985 
   9986 	built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
   9987 					NULL);
   9988         local_define_builtin (built_in_names[dcode], ftype, dcode,
   9989 			      built_in_names[dcode],
   9990 			      ECF_CONST | ECF_LEAF);
   9991       }
   9992   }
   9993 
   9994   init_internal_fns ();
   9995 }
   9996 
   9997 /* HACK.  GROSS.  This is absolutely disgusting.  I wish there was a
   9998    better way.
   9999 
   10000    If we requested a pointer to a vector, build up the pointers that
   10001    we stripped off while looking for the inner type.  Similarly for
   10002    return values from functions.
   10003 
   10004    The argument TYPE is the top of the chain, and BOTTOM is the
   10005    new type which we will point to.  */
   10006 
   10007 tree
   10008 reconstruct_complex_type (tree type, tree bottom)
   10009 {
   10010   tree inner, outer;
   10011 
   10012   if (TREE_CODE (type) == POINTER_TYPE)
   10013     {
   10014       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
   10015       outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
   10016 					   TYPE_REF_CAN_ALIAS_ALL (type));
   10017     }
   10018   else if (TREE_CODE (type) == REFERENCE_TYPE)
   10019     {
   10020       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
   10021       outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
   10022 					     TYPE_REF_CAN_ALIAS_ALL (type));
   10023     }
   10024   else if (TREE_CODE (type) == ARRAY_TYPE)
   10025     {
   10026       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
   10027       outer = build_array_type (inner, TYPE_DOMAIN (type));
   10028     }
   10029   else if (TREE_CODE (type) == FUNCTION_TYPE)
   10030     {
   10031       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
   10032       outer = build_function_type (inner, TYPE_ARG_TYPES (type));
   10033     }
   10034   else if (TREE_CODE (type) == METHOD_TYPE)
   10035     {
   10036       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
   10037       /* The build_method_type_directly() routine prepends 'this' to argument list,
   10038          so we must compensate by getting rid of it.  */
   10039       outer
   10040 	= build_method_type_directly
   10041 	    (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
   10042 	     inner,
   10043 	     TREE_CHAIN (TYPE_ARG_TYPES (type)));
   10044     }
   10045   else if (TREE_CODE (type) == OFFSET_TYPE)
   10046     {
   10047       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
   10048       outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
   10049     }
   10050   else
   10051     return bottom;
   10052 
   10053   return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
   10054 					    TYPE_QUALS (type));
   10055 }
   10056 
   10057 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
   10058    the inner type.  */
   10059 tree
   10060 build_vector_type_for_mode (tree innertype, machine_mode mode)
   10061 {
   10062   poly_int64 nunits;
   10063   unsigned int bitsize;
   10064 
   10065   switch (GET_MODE_CLASS (mode))
   10066     {
   10067     case MODE_VECTOR_BOOL:
   10068     case MODE_VECTOR_INT:
   10069     case MODE_VECTOR_FLOAT:
   10070     case MODE_VECTOR_FRACT:
   10071     case MODE_VECTOR_UFRACT:
   10072     case MODE_VECTOR_ACCUM:
   10073     case MODE_VECTOR_UACCUM:
   10074       nunits = GET_MODE_NUNITS (mode);
   10075       break;
   10076 
   10077     case MODE_INT:
   10078       /* Check that there are no leftover bits.  */
   10079       bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
   10080       gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
   10081       nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
   10082       break;
   10083 
   10084     default:
   10085       gcc_unreachable ();
   10086     }
   10087 
   10088   return make_vector_type (innertype, nunits, mode);
   10089 }
   10090 
   10091 /* Similarly, but takes the inner type and number of units, which must be
   10092    a power of two.  */
   10093 
   10094 tree
   10095 build_vector_type (tree innertype, poly_int64 nunits)
   10096 {
   10097   return make_vector_type (innertype, nunits, VOIDmode);
   10098 }
   10099 
   10100 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE.  */
   10101 
   10102 tree
   10103 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
   10104 {
   10105   gcc_assert (mask_mode != BLKmode);
   10106 
   10107   unsigned HOST_WIDE_INT esize;
   10108   if (VECTOR_MODE_P (mask_mode))
   10109     {
   10110       poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
   10111       esize = vector_element_size (vsize, nunits);
   10112     }
   10113   else
   10114     esize = 1;
   10115 
   10116   tree bool_type = build_nonstandard_boolean_type (esize);
   10117 
   10118   return make_vector_type (bool_type, nunits, mask_mode);
   10119 }
   10120 
   10121 /* Build a vector type that holds one boolean result for each element of
   10122    vector type VECTYPE.  The public interface for this operation is
   10123    truth_type_for.  */
   10124 
   10125 static tree
   10126 build_truth_vector_type_for (tree vectype)
   10127 {
   10128   machine_mode vector_mode = TYPE_MODE (vectype);
   10129   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
   10130 
   10131   machine_mode mask_mode;
   10132   if (VECTOR_MODE_P (vector_mode)
   10133       && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
   10134     return build_truth_vector_type_for_mode (nunits, mask_mode);
   10135 
   10136   poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
   10137   unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
   10138   tree bool_type = build_nonstandard_boolean_type (esize);
   10139 
   10140   return make_vector_type (bool_type, nunits, VOIDmode);
   10141 }
   10142 
   10143 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
   10144    set.  */
   10145 
   10146 tree
   10147 build_opaque_vector_type (tree innertype, poly_int64 nunits)
   10148 {
   10149   tree t = make_vector_type (innertype, nunits, VOIDmode);
   10150   tree cand;
   10151   /* We always build the non-opaque variant before the opaque one,
   10152      so if it already exists, it is TYPE_NEXT_VARIANT of this one.  */
   10153   cand = TYPE_NEXT_VARIANT (t);
   10154   if (cand
   10155       && TYPE_VECTOR_OPAQUE (cand)
   10156       && check_qualified_type (cand, t, TYPE_QUALS (t)))
   10157     return cand;
   10158   /* Othewise build a variant type and make sure to queue it after
   10159      the non-opaque type.  */
   10160   cand = build_distinct_type_copy (t);
   10161   TYPE_VECTOR_OPAQUE (cand) = true;
   10162   TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
   10163   TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
   10164   TYPE_NEXT_VARIANT (t) = cand;
   10165   TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
   10166   /* Type variants have no alias set defined.  */
   10167   TYPE_ALIAS_SET (cand) = -1;
   10168   return cand;
   10169 }
   10170 
   10171 /* Return the value of element I of VECTOR_CST T as a wide_int.  */
   10172 
   10173 static poly_wide_int
   10174 vector_cst_int_elt (const_tree t, unsigned int i)
   10175 {
   10176   /* First handle elements that are directly encoded.  */
   10177   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
   10178   if (i < encoded_nelts)
   10179     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
   10180 
   10181   /* Identify the pattern that contains element I and work out the index of
   10182      the last encoded element for that pattern.  */
   10183   unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
   10184   unsigned int pattern = i % npatterns;
   10185   unsigned int count = i / npatterns;
   10186   unsigned int final_i = encoded_nelts - npatterns + pattern;
   10187 
   10188   /* If there are no steps, the final encoded value is the right one.  */
   10189   if (!VECTOR_CST_STEPPED_P (t))
   10190     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
   10191 
   10192   /* Otherwise work out the value from the last two encoded elements.  */
   10193   tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
   10194   tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
   10195   poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
   10196   return wi::to_poly_wide (v2) + (count - 2) * diff;
   10197 }
   10198 
   10199 /* Return the value of element I of VECTOR_CST T.  */
   10200 
   10201 tree
   10202 vector_cst_elt (const_tree t, unsigned int i)
   10203 {
   10204   /* First handle elements that are directly encoded.  */
   10205   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
   10206   if (i < encoded_nelts)
   10207     return VECTOR_CST_ENCODED_ELT (t, i);
   10208 
   10209   /* If there are no steps, the final encoded value is the right one.  */
   10210   if (!VECTOR_CST_STEPPED_P (t))
   10211     {
   10212       /* Identify the pattern that contains element I and work out the index of
   10213 	 the last encoded element for that pattern.  */
   10214       unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
   10215       unsigned int pattern = i % npatterns;
   10216       unsigned int final_i = encoded_nelts - npatterns + pattern;
   10217       return VECTOR_CST_ENCODED_ELT (t, final_i);
   10218     }
   10219 
   10220   /* Otherwise work out the value from the last two encoded elements.  */
   10221   return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
   10222 			   vector_cst_int_elt (t, i));
   10223 }
   10224 
   10225 /* Given an initializer INIT, return TRUE if INIT is zero or some
   10226    aggregate of zeros.  Otherwise return FALSE.  If NONZERO is not
   10227    null, set *NONZERO if and only if INIT is known not to be all
   10228    zeros.  The combination of return value of false and *NONZERO
   10229    false implies that INIT may but need not be all zeros.  Other
   10230    combinations indicate definitive answers.  */
   10231 
   10232 bool
   10233 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
   10234 {
   10235   bool dummy;
   10236   if (!nonzero)
   10237     nonzero = &dummy;
   10238 
   10239   /* Conservatively clear NONZERO and set it only if INIT is definitely
   10240      not all zero.  */
   10241   *nonzero = false;
   10242 
   10243   STRIP_NOPS (init);
   10244 
   10245   unsigned HOST_WIDE_INT off = 0;
   10246 
   10247   switch (TREE_CODE (init))
   10248     {
   10249     case INTEGER_CST:
   10250       if (integer_zerop (init))
   10251 	return true;
   10252 
   10253       *nonzero = true;
   10254       return false;
   10255 
   10256     case REAL_CST:
   10257       /* ??? Note that this is not correct for C4X float formats.  There,
   10258 	 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
   10259 	 negative exponent.  */
   10260       if (real_zerop (init)
   10261 	  && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
   10262 	return true;
   10263 
   10264       *nonzero = true;
   10265       return false;
   10266 
   10267     case FIXED_CST:
   10268       if (fixed_zerop (init))
   10269 	return true;
   10270 
   10271       *nonzero = true;
   10272       return false;
   10273 
   10274     case COMPLEX_CST:
   10275       if (integer_zerop (init)
   10276 	  || (real_zerop (init)
   10277 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
   10278 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
   10279 	return true;
   10280 
   10281       *nonzero = true;
   10282       return false;
   10283 
   10284     case VECTOR_CST:
   10285       if (VECTOR_CST_NPATTERNS (init) == 1
   10286 	  && VECTOR_CST_DUPLICATE_P (init)
   10287 	  && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
   10288 	return true;
   10289 
   10290       *nonzero = true;
   10291       return false;
   10292 
   10293     case CONSTRUCTOR:
   10294       {
   10295 	if (TREE_CLOBBER_P (init))
   10296 	  return false;
   10297 
   10298 	unsigned HOST_WIDE_INT idx;
   10299 	tree elt;
   10300 
   10301 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
   10302 	  if (!initializer_zerop (elt, nonzero))
   10303 	    return false;
   10304 
   10305 	return true;
   10306       }
   10307 
   10308     case MEM_REF:
   10309       {
   10310 	tree arg = TREE_OPERAND (init, 0);
   10311 	if (TREE_CODE (arg) != ADDR_EXPR)
   10312 	  return false;
   10313 	tree offset = TREE_OPERAND (init, 1);
   10314 	if (TREE_CODE (offset) != INTEGER_CST
   10315 	    || !tree_fits_uhwi_p (offset))
   10316 	  return false;
   10317 	off = tree_to_uhwi (offset);
   10318 	if (INT_MAX < off)
   10319 	  return false;
   10320 	arg = TREE_OPERAND (arg, 0);
   10321 	if (TREE_CODE (arg) != STRING_CST)
   10322 	  return false;
   10323 	init = arg;
   10324       }
   10325       /* Fall through.  */
   10326 
   10327     case STRING_CST:
   10328       {
   10329 	gcc_assert (off <= INT_MAX);
   10330 
   10331 	int i = off;
   10332 	int n = TREE_STRING_LENGTH (init);
   10333 	if (n <= i)
   10334 	  return false;
   10335 
   10336 	/* We need to loop through all elements to handle cases like
   10337 	   "\0" and "\0foobar".  */
   10338 	for (i = 0; i < n; ++i)
   10339 	  if (TREE_STRING_POINTER (init)[i] != '\0')
   10340 	    {
   10341 	      *nonzero = true;
   10342 	      return false;
   10343 	    }
   10344 
   10345 	return true;
   10346       }
   10347 
   10348     default:
   10349       return false;
   10350     }
   10351 }
   10352 
   10353 /* Return true if EXPR is an initializer expression in which every element
   10354    is a constant that is numerically equal to 0 or 1.  The elements do not
   10355    need to be equal to each other.  */
   10356 
   10357 bool
   10358 initializer_each_zero_or_onep (const_tree expr)
   10359 {
   10360   STRIP_ANY_LOCATION_WRAPPER (expr);
   10361 
   10362   switch (TREE_CODE (expr))
   10363     {
   10364     case INTEGER_CST:
   10365       return integer_zerop (expr) || integer_onep (expr);
   10366 
   10367     case REAL_CST:
   10368       return real_zerop (expr) || real_onep (expr);
   10369 
   10370     case VECTOR_CST:
   10371       {
   10372 	unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
   10373 	if (VECTOR_CST_STEPPED_P (expr)
   10374 	    && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
   10375 	  return false;
   10376 
   10377 	for (unsigned int i = 0; i < nelts; ++i)
   10378 	  {
   10379 	    tree elt = vector_cst_elt (expr, i);
   10380 	    if (!initializer_each_zero_or_onep (elt))
   10381 	      return false;
   10382 	  }
   10383 
   10384 	return true;
   10385       }
   10386 
   10387     default:
   10388       return false;
   10389     }
   10390 }
   10391 
   10392 /* Check if vector VEC consists of all the equal elements and
   10393    that the number of elements corresponds to the type of VEC.
   10394    The function returns first element of the vector
   10395    or NULL_TREE if the vector is not uniform.  */
   10396 tree
   10397 uniform_vector_p (const_tree vec)
   10398 {
   10399   tree first, t;
   10400   unsigned HOST_WIDE_INT i, nelts;
   10401 
   10402   if (vec == NULL_TREE)
   10403     return NULL_TREE;
   10404 
   10405   gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
   10406 
   10407   if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
   10408     return TREE_OPERAND (vec, 0);
   10409 
   10410   else if (TREE_CODE (vec) == VECTOR_CST)
   10411     {
   10412       if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
   10413 	return VECTOR_CST_ENCODED_ELT (vec, 0);
   10414       return NULL_TREE;
   10415     }
   10416 
   10417   else if (TREE_CODE (vec) == CONSTRUCTOR
   10418 	   && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
   10419     {
   10420       first = error_mark_node;
   10421 
   10422       FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
   10423         {
   10424           if (i == 0)
   10425             {
   10426               first = t;
   10427               continue;
   10428             }
   10429 	  if (!operand_equal_p (first, t, 0))
   10430 	    return NULL_TREE;
   10431         }
   10432       if (i != nelts)
   10433 	return NULL_TREE;
   10434 
   10435       if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
   10436 	return uniform_vector_p (first);
   10437       return first;
   10438     }
   10439 
   10440   return NULL_TREE;
   10441 }
   10442 
   10443 /* If the argument is INTEGER_CST, return it.  If the argument is vector
   10444    with all elements the same INTEGER_CST, return that INTEGER_CST.  Otherwise
   10445    return NULL_TREE.
   10446    Look through location wrappers. */
   10447 
   10448 tree
   10449 uniform_integer_cst_p (tree t)
   10450 {
   10451   STRIP_ANY_LOCATION_WRAPPER (t);
   10452 
   10453   if (TREE_CODE (t) == INTEGER_CST)
   10454     return t;
   10455 
   10456   if (VECTOR_TYPE_P (TREE_TYPE (t)))
   10457     {
   10458       t = uniform_vector_p (t);
   10459       if (t && TREE_CODE (t) == INTEGER_CST)
   10460 	return t;
   10461     }
   10462 
   10463   return NULL_TREE;
   10464 }
   10465 
   10466 /* Checks to see if T is a constant or a constant vector and if each element E
   10467    adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE.  */
   10468 
   10469 tree
   10470 bitmask_inv_cst_vector_p (tree t)
   10471 {
   10472 
   10473   tree_code code = TREE_CODE (t);
   10474   tree type = TREE_TYPE (t);
   10475 
   10476   if (!INTEGRAL_TYPE_P (type)
   10477       && !VECTOR_INTEGER_TYPE_P (type))
   10478     return NULL_TREE;
   10479 
   10480   unsigned HOST_WIDE_INT nelts = 1;
   10481   tree cst;
   10482   unsigned int idx = 0;
   10483   bool uniform = uniform_integer_cst_p (t);
   10484   tree newtype = unsigned_type_for (type);
   10485   tree_vector_builder builder;
   10486   if (code == INTEGER_CST)
   10487     cst = t;
   10488   else
   10489     {
   10490       if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
   10491 	return NULL_TREE;
   10492 
   10493       cst = vector_cst_elt (t, 0);
   10494       builder.new_vector (newtype, nelts, 1);
   10495     }
   10496 
   10497   tree ty = unsigned_type_for (TREE_TYPE (cst));
   10498 
   10499   do
   10500     {
   10501       if (idx > 0)
   10502 	cst = vector_cst_elt (t, idx);
   10503       wide_int icst = wi::to_wide (cst);
   10504       wide_int inv =  wi::bit_not (icst);
   10505       icst = wi::add (1, inv);
   10506       if (wi::popcount (icst) != 1)
   10507 	return NULL_TREE;
   10508 
   10509       tree newcst = wide_int_to_tree (ty, inv);
   10510 
   10511       if (uniform)
   10512 	return build_uniform_cst (newtype, newcst);
   10513 
   10514       builder.quick_push (newcst);
   10515     }
   10516   while (++idx < nelts);
   10517 
   10518   return builder.build ();
   10519 }
   10520 
   10521 /* If VECTOR_CST T has a single nonzero element, return the index of that
   10522    element, otherwise return -1.  */
   10523 
   10524 int
   10525 single_nonzero_element (const_tree t)
   10526 {
   10527   unsigned HOST_WIDE_INT nelts;
   10528   unsigned int repeat_nelts;
   10529   if (VECTOR_CST_NELTS (t).is_constant (&nelts))
   10530     repeat_nelts = nelts;
   10531   else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
   10532     {
   10533       nelts = vector_cst_encoded_nelts (t);
   10534       repeat_nelts = VECTOR_CST_NPATTERNS (t);
   10535     }
   10536   else
   10537     return -1;
   10538 
   10539   int res = -1;
   10540   for (unsigned int i = 0; i < nelts; ++i)
   10541     {
   10542       tree elt = vector_cst_elt (t, i);
   10543       if (!integer_zerop (elt) && !real_zerop (elt))
   10544 	{
   10545 	  if (res >= 0 || i >= repeat_nelts)
   10546 	    return -1;
   10547 	  res = i;
   10548 	}
   10549     }
   10550   return res;
   10551 }
   10552 
   10553 /* Build an empty statement at location LOC.  */
   10554 
   10555 tree
   10556 build_empty_stmt (location_t loc)
   10557 {
   10558   tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
   10559   SET_EXPR_LOCATION (t, loc);
   10560   return t;
   10561 }
   10562 
   10563 
   10564 /* Build an OMP clause with code CODE.  LOC is the location of the
   10565    clause.  */
   10566 
   10567 tree
   10568 build_omp_clause (location_t loc, enum omp_clause_code code)
   10569 {
   10570   tree t;
   10571   int size, length;
   10572 
   10573   length = omp_clause_num_ops[code];
   10574   size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
   10575 
   10576   record_node_allocation_statistics (OMP_CLAUSE, size);
   10577 
   10578   t = (tree) ggc_internal_alloc (size);
   10579   memset (t, 0, size);
   10580   TREE_SET_CODE (t, OMP_CLAUSE);
   10581   OMP_CLAUSE_SET_CODE (t, code);
   10582   OMP_CLAUSE_LOCATION (t) = loc;
   10583 
   10584   return t;
   10585 }
   10586 
   10587 /* Build a tcc_vl_exp object with code CODE and room for LEN operands.  LEN
   10588    includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
   10589    Except for the CODE and operand count field, other storage for the
   10590    object is initialized to zeros.  */
   10591 
   10592 tree
   10593 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
   10594 {
   10595   tree t;
   10596   int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
   10597 
   10598   gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
   10599   gcc_assert (len >= 1);
   10600 
   10601   record_node_allocation_statistics (code, length);
   10602 
   10603   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
   10604 
   10605   TREE_SET_CODE (t, code);
   10606 
   10607   /* Can't use TREE_OPERAND to store the length because if checking is
   10608      enabled, it will try to check the length before we store it.  :-P  */
   10609   t->exp.operands[0] = build_int_cst (sizetype, len);
   10610 
   10611   return t;
   10612 }
   10613 
   10614 /* Helper function for build_call_* functions; build a CALL_EXPR with
   10615    indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
   10616    the argument slots.  */
   10617 
   10618 static tree
   10619 build_call_1 (tree return_type, tree fn, int nargs)
   10620 {
   10621   tree t;
   10622 
   10623   t = build_vl_exp (CALL_EXPR, nargs + 3);
   10624   TREE_TYPE (t) = return_type;
   10625   CALL_EXPR_FN (t) = fn;
   10626   CALL_EXPR_STATIC_CHAIN (t) = NULL;
   10627 
   10628   return t;
   10629 }
   10630 
   10631 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
   10632    FN and a null static chain slot.  NARGS is the number of call arguments
   10633    which are specified as "..." arguments.  */
   10634 
   10635 tree
   10636 build_call_nary (tree return_type, tree fn, int nargs, ...)
   10637 {
   10638   tree ret;
   10639   va_list args;
   10640   va_start (args, nargs);
   10641   ret = build_call_valist (return_type, fn, nargs, args);
   10642   va_end (args);
   10643   return ret;
   10644 }
   10645 
   10646 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
   10647    FN and a null static chain slot.  NARGS is the number of call arguments
   10648    which are specified as a va_list ARGS.  */
   10649 
   10650 tree
   10651 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
   10652 {
   10653   tree t;
   10654   int i;
   10655 
   10656   t = build_call_1 (return_type, fn, nargs);
   10657   for (i = 0; i < nargs; i++)
   10658     CALL_EXPR_ARG (t, i) = va_arg (args, tree);
   10659   process_call_operands (t);
   10660   return t;
   10661 }
   10662 
   10663 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
   10664    FN and a null static chain slot.  NARGS is the number of call arguments
   10665    which are specified as a tree array ARGS.  */
   10666 
   10667 tree
   10668 build_call_array_loc (location_t loc, tree return_type, tree fn,
   10669 		      int nargs, const tree *args)
   10670 {
   10671   tree t;
   10672   int i;
   10673 
   10674   t = build_call_1 (return_type, fn, nargs);
   10675   for (i = 0; i < nargs; i++)
   10676     CALL_EXPR_ARG (t, i) = args[i];
   10677   process_call_operands (t);
   10678   SET_EXPR_LOCATION (t, loc);
   10679   return t;
   10680 }
   10681 
   10682 /* Like build_call_array, but takes a vec.  */
   10683 
   10684 tree
   10685 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
   10686 {
   10687   tree ret, t;
   10688   unsigned int ix;
   10689 
   10690   ret = build_call_1 (return_type, fn, vec_safe_length (args));
   10691   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
   10692     CALL_EXPR_ARG (ret, ix) = t;
   10693   process_call_operands (ret);
   10694   return ret;
   10695 }
   10696 
   10697 /* Conveniently construct a function call expression.  FNDECL names the
   10698    function to be called and N arguments are passed in the array
   10699    ARGARRAY.  */
   10700 
   10701 tree
   10702 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
   10703 {
   10704   tree fntype = TREE_TYPE (fndecl);
   10705   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
   10706 
   10707   return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
   10708 }
   10709 
   10710 /* Conveniently construct a function call expression.  FNDECL names the
   10711    function to be called and the arguments are passed in the vector
   10712    VEC.  */
   10713 
   10714 tree
   10715 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
   10716 {
   10717   return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
   10718 				    vec_safe_address (vec));
   10719 }
   10720 
   10721 
   10722 /* Conveniently construct a function call expression.  FNDECL names the
   10723    function to be called, N is the number of arguments, and the "..."
   10724    parameters are the argument expressions.  */
   10725 
   10726 tree
   10727 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
   10728 {
   10729   va_list ap;
   10730   tree *argarray = XALLOCAVEC (tree, n);
   10731   int i;
   10732 
   10733   va_start (ap, n);
   10734   for (i = 0; i < n; i++)
   10735     argarray[i] = va_arg (ap, tree);
   10736   va_end (ap);
   10737   return build_call_expr_loc_array (loc, fndecl, n, argarray);
   10738 }
   10739 
   10740 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...).  Duplicated because
   10741    varargs macros aren't supported by all bootstrap compilers.  */
   10742 
   10743 tree
   10744 build_call_expr (tree fndecl, int n, ...)
   10745 {
   10746   va_list ap;
   10747   tree *argarray = XALLOCAVEC (tree, n);
   10748   int i;
   10749 
   10750   va_start (ap, n);
   10751   for (i = 0; i < n; i++)
   10752     argarray[i] = va_arg (ap, tree);
   10753   va_end (ap);
   10754   return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
   10755 }
   10756 
   10757 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
   10758    type TYPE.  This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
   10759    It will get gimplified later into an ordinary internal function.  */
   10760 
   10761 tree
   10762 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
   10763 				    tree type, int n, const tree *args)
   10764 {
   10765   tree t = build_call_1 (type, NULL_TREE, n);
   10766   for (int i = 0; i < n; ++i)
   10767     CALL_EXPR_ARG (t, i) = args[i];
   10768   SET_EXPR_LOCATION (t, loc);
   10769   CALL_EXPR_IFN (t) = ifn;
   10770   process_call_operands (t);
   10771   return t;
   10772 }
   10773 
   10774 /* Build internal call expression.  This is just like CALL_EXPR, except
   10775    its CALL_EXPR_FN is NULL.  It will get gimplified later into ordinary
   10776    internal function.  */
   10777 
   10778 tree
   10779 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
   10780 			      tree type, int n, ...)
   10781 {
   10782   va_list ap;
   10783   tree *argarray = XALLOCAVEC (tree, n);
   10784   int i;
   10785 
   10786   va_start (ap, n);
   10787   for (i = 0; i < n; i++)
   10788     argarray[i] = va_arg (ap, tree);
   10789   va_end (ap);
   10790   return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
   10791 }
   10792 
   10793 /* Return a function call to FN, if the target is guaranteed to support it,
   10794    or null otherwise.
   10795 
   10796    N is the number of arguments, passed in the "...", and TYPE is the
   10797    type of the return value.  */
   10798 
   10799 tree
   10800 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
   10801 			   int n, ...)
   10802 {
   10803   va_list ap;
   10804   tree *argarray = XALLOCAVEC (tree, n);
   10805   int i;
   10806 
   10807   va_start (ap, n);
   10808   for (i = 0; i < n; i++)
   10809     argarray[i] = va_arg (ap, tree);
   10810   va_end (ap);
   10811   if (internal_fn_p (fn))
   10812     {
   10813       internal_fn ifn = as_internal_fn (fn);
   10814       if (direct_internal_fn_p (ifn))
   10815 	{
   10816 	  tree_pair types = direct_internal_fn_types (ifn, type, argarray);
   10817 	  if (!direct_internal_fn_supported_p (ifn, types,
   10818 					       OPTIMIZE_FOR_BOTH))
   10819 	    return NULL_TREE;
   10820 	}
   10821       return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
   10822     }
   10823   else
   10824     {
   10825       tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
   10826       if (!fndecl)
   10827 	return NULL_TREE;
   10828       return build_call_expr_loc_array (loc, fndecl, n, argarray);
   10829     }
   10830 }
   10831 
   10832 /* Return a function call to the appropriate builtin alloca variant.
   10833 
   10834    SIZE is the size to be allocated.  ALIGN, if non-zero, is the requested
   10835    alignment of the allocated area.  MAX_SIZE, if non-negative, is an upper
   10836    bound for SIZE in case it is not a fixed value.  */
   10837 
   10838 tree
   10839 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
   10840 {
   10841   if (max_size >= 0)
   10842     {
   10843       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
   10844       return
   10845 	build_call_expr (t, 3, size, size_int (align), size_int (max_size));
   10846     }
   10847   else if (align > 0)
   10848     {
   10849       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
   10850       return build_call_expr (t, 2, size, size_int (align));
   10851     }
   10852   else
   10853     {
   10854       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
   10855       return build_call_expr (t, 1, size);
   10856     }
   10857 }
   10858 
   10859 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
   10860    if SIZE == -1) and return a tree node representing char* pointer to
   10861    it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)).  When STR is nonnull
   10862    the STRING_CST value is the LEN bytes at STR (the representation
   10863    of the string, which may be wide).  Otherwise it's all zeros.  */
   10864 
   10865 tree
   10866 build_string_literal (unsigned len, const char *str /* = NULL */,
   10867 		      tree eltype /* = char_type_node */,
   10868 		      unsigned HOST_WIDE_INT size /* = -1 */)
   10869 {
   10870   tree t = build_string (len, str);
   10871   /* Set the maximum valid index based on the string length or SIZE.  */
   10872   unsigned HOST_WIDE_INT maxidx
   10873     = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
   10874 
   10875   tree index = build_index_type (size_int (maxidx));
   10876   eltype = build_type_variant (eltype, 1, 0);
   10877   tree type = build_array_type (eltype, index);
   10878   TREE_TYPE (t) = type;
   10879   TREE_CONSTANT (t) = 1;
   10880   TREE_READONLY (t) = 1;
   10881   TREE_STATIC (t) = 1;
   10882 
   10883   type = build_pointer_type (eltype);
   10884   t = build1 (ADDR_EXPR, type,
   10885 	      build4 (ARRAY_REF, eltype,
   10886 		      t, integer_zero_node, NULL_TREE, NULL_TREE));
   10887   return t;
   10888 }
   10889 
   10890 
   10891 
   10892 /* Return true if T (assumed to be a DECL) must be assigned a memory
   10893    location.  */
   10894 
   10895 bool
   10896 needs_to_live_in_memory (const_tree t)
   10897 {
   10898   return (TREE_ADDRESSABLE (t)
   10899 	  || is_global_var (t)
   10900 	  || (TREE_CODE (t) == RESULT_DECL
   10901 	      && !DECL_BY_REFERENCE (t)
   10902 	      && aggregate_value_p (t, current_function_decl)));
   10903 }
   10904 
   10905 /* Return value of a constant X and sign-extend it.  */
   10906 
   10907 HOST_WIDE_INT
   10908 int_cst_value (const_tree x)
   10909 {
   10910   unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
   10911   unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
   10912 
   10913   /* Make sure the sign-extended value will fit in a HOST_WIDE_INT.  */
   10914   gcc_assert (cst_and_fits_in_hwi (x));
   10915 
   10916   if (bits < HOST_BITS_PER_WIDE_INT)
   10917     {
   10918       bool negative = ((val >> (bits - 1)) & 1) != 0;
   10919       if (negative)
   10920 	val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
   10921       else
   10922 	val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
   10923     }
   10924 
   10925   return val;
   10926 }
   10927 
   10928 /* If TYPE is an integral or pointer type, return an integer type with
   10929    the same precision which is unsigned iff UNSIGNEDP is true, or itself
   10930    if TYPE is already an integer type of signedness UNSIGNEDP.
   10931    If TYPE is a floating-point type, return an integer type with the same
   10932    bitsize and with the signedness given by UNSIGNEDP; this is useful
   10933    when doing bit-level operations on a floating-point value.  */
   10934 
   10935 tree
   10936 signed_or_unsigned_type_for (int unsignedp, tree type)
   10937 {
   10938   if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
   10939     return type;
   10940 
   10941   if (TREE_CODE (type) == VECTOR_TYPE)
   10942     {
   10943       tree inner = TREE_TYPE (type);
   10944       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
   10945       if (!inner2)
   10946 	return NULL_TREE;
   10947       if (inner == inner2)
   10948 	return type;
   10949       return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
   10950     }
   10951 
   10952   if (TREE_CODE (type) == COMPLEX_TYPE)
   10953     {
   10954       tree inner = TREE_TYPE (type);
   10955       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
   10956       if (!inner2)
   10957 	return NULL_TREE;
   10958       if (inner == inner2)
   10959 	return type;
   10960       return build_complex_type (inner2);
   10961     }
   10962 
   10963   unsigned int bits;
   10964   if (INTEGRAL_TYPE_P (type)
   10965       || POINTER_TYPE_P (type)
   10966       || TREE_CODE (type) == OFFSET_TYPE)
   10967     bits = TYPE_PRECISION (type);
   10968   else if (TREE_CODE (type) == REAL_TYPE)
   10969     bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
   10970   else
   10971     return NULL_TREE;
   10972 
   10973   return build_nonstandard_integer_type (bits, unsignedp);
   10974 }
   10975 
   10976 /* If TYPE is an integral or pointer type, return an integer type with
   10977    the same precision which is unsigned, or itself if TYPE is already an
   10978    unsigned integer type.  If TYPE is a floating-point type, return an
   10979    unsigned integer type with the same bitsize as TYPE.  */
   10980 
   10981 tree
   10982 unsigned_type_for (tree type)
   10983 {
   10984   return signed_or_unsigned_type_for (1, type);
   10985 }
   10986 
   10987 /* If TYPE is an integral or pointer type, return an integer type with
   10988    the same precision which is signed, or itself if TYPE is already a
   10989    signed integer type.  If TYPE is a floating-point type, return a
   10990    signed integer type with the same bitsize as TYPE.  */
   10991 
   10992 tree
   10993 signed_type_for (tree type)
   10994 {
   10995   return signed_or_unsigned_type_for (0, type);
   10996 }
   10997 
   10998 /* - For VECTOR_TYPEs:
   10999     - The truth type must be a VECTOR_BOOLEAN_TYPE.
   11000     - The number of elements must match (known_eq).
   11001     - targetm.vectorize.get_mask_mode exists, and exactly
   11002       the same mode as the truth type.
   11003    - Otherwise, the truth type must be a BOOLEAN_TYPE
   11004      or useless_type_conversion_p to BOOLEAN_TYPE.  */
   11005 bool
   11006 is_truth_type_for (tree type, tree truth_type)
   11007 {
   11008   machine_mode mask_mode = TYPE_MODE (truth_type);
   11009   machine_mode vmode = TYPE_MODE (type);
   11010   machine_mode tmask_mode;
   11011 
   11012   if (TREE_CODE (type) == VECTOR_TYPE)
   11013     {
   11014       if (VECTOR_BOOLEAN_TYPE_P (truth_type)
   11015 	  && known_eq (TYPE_VECTOR_SUBPARTS (type),
   11016 		       TYPE_VECTOR_SUBPARTS (truth_type))
   11017 	  && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
   11018 	  && tmask_mode == mask_mode)
   11019 	return true;
   11020 
   11021       return false;
   11022     }
   11023 
   11024   return useless_type_conversion_p (boolean_type_node, truth_type);
   11025 }
   11026 
   11027 /* If TYPE is a vector type, return a signed integer vector type with the
   11028    same width and number of subparts. Otherwise return boolean_type_node.  */
   11029 
   11030 tree
   11031 truth_type_for (tree type)
   11032 {
   11033   if (TREE_CODE (type) == VECTOR_TYPE)
   11034     {
   11035       if (VECTOR_BOOLEAN_TYPE_P (type))
   11036 	return type;
   11037       return build_truth_vector_type_for (type);
   11038     }
   11039   else
   11040     return boolean_type_node;
   11041 }
   11042 
   11043 /* Returns the largest value obtainable by casting something in INNER type to
   11044    OUTER type.  */
   11045 
   11046 tree
   11047 upper_bound_in_type (tree outer, tree inner)
   11048 {
   11049   unsigned int det = 0;
   11050   unsigned oprec = TYPE_PRECISION (outer);
   11051   unsigned iprec = TYPE_PRECISION (inner);
   11052   unsigned prec;
   11053 
   11054   /* Compute a unique number for every combination.  */
   11055   det |= (oprec > iprec) ? 4 : 0;
   11056   det |= TYPE_UNSIGNED (outer) ? 2 : 0;
   11057   det |= TYPE_UNSIGNED (inner) ? 1 : 0;
   11058 
   11059   /* Determine the exponent to use.  */
   11060   switch (det)
   11061     {
   11062     case 0:
   11063     case 1:
   11064       /* oprec <= iprec, outer: signed, inner: don't care.  */
   11065       prec = oprec - 1;
   11066       break;
   11067     case 2:
   11068     case 3:
   11069       /* oprec <= iprec, outer: unsigned, inner: don't care.  */
   11070       prec = oprec;
   11071       break;
   11072     case 4:
   11073       /* oprec > iprec, outer: signed, inner: signed.  */
   11074       prec = iprec - 1;
   11075       break;
   11076     case 5:
   11077       /* oprec > iprec, outer: signed, inner: unsigned.  */
   11078       prec = iprec;
   11079       break;
   11080     case 6:
   11081       /* oprec > iprec, outer: unsigned, inner: signed.  */
   11082       prec = oprec;
   11083       break;
   11084     case 7:
   11085       /* oprec > iprec, outer: unsigned, inner: unsigned.  */
   11086       prec = iprec;
   11087       break;
   11088     default:
   11089       gcc_unreachable ();
   11090     }
   11091 
   11092   return wide_int_to_tree (outer,
   11093 			   wi::mask (prec, false, TYPE_PRECISION (outer)));
   11094 }
   11095 
   11096 /* Returns the smallest value obtainable by casting something in INNER type to
   11097    OUTER type.  */
   11098 
   11099 tree
   11100 lower_bound_in_type (tree outer, tree inner)
   11101 {
   11102   unsigned oprec = TYPE_PRECISION (outer);
   11103   unsigned iprec = TYPE_PRECISION (inner);
   11104 
   11105   /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
   11106      and obtain 0.  */
   11107   if (TYPE_UNSIGNED (outer)
   11108       /* If we are widening something of an unsigned type, OUTER type
   11109 	 contains all values of INNER type.  In particular, both INNER
   11110 	 and OUTER types have zero in common.  */
   11111       || (oprec > iprec && TYPE_UNSIGNED (inner)))
   11112     return build_int_cst (outer, 0);
   11113   else
   11114     {
   11115       /* If we are widening a signed type to another signed type, we
   11116 	 want to obtain -2^^(iprec-1).  If we are keeping the
   11117 	 precision or narrowing to a signed type, we want to obtain
   11118 	 -2^(oprec-1).  */
   11119       unsigned prec = oprec > iprec ? iprec : oprec;
   11120       return wide_int_to_tree (outer,
   11121 			       wi::mask (prec - 1, true,
   11122 					 TYPE_PRECISION (outer)));
   11123     }
   11124 }
   11125 
   11126 /* Return nonzero if two operands that are suitable for PHI nodes are
   11127    necessarily equal.  Specifically, both ARG0 and ARG1 must be either
   11128    SSA_NAME or invariant.  Note that this is strictly an optimization.
   11129    That is, callers of this function can directly call operand_equal_p
   11130    and get the same result, only slower.  */
   11131 
   11132 int
   11133 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
   11134 {
   11135   if (arg0 == arg1)
   11136     return 1;
   11137   if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
   11138     return 0;
   11139   return operand_equal_p (arg0, arg1, 0);
   11140 }
   11141 
   11142 /* Returns number of zeros at the end of binary representation of X.  */
   11143 
   11144 tree
   11145 num_ending_zeros (const_tree x)
   11146 {
   11147   return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
   11148 }
   11149 
   11150 
   11151 #define WALK_SUBTREE(NODE)				\
   11152   do							\
   11153     {							\
   11154       result = walk_tree_1 (&(NODE), func, data, pset, lh);	\
   11155       if (result)					\
   11156 	return result;					\
   11157     }							\
   11158   while (0)
   11159 
   11160 /* This is a subroutine of walk_tree that walks field of TYPE that are to
   11161    be walked whenever a type is seen in the tree.  Rest of operands and return
   11162    value are as for walk_tree.  */
   11163 
   11164 static tree
   11165 walk_type_fields (tree type, walk_tree_fn func, void *data,
   11166 		  hash_set<tree> *pset, walk_tree_lh lh)
   11167 {
   11168   tree result = NULL_TREE;
   11169 
   11170   switch (TREE_CODE (type))
   11171     {
   11172     case POINTER_TYPE:
   11173     case REFERENCE_TYPE:
   11174     case VECTOR_TYPE:
   11175       /* We have to worry about mutually recursive pointers.  These can't
   11176 	 be written in C.  They can in Ada.  It's pathological, but
   11177 	 there's an ACATS test (c38102a) that checks it.  Deal with this
   11178 	 by checking if we're pointing to another pointer, that one
   11179 	 points to another pointer, that one does too, and we have no htab.
   11180 	 If so, get a hash table.  We check three levels deep to avoid
   11181 	 the cost of the hash table if we don't need one.  */
   11182       if (POINTER_TYPE_P (TREE_TYPE (type))
   11183 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
   11184 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
   11185 	  && !pset)
   11186 	{
   11187 	  result = walk_tree_without_duplicates (&TREE_TYPE (type),
   11188 						 func, data);
   11189 	  if (result)
   11190 	    return result;
   11191 
   11192 	  break;
   11193 	}
   11194 
   11195       /* fall through */
   11196 
   11197     case COMPLEX_TYPE:
   11198       WALK_SUBTREE (TREE_TYPE (type));
   11199       break;
   11200 
   11201     case METHOD_TYPE:
   11202       WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
   11203 
   11204       /* Fall through.  */
   11205 
   11206     case FUNCTION_TYPE:
   11207       WALK_SUBTREE (TREE_TYPE (type));
   11208       {
   11209 	tree arg;
   11210 
   11211 	/* We never want to walk into default arguments.  */
   11212 	for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
   11213 	  WALK_SUBTREE (TREE_VALUE (arg));
   11214       }
   11215       break;
   11216 
   11217     case ARRAY_TYPE:
   11218       /* Don't follow this nodes's type if a pointer for fear that
   11219 	 we'll have infinite recursion.  If we have a PSET, then we
   11220 	 need not fear.  */
   11221       if (pset
   11222 	  || (!POINTER_TYPE_P (TREE_TYPE (type))
   11223 	      && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
   11224 	WALK_SUBTREE (TREE_TYPE (type));
   11225       WALK_SUBTREE (TYPE_DOMAIN (type));
   11226       break;
   11227 
   11228     case OFFSET_TYPE:
   11229       WALK_SUBTREE (TREE_TYPE (type));
   11230       WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
   11231       break;
   11232 
   11233     default:
   11234       break;
   11235     }
   11236 
   11237   return NULL_TREE;
   11238 }
   11239 
   11240 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.  FUNC is
   11241    called with the DATA and the address of each sub-tree.  If FUNC returns a
   11242    non-NULL value, the traversal is stopped, and the value returned by FUNC
   11243    is returned.  If PSET is non-NULL it is used to record the nodes visited,
   11244    and to avoid visiting a node more than once.  */
   11245 
   11246 tree
   11247 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
   11248 	     hash_set<tree> *pset, walk_tree_lh lh)
   11249 {
   11250   enum tree_code code;
   11251   int walk_subtrees;
   11252   tree result;
   11253 
   11254 #define WALK_SUBTREE_TAIL(NODE)				\
   11255   do							\
   11256     {							\
   11257        tp = & (NODE);					\
   11258        goto tail_recurse;				\
   11259     }							\
   11260   while (0)
   11261 
   11262  tail_recurse:
   11263   /* Skip empty subtrees.  */
   11264   if (!*tp)
   11265     return NULL_TREE;
   11266 
   11267   /* Don't walk the same tree twice, if the user has requested
   11268      that we avoid doing so.  */
   11269   if (pset && pset->add (*tp))
   11270     return NULL_TREE;
   11271 
   11272   /* Call the function.  */
   11273   walk_subtrees = 1;
   11274   result = (*func) (tp, &walk_subtrees, data);
   11275 
   11276   /* If we found something, return it.  */
   11277   if (result)
   11278     return result;
   11279 
   11280   code = TREE_CODE (*tp);
   11281 
   11282   /* Even if we didn't, FUNC may have decided that there was nothing
   11283      interesting below this point in the tree.  */
   11284   if (!walk_subtrees)
   11285     {
   11286       /* But we still need to check our siblings.  */
   11287       if (code == TREE_LIST)
   11288 	WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
   11289       else if (code == OMP_CLAUSE)
   11290 	WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
   11291       else
   11292 	return NULL_TREE;
   11293     }
   11294 
   11295   if (lh)
   11296     {
   11297       result = (*lh) (tp, &walk_subtrees, func, data, pset);
   11298       if (result || !walk_subtrees)
   11299         return result;
   11300     }
   11301 
   11302   switch (code)
   11303     {
   11304     case ERROR_MARK:
   11305     case IDENTIFIER_NODE:
   11306     case INTEGER_CST:
   11307     case REAL_CST:
   11308     case FIXED_CST:
   11309     case STRING_CST:
   11310     case BLOCK:
   11311     case PLACEHOLDER_EXPR:
   11312     case SSA_NAME:
   11313     case FIELD_DECL:
   11314     case RESULT_DECL:
   11315       /* None of these have subtrees other than those already walked
   11316 	 above.  */
   11317       break;
   11318 
   11319     case TREE_LIST:
   11320       WALK_SUBTREE (TREE_VALUE (*tp));
   11321       WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
   11322 
   11323     case TREE_VEC:
   11324       {
   11325 	int len = TREE_VEC_LENGTH (*tp);
   11326 
   11327 	if (len == 0)
   11328 	  break;
   11329 
   11330 	/* Walk all elements but the first.  */
   11331 	while (--len)
   11332 	  WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
   11333 
   11334 	/* Now walk the first one as a tail call.  */
   11335 	WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
   11336       }
   11337 
   11338     case VECTOR_CST:
   11339       {
   11340 	unsigned len = vector_cst_encoded_nelts (*tp);
   11341 	if (len == 0)
   11342 	  break;
   11343 	/* Walk all elements but the first.  */
   11344 	while (--len)
   11345 	  WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
   11346 	/* Now walk the first one as a tail call.  */
   11347 	WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
   11348       }
   11349 
   11350     case COMPLEX_CST:
   11351       WALK_SUBTREE (TREE_REALPART (*tp));
   11352       WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
   11353 
   11354     case CONSTRUCTOR:
   11355       {
   11356 	unsigned HOST_WIDE_INT idx;
   11357 	constructor_elt *ce;
   11358 
   11359 	for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
   11360 	     idx++)
   11361 	  WALK_SUBTREE (ce->value);
   11362       }
   11363       break;
   11364 
   11365     case SAVE_EXPR:
   11366       WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
   11367 
   11368     case BIND_EXPR:
   11369       {
   11370 	tree decl;
   11371 	for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
   11372 	  {
   11373 	    /* Walk the DECL_INITIAL and DECL_SIZE.  We don't want to walk
   11374 	       into declarations that are just mentioned, rather than
   11375 	       declared; they don't really belong to this part of the tree.
   11376 	       And, we can see cycles: the initializer for a declaration
   11377 	       can refer to the declaration itself.  */
   11378 	    WALK_SUBTREE (DECL_INITIAL (decl));
   11379 	    WALK_SUBTREE (DECL_SIZE (decl));
   11380 	    WALK_SUBTREE (DECL_SIZE_UNIT (decl));
   11381 	  }
   11382 	WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
   11383       }
   11384 
   11385     case STATEMENT_LIST:
   11386       {
   11387 	tree_stmt_iterator i;
   11388 	for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
   11389 	  WALK_SUBTREE (*tsi_stmt_ptr (i));
   11390       }
   11391       break;
   11392 
   11393     case OMP_CLAUSE:
   11394       {
   11395 	int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
   11396 	for (int i = 0; i < len; i++)
   11397 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
   11398 	WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
   11399       }
   11400 
   11401     case TARGET_EXPR:
   11402       {
   11403 	int i, len;
   11404 
   11405 	/* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
   11406 	   But, we only want to walk once.  */
   11407 	len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
   11408 	for (i = 0; i < len; ++i)
   11409 	  WALK_SUBTREE (TREE_OPERAND (*tp, i));
   11410 	WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
   11411       }
   11412 
   11413     case DECL_EXPR:
   11414       /* If this is a TYPE_DECL, walk into the fields of the type that it's
   11415 	 defining.  We only want to walk into these fields of a type in this
   11416 	 case and not in the general case of a mere reference to the type.
   11417 
   11418 	 The criterion is as follows: if the field can be an expression, it
   11419 	 must be walked only here.  This should be in keeping with the fields
   11420 	 that are directly gimplified in gimplify_type_sizes in order for the
   11421 	 mark/copy-if-shared/unmark machinery of the gimplifier to work with
   11422 	 variable-sized types.
   11423 
   11424 	 Note that DECLs get walked as part of processing the BIND_EXPR.  */
   11425       if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
   11426 	{
   11427 	  /* Call the function for the decl so e.g. copy_tree_body_r can
   11428 	     replace it with the remapped one.  */
   11429 	  result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
   11430 	  if (result || !walk_subtrees)
   11431 	    return result;
   11432 
   11433 	  tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
   11434 	  if (TREE_CODE (*type_p) == ERROR_MARK)
   11435 	    return NULL_TREE;
   11436 
   11437 	  /* Call the function for the type.  See if it returns anything or
   11438 	     doesn't want us to continue.  If we are to continue, walk both
   11439 	     the normal fields and those for the declaration case.  */
   11440 	  result = (*func) (type_p, &walk_subtrees, data);
   11441 	  if (result || !walk_subtrees)
   11442 	    return result;
   11443 
   11444 	  /* But do not walk a pointed-to type since it may itself need to
   11445 	     be walked in the declaration case if it isn't anonymous.  */
   11446 	  if (!POINTER_TYPE_P (*type_p))
   11447 	    {
   11448 	      result = walk_type_fields (*type_p, func, data, pset, lh);
   11449 	      if (result)
   11450 		return result;
   11451 	    }
   11452 
   11453 	  /* If this is a record type, also walk the fields.  */
   11454 	  if (RECORD_OR_UNION_TYPE_P (*type_p))
   11455 	    {
   11456 	      tree field;
   11457 
   11458 	      for (field = TYPE_FIELDS (*type_p); field;
   11459 		   field = DECL_CHAIN (field))
   11460 		{
   11461 		  /* We'd like to look at the type of the field, but we can
   11462 		     easily get infinite recursion.  So assume it's pointed
   11463 		     to elsewhere in the tree.  Also, ignore things that
   11464 		     aren't fields.  */
   11465 		  if (TREE_CODE (field) != FIELD_DECL)
   11466 		    continue;
   11467 
   11468 		  WALK_SUBTREE (DECL_FIELD_OFFSET (field));
   11469 		  WALK_SUBTREE (DECL_SIZE (field));
   11470 		  WALK_SUBTREE (DECL_SIZE_UNIT (field));
   11471 		  if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
   11472 		    WALK_SUBTREE (DECL_QUALIFIER (field));
   11473 		}
   11474 	    }
   11475 
   11476 	  /* Same for scalar types.  */
   11477 	  else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
   11478 		   || TREE_CODE (*type_p) == ENUMERAL_TYPE
   11479 		   || TREE_CODE (*type_p) == INTEGER_TYPE
   11480 		   || TREE_CODE (*type_p) == FIXED_POINT_TYPE
   11481 		   || TREE_CODE (*type_p) == REAL_TYPE)
   11482 	    {
   11483 	      WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
   11484 	      WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
   11485 	    }
   11486 
   11487 	  WALK_SUBTREE (TYPE_SIZE (*type_p));
   11488 	  WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
   11489 	}
   11490       /* FALLTHRU */
   11491 
   11492     default:
   11493       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
   11494 	{
   11495 	  int i, len;
   11496 
   11497 	  /* Walk over all the sub-trees of this operand.  */
   11498 	  len = TREE_OPERAND_LENGTH (*tp);
   11499 
   11500 	  /* Go through the subtrees.  We need to do this in forward order so
   11501 	     that the scope of a FOR_EXPR is handled properly.  */
   11502 	  if (len)
   11503 	    {
   11504 	      for (i = 0; i < len - 1; ++i)
   11505 		WALK_SUBTREE (TREE_OPERAND (*tp, i));
   11506 	      WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
   11507 	    }
   11508 	}
   11509       /* If this is a type, walk the needed fields in the type.  */
   11510       else if (TYPE_P (*tp))
   11511 	return walk_type_fields (*tp, func, data, pset, lh);
   11512       break;
   11513     }
   11514 
   11515   /* We didn't find what we were looking for.  */
   11516   return NULL_TREE;
   11517 
   11518 #undef WALK_SUBTREE_TAIL
   11519 }
   11520 #undef WALK_SUBTREE
   11521 
   11522 /* Like walk_tree, but does not walk duplicate nodes more than once.  */
   11523 
   11524 tree
   11525 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
   11526 				walk_tree_lh lh)
   11527 {
   11528   tree result;
   11529 
   11530   hash_set<tree> pset;
   11531   result = walk_tree_1 (tp, func, data, &pset, lh);
   11532   return result;
   11533 }
   11534 
   11535 
   11536 tree
   11537 tree_block (tree t)
   11538 {
   11539   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
   11540 
   11541   if (IS_EXPR_CODE_CLASS (c))
   11542     return LOCATION_BLOCK (t->exp.locus);
   11543   gcc_unreachable ();
   11544   return NULL;
   11545 }
   11546 
   11547 void
   11548 tree_set_block (tree t, tree b)
   11549 {
   11550   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
   11551 
   11552   if (IS_EXPR_CODE_CLASS (c))
   11553     {
   11554       t->exp.locus = set_block (t->exp.locus, b);
   11555     }
   11556   else
   11557     gcc_unreachable ();
   11558 }
   11559 
   11560 /* Create a nameless artificial label and put it in the current
   11561    function context.  The label has a location of LOC.  Returns the
   11562    newly created label.  */
   11563 
   11564 tree
   11565 create_artificial_label (location_t loc)
   11566 {
   11567   tree lab = build_decl (loc,
   11568       			 LABEL_DECL, NULL_TREE, void_type_node);
   11569 
   11570   DECL_ARTIFICIAL (lab) = 1;
   11571   DECL_IGNORED_P (lab) = 1;
   11572   DECL_CONTEXT (lab) = current_function_decl;
   11573   return lab;
   11574 }
   11575 
   11576 /*  Given a tree, try to return a useful variable name that we can use
   11577     to prefix a temporary that is being assigned the value of the tree.
   11578     I.E. given  <temp> = &A, return A.  */
   11579 
   11580 const char *
   11581 get_name (tree t)
   11582 {
   11583   tree stripped_decl;
   11584 
   11585   stripped_decl = t;
   11586   STRIP_NOPS (stripped_decl);
   11587   if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
   11588     return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
   11589   else if (TREE_CODE (stripped_decl) == SSA_NAME)
   11590     {
   11591       tree name = SSA_NAME_IDENTIFIER (stripped_decl);
   11592       if (!name)
   11593 	return NULL;
   11594       return IDENTIFIER_POINTER (name);
   11595     }
   11596   else
   11597     {
   11598       switch (TREE_CODE (stripped_decl))
   11599 	{
   11600 	case ADDR_EXPR:
   11601 	  return get_name (TREE_OPERAND (stripped_decl, 0));
   11602 	default:
   11603 	  return NULL;
   11604 	}
   11605     }
   11606 }
   11607 
   11608 /* Return true if TYPE has a variable argument list.  */
   11609 
   11610 bool
   11611 stdarg_p (const_tree fntype)
   11612 {
   11613   function_args_iterator args_iter;
   11614   tree n = NULL_TREE, t;
   11615 
   11616   if (!fntype)
   11617     return false;
   11618 
   11619   FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
   11620     {
   11621       n = t;
   11622     }
   11623 
   11624   return n != NULL_TREE && n != void_type_node;
   11625 }
   11626 
   11627 /* Return true if TYPE has a prototype.  */
   11628 
   11629 bool
   11630 prototype_p (const_tree fntype)
   11631 {
   11632   tree t;
   11633 
   11634   gcc_assert (fntype != NULL_TREE);
   11635 
   11636   t = TYPE_ARG_TYPES (fntype);
   11637   return (t != NULL_TREE);
   11638 }
   11639 
   11640 /* If BLOCK is inlined from an __attribute__((__artificial__))
   11641    routine, return pointer to location from where it has been
   11642    called.  */
   11643 location_t *
   11644 block_nonartificial_location (tree block)
   11645 {
   11646   location_t *ret = NULL;
   11647 
   11648   while (block && TREE_CODE (block) == BLOCK
   11649 	 && BLOCK_ABSTRACT_ORIGIN (block))
   11650     {
   11651       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
   11652       if (TREE_CODE (ao) == FUNCTION_DECL)
   11653 	{
   11654 	  /* If AO is an artificial inline, point RET to the
   11655 	     call site locus at which it has been inlined and continue
   11656 	     the loop, in case AO's caller is also an artificial
   11657 	     inline.  */
   11658 	  if (DECL_DECLARED_INLINE_P (ao)
   11659 	      && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
   11660 	    ret = &BLOCK_SOURCE_LOCATION (block);
   11661 	  else
   11662 	    break;
   11663 	}
   11664       else if (TREE_CODE (ao) != BLOCK)
   11665 	break;
   11666 
   11667       block = BLOCK_SUPERCONTEXT (block);
   11668     }
   11669   return ret;
   11670 }
   11671 
   11672 
   11673 /* If EXP is inlined from an __attribute__((__artificial__))
   11674    function, return the location of the original call expression.  */
   11675 
   11676 location_t
   11677 tree_nonartificial_location (tree exp)
   11678 {
   11679   location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
   11680 
   11681   if (loc)
   11682     return *loc;
   11683   else
   11684     return EXPR_LOCATION (exp);
   11685 }
   11686 
   11687 /* Return the location into which EXP has been inlined.  Analogous
   11688    to tree_nonartificial_location() above but not limited to artificial
   11689    functions declared inline.  If SYSTEM_HEADER is true, return
   11690    the macro expansion point of the location if it's in a system header */
   11691 
   11692 location_t
   11693 tree_inlined_location (tree exp, bool system_header /* = true */)
   11694 {
   11695   location_t loc = UNKNOWN_LOCATION;
   11696 
   11697   tree block = TREE_BLOCK (exp);
   11698 
   11699   while (block && TREE_CODE (block) == BLOCK
   11700 	 && BLOCK_ABSTRACT_ORIGIN (block))
   11701     {
   11702       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
   11703       if (TREE_CODE (ao) == FUNCTION_DECL)
   11704 	loc = BLOCK_SOURCE_LOCATION (block);
   11705       else if (TREE_CODE (ao) != BLOCK)
   11706 	break;
   11707 
   11708       block = BLOCK_SUPERCONTEXT (block);
   11709     }
   11710 
   11711   if (loc == UNKNOWN_LOCATION)
   11712     {
   11713       loc = EXPR_LOCATION (exp);
   11714       if (system_header)
   11715 	/* Only consider macro expansion when the block traversal failed
   11716 	   to find a location.  Otherwise it's not relevant.  */
   11717 	return expansion_point_location_if_in_system_header (loc);
   11718     }
   11719 
   11720   return loc;
   11721 }
   11722 
   11723 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
   11724    nodes.  */
   11725 
   11726 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code.  */
   11727 
   11728 hashval_t
   11729 cl_option_hasher::hash (tree x)
   11730 {
   11731   const_tree const t = x;
   11732 
   11733   if (TREE_CODE (t) == OPTIMIZATION_NODE)
   11734     return cl_optimization_hash (TREE_OPTIMIZATION (t));
   11735   else if (TREE_CODE (t) == TARGET_OPTION_NODE)
   11736     return cl_target_option_hash (TREE_TARGET_OPTION (t));
   11737   else
   11738     gcc_unreachable ();
   11739 }
   11740 
   11741 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
   11742    TARGET_OPTION tree node) is the same as that given by *Y, which is the
   11743    same.  */
   11744 
   11745 bool
   11746 cl_option_hasher::equal (tree x, tree y)
   11747 {
   11748   const_tree const xt = x;
   11749   const_tree const yt = y;
   11750 
   11751   if (TREE_CODE (xt) != TREE_CODE (yt))
   11752     return 0;
   11753 
   11754   if (TREE_CODE (xt) == OPTIMIZATION_NODE)
   11755     return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
   11756 				      TREE_OPTIMIZATION (yt));
   11757   else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
   11758     return cl_target_option_eq (TREE_TARGET_OPTION (xt),
   11759 				TREE_TARGET_OPTION (yt));
   11760   else
   11761     gcc_unreachable ();
   11762 }
   11763 
   11764 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET.  */
   11765 
   11766 tree
   11767 build_optimization_node (struct gcc_options *opts,
   11768 			 struct gcc_options *opts_set)
   11769 {
   11770   tree t;
   11771 
   11772   /* Use the cache of optimization nodes.  */
   11773 
   11774   cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
   11775 			opts, opts_set);
   11776 
   11777   tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
   11778   t = *slot;
   11779   if (!t)
   11780     {
   11781       /* Insert this one into the hash table.  */
   11782       t = cl_optimization_node;
   11783       *slot = t;
   11784 
   11785       /* Make a new node for next time round.  */
   11786       cl_optimization_node = make_node (OPTIMIZATION_NODE);
   11787     }
   11788 
   11789   return t;
   11790 }
   11791 
   11792 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET.  */
   11793 
   11794 tree
   11795 build_target_option_node (struct gcc_options *opts,
   11796 			  struct gcc_options *opts_set)
   11797 {
   11798   tree t;
   11799 
   11800   /* Use the cache of optimization nodes.  */
   11801 
   11802   cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
   11803 			 opts, opts_set);
   11804 
   11805   tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
   11806   t = *slot;
   11807   if (!t)
   11808     {
   11809       /* Insert this one into the hash table.  */
   11810       t = cl_target_option_node;
   11811       *slot = t;
   11812 
   11813       /* Make a new node for next time round.  */
   11814       cl_target_option_node = make_node (TARGET_OPTION_NODE);
   11815     }
   11816 
   11817   return t;
   11818 }
   11819 
   11820 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
   11821    so that they aren't saved during PCH writing.  */
   11822 
   11823 void
   11824 prepare_target_option_nodes_for_pch (void)
   11825 {
   11826   hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
   11827   for (; iter != cl_option_hash_table->end (); ++iter)
   11828     if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
   11829       TREE_TARGET_GLOBALS (*iter) = NULL;
   11830 }
   11831 
   11832 /* Determine the "ultimate origin" of a block.  */
   11833 
   11834 tree
   11835 block_ultimate_origin (const_tree block)
   11836 {
   11837   tree origin = BLOCK_ABSTRACT_ORIGIN (block);
   11838 
   11839   if (origin == NULL_TREE)
   11840     return NULL_TREE;
   11841   else
   11842     {
   11843       gcc_checking_assert ((DECL_P (origin)
   11844 			    && DECL_ORIGIN (origin) == origin)
   11845 			   || BLOCK_ORIGIN (origin) == origin);
   11846       return origin;
   11847     }
   11848 }
   11849 
   11850 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
   11851    no instruction.  */
   11852 
   11853 bool
   11854 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
   11855 {
   11856   /* Do not strip casts into or out of differing address spaces.  */
   11857   if (POINTER_TYPE_P (outer_type)
   11858       && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
   11859     {
   11860       if (!POINTER_TYPE_P (inner_type)
   11861 	  || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
   11862 	      != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
   11863 	return false;
   11864     }
   11865   else if (POINTER_TYPE_P (inner_type)
   11866 	   && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
   11867     {
   11868       /* We already know that outer_type is not a pointer with
   11869 	 a non-generic address space.  */
   11870       return false;
   11871     }
   11872 
   11873   /* Use precision rather then machine mode when we can, which gives
   11874      the correct answer even for submode (bit-field) types.  */
   11875   if ((INTEGRAL_TYPE_P (outer_type)
   11876        || POINTER_TYPE_P (outer_type)
   11877        || TREE_CODE (outer_type) == OFFSET_TYPE)
   11878       && (INTEGRAL_TYPE_P (inner_type)
   11879 	  || POINTER_TYPE_P (inner_type)
   11880 	  || TREE_CODE (inner_type) == OFFSET_TYPE))
   11881     return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
   11882 
   11883   /* Otherwise fall back on comparing machine modes (e.g. for
   11884      aggregate types, floats).  */
   11885   return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
   11886 }
   11887 
   11888 /* Return true iff conversion in EXP generates no instruction.  Mark
   11889    it inline so that we fully inline into the stripping functions even
   11890    though we have two uses of this function.  */
   11891 
   11892 static inline bool
   11893 tree_nop_conversion (const_tree exp)
   11894 {
   11895   tree outer_type, inner_type;
   11896 
   11897   if (location_wrapper_p (exp))
   11898     return true;
   11899   if (!CONVERT_EXPR_P (exp)
   11900       && TREE_CODE (exp) != NON_LVALUE_EXPR)
   11901     return false;
   11902 
   11903   outer_type = TREE_TYPE (exp);
   11904   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
   11905   if (!inner_type || inner_type == error_mark_node)
   11906     return false;
   11907 
   11908   return tree_nop_conversion_p (outer_type, inner_type);
   11909 }
   11910 
   11911 /* Return true iff conversion in EXP generates no instruction.  Don't
   11912    consider conversions changing the signedness.  */
   11913 
   11914 static bool
   11915 tree_sign_nop_conversion (const_tree exp)
   11916 {
   11917   tree outer_type, inner_type;
   11918 
   11919   if (!tree_nop_conversion (exp))
   11920     return false;
   11921 
   11922   outer_type = TREE_TYPE (exp);
   11923   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
   11924 
   11925   return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
   11926 	  && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
   11927 }
   11928 
   11929 /* Strip conversions from EXP according to tree_nop_conversion and
   11930    return the resulting expression.  */
   11931 
   11932 tree
   11933 tree_strip_nop_conversions (tree exp)
   11934 {
   11935   while (tree_nop_conversion (exp))
   11936     exp = TREE_OPERAND (exp, 0);
   11937   return exp;
   11938 }
   11939 
   11940 /* Strip conversions from EXP according to tree_sign_nop_conversion
   11941    and return the resulting expression.  */
   11942 
   11943 tree
   11944 tree_strip_sign_nop_conversions (tree exp)
   11945 {
   11946   while (tree_sign_nop_conversion (exp))
   11947     exp = TREE_OPERAND (exp, 0);
   11948   return exp;
   11949 }
   11950 
   11951 /* Avoid any floating point extensions from EXP.  */
   11952 tree
   11953 strip_float_extensions (tree exp)
   11954 {
   11955   tree sub, expt, subt;
   11956 
   11957   /*  For floating point constant look up the narrowest type that can hold
   11958       it properly and handle it like (type)(narrowest_type)constant.
   11959       This way we can optimize for instance a=a*2.0 where "a" is float
   11960       but 2.0 is double constant.  */
   11961   if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
   11962     {
   11963       REAL_VALUE_TYPE orig;
   11964       tree type = NULL;
   11965 
   11966       orig = TREE_REAL_CST (exp);
   11967       if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
   11968 	  && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
   11969 	type = float_type_node;
   11970       else if (TYPE_PRECISION (TREE_TYPE (exp))
   11971 	       > TYPE_PRECISION (double_type_node)
   11972 	       && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
   11973 	type = double_type_node;
   11974       if (type)
   11975 	return build_real_truncate (type, orig);
   11976     }
   11977 
   11978   if (!CONVERT_EXPR_P (exp))
   11979     return exp;
   11980 
   11981   sub = TREE_OPERAND (exp, 0);
   11982   subt = TREE_TYPE (sub);
   11983   expt = TREE_TYPE (exp);
   11984 
   11985   if (!FLOAT_TYPE_P (subt))
   11986     return exp;
   11987 
   11988   if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
   11989     return exp;
   11990 
   11991   if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
   11992     return exp;
   11993 
   11994   return strip_float_extensions (sub);
   11995 }
   11996 
   11997 /* Strip out all handled components that produce invariant
   11998    offsets.  */
   11999 
   12000 const_tree
   12001 strip_invariant_refs (const_tree op)
   12002 {
   12003   while (handled_component_p (op))
   12004     {
   12005       switch (TREE_CODE (op))
   12006 	{
   12007 	case ARRAY_REF:
   12008 	case ARRAY_RANGE_REF:
   12009 	  if (!is_gimple_constant (TREE_OPERAND (op, 1))
   12010 	      || TREE_OPERAND (op, 2) != NULL_TREE
   12011 	      || TREE_OPERAND (op, 3) != NULL_TREE)
   12012 	    return NULL;
   12013 	  break;
   12014 
   12015 	case COMPONENT_REF:
   12016 	  if (TREE_OPERAND (op, 2) != NULL_TREE)
   12017 	    return NULL;
   12018 	  break;
   12019 
   12020 	default:;
   12021 	}
   12022       op = TREE_OPERAND (op, 0);
   12023     }
   12024 
   12025   return op;
   12026 }
   12027 
   12028 /* Strip handled components with zero offset from OP.  */
   12029 
   12030 tree
   12031 strip_zero_offset_components (tree op)
   12032 {
   12033   while (TREE_CODE (op) == COMPONENT_REF
   12034 	 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
   12035 	 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
   12036     op = TREE_OPERAND (op, 0);
   12037   return op;
   12038 }
   12039 
   12040 static GTY(()) tree gcc_eh_personality_decl;
   12041 
   12042 /* Return the GCC personality function decl.  */
   12043 
   12044 tree
   12045 lhd_gcc_personality (void)
   12046 {
   12047   if (!gcc_eh_personality_decl)
   12048     gcc_eh_personality_decl = build_personality_function ("gcc");
   12049   return gcc_eh_personality_decl;
   12050 }
   12051 
   12052 /* TARGET is a call target of GIMPLE call statement
   12053    (obtained by gimple_call_fn).  Return true if it is
   12054    OBJ_TYPE_REF representing an virtual call of C++ method.
   12055    (As opposed to OBJ_TYPE_REF representing objc calls
   12056    through a cast where middle-end devirtualization machinery
   12057    can't apply.)  FOR_DUMP_P is true when being called from
   12058    the dump routines.  */
   12059 
   12060 bool
   12061 virtual_method_call_p (const_tree target, bool for_dump_p)
   12062 {
   12063   if (TREE_CODE (target) != OBJ_TYPE_REF)
   12064     return false;
   12065   tree t = TREE_TYPE (target);
   12066   gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
   12067   t = TREE_TYPE (t);
   12068   if (TREE_CODE (t) == FUNCTION_TYPE)
   12069     return false;
   12070   gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
   12071   /* If we do not have BINFO associated, it means that type was built
   12072      without devirtualization enabled.  Do not consider this a virtual
   12073      call.  */
   12074   if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
   12075     return false;
   12076   return true;
   12077 }
   12078 
   12079 /* Lookup sub-BINFO of BINFO of TYPE at offset POS.  */
   12080 
   12081 static tree
   12082 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
   12083 {
   12084   unsigned int i;
   12085   tree base_binfo, b;
   12086 
   12087   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
   12088     if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
   12089 	&& types_same_for_odr (TREE_TYPE (base_binfo), type))
   12090       return base_binfo;
   12091     else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
   12092       return b;
   12093   return NULL;
   12094 }
   12095 
   12096 /* Try to find a base info of BINFO that would have its field decl at offset
   12097    OFFSET within the BINFO type and which is of EXPECTED_TYPE.  If it can be
   12098    found, return, otherwise return NULL_TREE.  */
   12099 
   12100 tree
   12101 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
   12102 {
   12103   tree type = BINFO_TYPE (binfo);
   12104 
   12105   while (true)
   12106     {
   12107       HOST_WIDE_INT pos, size;
   12108       tree fld;
   12109       int i;
   12110 
   12111       if (types_same_for_odr (type, expected_type))
   12112 	  return binfo;
   12113       if (maybe_lt (offset, 0))
   12114 	return NULL_TREE;
   12115 
   12116       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
   12117 	{
   12118 	  if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
   12119 	    continue;
   12120 
   12121 	  pos = int_bit_position (fld);
   12122 	  size = tree_to_uhwi (DECL_SIZE (fld));
   12123 	  if (known_in_range_p (offset, pos, size))
   12124 	    break;
   12125 	}
   12126       if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
   12127 	return NULL_TREE;
   12128 
   12129       /* Offset 0 indicates the primary base, whose vtable contents are
   12130 	 represented in the binfo for the derived class.  */
   12131       else if (maybe_ne (offset, 0))
   12132 	{
   12133 	  tree found_binfo = NULL, base_binfo;
   12134 	  /* Offsets in BINFO are in bytes relative to the whole structure
   12135 	     while POS is in bits relative to the containing field.  */
   12136 	  int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
   12137 			     / BITS_PER_UNIT);
   12138 
   12139 	  for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
   12140 	    if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
   12141 		&& types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
   12142 	      {
   12143 		found_binfo = base_binfo;
   12144 		break;
   12145 	      }
   12146 	  if (found_binfo)
   12147 	    binfo = found_binfo;
   12148 	  else
   12149 	    binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
   12150 					    binfo_offset);
   12151 	 }
   12152 
   12153       type = TREE_TYPE (fld);
   12154       offset -= pos;
   12155     }
   12156 }
   12157 
   12158 /* Returns true if X is a typedef decl.  */
   12159 
   12160 bool
   12161 is_typedef_decl (const_tree x)
   12162 {
   12163   return (x && TREE_CODE (x) == TYPE_DECL
   12164           && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
   12165 }
   12166 
   12167 /* Returns true iff TYPE is a type variant created for a typedef. */
   12168 
   12169 bool
   12170 typedef_variant_p (const_tree type)
   12171 {
   12172   return is_typedef_decl (TYPE_NAME (type));
   12173 }
   12174 
   12175 /* PR 84195: Replace control characters in "unescaped" with their
   12176    escaped equivalents.  Allow newlines if -fmessage-length has
   12177    been set to a non-zero value.  This is done here, rather than
   12178    where the attribute is recorded as the message length can
   12179    change between these two locations.  */
   12180 
   12181 void
   12182 escaped_string::escape (const char *unescaped)
   12183 {
   12184   char *escaped;
   12185   size_t i, new_i, len;
   12186 
   12187   if (m_owned)
   12188     free (m_str);
   12189 
   12190   m_str = const_cast<char *> (unescaped);
   12191   m_owned = false;
   12192 
   12193   if (unescaped == NULL || *unescaped == 0)
   12194     return;
   12195 
   12196   len = strlen (unescaped);
   12197   escaped = NULL;
   12198   new_i = 0;
   12199 
   12200   for (i = 0; i < len; i++)
   12201     {
   12202       char c = unescaped[i];
   12203 
   12204       if (!ISCNTRL (c))
   12205 	{
   12206 	  if (escaped)
   12207 	    escaped[new_i++] = c;
   12208 	  continue;
   12209 	}
   12210 
   12211       if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
   12212 	{
   12213 	  if (escaped == NULL)
   12214 	    {
   12215 	      /* We only allocate space for a new string if we
   12216 		 actually encounter a control character that
   12217 		 needs replacing.  */
   12218 	      escaped = (char *) xmalloc (len * 2 + 1);
   12219 	      strncpy (escaped, unescaped, i);
   12220 	      new_i = i;
   12221 	    }
   12222 
   12223 	  escaped[new_i++] = '\\';
   12224 
   12225 	  switch (c)
   12226 	    {
   12227 	    case '\a': escaped[new_i++] = 'a'; break;
   12228 	    case '\b': escaped[new_i++] = 'b'; break;
   12229 	    case '\f': escaped[new_i++] = 'f'; break;
   12230 	    case '\n': escaped[new_i++] = 'n'; break;
   12231 	    case '\r': escaped[new_i++] = 'r'; break;
   12232 	    case '\t': escaped[new_i++] = 't'; break;
   12233 	    case '\v': escaped[new_i++] = 'v'; break;
   12234 	    default:   escaped[new_i++] = '?'; break;
   12235 	    }
   12236 	}
   12237       else if (escaped)
   12238 	escaped[new_i++] = c;
   12239     }
   12240 
   12241   if (escaped)
   12242     {
   12243       escaped[new_i] = 0;
   12244       m_str = escaped;
   12245       m_owned = true;
   12246     }
   12247 }
   12248 
   12249 /* Warn about a use of an identifier which was marked deprecated.  Returns
   12250    whether a warning was given.  */
   12251 
   12252 bool
   12253 warn_deprecated_use (tree node, tree attr)
   12254 {
   12255   escaped_string msg;
   12256 
   12257   if (node == 0 || !warn_deprecated_decl)
   12258     return false;
   12259 
   12260   if (!attr)
   12261     {
   12262       if (DECL_P (node))
   12263 	attr = DECL_ATTRIBUTES (node);
   12264       else if (TYPE_P (node))
   12265 	{
   12266 	  tree decl = TYPE_STUB_DECL (node);
   12267 	  if (decl)
   12268 	    attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
   12269 	  else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
   12270 		   != NULL_TREE)
   12271 	    {
   12272 	      node = TREE_TYPE (decl);
   12273 	      attr = TYPE_ATTRIBUTES (node);
   12274 	    }
   12275 	}
   12276     }
   12277 
   12278   if (attr)
   12279     attr = lookup_attribute ("deprecated", attr);
   12280 
   12281   if (attr)
   12282     msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
   12283 
   12284   bool w = false;
   12285   if (DECL_P (node))
   12286     {
   12287       auto_diagnostic_group d;
   12288       if (msg)
   12289 	w = warning (OPT_Wdeprecated_declarations,
   12290 		     "%qD is deprecated: %s", node, (const char *) msg);
   12291       else
   12292 	w = warning (OPT_Wdeprecated_declarations,
   12293 		     "%qD is deprecated", node);
   12294       if (w)
   12295 	inform (DECL_SOURCE_LOCATION (node), "declared here");
   12296     }
   12297   else if (TYPE_P (node))
   12298     {
   12299       tree what = NULL_TREE;
   12300       tree decl = TYPE_STUB_DECL (node);
   12301 
   12302       if (TYPE_NAME (node))
   12303 	{
   12304 	  if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
   12305 	    what = TYPE_NAME (node);
   12306 	  else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
   12307 		   && DECL_NAME (TYPE_NAME (node)))
   12308 	    what = DECL_NAME (TYPE_NAME (node));
   12309 	}
   12310 
   12311       auto_diagnostic_group d;
   12312       if (what)
   12313 	{
   12314 	  if (msg)
   12315 	    w = warning (OPT_Wdeprecated_declarations,
   12316 			 "%qE is deprecated: %s", what, (const char *) msg);
   12317 	  else
   12318 	    w = warning (OPT_Wdeprecated_declarations,
   12319 			 "%qE is deprecated", what);
   12320 	}
   12321       else
   12322 	{
   12323 	  if (msg)
   12324 	    w = warning (OPT_Wdeprecated_declarations,
   12325 			 "type is deprecated: %s", (const char *) msg);
   12326 	  else
   12327 	    w = warning (OPT_Wdeprecated_declarations,
   12328 			 "type is deprecated");
   12329 	}
   12330 
   12331       if (w && decl)
   12332 	inform (DECL_SOURCE_LOCATION (decl), "declared here");
   12333     }
   12334 
   12335   return w;
   12336 }
   12337 
   12338 /* Error out with an identifier which was marked 'unavailable'. */
   12339 void
   12340 error_unavailable_use (tree node, tree attr)
   12341 {
   12342   escaped_string msg;
   12343 
   12344   if (node == 0)
   12345     return;
   12346 
   12347   if (!attr)
   12348     {
   12349       if (DECL_P (node))
   12350 	attr = DECL_ATTRIBUTES (node);
   12351       else if (TYPE_P (node))
   12352 	{
   12353 	  tree decl = TYPE_STUB_DECL (node);
   12354 	  if (decl)
   12355 	    attr = lookup_attribute ("unavailable",
   12356 				     TYPE_ATTRIBUTES (TREE_TYPE (decl)));
   12357 	}
   12358     }
   12359 
   12360   if (attr)
   12361     attr = lookup_attribute ("unavailable", attr);
   12362 
   12363   if (attr)
   12364     msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
   12365 
   12366   if (DECL_P (node))
   12367     {
   12368       auto_diagnostic_group d;
   12369       if (msg)
   12370 	error ("%qD is unavailable: %s", node, (const char *) msg);
   12371       else
   12372 	error ("%qD is unavailable", node);
   12373       inform (DECL_SOURCE_LOCATION (node), "declared here");
   12374     }
   12375   else if (TYPE_P (node))
   12376     {
   12377       tree what = NULL_TREE;
   12378       tree decl = TYPE_STUB_DECL (node);
   12379 
   12380       if (TYPE_NAME (node))
   12381 	{
   12382 	  if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
   12383 	    what = TYPE_NAME (node);
   12384 	  else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
   12385 		   && DECL_NAME (TYPE_NAME (node)))
   12386 	    what = DECL_NAME (TYPE_NAME (node));
   12387 	}
   12388 
   12389       auto_diagnostic_group d;
   12390       if (what)
   12391 	{
   12392 	  if (msg)
   12393 	    error ("%qE is unavailable: %s", what, (const char *) msg);
   12394 	  else
   12395 	    error ("%qE is unavailable", what);
   12396 	}
   12397       else
   12398 	{
   12399 	  if (msg)
   12400 	    error ("type is unavailable: %s", (const char *) msg);
   12401 	  else
   12402 	    error ("type is unavailable");
   12403 	}
   12404 
   12405       if (decl)
   12406 	inform (DECL_SOURCE_LOCATION (decl), "declared here");
   12407     }
   12408 }
   12409 
   12410 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
   12411    somewhere in it.  */
   12412 
   12413 bool
   12414 contains_bitfld_component_ref_p (const_tree ref)
   12415 {
   12416   while (handled_component_p (ref))
   12417     {
   12418       if (TREE_CODE (ref) == COMPONENT_REF
   12419           && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
   12420         return true;
   12421       ref = TREE_OPERAND (ref, 0);
   12422     }
   12423 
   12424   return false;
   12425 }
   12426 
   12427 /* Try to determine whether a TRY_CATCH expression can fall through.
   12428    This is a subroutine of block_may_fallthru.  */
   12429 
   12430 static bool
   12431 try_catch_may_fallthru (const_tree stmt)
   12432 {
   12433   tree_stmt_iterator i;
   12434 
   12435   /* If the TRY block can fall through, the whole TRY_CATCH can
   12436      fall through.  */
   12437   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
   12438     return true;
   12439 
   12440   i = tsi_start (TREE_OPERAND (stmt, 1));
   12441   switch (TREE_CODE (tsi_stmt (i)))
   12442     {
   12443     case CATCH_EXPR:
   12444       /* We expect to see a sequence of CATCH_EXPR trees, each with a
   12445 	 catch expression and a body.  The whole TRY_CATCH may fall
   12446 	 through iff any of the catch bodies falls through.  */
   12447       for (; !tsi_end_p (i); tsi_next (&i))
   12448 	{
   12449 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
   12450 	    return true;
   12451 	}
   12452       return false;
   12453 
   12454     case EH_FILTER_EXPR:
   12455       /* The exception filter expression only matters if there is an
   12456 	 exception.  If the exception does not match EH_FILTER_TYPES,
   12457 	 we will execute EH_FILTER_FAILURE, and we will fall through
   12458 	 if that falls through.  If the exception does match
   12459 	 EH_FILTER_TYPES, the stack unwinder will continue up the
   12460 	 stack, so we will not fall through.  We don't know whether we
   12461 	 will throw an exception which matches EH_FILTER_TYPES or not,
   12462 	 so we just ignore EH_FILTER_TYPES and assume that we might
   12463 	 throw an exception which doesn't match.  */
   12464       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
   12465 
   12466     default:
   12467       /* This case represents statements to be executed when an
   12468 	 exception occurs.  Those statements are implicitly followed
   12469 	 by a RESX statement to resume execution after the exception.
   12470 	 So in this case the TRY_CATCH never falls through.  */
   12471       return false;
   12472     }
   12473 }
   12474 
   12475 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
   12476    need not be 100% accurate; simply be conservative and return true if we
   12477    don't know.  This is used only to avoid stupidly generating extra code.
   12478    If we're wrong, we'll just delete the extra code later.  */
   12479 
   12480 bool
   12481 block_may_fallthru (const_tree block)
   12482 {
   12483   /* This CONST_CAST is okay because expr_last returns its argument
   12484      unmodified and we assign it to a const_tree.  */
   12485   const_tree stmt = expr_last (CONST_CAST_TREE (block));
   12486 
   12487   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
   12488     {
   12489     case GOTO_EXPR:
   12490     case RETURN_EXPR:
   12491       /* Easy cases.  If the last statement of the block implies
   12492 	 control transfer, then we can't fall through.  */
   12493       return false;
   12494 
   12495     case SWITCH_EXPR:
   12496       /* If there is a default: label or case labels cover all possible
   12497 	 SWITCH_COND values, then the SWITCH_EXPR will transfer control
   12498 	 to some case label in all cases and all we care is whether the
   12499 	 SWITCH_BODY falls through.  */
   12500       if (SWITCH_ALL_CASES_P (stmt))
   12501 	return block_may_fallthru (SWITCH_BODY (stmt));
   12502       return true;
   12503 
   12504     case COND_EXPR:
   12505       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
   12506 	return true;
   12507       return block_may_fallthru (COND_EXPR_ELSE (stmt));
   12508 
   12509     case BIND_EXPR:
   12510       return block_may_fallthru (BIND_EXPR_BODY (stmt));
   12511 
   12512     case TRY_CATCH_EXPR:
   12513       return try_catch_may_fallthru (stmt);
   12514 
   12515     case TRY_FINALLY_EXPR:
   12516       /* The finally clause is always executed after the try clause,
   12517 	 so if it does not fall through, then the try-finally will not
   12518 	 fall through.  Otherwise, if the try clause does not fall
   12519 	 through, then when the finally clause falls through it will
   12520 	 resume execution wherever the try clause was going.  So the
   12521 	 whole try-finally will only fall through if both the try
   12522 	 clause and the finally clause fall through.  */
   12523       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
   12524 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
   12525 
   12526     case EH_ELSE_EXPR:
   12527       return block_may_fallthru (TREE_OPERAND (stmt, 0));
   12528 
   12529     case MODIFY_EXPR:
   12530       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
   12531 	stmt = TREE_OPERAND (stmt, 1);
   12532       else
   12533 	return true;
   12534       /* FALLTHRU */
   12535 
   12536     case CALL_EXPR:
   12537       /* Functions that do not return do not fall through.  */
   12538       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
   12539 
   12540     case CLEANUP_POINT_EXPR:
   12541       return block_may_fallthru (TREE_OPERAND (stmt, 0));
   12542 
   12543     case TARGET_EXPR:
   12544       return block_may_fallthru (TREE_OPERAND (stmt, 1));
   12545 
   12546     case ERROR_MARK:
   12547       return true;
   12548 
   12549     default:
   12550       return lang_hooks.block_may_fallthru (stmt);
   12551     }
   12552 }
   12553 
   12554 /* True if we are using EH to handle cleanups.  */
   12555 static bool using_eh_for_cleanups_flag = false;
   12556 
   12557 /* This routine is called from front ends to indicate eh should be used for
   12558    cleanups.  */
   12559 void
   12560 using_eh_for_cleanups (void)
   12561 {
   12562   using_eh_for_cleanups_flag = true;
   12563 }
   12564 
   12565 /* Query whether EH is used for cleanups.  */
   12566 bool
   12567 using_eh_for_cleanups_p (void)
   12568 {
   12569   return using_eh_for_cleanups_flag;
   12570 }
   12571 
   12572 /* Wrapper for tree_code_name to ensure that tree code is valid */
   12573 const char *
   12574 get_tree_code_name (enum tree_code code)
   12575 {
   12576   const char *invalid = "<invalid tree code>";
   12577 
   12578   /* The tree_code enum promotes to signed, but we could be getting
   12579      invalid values, so force an unsigned comparison.  */
   12580   if (unsigned (code) >= MAX_TREE_CODES)
   12581     {
   12582       if ((unsigned)code == 0xa5a5)
   12583 	return "ggc_freed";
   12584       return invalid;
   12585     }
   12586 
   12587   return tree_code_name[code];
   12588 }
   12589 
   12590 /* Drops the TREE_OVERFLOW flag from T.  */
   12591 
   12592 tree
   12593 drop_tree_overflow (tree t)
   12594 {
   12595   gcc_checking_assert (TREE_OVERFLOW (t));
   12596 
   12597   /* For tree codes with a sharing machinery re-build the result.  */
   12598   if (poly_int_tree_p (t))
   12599     return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
   12600 
   12601   /* For VECTOR_CST, remove the overflow bits from the encoded elements
   12602      and canonicalize the result.  */
   12603   if (TREE_CODE (t) == VECTOR_CST)
   12604     {
   12605       tree_vector_builder builder;
   12606       builder.new_unary_operation (TREE_TYPE (t), t, true);
   12607       unsigned int count = builder.encoded_nelts ();
   12608       for (unsigned int i = 0; i < count; ++i)
   12609 	{
   12610 	  tree elt = VECTOR_CST_ELT (t, i);
   12611 	  if (TREE_OVERFLOW (elt))
   12612 	    elt = drop_tree_overflow (elt);
   12613 	  builder.quick_push (elt);
   12614 	}
   12615       return builder.build ();
   12616     }
   12617 
   12618   /* Otherwise, as all tcc_constants are possibly shared, copy the node
   12619      and drop the flag.  */
   12620   t = copy_node (t);
   12621   TREE_OVERFLOW (t) = 0;
   12622 
   12623   /* For constants that contain nested constants, drop the flag
   12624      from those as well.  */
   12625   if (TREE_CODE (t) == COMPLEX_CST)
   12626     {
   12627       if (TREE_OVERFLOW (TREE_REALPART (t)))
   12628 	TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
   12629       if (TREE_OVERFLOW (TREE_IMAGPART (t)))
   12630 	TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
   12631     }
   12632 
   12633   return t;
   12634 }
   12635 
   12636 /* Given a memory reference expression T, return its base address.
   12637    The base address of a memory reference expression is the main
   12638    object being referenced.  For instance, the base address for
   12639    'array[i].fld[j]' is 'array'.  You can think of this as stripping
   12640    away the offset part from a memory address.
   12641 
   12642    This function calls handled_component_p to strip away all the inner
   12643    parts of the memory reference until it reaches the base object.  */
   12644 
   12645 tree
   12646 get_base_address (tree t)
   12647 {
   12648   if (TREE_CODE (t) == WITH_SIZE_EXPR)
   12649     t = TREE_OPERAND (t, 0);
   12650   while (handled_component_p (t))
   12651     t = TREE_OPERAND (t, 0);
   12652 
   12653   if ((TREE_CODE (t) == MEM_REF
   12654        || TREE_CODE (t) == TARGET_MEM_REF)
   12655       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
   12656     t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
   12657 
   12658   return t;
   12659 }
   12660 
   12661 /* Return a tree of sizetype representing the size, in bytes, of the element
   12662    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
   12663 
   12664 tree
   12665 array_ref_element_size (tree exp)
   12666 {
   12667   tree aligned_size = TREE_OPERAND (exp, 3);
   12668   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
   12669   location_t loc = EXPR_LOCATION (exp);
   12670 
   12671   /* If a size was specified in the ARRAY_REF, it's the size measured
   12672      in alignment units of the element type.  So multiply by that value.  */
   12673   if (aligned_size)
   12674     {
   12675       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
   12676 	 sizetype from another type of the same width and signedness.  */
   12677       if (TREE_TYPE (aligned_size) != sizetype)
   12678 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
   12679       return size_binop_loc (loc, MULT_EXPR, aligned_size,
   12680 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
   12681     }
   12682 
   12683   /* Otherwise, take the size from that of the element type.  Substitute
   12684      any PLACEHOLDER_EXPR that we have.  */
   12685   else
   12686     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
   12687 }
   12688 
   12689 /* Return a tree representing the lower bound of the array mentioned in
   12690    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
   12691 
   12692 tree
   12693 array_ref_low_bound (tree exp)
   12694 {
   12695   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
   12696 
   12697   /* If a lower bound is specified in EXP, use it.  */
   12698   if (TREE_OPERAND (exp, 2))
   12699     return TREE_OPERAND (exp, 2);
   12700 
   12701   /* Otherwise, if there is a domain type and it has a lower bound, use it,
   12702      substituting for a PLACEHOLDER_EXPR as needed.  */
   12703   if (domain_type && TYPE_MIN_VALUE (domain_type))
   12704     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
   12705 
   12706   /* Otherwise, return a zero of the appropriate type.  */
   12707   tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
   12708   return (idxtype == error_mark_node
   12709 	  ? integer_zero_node : build_int_cst (idxtype, 0));
   12710 }
   12711 
   12712 /* Return a tree representing the upper bound of the array mentioned in
   12713    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
   12714 
   12715 tree
   12716 array_ref_up_bound (tree exp)
   12717 {
   12718   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
   12719 
   12720   /* If there is a domain type and it has an upper bound, use it, substituting
   12721      for a PLACEHOLDER_EXPR as needed.  */
   12722   if (domain_type && TYPE_MAX_VALUE (domain_type))
   12723     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
   12724 
   12725   /* Otherwise fail.  */
   12726   return NULL_TREE;
   12727 }
   12728 
   12729 /* Returns true if REF is an array reference, component reference,
   12730    or memory reference to an array at the end of a structure.
   12731    If this is the case, the array may be allocated larger
   12732    than its upper bound implies.  */
   12733 
   12734 bool
   12735 array_at_struct_end_p (tree ref)
   12736 {
   12737   tree atype;
   12738 
   12739   if (TREE_CODE (ref) == ARRAY_REF
   12740       || TREE_CODE (ref) == ARRAY_RANGE_REF)
   12741     {
   12742       atype = TREE_TYPE (TREE_OPERAND (ref, 0));
   12743       ref = TREE_OPERAND (ref, 0);
   12744     }
   12745   else if (TREE_CODE (ref) == COMPONENT_REF
   12746 	   && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
   12747     atype = TREE_TYPE (TREE_OPERAND (ref, 1));
   12748   else if (TREE_CODE (ref) == MEM_REF)
   12749     {
   12750       tree arg = TREE_OPERAND (ref, 0);
   12751       if (TREE_CODE (arg) == ADDR_EXPR)
   12752 	arg = TREE_OPERAND (arg, 0);
   12753       tree argtype = TREE_TYPE (arg);
   12754       if (TREE_CODE (argtype) == RECORD_TYPE)
   12755 	{
   12756 	  if (tree fld = last_field (argtype))
   12757 	    {
   12758 	      atype = TREE_TYPE (fld);
   12759 	      if (TREE_CODE (atype) != ARRAY_TYPE)
   12760 		return false;
   12761 	      if (VAR_P (arg) && DECL_SIZE (fld))
   12762 		return false;
   12763 	    }
   12764 	  else
   12765 	    return false;
   12766 	}
   12767       else
   12768 	return false;
   12769     }
   12770   else
   12771     return false;
   12772 
   12773   if (TREE_CODE (ref) == STRING_CST)
   12774     return false;
   12775 
   12776   tree ref_to_array = ref;
   12777   while (handled_component_p (ref))
   12778     {
   12779       /* If the reference chain contains a component reference to a
   12780          non-union type and there follows another field the reference
   12781 	 is not at the end of a structure.  */
   12782       if (TREE_CODE (ref) == COMPONENT_REF)
   12783 	{
   12784 	  if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
   12785 	    {
   12786 	      tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
   12787 	      while (nextf && TREE_CODE (nextf) != FIELD_DECL)
   12788 		nextf = DECL_CHAIN (nextf);
   12789 	      if (nextf)
   12790 		return false;
   12791 	    }
   12792 	}
   12793       /* If we have a multi-dimensional array we do not consider
   12794          a non-innermost dimension as flex array if the whole
   12795 	 multi-dimensional array is at struct end.
   12796 	 Same for an array of aggregates with a trailing array
   12797 	 member.  */
   12798       else if (TREE_CODE (ref) == ARRAY_REF)
   12799 	return false;
   12800       else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
   12801 	;
   12802       /* If we view an underlying object as sth else then what we
   12803          gathered up to now is what we have to rely on.  */
   12804       else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
   12805 	break;
   12806       else
   12807 	gcc_unreachable ();
   12808 
   12809       ref = TREE_OPERAND (ref, 0);
   12810     }
   12811 
   12812   /* The array now is at struct end.  Treat flexible arrays as
   12813      always subject to extend, even into just padding constrained by
   12814      an underlying decl.  */
   12815   if (! TYPE_SIZE (atype)
   12816       || ! TYPE_DOMAIN (atype)
   12817       || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
   12818     return true;
   12819 
   12820   /* If the reference is based on a declared entity, the size of the array
   12821      is constrained by its given domain.  (Do not trust commons PR/69368).  */
   12822   ref = get_base_address (ref);
   12823   if (ref
   12824       && DECL_P (ref)
   12825       && !(flag_unconstrained_commons
   12826 	   && VAR_P (ref) && DECL_COMMON (ref))
   12827       && DECL_SIZE_UNIT (ref)
   12828       && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
   12829     {
   12830       /* Check whether the array domain covers all of the available
   12831          padding.  */
   12832       poly_int64 offset;
   12833       if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
   12834 	  || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
   12835           || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
   12836 	return true;
   12837       if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
   12838 	return true;
   12839 
   12840       /* If at least one extra element fits it is a flexarray.  */
   12841       if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
   12842 		     - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
   12843 		     + 2)
   12844 		    * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
   12845 		    wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
   12846 	return true;
   12847 
   12848       return false;
   12849     }
   12850 
   12851   return true;
   12852 }
   12853 
   12854 /* Return a tree representing the offset, in bytes, of the field referenced
   12855    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
   12856 
   12857 tree
   12858 component_ref_field_offset (tree exp)
   12859 {
   12860   tree aligned_offset = TREE_OPERAND (exp, 2);
   12861   tree field = TREE_OPERAND (exp, 1);
   12862   location_t loc = EXPR_LOCATION (exp);
   12863 
   12864   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
   12865      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
   12866      value.  */
   12867   if (aligned_offset)
   12868     {
   12869       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
   12870 	 sizetype from another type of the same width and signedness.  */
   12871       if (TREE_TYPE (aligned_offset) != sizetype)
   12872 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
   12873       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
   12874 			     size_int (DECL_OFFSET_ALIGN (field)
   12875 				       / BITS_PER_UNIT));
   12876     }
   12877 
   12878   /* Otherwise, take the offset from that of the field.  Substitute
   12879      any PLACEHOLDER_EXPR that we have.  */
   12880   else
   12881     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
   12882 }
   12883 
   12884 /* Given the initializer INIT, return the initializer for the field
   12885    DECL if it exists, otherwise null.  Used to obtain the initializer
   12886    for a flexible array member and determine its size.  */
   12887 
   12888 static tree
   12889 get_initializer_for (tree init, tree decl)
   12890 {
   12891   STRIP_NOPS (init);
   12892 
   12893   tree fld, fld_init;
   12894   unsigned HOST_WIDE_INT i;
   12895   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
   12896     {
   12897       if (decl == fld)
   12898 	return fld_init;
   12899 
   12900       if (TREE_CODE (fld) == CONSTRUCTOR)
   12901 	{
   12902 	  fld_init = get_initializer_for (fld_init, decl);
   12903 	  if (fld_init)
   12904 	    return fld_init;
   12905 	}
   12906     }
   12907 
   12908   return NULL_TREE;
   12909 }
   12910 
   12911 /* Determines the size of the member referenced by the COMPONENT_REF
   12912    REF, using its initializer expression if necessary in order to
   12913    determine the size of an initialized flexible array member.
   12914    If non-null, set *ARK when REF refers to an interior zero-length
   12915    array or a trailing one-element array.
   12916    Returns the size as sizetype (which might be zero for an object
   12917    with an uninitialized flexible array member) or null if the size
   12918    cannot be determined.  */
   12919 
   12920 tree
   12921 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
   12922 {
   12923   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
   12924 
   12925   special_array_member sambuf;
   12926   if (!sam)
   12927     sam = &sambuf;
   12928   *sam = special_array_member::none;
   12929 
   12930   /* The object/argument referenced by the COMPONENT_REF and its type.  */
   12931   tree arg = TREE_OPERAND (ref, 0);
   12932   tree argtype = TREE_TYPE (arg);
   12933   /* The referenced member.  */
   12934   tree member = TREE_OPERAND (ref, 1);
   12935 
   12936   tree memsize = DECL_SIZE_UNIT (member);
   12937   if (memsize)
   12938     {
   12939       tree memtype = TREE_TYPE (member);
   12940       if (TREE_CODE (memtype) != ARRAY_TYPE)
   12941 	/* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
   12942 	   to the type of a class with a virtual base which doesn't
   12943 	   reflect the size of the virtual's members (see pr97595).
   12944 	   If that's the case fail for now and implement something
   12945 	   more robust in the future.  */
   12946 	return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
   12947 		? memsize : NULL_TREE);
   12948 
   12949       bool trailing = array_at_struct_end_p (ref);
   12950       bool zero_length = integer_zerop (memsize);
   12951       if (!trailing && !zero_length)
   12952 	/* MEMBER is either an interior array or is an array with
   12953 	   more than one element.  */
   12954 	return memsize;
   12955 
   12956       if (zero_length)
   12957 	{
   12958 	  if (trailing)
   12959 	    *sam = special_array_member::trail_0;
   12960 	  else
   12961 	    {
   12962 	      *sam = special_array_member::int_0;
   12963 	      memsize = NULL_TREE;
   12964 	    }
   12965 	}
   12966 
   12967       if (!zero_length)
   12968 	if (tree dom = TYPE_DOMAIN (memtype))
   12969 	  if (tree min = TYPE_MIN_VALUE (dom))
   12970 	    if (tree max = TYPE_MAX_VALUE (dom))
   12971 	      if (TREE_CODE (min) == INTEGER_CST
   12972 		  && TREE_CODE (max) == INTEGER_CST)
   12973 		{
   12974 		  offset_int minidx = wi::to_offset (min);
   12975 		  offset_int maxidx = wi::to_offset (max);
   12976 		  offset_int neltsm1 = maxidx - minidx;
   12977 		  if (neltsm1 > 0)
   12978 		    /* MEMBER is an array with more than one element.  */
   12979 		    return memsize;
   12980 
   12981 		  if (neltsm1 == 0)
   12982 		    *sam = special_array_member::trail_1;
   12983 		}
   12984 
   12985       /* For a reference to a zero- or one-element array member of a union
   12986 	 use the size of the union instead of the size of the member.  */
   12987       if (TREE_CODE (argtype) == UNION_TYPE)
   12988 	memsize = TYPE_SIZE_UNIT (argtype);
   12989     }
   12990 
   12991   /* MEMBER is either a bona fide flexible array member, or a zero-length
   12992      array member, or an array of length one treated as such.  */
   12993 
   12994   /* If the reference is to a declared object and the member a true
   12995      flexible array, try to determine its size from its initializer.  */
   12996   poly_int64 baseoff = 0;
   12997   tree base = get_addr_base_and_unit_offset (ref, &baseoff);
   12998   if (!base || !VAR_P (base))
   12999     {
   13000       if (*sam != special_array_member::int_0)
   13001 	return NULL_TREE;
   13002 
   13003       if (TREE_CODE (arg) != COMPONENT_REF)
   13004 	return NULL_TREE;
   13005 
   13006       base = arg;
   13007       while (TREE_CODE (base) == COMPONENT_REF)
   13008 	base = TREE_OPERAND (base, 0);
   13009       baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
   13010     }
   13011 
   13012   /* BASE is the declared object of which MEMBER is either a member
   13013      or that is cast to ARGTYPE (e.g., a char buffer used to store
   13014      an ARGTYPE object).  */
   13015   tree basetype = TREE_TYPE (base);
   13016 
   13017   /* Determine the base type of the referenced object.  If it's
   13018      the same as ARGTYPE and MEMBER has a known size, return it.  */
   13019   tree bt = basetype;
   13020   if (*sam != special_array_member::int_0)
   13021     while (TREE_CODE (bt) == ARRAY_TYPE)
   13022       bt = TREE_TYPE (bt);
   13023   bool typematch = useless_type_conversion_p (argtype, bt);
   13024   if (memsize && typematch)
   13025     return memsize;
   13026 
   13027   memsize = NULL_TREE;
   13028 
   13029   if (typematch)
   13030     /* MEMBER is a true flexible array member.  Compute its size from
   13031        the initializer of the BASE object if it has one.  */
   13032     if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
   13033       if (init != error_mark_node)
   13034 	{
   13035 	  init = get_initializer_for (init, member);
   13036 	  if (init)
   13037 	    {
   13038 	      memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
   13039 	      if (tree refsize = TYPE_SIZE_UNIT (argtype))
   13040 		{
   13041 		  /* Use the larger of the initializer size and the tail
   13042 		     padding in the enclosing struct.  */
   13043 		  poly_int64 rsz = tree_to_poly_int64 (refsize);
   13044 		  rsz -= baseoff;
   13045 		  if (known_lt (tree_to_poly_int64 (memsize), rsz))
   13046 		    memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
   13047 		}
   13048 
   13049 	      baseoff = 0;
   13050 	    }
   13051 	}
   13052 
   13053   if (!memsize)
   13054     {
   13055       if (typematch)
   13056 	{
   13057 	  if (DECL_P (base)
   13058 	      && DECL_EXTERNAL (base)
   13059 	      && bt == basetype
   13060 	      && *sam != special_array_member::int_0)
   13061 	    /* The size of a flexible array member of an extern struct
   13062 	       with no initializer cannot be determined (it's defined
   13063 	       in another translation unit and can have an initializer
   13064 	       with an arbitrary number of elements).  */
   13065 	    return NULL_TREE;
   13066 
   13067 	  /* Use the size of the base struct or, for interior zero-length
   13068 	     arrays, the size of the enclosing type.  */
   13069 	  memsize = TYPE_SIZE_UNIT (bt);
   13070 	}
   13071       else if (DECL_P (base))
   13072 	/* Use the size of the BASE object (possibly an array of some
   13073 	   other type such as char used to store the struct).  */
   13074 	memsize = DECL_SIZE_UNIT (base);
   13075       else
   13076 	return NULL_TREE;
   13077     }
   13078 
   13079   /* If the flexible array member has a known size use the greater
   13080      of it and the tail padding in the enclosing struct.
   13081      Otherwise, when the size of the flexible array member is unknown
   13082      and the referenced object is not a struct, use the size of its
   13083      type when known.  This detects sizes of array buffers when cast
   13084      to struct types with flexible array members.  */
   13085   if (memsize)
   13086     {
   13087       if (!tree_fits_poly_int64_p (memsize))
   13088 	return NULL_TREE;
   13089       poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
   13090       if (known_lt (baseoff, memsz64))
   13091 	{
   13092 	  memsz64 -= baseoff;
   13093 	  return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
   13094 	}
   13095       return size_zero_node;
   13096     }
   13097 
   13098   /* Return "don't know" for an external non-array object since its
   13099      flexible array member can be initialized to have any number of
   13100      elements.  Otherwise, return zero because the flexible array
   13101      member has no elements.  */
   13102   return (DECL_P (base)
   13103 	  && DECL_EXTERNAL (base)
   13104 	  && (!typematch
   13105 	      || TREE_CODE (basetype) != ARRAY_TYPE)
   13106 	  ? NULL_TREE : size_zero_node);
   13107 }
   13108 
   13109 /* Return the machine mode of T.  For vectors, returns the mode of the
   13110    inner type.  The main use case is to feed the result to HONOR_NANS,
   13111    avoiding the BLKmode that a direct TYPE_MODE (T) might return.  */
   13112 
   13113 machine_mode
   13114 element_mode (const_tree t)
   13115 {
   13116   if (!TYPE_P (t))
   13117     t = TREE_TYPE (t);
   13118   if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
   13119     t = TREE_TYPE (t);
   13120   return TYPE_MODE (t);
   13121 }
   13122 
   13123 /* Vector types need to re-check the target flags each time we report
   13124    the machine mode.  We need to do this because attribute target can
   13125    change the result of vector_mode_supported_p and have_regs_of_mode
   13126    on a per-function basis.  Thus the TYPE_MODE of a VECTOR_TYPE can
   13127    change on a per-function basis.  */
   13128 /* ??? Possibly a better solution is to run through all the types
   13129    referenced by a function and re-compute the TYPE_MODE once, rather
   13130    than make the TYPE_MODE macro call a function.  */
   13131 
   13132 machine_mode
   13133 vector_type_mode (const_tree t)
   13134 {
   13135   machine_mode mode;
   13136 
   13137   gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
   13138 
   13139   mode = t->type_common.mode;
   13140   if (VECTOR_MODE_P (mode)
   13141       && (!targetm.vector_mode_supported_p (mode)
   13142 	  || !have_regs_of_mode[mode]))
   13143     {
   13144       scalar_int_mode innermode;
   13145 
   13146       /* For integers, try mapping it to a same-sized scalar mode.  */
   13147       if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
   13148 	{
   13149 	  poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
   13150 			     * GET_MODE_BITSIZE (innermode));
   13151 	  scalar_int_mode mode;
   13152 	  if (int_mode_for_size (size, 0).exists (&mode)
   13153 	      && have_regs_of_mode[mode])
   13154 	    return mode;
   13155 	}
   13156 
   13157       return BLKmode;
   13158     }
   13159 
   13160   return mode;
   13161 }
   13162 
   13163 /* Return the size in bits of each element of vector type TYPE.  */
   13164 
   13165 unsigned int
   13166 vector_element_bits (const_tree type)
   13167 {
   13168   gcc_checking_assert (VECTOR_TYPE_P (type));
   13169   if (VECTOR_BOOLEAN_TYPE_P (type))
   13170     return TYPE_PRECISION (TREE_TYPE (type));
   13171   return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
   13172 }
   13173 
   13174 /* Calculate the size in bits of each element of vector type TYPE
   13175    and return the result as a tree of type bitsizetype.  */
   13176 
   13177 tree
   13178 vector_element_bits_tree (const_tree type)
   13179 {
   13180   gcc_checking_assert (VECTOR_TYPE_P (type));
   13181   if (VECTOR_BOOLEAN_TYPE_P (type))
   13182     return bitsize_int (vector_element_bits (type));
   13183   return TYPE_SIZE (TREE_TYPE (type));
   13184 }
   13185 
   13186 /* Verify that basic properties of T match TV and thus T can be a variant of
   13187    TV.  TV should be the more specified variant (i.e. the main variant).  */
   13188 
   13189 static bool
   13190 verify_type_variant (const_tree t, tree tv)
   13191 {
   13192   /* Type variant can differ by:
   13193 
   13194      - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
   13195                    ENCODE_QUAL_ADDR_SPACE.
   13196      - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
   13197        in this case some values may not be set in the variant types
   13198        (see TYPE_COMPLETE_P checks).
   13199      - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
   13200      - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
   13201      - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
   13202      - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
   13203      - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
   13204        this is necessary to make it possible to merge types form different TUs
   13205      - arrays, pointers and references may have TREE_TYPE that is a variant
   13206        of TREE_TYPE of their main variants.
   13207      - aggregates may have new TYPE_FIELDS list that list variants of
   13208        the main variant TYPE_FIELDS.
   13209      - vector types may differ by TYPE_VECTOR_OPAQUE
   13210    */
   13211 
   13212   /* Convenience macro for matching individual fields.  */
   13213 #define verify_variant_match(flag)					    \
   13214   do {									    \
   13215     if (flag (tv) != flag (t))						    \
   13216       {									    \
   13217 	error ("type variant differs by %s", #flag);			    \
   13218 	debug_tree (tv);						    \
   13219 	return false;							    \
   13220       }									    \
   13221   } while (false)
   13222 
   13223   /* tree_base checks.  */
   13224 
   13225   verify_variant_match (TREE_CODE);
   13226   /* FIXME: Ada builds non-artificial variants of artificial types.  */
   13227 #if 0
   13228   if (TYPE_ARTIFICIAL (tv))
   13229     verify_variant_match (TYPE_ARTIFICIAL);
   13230 #endif
   13231   if (POINTER_TYPE_P (tv))
   13232     verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
   13233   /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build.  */
   13234   verify_variant_match (TYPE_UNSIGNED);
   13235   verify_variant_match (TYPE_PACKED);
   13236   if (TREE_CODE (t) == REFERENCE_TYPE)
   13237     verify_variant_match (TYPE_REF_IS_RVALUE);
   13238   if (AGGREGATE_TYPE_P (t))
   13239     verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
   13240   else
   13241     verify_variant_match (TYPE_SATURATING);
   13242   /* FIXME: This check trigger during libstdc++ build.  */
   13243 #if 0
   13244   if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
   13245     verify_variant_match (TYPE_FINAL_P);
   13246 #endif
   13247 
   13248   /* tree_type_common checks.  */
   13249 
   13250   if (COMPLETE_TYPE_P (t))
   13251     {
   13252       verify_variant_match (TYPE_MODE);
   13253       if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
   13254 	  && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
   13255 	verify_variant_match (TYPE_SIZE);
   13256       if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
   13257 	  && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
   13258 	  && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
   13259 	{
   13260 	  gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
   13261 					TYPE_SIZE_UNIT (tv), 0));
   13262 	  error ("type variant has different %<TYPE_SIZE_UNIT%>");
   13263 	  debug_tree (tv);
   13264 	  error ("type variant%'s %<TYPE_SIZE_UNIT%>");
   13265 	  debug_tree (TYPE_SIZE_UNIT (tv));
   13266 	  error ("type%'s %<TYPE_SIZE_UNIT%>");
   13267 	  debug_tree (TYPE_SIZE_UNIT (t));
   13268 	  return false;
   13269 	}
   13270       verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
   13271     }
   13272   verify_variant_match (TYPE_PRECISION);
   13273   if (RECORD_OR_UNION_TYPE_P (t))
   13274     verify_variant_match (TYPE_TRANSPARENT_AGGR);
   13275   else if (TREE_CODE (t) == ARRAY_TYPE)
   13276     verify_variant_match (TYPE_NONALIASED_COMPONENT);
   13277   /* During LTO we merge variant lists from diferent translation units
   13278      that may differ BY TYPE_CONTEXT that in turn may point
   13279      to TRANSLATION_UNIT_DECL.
   13280      Ada also builds variants of types with different TYPE_CONTEXT.   */
   13281 #if 0
   13282   if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
   13283     verify_variant_match (TYPE_CONTEXT);
   13284 #endif
   13285   if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
   13286     verify_variant_match (TYPE_STRING_FLAG);
   13287   if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
   13288     verify_variant_match (TYPE_CXX_ODR_P);
   13289   if (TYPE_ALIAS_SET_KNOWN_P (t))
   13290     {
   13291       error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
   13292       debug_tree (tv);
   13293       return false;
   13294     }
   13295 
   13296   /* tree_type_non_common checks.  */
   13297 
   13298   /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
   13299      and dangle the pointer from time to time.  */
   13300   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
   13301       && (in_lto_p || !TYPE_VFIELD (tv)
   13302 	  || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
   13303     {
   13304       error ("type variant has different %<TYPE_VFIELD%>");
   13305       debug_tree (tv);
   13306       return false;
   13307     }
   13308   if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
   13309        || TREE_CODE (t) == INTEGER_TYPE
   13310        || TREE_CODE (t) == BOOLEAN_TYPE
   13311        || TREE_CODE (t) == REAL_TYPE
   13312        || TREE_CODE (t) == FIXED_POINT_TYPE)
   13313     {
   13314       verify_variant_match (TYPE_MAX_VALUE);
   13315       verify_variant_match (TYPE_MIN_VALUE);
   13316     }
   13317   if (TREE_CODE (t) == METHOD_TYPE)
   13318     verify_variant_match (TYPE_METHOD_BASETYPE);
   13319   if (TREE_CODE (t) == OFFSET_TYPE)
   13320     verify_variant_match (TYPE_OFFSET_BASETYPE);
   13321   if (TREE_CODE (t) == ARRAY_TYPE)
   13322     verify_variant_match (TYPE_ARRAY_MAX_SIZE);
   13323   /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
   13324      or even type's main variant.  This is needed to make bootstrap pass
   13325      and the bug seems new in GCC 5.
   13326      C++ FE should be updated to make this consistent and we should check
   13327      that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
   13328      is a match with main variant.
   13329 
   13330      Also disable the check for Java for now because of parser hack that builds
   13331      first an dummy BINFO and then sometimes replace it by real BINFO in some
   13332      of the copies.  */
   13333   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
   13334       && TYPE_BINFO (t) != TYPE_BINFO (tv)
   13335       /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
   13336 	 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
   13337 	 at LTO time only.  */
   13338       && (in_lto_p && odr_type_p (t)))
   13339     {
   13340       error ("type variant has different %<TYPE_BINFO%>");
   13341       debug_tree (tv);
   13342       error ("type variant%'s %<TYPE_BINFO%>");
   13343       debug_tree (TYPE_BINFO (tv));
   13344       error ("type%'s %<TYPE_BINFO%>");
   13345       debug_tree (TYPE_BINFO (t));
   13346       return false;
   13347     }
   13348 
   13349   /* Check various uses of TYPE_VALUES_RAW.  */
   13350   if (TREE_CODE (t) == ENUMERAL_TYPE
   13351       && TYPE_VALUES (t))
   13352     verify_variant_match (TYPE_VALUES);
   13353   else if (TREE_CODE (t) == ARRAY_TYPE)
   13354     verify_variant_match (TYPE_DOMAIN);
   13355   /* Permit incomplete variants of complete type.  While FEs may complete
   13356      all variants, this does not happen for C++ templates in all cases.  */
   13357   else if (RECORD_OR_UNION_TYPE_P (t)
   13358 	   && COMPLETE_TYPE_P (t)
   13359 	   && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
   13360     {
   13361       tree f1, f2;
   13362 
   13363       /* Fortran builds qualified variants as new records with items of
   13364 	 qualified type. Verify that they looks same.  */
   13365       for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
   13366 	   f1 && f2;
   13367 	   f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
   13368 	if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
   13369 	    || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
   13370 		 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
   13371 		/* FIXME: gfc_nonrestricted_type builds all types as variants
   13372 		   with exception of pointer types.  It deeply copies the type
   13373 		   which means that we may end up with a variant type
   13374 		   referring non-variant pointer.  We may change it to
   13375 		   produce types as variants, too, like
   13376 		   objc_get_protocol_qualified_type does.  */
   13377 		&& !POINTER_TYPE_P (TREE_TYPE (f1)))
   13378 	    || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
   13379 	    || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
   13380 	  break;
   13381       if (f1 || f2)
   13382 	{
   13383 	  error ("type variant has different %<TYPE_FIELDS%>");
   13384 	  debug_tree (tv);
   13385 	  error ("first mismatch is field");
   13386 	  debug_tree (f1);
   13387 	  error ("and field");
   13388 	  debug_tree (f2);
   13389           return false;
   13390 	}
   13391     }
   13392   else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
   13393     verify_variant_match (TYPE_ARG_TYPES);
   13394   /* For C++ the qualified variant of array type is really an array type
   13395      of qualified TREE_TYPE.
   13396      objc builds variants of pointer where pointer to type is a variant, too
   13397      in objc_get_protocol_qualified_type.  */
   13398   if (TREE_TYPE (t) != TREE_TYPE (tv)
   13399       && ((TREE_CODE (t) != ARRAY_TYPE
   13400 	   && !POINTER_TYPE_P (t))
   13401 	  || TYPE_MAIN_VARIANT (TREE_TYPE (t))
   13402 	     != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
   13403     {
   13404       error ("type variant has different %<TREE_TYPE%>");
   13405       debug_tree (tv);
   13406       error ("type variant%'s %<TREE_TYPE%>");
   13407       debug_tree (TREE_TYPE (tv));
   13408       error ("type%'s %<TREE_TYPE%>");
   13409       debug_tree (TREE_TYPE (t));
   13410       return false;
   13411     }
   13412   if (type_with_alias_set_p (t)
   13413       && !gimple_canonical_types_compatible_p (t, tv, false))
   13414     {
   13415       error ("type is not compatible with its variant");
   13416       debug_tree (tv);
   13417       error ("type variant%'s %<TREE_TYPE%>");
   13418       debug_tree (TREE_TYPE (tv));
   13419       error ("type%'s %<TREE_TYPE%>");
   13420       debug_tree (TREE_TYPE (t));
   13421       return false;
   13422     }
   13423   return true;
   13424 #undef verify_variant_match
   13425 }
   13426 
   13427 
   13428 /* The TYPE_CANONICAL merging machinery.  It should closely resemble
   13429    the middle-end types_compatible_p function.  It needs to avoid
   13430    claiming types are different for types that should be treated
   13431    the same with respect to TBAA.  Canonical types are also used
   13432    for IL consistency checks via the useless_type_conversion_p
   13433    predicate which does not handle all type kinds itself but falls
   13434    back to pointer-comparison of TYPE_CANONICAL for aggregates
   13435    for example.  */
   13436 
   13437 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
   13438    type calculation because we need to allow inter-operability between signed
   13439    and unsigned variants.  */
   13440 
   13441 bool
   13442 type_with_interoperable_signedness (const_tree type)
   13443 {
   13444   /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
   13445      signed char and unsigned char.  Similarly fortran FE builds
   13446      C_SIZE_T as signed type, while C defines it unsigned.  */
   13447 
   13448   return tree_code_for_canonical_type_merging (TREE_CODE (type))
   13449 	   == INTEGER_TYPE
   13450          && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
   13451 	     || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
   13452 }
   13453 
   13454 /* Return true iff T1 and T2 are structurally identical for what
   13455    TBAA is concerned.
   13456    This function is used both by lto.cc canonical type merging and by the
   13457    verifier.  If TRUST_TYPE_CANONICAL we do not look into structure of types
   13458    that have TYPE_CANONICAL defined and assume them equivalent.  This is useful
   13459    only for LTO because only in these cases TYPE_CANONICAL equivalence
   13460    correspond to one defined by gimple_canonical_types_compatible_p.  */
   13461 
   13462 bool
   13463 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
   13464 				     bool trust_type_canonical)
   13465 {
   13466   /* Type variants should be same as the main variant.  When not doing sanity
   13467      checking to verify this fact, go to main variants and save some work.  */
   13468   if (trust_type_canonical)
   13469     {
   13470       t1 = TYPE_MAIN_VARIANT (t1);
   13471       t2 = TYPE_MAIN_VARIANT (t2);
   13472     }
   13473 
   13474   /* Check first for the obvious case of pointer identity.  */
   13475   if (t1 == t2)
   13476     return true;
   13477 
   13478   /* Check that we have two types to compare.  */
   13479   if (t1 == NULL_TREE || t2 == NULL_TREE)
   13480     return false;
   13481 
   13482   /* We consider complete types always compatible with incomplete type.
   13483      This does not make sense for canonical type calculation and thus we
   13484      need to ensure that we are never called on it.
   13485 
   13486      FIXME: For more correctness the function probably should have three modes
   13487 	1) mode assuming that types are complete mathcing their structure
   13488 	2) mode allowing incomplete types but producing equivalence classes
   13489 	   and thus ignoring all info from complete types
   13490 	3) mode allowing incomplete types to match complete but checking
   13491 	   compatibility between complete types.
   13492 
   13493      1 and 2 can be used for canonical type calculation. 3 is the real
   13494      definition of type compatibility that can be used i.e. for warnings during
   13495      declaration merging.  */
   13496 
   13497   gcc_assert (!trust_type_canonical
   13498 	      || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
   13499 
   13500   /* If the types have been previously registered and found equal
   13501      they still are.  */
   13502 
   13503   if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
   13504       && trust_type_canonical)
   13505     {
   13506       /* Do not use TYPE_CANONICAL of pointer types.  For LTO streamed types
   13507 	 they are always NULL, but they are set to non-NULL for types
   13508 	 constructed by build_pointer_type and variants.  In this case the
   13509 	 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
   13510 	 all pointers are considered equal.  Be sure to not return false
   13511 	 negatives.  */
   13512       gcc_checking_assert (canonical_type_used_p (t1)
   13513 			   && canonical_type_used_p (t2));
   13514       return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
   13515     }
   13516 
   13517   /* For types where we do ODR based TBAA the canonical type is always
   13518      set correctly, so we know that types are different if their
   13519      canonical types does not match.  */
   13520   if (trust_type_canonical
   13521       && (odr_type_p (t1) && odr_based_tbaa_p (t1))
   13522 	  != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
   13523     return false;
   13524 
   13525   /* Can't be the same type if the types don't have the same code.  */
   13526   enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
   13527   if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
   13528     return false;
   13529 
   13530   /* Qualifiers do not matter for canonical type comparison purposes.  */
   13531 
   13532   /* Void types and nullptr types are always the same.  */
   13533   if (TREE_CODE (t1) == VOID_TYPE
   13534       || TREE_CODE (t1) == NULLPTR_TYPE)
   13535     return true;
   13536 
   13537   /* Can't be the same type if they have different mode.  */
   13538   if (TYPE_MODE (t1) != TYPE_MODE (t2))
   13539     return false;
   13540 
   13541   /* Non-aggregate types can be handled cheaply.  */
   13542   if (INTEGRAL_TYPE_P (t1)
   13543       || SCALAR_FLOAT_TYPE_P (t1)
   13544       || FIXED_POINT_TYPE_P (t1)
   13545       || TREE_CODE (t1) == VECTOR_TYPE
   13546       || TREE_CODE (t1) == COMPLEX_TYPE
   13547       || TREE_CODE (t1) == OFFSET_TYPE
   13548       || POINTER_TYPE_P (t1))
   13549     {
   13550       /* Can't be the same type if they have different recision.  */
   13551       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
   13552 	return false;
   13553 
   13554       /* In some cases the signed and unsigned types are required to be
   13555 	 inter-operable.  */
   13556       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
   13557 	  && !type_with_interoperable_signedness (t1))
   13558 	return false;
   13559 
   13560       /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
   13561 	 interoperable with "signed char".  Unless all frontends are revisited
   13562 	 to agree on these types, we must ignore the flag completely.  */
   13563 
   13564       /* Fortran standard define C_PTR type that is compatible with every
   13565  	 C pointer.  For this reason we need to glob all pointers into one.
   13566 	 Still pointers in different address spaces are not compatible.  */
   13567       if (POINTER_TYPE_P (t1))
   13568 	{
   13569 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
   13570 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
   13571 	    return false;
   13572 	}
   13573 
   13574       /* Tail-recurse to components.  */
   13575       if (TREE_CODE (t1) == VECTOR_TYPE
   13576 	  || TREE_CODE (t1) == COMPLEX_TYPE)
   13577 	return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
   13578 						    TREE_TYPE (t2),
   13579 						    trust_type_canonical);
   13580 
   13581       return true;
   13582     }
   13583 
   13584   /* Do type-specific comparisons.  */
   13585   switch (TREE_CODE (t1))
   13586     {
   13587     case ARRAY_TYPE:
   13588       /* Array types are the same if the element types are the same and
   13589 	 the number of elements are the same.  */
   13590       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
   13591 						trust_type_canonical)
   13592 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
   13593 	  || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
   13594 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
   13595 	return false;
   13596       else
   13597 	{
   13598 	  tree i1 = TYPE_DOMAIN (t1);
   13599 	  tree i2 = TYPE_DOMAIN (t2);
   13600 
   13601 	  /* For an incomplete external array, the type domain can be
   13602  	     NULL_TREE.  Check this condition also.  */
   13603 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
   13604 	    return true;
   13605 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
   13606 	    return false;
   13607 	  else
   13608 	    {
   13609 	      tree min1 = TYPE_MIN_VALUE (i1);
   13610 	      tree min2 = TYPE_MIN_VALUE (i2);
   13611 	      tree max1 = TYPE_MAX_VALUE (i1);
   13612 	      tree max2 = TYPE_MAX_VALUE (i2);
   13613 
   13614 	      /* The minimum/maximum values have to be the same.  */
   13615 	      if ((min1 == min2
   13616 		   || (min1 && min2
   13617 		       && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
   13618 			    && TREE_CODE (min2) == PLACEHOLDER_EXPR)
   13619 		           || operand_equal_p (min1, min2, 0))))
   13620 		  && (max1 == max2
   13621 		      || (max1 && max2
   13622 			  && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
   13623 			       && TREE_CODE (max2) == PLACEHOLDER_EXPR)
   13624 			      || operand_equal_p (max1, max2, 0)))))
   13625 		return true;
   13626 	      else
   13627 		return false;
   13628 	    }
   13629 	}
   13630 
   13631     case METHOD_TYPE:
   13632     case FUNCTION_TYPE:
   13633       /* Function types are the same if the return type and arguments types
   13634 	 are the same.  */
   13635       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
   13636 						trust_type_canonical))
   13637 	return false;
   13638 
   13639       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
   13640 	return true;
   13641       else
   13642 	{
   13643 	  tree parms1, parms2;
   13644 
   13645 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
   13646 	       parms1 && parms2;
   13647 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
   13648 	    {
   13649 	      if (!gimple_canonical_types_compatible_p
   13650 		     (TREE_VALUE (parms1), TREE_VALUE (parms2),
   13651 		      trust_type_canonical))
   13652 		return false;
   13653 	    }
   13654 
   13655 	  if (parms1 || parms2)
   13656 	    return false;
   13657 
   13658 	  return true;
   13659 	}
   13660 
   13661     case RECORD_TYPE:
   13662     case UNION_TYPE:
   13663     case QUAL_UNION_TYPE:
   13664       {
   13665 	tree f1, f2;
   13666 
   13667 	/* Don't try to compare variants of an incomplete type, before
   13668 	   TYPE_FIELDS has been copied around.  */
   13669 	if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
   13670 	  return true;
   13671 
   13672 
   13673 	if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
   13674 	  return false;
   13675 
   13676 	/* For aggregate types, all the fields must be the same.  */
   13677 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
   13678 	     f1 || f2;
   13679 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
   13680 	  {
   13681 	    /* Skip non-fields and zero-sized fields.  */
   13682 	    while (f1 && (TREE_CODE (f1) != FIELD_DECL
   13683 			  || (DECL_SIZE (f1)
   13684 			      && integer_zerop (DECL_SIZE (f1)))))
   13685 	      f1 = TREE_CHAIN (f1);
   13686 	    while (f2 && (TREE_CODE (f2) != FIELD_DECL
   13687 			  || (DECL_SIZE (f2)
   13688 			      && integer_zerop (DECL_SIZE (f2)))))
   13689 	      f2 = TREE_CHAIN (f2);
   13690 	    if (!f1 || !f2)
   13691 	      break;
   13692 	    /* The fields must have the same name, offset and type.  */
   13693 	    if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
   13694 		|| !gimple_compare_field_offset (f1, f2)
   13695 		|| !gimple_canonical_types_compatible_p
   13696 		      (TREE_TYPE (f1), TREE_TYPE (f2),
   13697 		       trust_type_canonical))
   13698 	      return false;
   13699 	  }
   13700 
   13701 	/* If one aggregate has more fields than the other, they
   13702 	   are not the same.  */
   13703 	if (f1 || f2)
   13704 	  return false;
   13705 
   13706 	return true;
   13707       }
   13708 
   13709     default:
   13710       /* Consider all types with language specific trees in them mutually
   13711 	 compatible.  This is executed only from verify_type and false
   13712          positives can be tolerated.  */
   13713       gcc_assert (!in_lto_p);
   13714       return true;
   13715     }
   13716 }
   13717 
   13718 /* Verify type T.  */
   13719 
   13720 void
   13721 verify_type (const_tree t)
   13722 {
   13723   bool error_found = false;
   13724   tree mv = TYPE_MAIN_VARIANT (t);
   13725   if (!mv)
   13726     {
   13727       error ("main variant is not defined");
   13728       error_found = true;
   13729     }
   13730   else if (mv != TYPE_MAIN_VARIANT (mv))
   13731     {
   13732       error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
   13733       debug_tree (mv);
   13734       error_found = true;
   13735     }
   13736   else if (t != mv && !verify_type_variant (t, mv))
   13737     error_found = true;
   13738 
   13739   tree ct = TYPE_CANONICAL (t);
   13740   if (!ct)
   13741     ;
   13742   else if (TYPE_CANONICAL (ct) != ct)
   13743     {
   13744       error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
   13745       debug_tree (ct);
   13746       error_found = true;
   13747     }
   13748   /* Method and function types cannot be used to address memory and thus
   13749      TYPE_CANONICAL really matters only for determining useless conversions.
   13750 
   13751      FIXME: C++ FE produce declarations of builtin functions that are not
   13752      compatible with main variants.  */
   13753   else if (TREE_CODE (t) == FUNCTION_TYPE)
   13754     ;
   13755   else if (t != ct
   13756 	   /* FIXME: gimple_canonical_types_compatible_p cannot compare types
   13757 	      with variably sized arrays because their sizes possibly
   13758 	      gimplified to different variables.  */
   13759 	   && !variably_modified_type_p (ct, NULL)
   13760 	   && !gimple_canonical_types_compatible_p (t, ct, false)
   13761 	   && COMPLETE_TYPE_P (t))
   13762     {
   13763       error ("%<TYPE_CANONICAL%> is not compatible");
   13764       debug_tree (ct);
   13765       error_found = true;
   13766     }
   13767 
   13768   if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
   13769       && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
   13770     {
   13771       error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
   13772       debug_tree (ct);
   13773       error_found = true;
   13774     }
   13775   if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
   13776    {
   13777       error ("%<TYPE_CANONICAL%> of main variant is not main variant");
   13778       debug_tree (ct);
   13779       debug_tree (TYPE_MAIN_VARIANT (ct));
   13780       error_found = true;
   13781    }
   13782 
   13783 
   13784   /* Check various uses of TYPE_MIN_VALUE_RAW.  */
   13785   if (RECORD_OR_UNION_TYPE_P (t))
   13786     {
   13787       /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
   13788 	 and danagle the pointer from time to time.  */
   13789       if (TYPE_VFIELD (t)
   13790 	  && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
   13791 	  && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
   13792 	{
   13793 	  error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
   13794 	  debug_tree (TYPE_VFIELD (t));
   13795 	  error_found = true;
   13796 	}
   13797     }
   13798   else if (TREE_CODE (t) == POINTER_TYPE)
   13799     {
   13800       if (TYPE_NEXT_PTR_TO (t)
   13801 	  && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
   13802 	{
   13803 	  error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
   13804 	  debug_tree (TYPE_NEXT_PTR_TO (t));
   13805 	  error_found = true;
   13806 	}
   13807     }
   13808   else if (TREE_CODE (t) == REFERENCE_TYPE)
   13809     {
   13810       if (TYPE_NEXT_REF_TO (t)
   13811 	  && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
   13812 	{
   13813 	  error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
   13814 	  debug_tree (TYPE_NEXT_REF_TO (t));
   13815 	  error_found = true;
   13816 	}
   13817     }
   13818   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
   13819 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
   13820     {
   13821       /* FIXME: The following check should pass:
   13822 	  useless_type_conversion_p (const_cast <tree> (t),
   13823 				     TREE_TYPE (TYPE_MIN_VALUE (t))
   13824 	 but does not for C sizetypes in LTO.  */
   13825     }
   13826 
   13827   /* Check various uses of TYPE_MAXVAL_RAW.  */
   13828   if (RECORD_OR_UNION_TYPE_P (t))
   13829     {
   13830       if (!TYPE_BINFO (t))
   13831 	;
   13832       else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
   13833 	{
   13834 	  error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
   13835 	  debug_tree (TYPE_BINFO (t));
   13836 	  error_found = true;
   13837 	}
   13838       else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
   13839 	{
   13840 	  error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
   13841 	  debug_tree (TREE_TYPE (TYPE_BINFO (t)));
   13842 	  error_found = true;
   13843 	}
   13844     }
   13845   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
   13846     {
   13847       if (TYPE_METHOD_BASETYPE (t)
   13848 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
   13849 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
   13850 	{
   13851 	  error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
   13852 	  debug_tree (TYPE_METHOD_BASETYPE (t));
   13853 	  error_found = true;
   13854 	}
   13855     }
   13856   else if (TREE_CODE (t) == OFFSET_TYPE)
   13857     {
   13858       if (TYPE_OFFSET_BASETYPE (t)
   13859 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
   13860 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
   13861 	{
   13862 	  error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
   13863 	  debug_tree (TYPE_OFFSET_BASETYPE (t));
   13864 	  error_found = true;
   13865 	}
   13866     }
   13867   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
   13868 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
   13869     {
   13870       /* FIXME: The following check should pass:
   13871 	  useless_type_conversion_p (const_cast <tree> (t),
   13872 				     TREE_TYPE (TYPE_MAX_VALUE (t))
   13873 	 but does not for C sizetypes in LTO.  */
   13874     }
   13875   else if (TREE_CODE (t) == ARRAY_TYPE)
   13876     {
   13877       if (TYPE_ARRAY_MAX_SIZE (t)
   13878 	  && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
   13879         {
   13880 	  error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
   13881 	  debug_tree (TYPE_ARRAY_MAX_SIZE (t));
   13882 	  error_found = true;
   13883         }
   13884     }
   13885   else if (TYPE_MAX_VALUE_RAW (t))
   13886     {
   13887       error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
   13888       debug_tree (TYPE_MAX_VALUE_RAW (t));
   13889       error_found = true;
   13890     }
   13891 
   13892   if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
   13893     {
   13894       error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
   13895       debug_tree (TYPE_LANG_SLOT_1 (t));
   13896       error_found = true;
   13897     }
   13898 
   13899   /* Check various uses of TYPE_VALUES_RAW.  */
   13900   if (TREE_CODE (t) == ENUMERAL_TYPE)
   13901     for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
   13902       {
   13903 	tree value = TREE_VALUE (l);
   13904 	tree name = TREE_PURPOSE (l);
   13905 
   13906 	/* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
   13907  	   CONST_DECL of ENUMERAL TYPE.  */
   13908 	if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
   13909 	  {
   13910 	    error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
   13911 	    debug_tree (value);
   13912 	    debug_tree (name);
   13913 	    error_found = true;
   13914 	  }
   13915 	if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
   13916 	    && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
   13917 	    && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
   13918 	  {
   13919 	    error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
   13920 		   "to the enum");
   13921 	    debug_tree (value);
   13922 	    debug_tree (name);
   13923 	    error_found = true;
   13924 	  }
   13925 	if (TREE_CODE (name) != IDENTIFIER_NODE)
   13926 	  {
   13927 	    error ("enum value name is not %<IDENTIFIER_NODE%>");
   13928 	    debug_tree (value);
   13929 	    debug_tree (name);
   13930 	    error_found = true;
   13931 	  }
   13932       }
   13933   else if (TREE_CODE (t) == ARRAY_TYPE)
   13934     {
   13935       if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
   13936 	{
   13937 	  error ("array %<TYPE_DOMAIN%> is not integer type");
   13938 	  debug_tree (TYPE_DOMAIN (t));
   13939 	  error_found = true;
   13940 	}
   13941     }
   13942   else if (RECORD_OR_UNION_TYPE_P (t))
   13943     {
   13944       if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
   13945 	{
   13946 	  error ("%<TYPE_FIELDS%> defined in incomplete type");
   13947 	  error_found = true;
   13948 	}
   13949       for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
   13950 	{
   13951 	  /* TODO: verify properties of decls.  */
   13952 	  if (TREE_CODE (fld) == FIELD_DECL)
   13953 	    ;
   13954 	  else if (TREE_CODE (fld) == TYPE_DECL)
   13955 	    ;
   13956 	  else if (TREE_CODE (fld) == CONST_DECL)
   13957 	    ;
   13958 	  else if (VAR_P (fld))
   13959 	    ;
   13960 	  else if (TREE_CODE (fld) == TEMPLATE_DECL)
   13961 	    ;
   13962 	  else if (TREE_CODE (fld) == USING_DECL)
   13963 	    ;
   13964 	  else if (TREE_CODE (fld) == FUNCTION_DECL)
   13965 	    ;
   13966 	  else
   13967 	    {
   13968 	      error ("wrong tree in %<TYPE_FIELDS%> list");
   13969 	      debug_tree (fld);
   13970 	      error_found = true;
   13971 	    }
   13972 	}
   13973     }
   13974   else if (TREE_CODE (t) == INTEGER_TYPE
   13975 	   || TREE_CODE (t) == BOOLEAN_TYPE
   13976 	   || TREE_CODE (t) == OFFSET_TYPE
   13977 	   || TREE_CODE (t) == REFERENCE_TYPE
   13978 	   || TREE_CODE (t) == NULLPTR_TYPE
   13979 	   || TREE_CODE (t) == POINTER_TYPE)
   13980     {
   13981       if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
   13982 	{
   13983 	  error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
   13984 		 "is %p",
   13985 		 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
   13986 	  error_found = true;
   13987 	}
   13988       else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
   13989 	{
   13990 	  error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
   13991 	  debug_tree (TYPE_CACHED_VALUES (t));
   13992 	  error_found = true;
   13993 	}
   13994       /* Verify just enough of cache to ensure that no one copied it to new type.
   13995  	 All copying should go by copy_node that should clear it.  */
   13996       else if (TYPE_CACHED_VALUES_P (t))
   13997 	{
   13998 	  int i;
   13999 	  for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
   14000 	    if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
   14001 		&& TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
   14002 	      {
   14003 		error ("wrong %<TYPE_CACHED_VALUES%> entry");
   14004 		debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
   14005 		error_found = true;
   14006 		break;
   14007 	      }
   14008 	}
   14009     }
   14010   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
   14011     for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
   14012       {
   14013 	/* C++ FE uses TREE_PURPOSE to store initial values.  */
   14014 	if (TREE_PURPOSE (l) && in_lto_p)
   14015 	  {
   14016 	    error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
   14017 	    debug_tree (l);
   14018 	    error_found = true;
   14019 	  }
   14020 	if (!TYPE_P (TREE_VALUE (l)))
   14021 	  {
   14022 	    error ("wrong entry in %<TYPE_ARG_TYPES%> list");
   14023 	    debug_tree (l);
   14024 	    error_found = true;
   14025 	  }
   14026       }
   14027   else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
   14028     {
   14029       error ("%<TYPE_VALUES_RAW%> field is non-NULL");
   14030       debug_tree (TYPE_VALUES_RAW (t));
   14031       error_found = true;
   14032     }
   14033   if (TREE_CODE (t) != INTEGER_TYPE
   14034       && TREE_CODE (t) != BOOLEAN_TYPE
   14035       && TREE_CODE (t) != OFFSET_TYPE
   14036       && TREE_CODE (t) != REFERENCE_TYPE
   14037       && TREE_CODE (t) != NULLPTR_TYPE
   14038       && TREE_CODE (t) != POINTER_TYPE
   14039       && TYPE_CACHED_VALUES_P (t))
   14040     {
   14041       error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
   14042       error_found = true;
   14043     }
   14044 
   14045   /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
   14046      TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
   14047      of a type. */
   14048   if (TREE_CODE (t) == METHOD_TYPE
   14049       && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
   14050     {
   14051 	error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
   14052 	error_found = true;
   14053     }
   14054 
   14055   if (error_found)
   14056     {
   14057       debug_tree (const_cast <tree> (t));
   14058       internal_error ("%qs failed", __func__);
   14059     }
   14060 }
   14061 
   14062 
   14063 /* Return 1 if ARG interpreted as signed in its precision is known to be
   14064    always positive or 2 if ARG is known to be always negative, or 3 if
   14065    ARG may be positive or negative.  */
   14066 
   14067 int
   14068 get_range_pos_neg (tree arg)
   14069 {
   14070   if (arg == error_mark_node)
   14071     return 3;
   14072 
   14073   int prec = TYPE_PRECISION (TREE_TYPE (arg));
   14074   int cnt = 0;
   14075   if (TREE_CODE (arg) == INTEGER_CST)
   14076     {
   14077       wide_int w = wi::sext (wi::to_wide (arg), prec);
   14078       if (wi::neg_p (w))
   14079 	return 2;
   14080       else
   14081 	return 1;
   14082     }
   14083   while (CONVERT_EXPR_P (arg)
   14084 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
   14085 	 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
   14086     {
   14087       arg = TREE_OPERAND (arg, 0);
   14088       /* Narrower value zero extended into wider type
   14089 	 will always result in positive values.  */
   14090       if (TYPE_UNSIGNED (TREE_TYPE (arg))
   14091 	  && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
   14092 	return 1;
   14093       prec = TYPE_PRECISION (TREE_TYPE (arg));
   14094       if (++cnt > 30)
   14095 	return 3;
   14096     }
   14097 
   14098   if (TREE_CODE (arg) != SSA_NAME)
   14099     return 3;
   14100   value_range r;
   14101   while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
   14102     {
   14103       gimple *g = SSA_NAME_DEF_STMT (arg);
   14104       if (is_gimple_assign (g)
   14105 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
   14106 	{
   14107 	  tree t = gimple_assign_rhs1 (g);
   14108 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
   14109 	      && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
   14110 	    {
   14111 	      if (TYPE_UNSIGNED (TREE_TYPE (t))
   14112 		  && TYPE_PRECISION (TREE_TYPE (t)) < prec)
   14113 		return 1;
   14114 	      prec = TYPE_PRECISION (TREE_TYPE (t));
   14115 	      arg = t;
   14116 	      if (++cnt > 30)
   14117 		return 3;
   14118 	      continue;
   14119 	    }
   14120 	}
   14121       return 3;
   14122     }
   14123   if (TYPE_UNSIGNED (TREE_TYPE (arg)))
   14124     {
   14125       /* For unsigned values, the "positive" range comes
   14126 	 below the "negative" range.  */
   14127       if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
   14128 	return 1;
   14129       if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
   14130 	return 2;
   14131     }
   14132   else
   14133     {
   14134       if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
   14135 	return 1;
   14136       if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
   14137 	return 2;
   14138     }
   14139   return 3;
   14140 }
   14141 
   14142 
   14143 
   14144 
   14145 /* Return true if ARG is marked with the nonnull attribute in the
   14146    current function signature.  */
   14147 
   14148 bool
   14149 nonnull_arg_p (const_tree arg)
   14150 {
   14151   tree t, attrs, fntype;
   14152   unsigned HOST_WIDE_INT arg_num;
   14153 
   14154   gcc_assert (TREE_CODE (arg) == PARM_DECL
   14155 	      && (POINTER_TYPE_P (TREE_TYPE (arg))
   14156 		  || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
   14157 
   14158   /* The static chain decl is always non null.  */
   14159   if (arg == cfun->static_chain_decl)
   14160     return true;
   14161 
   14162   /* THIS argument of method is always non-NULL.  */
   14163   if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
   14164       && arg == DECL_ARGUMENTS (cfun->decl)
   14165       && flag_delete_null_pointer_checks)
   14166     return true;
   14167 
   14168   /* Values passed by reference are always non-NULL.  */
   14169   if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
   14170       && flag_delete_null_pointer_checks)
   14171     return true;
   14172 
   14173   fntype = TREE_TYPE (cfun->decl);
   14174   for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
   14175     {
   14176       attrs = lookup_attribute ("nonnull", attrs);
   14177 
   14178       /* If "nonnull" wasn't specified, we know nothing about the argument.  */
   14179       if (attrs == NULL_TREE)
   14180 	return false;
   14181 
   14182       /* If "nonnull" applies to all the arguments, then ARG is non-null.  */
   14183       if (TREE_VALUE (attrs) == NULL_TREE)
   14184 	return true;
   14185 
   14186       /* Get the position number for ARG in the function signature.  */
   14187       for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
   14188 	   t;
   14189 	   t = DECL_CHAIN (t), arg_num++)
   14190 	{
   14191 	  if (t == arg)
   14192 	    break;
   14193 	}
   14194 
   14195       gcc_assert (t == arg);
   14196 
   14197       /* Now see if ARG_NUM is mentioned in the nonnull list.  */
   14198       for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
   14199 	{
   14200 	  if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
   14201 	    return true;
   14202 	}
   14203     }
   14204 
   14205   return false;
   14206 }
   14207 
   14208 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
   14209    information.  */
   14210 
   14211 location_t
   14212 set_block (location_t loc, tree block)
   14213 {
   14214   location_t pure_loc = get_pure_location (loc);
   14215   source_range src_range = get_range_from_loc (line_table, loc);
   14216   return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
   14217 }
   14218 
   14219 location_t
   14220 set_source_range (tree expr, location_t start, location_t finish)
   14221 {
   14222   source_range src_range;
   14223   src_range.m_start = start;
   14224   src_range.m_finish = finish;
   14225   return set_source_range (expr, src_range);
   14226 }
   14227 
   14228 location_t
   14229 set_source_range (tree expr, source_range src_range)
   14230 {
   14231   if (!EXPR_P (expr))
   14232     return UNKNOWN_LOCATION;
   14233 
   14234   location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
   14235   location_t adhoc = COMBINE_LOCATION_DATA (line_table,
   14236 					    pure_loc,
   14237 					    src_range,
   14238 					    NULL);
   14239   SET_EXPR_LOCATION (expr, adhoc);
   14240   return adhoc;
   14241 }
   14242 
   14243 /* Return EXPR, potentially wrapped with a node expression LOC,
   14244    if !CAN_HAVE_LOCATION_P (expr).
   14245 
   14246    NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
   14247    VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
   14248 
   14249    Wrapper nodes can be identified using location_wrapper_p.  */
   14250 
   14251 tree
   14252 maybe_wrap_with_location (tree expr, location_t loc)
   14253 {
   14254   if (expr == NULL)
   14255     return NULL;
   14256   if (loc == UNKNOWN_LOCATION)
   14257     return expr;
   14258   if (CAN_HAVE_LOCATION_P (expr))
   14259     return expr;
   14260   /* We should only be adding wrappers for constants and for decls,
   14261      or for some exceptional tree nodes (e.g. BASELINK in the C++ FE).  */
   14262   gcc_assert (CONSTANT_CLASS_P (expr)
   14263 	      || DECL_P (expr)
   14264 	      || EXCEPTIONAL_CLASS_P (expr));
   14265 
   14266   /* For now, don't add wrappers to exceptional tree nodes, to minimize
   14267      any impact of the wrapper nodes.  */
   14268   if (EXCEPTIONAL_CLASS_P (expr))
   14269     return expr;
   14270 
   14271   /* Compiler-generated temporary variables don't need a wrapper.  */
   14272   if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
   14273     return expr;
   14274 
   14275   /* If any auto_suppress_location_wrappers are active, don't create
   14276      wrappers.  */
   14277   if (suppress_location_wrappers > 0)
   14278     return expr;
   14279 
   14280   tree_code code
   14281     = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
   14282 	|| (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
   14283        ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
   14284   tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
   14285   /* Mark this node as being a wrapper.  */
   14286   EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
   14287   return wrapper;
   14288 }
   14289 
   14290 int suppress_location_wrappers;
   14291 
   14292 /* Return the name of combined function FN, for debugging purposes.  */
   14293 
   14294 const char *
   14295 combined_fn_name (combined_fn fn)
   14296 {
   14297   if (builtin_fn_p (fn))
   14298     {
   14299       tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
   14300       return IDENTIFIER_POINTER (DECL_NAME (fndecl));
   14301     }
   14302   else
   14303     return internal_fn_name (as_internal_fn (fn));
   14304 }
   14305 
   14306 /* Return a bitmap with a bit set corresponding to each argument in
   14307    a function call type FNTYPE declared with attribute nonnull,
   14308    or null if none of the function's argument are nonnull.  The caller
   14309    must free the bitmap.  */
   14310 
   14311 bitmap
   14312 get_nonnull_args (const_tree fntype)
   14313 {
   14314   if (fntype == NULL_TREE)
   14315     return NULL;
   14316 
   14317   bitmap argmap = NULL;
   14318   if (TREE_CODE (fntype) == METHOD_TYPE)
   14319     {
   14320       /* The this pointer in C++ non-static member functions is
   14321 	 implicitly nonnull whether or not it's declared as such.  */
   14322       argmap = BITMAP_ALLOC (NULL);
   14323       bitmap_set_bit (argmap, 0);
   14324     }
   14325 
   14326   tree attrs = TYPE_ATTRIBUTES (fntype);
   14327   if (!attrs)
   14328     return argmap;
   14329 
   14330   /* A function declaration can specify multiple attribute nonnull,
   14331      each with zero or more arguments.  The loop below creates a bitmap
   14332      representing a union of all the arguments.  An empty (but non-null)
   14333      bitmap means that all arguments have been declaraed nonnull.  */
   14334   for ( ; attrs; attrs = TREE_CHAIN (attrs))
   14335     {
   14336       attrs = lookup_attribute ("nonnull", attrs);
   14337       if (!attrs)
   14338 	break;
   14339 
   14340       if (!argmap)
   14341 	argmap = BITMAP_ALLOC (NULL);
   14342 
   14343       if (!TREE_VALUE (attrs))
   14344 	{
   14345 	  /* Clear the bitmap in case a previous attribute nonnull
   14346 	     set it and this one overrides it for all arguments.  */
   14347 	  bitmap_clear (argmap);
   14348 	  return argmap;
   14349 	}
   14350 
   14351       /* Iterate over the indices of the format arguments declared nonnull
   14352 	 and set a bit for each.  */
   14353       for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
   14354 	{
   14355 	  unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
   14356 	  bitmap_set_bit (argmap, val);
   14357 	}
   14358     }
   14359 
   14360   return argmap;
   14361 }
   14362 
   14363 /* Returns true if TYPE is a type where it and all of its subobjects
   14364    (recursively) are of structure, union, or array type.  */
   14365 
   14366 bool
   14367 is_empty_type (const_tree type)
   14368 {
   14369   if (RECORD_OR_UNION_TYPE_P (type))
   14370     {
   14371       for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
   14372 	if (TREE_CODE (field) == FIELD_DECL
   14373 	    && !DECL_PADDING_P (field)
   14374 	    && !is_empty_type (TREE_TYPE (field)))
   14375 	  return false;
   14376       return true;
   14377     }
   14378   else if (TREE_CODE (type) == ARRAY_TYPE)
   14379     return (integer_minus_onep (array_type_nelts (type))
   14380 	    || TYPE_DOMAIN (type) == NULL_TREE
   14381 	    || is_empty_type (TREE_TYPE (type)));
   14382   return false;
   14383 }
   14384 
   14385 /* Implement TARGET_EMPTY_RECORD_P.  Return true if TYPE is an empty type
   14386    that shouldn't be passed via stack.  */
   14387 
   14388 bool
   14389 default_is_empty_record (const_tree type)
   14390 {
   14391   if (!abi_version_at_least (12))
   14392     return false;
   14393 
   14394   if (type == error_mark_node)
   14395     return false;
   14396 
   14397   if (TREE_ADDRESSABLE (type))
   14398     return false;
   14399 
   14400   return is_empty_type (TYPE_MAIN_VARIANT (type));
   14401 }
   14402 
   14403 /* Determine whether TYPE is a structure with a flexible array member,
   14404    or a union containing such a structure (possibly recursively).  */
   14405 
   14406 bool
   14407 flexible_array_type_p (const_tree type)
   14408 {
   14409   tree x, last;
   14410   switch (TREE_CODE (type))
   14411     {
   14412     case RECORD_TYPE:
   14413       last = NULL_TREE;
   14414       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
   14415 	if (TREE_CODE (x) == FIELD_DECL)
   14416 	  last = x;
   14417       if (last == NULL_TREE)
   14418 	return false;
   14419       if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
   14420 	  && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
   14421 	  && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
   14422 	  && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
   14423 	return true;
   14424       return false;
   14425     case UNION_TYPE:
   14426       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
   14427 	{
   14428 	  if (TREE_CODE (x) == FIELD_DECL
   14429 	      && flexible_array_type_p (TREE_TYPE (x)))
   14430 	    return true;
   14431 	}
   14432       return false;
   14433     default:
   14434       return false;
   14435   }
   14436 }
   14437 
   14438 /* Like int_size_in_bytes, but handle empty records specially.  */
   14439 
   14440 HOST_WIDE_INT
   14441 arg_int_size_in_bytes (const_tree type)
   14442 {
   14443   return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
   14444 }
   14445 
   14446 /* Like size_in_bytes, but handle empty records specially.  */
   14447 
   14448 tree
   14449 arg_size_in_bytes (const_tree type)
   14450 {
   14451   return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
   14452 }
   14453 
   14454 /* Return true if an expression with CODE has to have the same result type as
   14455    its first operand.  */
   14456 
   14457 bool
   14458 expr_type_first_operand_type_p (tree_code code)
   14459 {
   14460   switch (code)
   14461     {
   14462     case NEGATE_EXPR:
   14463     case ABS_EXPR:
   14464     case BIT_NOT_EXPR:
   14465     case PAREN_EXPR:
   14466     case CONJ_EXPR:
   14467 
   14468     case PLUS_EXPR:
   14469     case MINUS_EXPR:
   14470     case MULT_EXPR:
   14471     case TRUNC_DIV_EXPR:
   14472     case CEIL_DIV_EXPR:
   14473     case FLOOR_DIV_EXPR:
   14474     case ROUND_DIV_EXPR:
   14475     case TRUNC_MOD_EXPR:
   14476     case CEIL_MOD_EXPR:
   14477     case FLOOR_MOD_EXPR:
   14478     case ROUND_MOD_EXPR:
   14479     case RDIV_EXPR:
   14480     case EXACT_DIV_EXPR:
   14481     case MIN_EXPR:
   14482     case MAX_EXPR:
   14483     case BIT_IOR_EXPR:
   14484     case BIT_XOR_EXPR:
   14485     case BIT_AND_EXPR:
   14486 
   14487     case LSHIFT_EXPR:
   14488     case RSHIFT_EXPR:
   14489     case LROTATE_EXPR:
   14490     case RROTATE_EXPR:
   14491       return true;
   14492 
   14493     default:
   14494       return false;
   14495     }
   14496 }
   14497 
   14498 /* Return a typenode for the "standard" C type with a given name.  */
   14499 tree
   14500 get_typenode_from_name (const char *name)
   14501 {
   14502   if (name == NULL || *name == '\0')
   14503     return NULL_TREE;
   14504 
   14505   if (strcmp (name, "char") == 0)
   14506     return char_type_node;
   14507   if (strcmp (name, "unsigned char") == 0)
   14508     return unsigned_char_type_node;
   14509   if (strcmp (name, "signed char") == 0)
   14510     return signed_char_type_node;
   14511 
   14512   if (strcmp (name, "short int") == 0)
   14513     return short_integer_type_node;
   14514   if (strcmp (name, "short unsigned int") == 0)
   14515     return short_unsigned_type_node;
   14516 
   14517   if (strcmp (name, "int") == 0)
   14518     return integer_type_node;
   14519   if (strcmp (name, "unsigned int") == 0)
   14520     return unsigned_type_node;
   14521 
   14522   if (strcmp (name, "long int") == 0)
   14523     return long_integer_type_node;
   14524   if (strcmp (name, "long unsigned int") == 0)
   14525     return long_unsigned_type_node;
   14526 
   14527   if (strcmp (name, "long long int") == 0)
   14528     return long_long_integer_type_node;
   14529   if (strcmp (name, "long long unsigned int") == 0)
   14530     return long_long_unsigned_type_node;
   14531 
   14532   gcc_unreachable ();
   14533 }
   14534 
   14535 /* List of pointer types used to declare builtins before we have seen their
   14536    real declaration.
   14537 
   14538    Keep the size up to date in tree.h !  */
   14539 const builtin_structptr_type builtin_structptr_types[6] =
   14540 {
   14541   { fileptr_type_node, ptr_type_node, "FILE" },
   14542   { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
   14543   { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
   14544   { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
   14545   { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
   14546   { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
   14547 };
   14548 
   14549 /* Return the maximum object size.  */
   14550 
   14551 tree
   14552 max_object_size (void)
   14553 {
   14554   /* To do: Make this a configurable parameter.  */
   14555   return TYPE_MAX_VALUE (ptrdiff_type_node);
   14556 }
   14557 
   14558 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
   14559    parameter default to false and that weeds out error_mark_node.  */
   14560 
   14561 bool
   14562 verify_type_context (location_t loc, type_context_kind context,
   14563 		     const_tree type, bool silent_p)
   14564 {
   14565   if (type == error_mark_node)
   14566     return true;
   14567 
   14568   gcc_assert (TYPE_P (type));
   14569   return (!targetm.verify_type_context
   14570 	  || targetm.verify_type_context (loc, context, type, silent_p));
   14571 }
   14572 
   14573 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
   14574    delete operators.  Return false if they may or may not name such
   14575    a pair and, when nonnull, set *PCERTAIN to true if they certainly
   14576    do not.  */
   14577 
   14578 bool
   14579 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
   14580 			 bool *pcertain /* = NULL */)
   14581 {
   14582   bool certain;
   14583   if (!pcertain)
   14584     pcertain = &certain;
   14585 
   14586   const char *new_name = IDENTIFIER_POINTER (new_asm);
   14587   const char *delete_name = IDENTIFIER_POINTER (delete_asm);
   14588   unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
   14589   unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
   14590 
   14591   /* The following failures are due to invalid names so they're not
   14592      considered certain mismatches.  */
   14593   *pcertain = false;
   14594 
   14595   if (new_len < 5 || delete_len < 6)
   14596     return false;
   14597   if (new_name[0] == '_')
   14598     ++new_name, --new_len;
   14599   if (new_name[0] == '_')
   14600     ++new_name, --new_len;
   14601   if (delete_name[0] == '_')
   14602     ++delete_name, --delete_len;
   14603   if (delete_name[0] == '_')
   14604     ++delete_name, --delete_len;
   14605   if (new_len < 4 || delete_len < 5)
   14606     return false;
   14607 
   14608   /* The following failures are due to names of user-defined operators
   14609      so they're also not considered certain mismatches.  */
   14610 
   14611   /* *_len is now just the length after initial underscores.  */
   14612   if (new_name[0] != 'Z' || new_name[1] != 'n')
   14613     return false;
   14614   if (delete_name[0] != 'Z' || delete_name[1] != 'd')
   14615     return false;
   14616 
   14617   /* The following failures are certain mismatches.  */
   14618   *pcertain = true;
   14619 
   14620   /* _Znw must match _Zdl, _Zna must match _Zda.  */
   14621   if ((new_name[2] != 'w' || delete_name[2] != 'l')
   14622       && (new_name[2] != 'a' || delete_name[2] != 'a'))
   14623     return false;
   14624   /* 'j', 'm' and 'y' correspond to size_t.  */
   14625   if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
   14626     return false;
   14627   if (delete_name[3] != 'P' || delete_name[4] != 'v')
   14628     return false;
   14629   if (new_len == 4
   14630       || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
   14631     {
   14632       /* _ZnXY or _ZnXYRKSt9nothrow_t matches
   14633 	 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t.  */
   14634       if (delete_len == 5)
   14635 	return true;
   14636       if (delete_len == 6 && delete_name[5] == new_name[3])
   14637 	return true;
   14638       if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
   14639 	return true;
   14640     }
   14641   else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
   14642 	   || (new_len == 33
   14643 	       && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
   14644     {
   14645       /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
   14646 	 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or  or
   14647 	 _ZdXPvSt11align_val_tRKSt9nothrow_t.  */
   14648       if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
   14649 	return true;
   14650       if (delete_len == 21
   14651 	  && delete_name[5] == new_name[3]
   14652 	  && !memcmp (delete_name + 6, "St11align_val_t", 15))
   14653 	return true;
   14654       if (delete_len == 34
   14655 	  && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
   14656 	return true;
   14657     }
   14658 
   14659   /* The negative result is conservative.  */
   14660   *pcertain = false;
   14661   return false;
   14662 }
   14663 
   14664 /* Return the zero-based number corresponding to the argument being
   14665    deallocated if FNDECL is a deallocation function or an out-of-bounds
   14666    value if it isn't.  */
   14667 
   14668 unsigned
   14669 fndecl_dealloc_argno (tree fndecl)
   14670 {
   14671   /* A call to operator delete isn't recognized as one to a built-in.  */
   14672   if (DECL_IS_OPERATOR_DELETE_P (fndecl))
   14673     {
   14674       if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
   14675 	return 0;
   14676 
   14677       /* Avoid placement delete that's not been inlined.  */
   14678       tree fname = DECL_ASSEMBLER_NAME (fndecl);
   14679       if (id_equal (fname, "_ZdlPvS_")       // ordinary form
   14680 	  || id_equal (fname, "_ZdaPvS_"))   // array form
   14681 	return UINT_MAX;
   14682       return 0;
   14683     }
   14684 
   14685   /* TODO: Handle user-defined functions with attribute malloc?  Handle
   14686      known non-built-ins like fopen?  */
   14687   if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
   14688     {
   14689       switch (DECL_FUNCTION_CODE (fndecl))
   14690 	{
   14691 	case BUILT_IN_FREE:
   14692 	case BUILT_IN_REALLOC:
   14693 	  return 0;
   14694 	default:
   14695 	  break;
   14696 	}
   14697       return UINT_MAX;
   14698     }
   14699 
   14700   tree attrs = DECL_ATTRIBUTES (fndecl);
   14701   if (!attrs)
   14702     return UINT_MAX;
   14703 
   14704   for (tree atfree = attrs;
   14705        (atfree = lookup_attribute ("*dealloc", atfree));
   14706        atfree = TREE_CHAIN (atfree))
   14707     {
   14708       tree alloc = TREE_VALUE (atfree);
   14709       if (!alloc)
   14710 	continue;
   14711 
   14712       tree pos = TREE_CHAIN (alloc);
   14713       if (!pos)
   14714 	return 0;
   14715 
   14716       pos = TREE_VALUE (pos);
   14717       return TREE_INT_CST_LOW (pos) - 1;
   14718     }
   14719 
   14720   return UINT_MAX;
   14721 }
   14722 
   14723 /* If EXPR refers to a character array or pointer declared attribute
   14724    nonstring, return a decl for that array or pointer and set *REF
   14725    to the referenced enclosing object or pointer.  Otherwise return
   14726    null.  */
   14727 
   14728 tree
   14729 get_attr_nonstring_decl (tree expr, tree *ref)
   14730 {
   14731   tree decl = expr;
   14732   tree var = NULL_TREE;
   14733   if (TREE_CODE (decl) == SSA_NAME)
   14734     {
   14735       gimple *def = SSA_NAME_DEF_STMT (decl);
   14736 
   14737       if (is_gimple_assign (def))
   14738 	{
   14739 	  tree_code code = gimple_assign_rhs_code (def);
   14740 	  if (code == ADDR_EXPR
   14741 	      || code == COMPONENT_REF
   14742 	      || code == VAR_DECL)
   14743 	    decl = gimple_assign_rhs1 (def);
   14744 	}
   14745       else
   14746 	var = SSA_NAME_VAR (decl);
   14747     }
   14748 
   14749   if (TREE_CODE (decl) == ADDR_EXPR)
   14750     decl = TREE_OPERAND (decl, 0);
   14751 
   14752   /* To simplify calling code, store the referenced DECL regardless of
   14753      the attribute determined below, but avoid storing the SSA_NAME_VAR
   14754      obtained above (it's not useful for dataflow purposes).  */
   14755   if (ref)
   14756     *ref = decl;
   14757 
   14758   /* Use the SSA_NAME_VAR that was determined above to see if it's
   14759      declared nonstring.  Otherwise drill down into the referenced
   14760      DECL.  */
   14761   if (var)
   14762     decl = var;
   14763   else if (TREE_CODE (decl) == ARRAY_REF)
   14764     decl = TREE_OPERAND (decl, 0);
   14765   else if (TREE_CODE (decl) == COMPONENT_REF)
   14766     decl = TREE_OPERAND (decl, 1);
   14767   else if (TREE_CODE (decl) == MEM_REF)
   14768     return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
   14769 
   14770   if (DECL_P (decl)
   14771       && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
   14772     return decl;
   14773 
   14774   return NULL_TREE;
   14775 }
   14776 
   14777 /* Return length of attribute names string,
   14778    if arglist chain > 1, -1 otherwise.  */
   14779 
   14780 int
   14781 get_target_clone_attr_len (tree arglist)
   14782 {
   14783   tree arg;
   14784   int str_len_sum = 0;
   14785   int argnum = 0;
   14786 
   14787   for (arg = arglist; arg; arg = TREE_CHAIN (arg))
   14788     {
   14789       const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
   14790       size_t len = strlen (str);
   14791       str_len_sum += len + 1;
   14792       for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
   14793 	argnum++;
   14794       argnum++;
   14795     }
   14796   if (argnum <= 1)
   14797     return -1;
   14798   return str_len_sum;
   14799 }
   14800 
   14801 void
   14802 tree_cc_finalize (void)
   14803 {
   14804   clear_nonstandard_integer_type_cache ();
   14805 }
   14806 
   14807 #if CHECKING_P
   14808 
   14809 namespace selftest {
   14810 
   14811 /* Selftests for tree.  */
   14812 
   14813 /* Verify that integer constants are sane.  */
   14814 
   14815 static void
   14816 test_integer_constants ()
   14817 {
   14818   ASSERT_TRUE (integer_type_node != NULL);
   14819   ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
   14820 
   14821   tree type = integer_type_node;
   14822 
   14823   tree zero = build_zero_cst (type);
   14824   ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
   14825   ASSERT_EQ (type, TREE_TYPE (zero));
   14826 
   14827   tree one = build_int_cst (type, 1);
   14828   ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
   14829   ASSERT_EQ (type, TREE_TYPE (zero));
   14830 }
   14831 
   14832 /* Verify identifiers.  */
   14833 
   14834 static void
   14835 test_identifiers ()
   14836 {
   14837   tree identifier = get_identifier ("foo");
   14838   ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
   14839   ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
   14840 }
   14841 
   14842 /* Verify LABEL_DECL.  */
   14843 
   14844 static void
   14845 test_labels ()
   14846 {
   14847   tree identifier = get_identifier ("err");
   14848   tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
   14849 				identifier, void_type_node);
   14850   ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
   14851   ASSERT_FALSE (FORCED_LABEL (label_decl));
   14852 }
   14853 
   14854 /* Return a new VECTOR_CST node whose type is TYPE and whose values
   14855    are given by VALS.  */
   14856 
   14857 static tree
   14858 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
   14859 {
   14860   gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
   14861   tree_vector_builder builder (type, vals.length (), 1);
   14862   builder.splice (vals);
   14863   return builder.build ();
   14864 }
   14865 
   14866 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED.  */
   14867 
   14868 static void
   14869 check_vector_cst (const vec<tree> &expected, tree actual)
   14870 {
   14871   ASSERT_KNOWN_EQ (expected.length (),
   14872 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
   14873   for (unsigned int i = 0; i < expected.length (); ++i)
   14874     ASSERT_EQ (wi::to_wide (expected[i]),
   14875 	       wi::to_wide (vector_cst_elt (actual, i)));
   14876 }
   14877 
   14878 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
   14879    and that its elements match EXPECTED.  */
   14880 
   14881 static void
   14882 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
   14883 			    unsigned int npatterns)
   14884 {
   14885   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
   14886   ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
   14887   ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
   14888   ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
   14889   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
   14890   check_vector_cst (expected, actual);
   14891 }
   14892 
   14893 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
   14894    and NPATTERNS background elements, and that its elements match
   14895    EXPECTED.  */
   14896 
   14897 static void
   14898 check_vector_cst_fill (const vec<tree> &expected, tree actual,
   14899 		       unsigned int npatterns)
   14900 {
   14901   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
   14902   ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
   14903   ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
   14904   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
   14905   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
   14906   check_vector_cst (expected, actual);
   14907 }
   14908 
   14909 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
   14910    and that its elements match EXPECTED.  */
   14911 
   14912 static void
   14913 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
   14914 			  unsigned int npatterns)
   14915 {
   14916   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
   14917   ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
   14918   ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
   14919   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
   14920   ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
   14921   check_vector_cst (expected, actual);
   14922 }
   14923 
   14924 /* Test the creation of VECTOR_CSTs.  */
   14925 
   14926 static void
   14927 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
   14928 {
   14929   auto_vec<tree, 8> elements (8);
   14930   elements.quick_grow (8);
   14931   tree element_type = build_nonstandard_integer_type (16, true);
   14932   tree vector_type = build_vector_type (element_type, 8);
   14933 
   14934   /* Test a simple linear series with a base of 0 and a step of 1:
   14935      { 0, 1, 2, 3, 4, 5, 6, 7 }.  */
   14936   for (unsigned int i = 0; i < 8; ++i)
   14937     elements[i] = build_int_cst (element_type, i);
   14938   tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14939   check_vector_cst_stepped (elements, vector, 1);
   14940 
   14941   /* Try the same with the first element replaced by 100:
   14942      { 100, 1, 2, 3, 4, 5, 6, 7 }.  */
   14943   elements[0] = build_int_cst (element_type, 100);
   14944   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14945   check_vector_cst_stepped (elements, vector, 1);
   14946 
   14947   /* Try a series that wraps around.
   14948      { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }.  */
   14949   for (unsigned int i = 1; i < 8; ++i)
   14950     elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
   14951   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14952   check_vector_cst_stepped (elements, vector, 1);
   14953 
   14954   /* Try a downward series:
   14955      { 100, 79, 78, 77, 76, 75, 75, 73 }.  */
   14956   for (unsigned int i = 1; i < 8; ++i)
   14957     elements[i] = build_int_cst (element_type, 80 - i);
   14958   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14959   check_vector_cst_stepped (elements, vector, 1);
   14960 
   14961   /* Try two interleaved series with different bases and steps:
   14962      { 100, 53, 66, 206, 62, 212, 58, 218 }.  */
   14963   elements[1] = build_int_cst (element_type, 53);
   14964   for (unsigned int i = 2; i < 8; i += 2)
   14965     {
   14966       elements[i] = build_int_cst (element_type, 70 - i * 2);
   14967       elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
   14968     }
   14969   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14970   check_vector_cst_stepped (elements, vector, 2);
   14971 
   14972   /* Try a duplicated value:
   14973      { 100, 100, 100, 100, 100, 100, 100, 100 }.  */
   14974   for (unsigned int i = 1; i < 8; ++i)
   14975     elements[i] = elements[0];
   14976   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14977   check_vector_cst_duplicate (elements, vector, 1);
   14978 
   14979   /* Try an interleaved duplicated value:
   14980      { 100, 55, 100, 55, 100, 55, 100, 55 }.  */
   14981   elements[1] = build_int_cst (element_type, 55);
   14982   for (unsigned int i = 2; i < 8; ++i)
   14983     elements[i] = elements[i - 2];
   14984   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14985   check_vector_cst_duplicate (elements, vector, 2);
   14986 
   14987   /* Try a duplicated value with 2 exceptions
   14988      { 41, 97, 100, 55, 100, 55, 100, 55 }.  */
   14989   elements[0] = build_int_cst (element_type, 41);
   14990   elements[1] = build_int_cst (element_type, 97);
   14991   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14992   check_vector_cst_fill (elements, vector, 2);
   14993 
   14994   /* Try with and without a step
   14995      { 41, 97, 100, 21, 100, 35, 100, 49 }.  */
   14996   for (unsigned int i = 3; i < 8; i += 2)
   14997     elements[i] = build_int_cst (element_type, i * 7);
   14998   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   14999   check_vector_cst_stepped (elements, vector, 2);
   15000 
   15001   /* Try a fully-general constant:
   15002      { 41, 97, 100, 21, 100, 9990, 100, 49 }.  */
   15003   elements[5] = build_int_cst (element_type, 9990);
   15004   vector = build_vector (vector_type, elements PASS_MEM_STAT);
   15005   check_vector_cst_fill (elements, vector, 4);
   15006 }
   15007 
   15008 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
   15009    Helper function for test_location_wrappers, to deal with STRIP_NOPS
   15010    modifying its argument in-place.  */
   15011 
   15012 static void
   15013 check_strip_nops (tree node, tree expected)
   15014 {
   15015   STRIP_NOPS (node);
   15016   ASSERT_EQ (expected, node);
   15017 }
   15018 
   15019 /* Verify location wrappers.  */
   15020 
   15021 static void
   15022 test_location_wrappers ()
   15023 {
   15024   location_t loc = BUILTINS_LOCATION;
   15025 
   15026   ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
   15027 
   15028   /* Wrapping a constant.  */
   15029   tree int_cst = build_int_cst (integer_type_node, 42);
   15030   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
   15031   ASSERT_FALSE (location_wrapper_p (int_cst));
   15032 
   15033   tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
   15034   ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
   15035   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
   15036   ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
   15037 
   15038   /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION.  */
   15039   ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
   15040 
   15041   /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P.  */
   15042   tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
   15043   ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
   15044   ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
   15045 
   15046   /* Wrapping a STRING_CST.  */
   15047   tree string_cst = build_string (4, "foo");
   15048   ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
   15049   ASSERT_FALSE (location_wrapper_p (string_cst));
   15050 
   15051   tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
   15052   ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
   15053   ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
   15054   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
   15055   ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
   15056 
   15057 
   15058   /* Wrapping a variable.  */
   15059   tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
   15060 			     get_identifier ("some_int_var"),
   15061 			     integer_type_node);
   15062   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
   15063   ASSERT_FALSE (location_wrapper_p (int_var));
   15064 
   15065   tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
   15066   ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
   15067   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
   15068   ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
   15069 
   15070   /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
   15071      wrapper.  */
   15072   tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
   15073   ASSERT_FALSE (location_wrapper_p (r_cast));
   15074   ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
   15075 
   15076   /* Verify that STRIP_NOPS removes wrappers.  */
   15077   check_strip_nops (wrapped_int_cst, int_cst);
   15078   check_strip_nops (wrapped_string_cst, string_cst);
   15079   check_strip_nops (wrapped_int_var, int_var);
   15080 }
   15081 
   15082 /* Test various tree predicates.  Verify that location wrappers don't
   15083    affect the results.  */
   15084 
   15085 static void
   15086 test_predicates ()
   15087 {
   15088   /* Build various constants and wrappers around them.  */
   15089 
   15090   location_t loc = BUILTINS_LOCATION;
   15091 
   15092   tree i_0 = build_int_cst (integer_type_node, 0);
   15093   tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
   15094 
   15095   tree i_1 = build_int_cst (integer_type_node, 1);
   15096   tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
   15097 
   15098   tree i_m1 = build_int_cst (integer_type_node, -1);
   15099   tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
   15100 
   15101   tree f_0 = build_real_from_int_cst (float_type_node, i_0);
   15102   tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
   15103   tree f_1 = build_real_from_int_cst (float_type_node, i_1);
   15104   tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
   15105   tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
   15106   tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
   15107 
   15108   tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
   15109   tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
   15110   tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
   15111 
   15112   tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
   15113   tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
   15114   tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
   15115 
   15116   /* TODO: vector constants.  */
   15117 
   15118   /* Test integer_onep.  */
   15119   ASSERT_FALSE (integer_onep (i_0));
   15120   ASSERT_FALSE (integer_onep (wr_i_0));
   15121   ASSERT_TRUE (integer_onep (i_1));
   15122   ASSERT_TRUE (integer_onep (wr_i_1));
   15123   ASSERT_FALSE (integer_onep (i_m1));
   15124   ASSERT_FALSE (integer_onep (wr_i_m1));
   15125   ASSERT_FALSE (integer_onep (f_0));
   15126   ASSERT_FALSE (integer_onep (wr_f_0));
   15127   ASSERT_FALSE (integer_onep (f_1));
   15128   ASSERT_FALSE (integer_onep (wr_f_1));
   15129   ASSERT_FALSE (integer_onep (f_m1));
   15130   ASSERT_FALSE (integer_onep (wr_f_m1));
   15131   ASSERT_FALSE (integer_onep (c_i_0));
   15132   ASSERT_TRUE (integer_onep (c_i_1));
   15133   ASSERT_FALSE (integer_onep (c_i_m1));
   15134   ASSERT_FALSE (integer_onep (c_f_0));
   15135   ASSERT_FALSE (integer_onep (c_f_1));
   15136   ASSERT_FALSE (integer_onep (c_f_m1));
   15137 
   15138   /* Test integer_zerop.  */
   15139   ASSERT_TRUE (integer_zerop (i_0));
   15140   ASSERT_TRUE (integer_zerop (wr_i_0));
   15141   ASSERT_FALSE (integer_zerop (i_1));
   15142   ASSERT_FALSE (integer_zerop (wr_i_1));
   15143   ASSERT_FALSE (integer_zerop (i_m1));
   15144   ASSERT_FALSE (integer_zerop (wr_i_m1));
   15145   ASSERT_FALSE (integer_zerop (f_0));
   15146   ASSERT_FALSE (integer_zerop (wr_f_0));
   15147   ASSERT_FALSE (integer_zerop (f_1));
   15148   ASSERT_FALSE (integer_zerop (wr_f_1));
   15149   ASSERT_FALSE (integer_zerop (f_m1));
   15150   ASSERT_FALSE (integer_zerop (wr_f_m1));
   15151   ASSERT_TRUE (integer_zerop (c_i_0));
   15152   ASSERT_FALSE (integer_zerop (c_i_1));
   15153   ASSERT_FALSE (integer_zerop (c_i_m1));
   15154   ASSERT_FALSE (integer_zerop (c_f_0));
   15155   ASSERT_FALSE (integer_zerop (c_f_1));
   15156   ASSERT_FALSE (integer_zerop (c_f_m1));
   15157 
   15158   /* Test integer_all_onesp.  */
   15159   ASSERT_FALSE (integer_all_onesp (i_0));
   15160   ASSERT_FALSE (integer_all_onesp (wr_i_0));
   15161   ASSERT_FALSE (integer_all_onesp (i_1));
   15162   ASSERT_FALSE (integer_all_onesp (wr_i_1));
   15163   ASSERT_TRUE (integer_all_onesp (i_m1));
   15164   ASSERT_TRUE (integer_all_onesp (wr_i_m1));
   15165   ASSERT_FALSE (integer_all_onesp (f_0));
   15166   ASSERT_FALSE (integer_all_onesp (wr_f_0));
   15167   ASSERT_FALSE (integer_all_onesp (f_1));
   15168   ASSERT_FALSE (integer_all_onesp (wr_f_1));
   15169   ASSERT_FALSE (integer_all_onesp (f_m1));
   15170   ASSERT_FALSE (integer_all_onesp (wr_f_m1));
   15171   ASSERT_FALSE (integer_all_onesp (c_i_0));
   15172   ASSERT_FALSE (integer_all_onesp (c_i_1));
   15173   ASSERT_FALSE (integer_all_onesp (c_i_m1));
   15174   ASSERT_FALSE (integer_all_onesp (c_f_0));
   15175   ASSERT_FALSE (integer_all_onesp (c_f_1));
   15176   ASSERT_FALSE (integer_all_onesp (c_f_m1));
   15177 
   15178   /* Test integer_minus_onep.  */
   15179   ASSERT_FALSE (integer_minus_onep (i_0));
   15180   ASSERT_FALSE (integer_minus_onep (wr_i_0));
   15181   ASSERT_FALSE (integer_minus_onep (i_1));
   15182   ASSERT_FALSE (integer_minus_onep (wr_i_1));
   15183   ASSERT_TRUE (integer_minus_onep (i_m1));
   15184   ASSERT_TRUE (integer_minus_onep (wr_i_m1));
   15185   ASSERT_FALSE (integer_minus_onep (f_0));
   15186   ASSERT_FALSE (integer_minus_onep (wr_f_0));
   15187   ASSERT_FALSE (integer_minus_onep (f_1));
   15188   ASSERT_FALSE (integer_minus_onep (wr_f_1));
   15189   ASSERT_FALSE (integer_minus_onep (f_m1));
   15190   ASSERT_FALSE (integer_minus_onep (wr_f_m1));
   15191   ASSERT_FALSE (integer_minus_onep (c_i_0));
   15192   ASSERT_FALSE (integer_minus_onep (c_i_1));
   15193   ASSERT_TRUE (integer_minus_onep (c_i_m1));
   15194   ASSERT_FALSE (integer_minus_onep (c_f_0));
   15195   ASSERT_FALSE (integer_minus_onep (c_f_1));
   15196   ASSERT_FALSE (integer_minus_onep (c_f_m1));
   15197 
   15198   /* Test integer_each_onep.  */
   15199   ASSERT_FALSE (integer_each_onep (i_0));
   15200   ASSERT_FALSE (integer_each_onep (wr_i_0));
   15201   ASSERT_TRUE (integer_each_onep (i_1));
   15202   ASSERT_TRUE (integer_each_onep (wr_i_1));
   15203   ASSERT_FALSE (integer_each_onep (i_m1));
   15204   ASSERT_FALSE (integer_each_onep (wr_i_m1));
   15205   ASSERT_FALSE (integer_each_onep (f_0));
   15206   ASSERT_FALSE (integer_each_onep (wr_f_0));
   15207   ASSERT_FALSE (integer_each_onep (f_1));
   15208   ASSERT_FALSE (integer_each_onep (wr_f_1));
   15209   ASSERT_FALSE (integer_each_onep (f_m1));
   15210   ASSERT_FALSE (integer_each_onep (wr_f_m1));
   15211   ASSERT_FALSE (integer_each_onep (c_i_0));
   15212   ASSERT_FALSE (integer_each_onep (c_i_1));
   15213   ASSERT_FALSE (integer_each_onep (c_i_m1));
   15214   ASSERT_FALSE (integer_each_onep (c_f_0));
   15215   ASSERT_FALSE (integer_each_onep (c_f_1));
   15216   ASSERT_FALSE (integer_each_onep (c_f_m1));
   15217 
   15218   /* Test integer_truep.  */
   15219   ASSERT_FALSE (integer_truep (i_0));
   15220   ASSERT_FALSE (integer_truep (wr_i_0));
   15221   ASSERT_TRUE (integer_truep (i_1));
   15222   ASSERT_TRUE (integer_truep (wr_i_1));
   15223   ASSERT_FALSE (integer_truep (i_m1));
   15224   ASSERT_FALSE (integer_truep (wr_i_m1));
   15225   ASSERT_FALSE (integer_truep (f_0));
   15226   ASSERT_FALSE (integer_truep (wr_f_0));
   15227   ASSERT_FALSE (integer_truep (f_1));
   15228   ASSERT_FALSE (integer_truep (wr_f_1));
   15229   ASSERT_FALSE (integer_truep (f_m1));
   15230   ASSERT_FALSE (integer_truep (wr_f_m1));
   15231   ASSERT_FALSE (integer_truep (c_i_0));
   15232   ASSERT_TRUE (integer_truep (c_i_1));
   15233   ASSERT_FALSE (integer_truep (c_i_m1));
   15234   ASSERT_FALSE (integer_truep (c_f_0));
   15235   ASSERT_FALSE (integer_truep (c_f_1));
   15236   ASSERT_FALSE (integer_truep (c_f_m1));
   15237 
   15238   /* Test integer_nonzerop.  */
   15239   ASSERT_FALSE (integer_nonzerop (i_0));
   15240   ASSERT_FALSE (integer_nonzerop (wr_i_0));
   15241   ASSERT_TRUE (integer_nonzerop (i_1));
   15242   ASSERT_TRUE (integer_nonzerop (wr_i_1));
   15243   ASSERT_TRUE (integer_nonzerop (i_m1));
   15244   ASSERT_TRUE (integer_nonzerop (wr_i_m1));
   15245   ASSERT_FALSE (integer_nonzerop (f_0));
   15246   ASSERT_FALSE (integer_nonzerop (wr_f_0));
   15247   ASSERT_FALSE (integer_nonzerop (f_1));
   15248   ASSERT_FALSE (integer_nonzerop (wr_f_1));
   15249   ASSERT_FALSE (integer_nonzerop (f_m1));
   15250   ASSERT_FALSE (integer_nonzerop (wr_f_m1));
   15251   ASSERT_FALSE (integer_nonzerop (c_i_0));
   15252   ASSERT_TRUE (integer_nonzerop (c_i_1));
   15253   ASSERT_TRUE (integer_nonzerop (c_i_m1));
   15254   ASSERT_FALSE (integer_nonzerop (c_f_0));
   15255   ASSERT_FALSE (integer_nonzerop (c_f_1));
   15256   ASSERT_FALSE (integer_nonzerop (c_f_m1));
   15257 
   15258   /* Test real_zerop.  */
   15259   ASSERT_FALSE (real_zerop (i_0));
   15260   ASSERT_FALSE (real_zerop (wr_i_0));
   15261   ASSERT_FALSE (real_zerop (i_1));
   15262   ASSERT_FALSE (real_zerop (wr_i_1));
   15263   ASSERT_FALSE (real_zerop (i_m1));
   15264   ASSERT_FALSE (real_zerop (wr_i_m1));
   15265   ASSERT_TRUE (real_zerop (f_0));
   15266   ASSERT_TRUE (real_zerop (wr_f_0));
   15267   ASSERT_FALSE (real_zerop (f_1));
   15268   ASSERT_FALSE (real_zerop (wr_f_1));
   15269   ASSERT_FALSE (real_zerop (f_m1));
   15270   ASSERT_FALSE (real_zerop (wr_f_m1));
   15271   ASSERT_FALSE (real_zerop (c_i_0));
   15272   ASSERT_FALSE (real_zerop (c_i_1));
   15273   ASSERT_FALSE (real_zerop (c_i_m1));
   15274   ASSERT_TRUE (real_zerop (c_f_0));
   15275   ASSERT_FALSE (real_zerop (c_f_1));
   15276   ASSERT_FALSE (real_zerop (c_f_m1));
   15277 
   15278   /* Test real_onep.  */
   15279   ASSERT_FALSE (real_onep (i_0));
   15280   ASSERT_FALSE (real_onep (wr_i_0));
   15281   ASSERT_FALSE (real_onep (i_1));
   15282   ASSERT_FALSE (real_onep (wr_i_1));
   15283   ASSERT_FALSE (real_onep (i_m1));
   15284   ASSERT_FALSE (real_onep (wr_i_m1));
   15285   ASSERT_FALSE (real_onep (f_0));
   15286   ASSERT_FALSE (real_onep (wr_f_0));
   15287   ASSERT_TRUE (real_onep (f_1));
   15288   ASSERT_TRUE (real_onep (wr_f_1));
   15289   ASSERT_FALSE (real_onep (f_m1));
   15290   ASSERT_FALSE (real_onep (wr_f_m1));
   15291   ASSERT_FALSE (real_onep (c_i_0));
   15292   ASSERT_FALSE (real_onep (c_i_1));
   15293   ASSERT_FALSE (real_onep (c_i_m1));
   15294   ASSERT_FALSE (real_onep (c_f_0));
   15295   ASSERT_TRUE (real_onep (c_f_1));
   15296   ASSERT_FALSE (real_onep (c_f_m1));
   15297 
   15298   /* Test real_minus_onep.  */
   15299   ASSERT_FALSE (real_minus_onep (i_0));
   15300   ASSERT_FALSE (real_minus_onep (wr_i_0));
   15301   ASSERT_FALSE (real_minus_onep (i_1));
   15302   ASSERT_FALSE (real_minus_onep (wr_i_1));
   15303   ASSERT_FALSE (real_minus_onep (i_m1));
   15304   ASSERT_FALSE (real_minus_onep (wr_i_m1));
   15305   ASSERT_FALSE (real_minus_onep (f_0));
   15306   ASSERT_FALSE (real_minus_onep (wr_f_0));
   15307   ASSERT_FALSE (real_minus_onep (f_1));
   15308   ASSERT_FALSE (real_minus_onep (wr_f_1));
   15309   ASSERT_TRUE (real_minus_onep (f_m1));
   15310   ASSERT_TRUE (real_minus_onep (wr_f_m1));
   15311   ASSERT_FALSE (real_minus_onep (c_i_0));
   15312   ASSERT_FALSE (real_minus_onep (c_i_1));
   15313   ASSERT_FALSE (real_minus_onep (c_i_m1));
   15314   ASSERT_FALSE (real_minus_onep (c_f_0));
   15315   ASSERT_FALSE (real_minus_onep (c_f_1));
   15316   ASSERT_TRUE (real_minus_onep (c_f_m1));
   15317 
   15318   /* Test zerop.  */
   15319   ASSERT_TRUE (zerop (i_0));
   15320   ASSERT_TRUE (zerop (wr_i_0));
   15321   ASSERT_FALSE (zerop (i_1));
   15322   ASSERT_FALSE (zerop (wr_i_1));
   15323   ASSERT_FALSE (zerop (i_m1));
   15324   ASSERT_FALSE (zerop (wr_i_m1));
   15325   ASSERT_TRUE (zerop (f_0));
   15326   ASSERT_TRUE (zerop (wr_f_0));
   15327   ASSERT_FALSE (zerop (f_1));
   15328   ASSERT_FALSE (zerop (wr_f_1));
   15329   ASSERT_FALSE (zerop (f_m1));
   15330   ASSERT_FALSE (zerop (wr_f_m1));
   15331   ASSERT_TRUE (zerop (c_i_0));
   15332   ASSERT_FALSE (zerop (c_i_1));
   15333   ASSERT_FALSE (zerop (c_i_m1));
   15334   ASSERT_TRUE (zerop (c_f_0));
   15335   ASSERT_FALSE (zerop (c_f_1));
   15336   ASSERT_FALSE (zerop (c_f_m1));
   15337 
   15338   /* Test tree_expr_nonnegative_p.  */
   15339   ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
   15340   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
   15341   ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
   15342   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
   15343   ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
   15344   ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
   15345   ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
   15346   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
   15347   ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
   15348   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
   15349   ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
   15350   ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
   15351   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
   15352   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
   15353   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
   15354   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
   15355   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
   15356   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
   15357 
   15358   /* Test tree_expr_nonzero_p.  */
   15359   ASSERT_FALSE (tree_expr_nonzero_p (i_0));
   15360   ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
   15361   ASSERT_TRUE (tree_expr_nonzero_p (i_1));
   15362   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
   15363   ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
   15364   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
   15365 
   15366   /* Test integer_valued_real_p.  */
   15367   ASSERT_FALSE (integer_valued_real_p (i_0));
   15368   ASSERT_TRUE (integer_valued_real_p (f_0));
   15369   ASSERT_TRUE (integer_valued_real_p (wr_f_0));
   15370   ASSERT_TRUE (integer_valued_real_p (f_1));
   15371   ASSERT_TRUE (integer_valued_real_p (wr_f_1));
   15372 
   15373   /* Test integer_pow2p.  */
   15374   ASSERT_FALSE (integer_pow2p (i_0));
   15375   ASSERT_TRUE (integer_pow2p (i_1));
   15376   ASSERT_TRUE (integer_pow2p (wr_i_1));
   15377 
   15378   /* Test uniform_integer_cst_p.  */
   15379   ASSERT_TRUE (uniform_integer_cst_p (i_0));
   15380   ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
   15381   ASSERT_TRUE (uniform_integer_cst_p (i_1));
   15382   ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
   15383   ASSERT_TRUE (uniform_integer_cst_p (i_m1));
   15384   ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
   15385   ASSERT_FALSE (uniform_integer_cst_p (f_0));
   15386   ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
   15387   ASSERT_FALSE (uniform_integer_cst_p (f_1));
   15388   ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
   15389   ASSERT_FALSE (uniform_integer_cst_p (f_m1));
   15390   ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
   15391   ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
   15392   ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
   15393   ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
   15394   ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
   15395   ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
   15396   ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
   15397 }
   15398 
   15399 /* Check that string escaping works correctly.  */
   15400 
   15401 static void
   15402 test_escaped_strings (void)
   15403 {
   15404   int saved_cutoff;
   15405   escaped_string msg;
   15406 
   15407   msg.escape (NULL);
   15408   /* ASSERT_STREQ does not accept NULL as a valid test
   15409      result, so we have to use ASSERT_EQ instead.  */
   15410   ASSERT_EQ (NULL, (const char *) msg);
   15411 
   15412   msg.escape ("");
   15413   ASSERT_STREQ ("", (const char *) msg);
   15414 
   15415   msg.escape ("foobar");
   15416   ASSERT_STREQ ("foobar", (const char *) msg);
   15417 
   15418   /* Ensure that we have -fmessage-length set to 0.  */
   15419   saved_cutoff = pp_line_cutoff (global_dc->printer);
   15420   pp_line_cutoff (global_dc->printer) = 0;
   15421 
   15422   msg.escape ("foo\nbar");
   15423   ASSERT_STREQ ("foo\\nbar", (const char *) msg);
   15424 
   15425   msg.escape ("\a\b\f\n\r\t\v");
   15426   ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
   15427 
   15428   /* Now repeat the tests with -fmessage-length set to 5.  */
   15429   pp_line_cutoff (global_dc->printer) = 5;
   15430 
   15431   /* Note that the newline is not translated into an escape.  */
   15432   msg.escape ("foo\nbar");
   15433   ASSERT_STREQ ("foo\nbar", (const char *) msg);
   15434 
   15435   msg.escape ("\a\b\f\n\r\t\v");
   15436   ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
   15437 
   15438   /* Restore the original message length setting.  */
   15439   pp_line_cutoff (global_dc->printer) = saved_cutoff;
   15440 }
   15441 
   15442 /* Run all of the selftests within this file.  */
   15443 
   15444 void
   15445 tree_cc_tests ()
   15446 {
   15447   test_integer_constants ();
   15448   test_identifiers ();
   15449   test_labels ();
   15450   test_vector_cst_patterns ();
   15451   test_location_wrappers ();
   15452   test_predicates ();
   15453   test_escaped_strings ();
   15454 }
   15455 
   15456 } // namespace selftest
   15457 
   15458 #endif /* CHECKING_P */
   15459 
   15460 #include "gt-tree.h"
   15461