1 1.1 mrg /* SSA operands management for trees. 2 1.1 mrg Copyright (C) 2003-2022 Free Software Foundation, Inc. 3 1.1 mrg 4 1.1 mrg This file is part of GCC. 5 1.1 mrg 6 1.1 mrg GCC is free software; you can redistribute it and/or modify 7 1.1 mrg it under the terms of the GNU General Public License as published by 8 1.1 mrg the Free Software Foundation; either version 3, or (at your option) 9 1.1 mrg any later version. 10 1.1 mrg 11 1.1 mrg GCC is distributed in the hope that it will be useful, 12 1.1 mrg but WITHOUT ANY WARRANTY; without even the implied warranty of 13 1.1 mrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 1.1 mrg GNU General Public License for more details. 15 1.1 mrg 16 1.1 mrg You should have received a copy of the GNU General Public License 17 1.1 mrg along with GCC; see the file COPYING3. If not see 18 1.1 mrg <http://www.gnu.org/licenses/>. */ 19 1.1 mrg 20 1.1 mrg #include "config.h" 21 1.1 mrg #include "system.h" 22 1.1 mrg #include "coretypes.h" 23 1.1 mrg #include "backend.h" 24 1.1 mrg #include "tree.h" 25 1.1 mrg #include "gimple.h" 26 1.1 mrg #include "timevar.h" 27 1.1 mrg #include "ssa.h" 28 1.1 mrg #include "gimple-pretty-print.h" 29 1.1 mrg #include "diagnostic-core.h" 30 1.1 mrg #include "stmt.h" 31 1.1 mrg #include "print-tree.h" 32 1.1 mrg #include "dumpfile.h" 33 1.1 mrg 34 1.1 mrg 35 1.1 mrg /* This file contains the code required to manage the operands cache of the 36 1.1 mrg SSA optimizer. For every stmt, we maintain an operand cache in the stmt 37 1.1 mrg annotation. This cache contains operands that will be of interest to 38 1.1 mrg optimizers and other passes wishing to manipulate the IL. 39 1.1 mrg 40 1.1 mrg The operand type are broken up into REAL and VIRTUAL operands. The real 41 1.1 mrg operands are represented as pointers into the stmt's operand tree. Thus 42 1.1 mrg any manipulation of the real operands will be reflected in the actual tree. 43 1.1 mrg Virtual operands are represented solely in the cache, although the base 44 1.1 mrg variable for the SSA_NAME may, or may not occur in the stmt's tree. 45 1.1 mrg Manipulation of the virtual operands will not be reflected in the stmt tree. 46 1.1 mrg 47 1.1 mrg The routines in this file are concerned with creating this operand cache 48 1.1 mrg from a stmt tree. 49 1.1 mrg 50 1.1 mrg The operand tree is the parsed by the various get_* routines which look 51 1.1 mrg through the stmt tree for the occurrence of operands which may be of 52 1.1 mrg interest, and calls are made to the append_* routines whenever one is 53 1.1 mrg found. There are 4 of these routines, each representing one of the 54 1.1 mrg 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. 55 1.1 mrg 56 1.1 mrg The append_* routines check for duplication, and simply keep a list of 57 1.1 mrg unique objects for each operand type in the build_* extendable vectors. 58 1.1 mrg 59 1.1 mrg Once the stmt tree is completely parsed, the finalize_ssa_operands() 60 1.1 mrg routine is called, which proceeds to perform the finalization routine 61 1.1 mrg on each of the 4 operand vectors which have been built up. 62 1.1 mrg 63 1.1 mrg If the stmt had a previous operand cache, the finalization routines 64 1.1 mrg attempt to match up the new operands with the old ones. If it's a perfect 65 1.1 mrg match, the old vector is simply reused. If it isn't a perfect match, then 66 1.1 mrg a new vector is created and the new operands are placed there. For 67 1.1 mrg virtual operands, if the previous cache had SSA_NAME version of a 68 1.1 mrg variable, and that same variable occurs in the same operands cache, then 69 1.1 mrg the new cache vector will also get the same SSA_NAME. 70 1.1 mrg 71 1.1 mrg i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new 72 1.1 mrg operand vector for VUSE, then the new vector will also be modified 73 1.1 mrg such that it contains 'a_5' rather than 'a'. */ 74 1.1 mrg 75 1.1 mrg 76 1.1 mrg /* Flags to describe operand properties in helpers. */ 77 1.1 mrg 78 1.1 mrg /* By default, operands are loaded. */ 79 1.1 mrg #define opf_use 0 80 1.1 mrg 81 1.1 mrg /* Operand is the target of an assignment expression or a 82 1.1 mrg call-clobbered variable. */ 83 1.1 mrg #define opf_def (1 << 0) 84 1.1 mrg 85 1.1 mrg /* No virtual operands should be created in the expression. This is used 86 1.1 mrg when traversing ADDR_EXPR nodes which have different semantics than 87 1.1 mrg other expressions. Inside an ADDR_EXPR node, the only operands that we 88 1.1 mrg need to consider are indices into arrays. For instance, &a.b[i] should 89 1.1 mrg generate a USE of 'i' but it should not generate a VUSE for 'a' nor a 90 1.1 mrg VUSE for 'b'. */ 91 1.1 mrg #define opf_no_vops (1 << 1) 92 1.1 mrg 93 1.1 mrg /* Operand is in a place where address-taken does not imply addressable. */ 94 1.1 mrg #define opf_non_addressable (1 << 3) 95 1.1 mrg 96 1.1 mrg /* Operand is in a place where opf_non_addressable does not apply. */ 97 1.1 mrg #define opf_not_non_addressable (1 << 4) 98 1.1 mrg 99 1.1 mrg /* Operand is having its address taken. */ 100 1.1 mrg #define opf_address_taken (1 << 5) 101 1.1 mrg 102 1.1 mrg /* Class containing temporary per-stmt state. */ 103 1.1 mrg 104 1.1 mrg class operands_scanner 105 1.1 mrg { 106 1.1 mrg public: 107 1.1 mrg operands_scanner (struct function *fun, gimple *statement) 108 1.1 mrg { 109 1.1 mrg build_vuse = NULL_TREE; 110 1.1 mrg build_vdef = NULL_TREE; 111 1.1 mrg fn = fun; 112 1.1 mrg stmt = statement; 113 1.1 mrg } 114 1.1 mrg 115 1.1 mrg /* Create an operands cache for STMT. */ 116 1.1 mrg void build_ssa_operands (); 117 1.1 mrg 118 1.1 mrg /* Verifies SSA statement operands. */ 119 1.1 mrg DEBUG_FUNCTION bool verify_ssa_operands (); 120 1.1 mrg 121 1.1 mrg private: 122 1.1 mrg /* Disable copy and assign of this class, as it may have problems with 123 1.1 mrg build_uses vec. */ 124 1.1 mrg DISABLE_COPY_AND_ASSIGN (operands_scanner); 125 1.1 mrg 126 1.1 mrg /* Array for building all the use operands. */ 127 1.1 mrg auto_vec<tree *, 16> build_uses; 128 1.1 mrg 129 1.1 mrg /* The built VDEF operand. */ 130 1.1 mrg tree build_vdef; 131 1.1 mrg 132 1.1 mrg /* The built VUSE operand. */ 133 1.1 mrg tree build_vuse; 134 1.1 mrg 135 1.1 mrg /* Function which STMT belongs to. */ 136 1.1 mrg struct function *fn; 137 1.1 mrg 138 1.1 mrg /* Statement to work on. */ 139 1.1 mrg gimple *stmt; 140 1.1 mrg 141 1.1 mrg /* Takes elements from build_uses and turns them into use operands of STMT. */ 142 1.1 mrg void finalize_ssa_uses (); 143 1.1 mrg 144 1.1 mrg /* Clear the in_list bits and empty the build array for VDEFs and 145 1.1 mrg VUSEs. */ 146 1.1 mrg void cleanup_build_arrays (); 147 1.1 mrg 148 1.1 mrg /* Finalize all the build vectors, fill the new ones into INFO. */ 149 1.1 mrg void finalize_ssa_stmt_operands (); 150 1.1 mrg 151 1.1 mrg /* Start the process of building up operands vectors in INFO. */ 152 1.1 mrg void start_ssa_stmt_operands (); 153 1.1 mrg 154 1.1 mrg /* Add USE_P to the list of pointers to operands. */ 155 1.1 mrg void append_use (tree *use_p); 156 1.1 mrg 157 1.1 mrg /* Add VAR to the set of variables that require a VDEF operator. */ 158 1.1 mrg void append_vdef (tree var); 159 1.1 mrg 160 1.1 mrg /* Add VAR to the set of variables that require a VUSE operator. */ 161 1.1 mrg void append_vuse (tree var); 162 1.1 mrg 163 1.1 mrg /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ 164 1.1 mrg void add_virtual_operand (int flags); 165 1.1 mrg 166 1.1 mrg 167 1.1 mrg /* Add *VAR_P to the appropriate operand array for statement STMT. 168 1.1 mrg FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, 169 1.1 mrg it will be added to the statement's real operands, otherwise it is 170 1.1 mrg added to virtual operands. */ 171 1.1 mrg void add_stmt_operand (tree *var_p, int flags); 172 1.1 mrg 173 1.1 mrg /* A subroutine of get_expr_operands to handle MEM_REF. 174 1.1 mrg 175 1.1 mrg STMT is the statement being processed, EXPR is the MEM_REF 176 1.1 mrg that got us here. 177 1.1 mrg 178 1.1 mrg FLAGS is as in get_expr_operands. */ 179 1.1 mrg void get_mem_ref_operands (tree expr, int flags); 180 1.1 mrg 181 1.1 mrg /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ 182 1.1 mrg void get_tmr_operands (tree expr, int flags); 183 1.1 mrg 184 1.1 mrg 185 1.1 mrg /* If STMT is a call that may clobber globals and other symbols that 186 1.1 mrg escape, add them to the VDEF/VUSE lists for it. */ 187 1.1 mrg void maybe_add_call_vops (gcall *stmt); 188 1.1 mrg 189 1.1 mrg /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ 190 1.1 mrg void get_asm_stmt_operands (gasm *stmt); 191 1.1 mrg 192 1.1 mrg 193 1.1 mrg /* Recursively scan the expression pointed to by EXPR_P in statement 194 1.1 mrg STMT. FLAGS is one of the OPF_* constants modifying how to 195 1.1 mrg interpret the operands found. */ 196 1.1 mrg void get_expr_operands (tree *expr_p, int flags); 197 1.1 mrg 198 1.1 mrg /* Parse STMT looking for operands. When finished, the various 199 1.1 mrg build_* operand vectors will have potential operands in them. */ 200 1.1 mrg void parse_ssa_operands (); 201 1.1 mrg 202 1.1 mrg 203 1.1 mrg /* Takes elements from build_defs and turns them into def operands of STMT. 204 1.1 mrg TODO -- Make build_defs vec of tree *. */ 205 1.1 mrg void finalize_ssa_defs (); 206 1.1 mrg }; 207 1.1 mrg 208 1.1 mrg /* Accessor to tree-ssa-operands.cc caches. */ 209 1.1 mrg static inline struct ssa_operands * 210 1.1 mrg gimple_ssa_operands (const struct function *fun) 211 1.1 mrg { 212 1.1 mrg return &fun->gimple_df->ssa_operands; 213 1.1 mrg } 214 1.1 mrg 215 1.1 mrg 216 1.1 mrg /* Return true if the SSA operands cache is active. */ 217 1.1 mrg 218 1.1 mrg bool 219 1.1 mrg ssa_operands_active (struct function *fun) 220 1.1 mrg { 221 1.1 mrg if (fun == NULL) 222 1.1 mrg return false; 223 1.1 mrg 224 1.1 mrg return fun->gimple_df && gimple_ssa_operands (fun)->ops_active; 225 1.1 mrg } 226 1.1 mrg 227 1.1 mrg 228 1.1 mrg /* Create the VOP variable, an artificial global variable to act as a 229 1.1 mrg representative of all of the virtual operands FUD chain. */ 230 1.1 mrg 231 1.1 mrg static void 232 1.1 mrg create_vop_var (struct function *fn) 233 1.1 mrg { 234 1.1 mrg tree global_var; 235 1.1 mrg 236 1.1 mrg gcc_assert (fn->gimple_df->vop == NULL_TREE); 237 1.1 mrg 238 1.1 mrg global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, 239 1.1 mrg get_identifier (".MEM"), 240 1.1 mrg void_type_node); 241 1.1 mrg DECL_ARTIFICIAL (global_var) = 1; 242 1.1 mrg DECL_IGNORED_P (global_var) = 1; 243 1.1 mrg TREE_READONLY (global_var) = 0; 244 1.1 mrg DECL_EXTERNAL (global_var) = 1; 245 1.1 mrg TREE_STATIC (global_var) = 1; 246 1.1 mrg TREE_USED (global_var) = 1; 247 1.1 mrg DECL_CONTEXT (global_var) = NULL_TREE; 248 1.1 mrg TREE_THIS_VOLATILE (global_var) = 0; 249 1.1 mrg TREE_ADDRESSABLE (global_var) = 0; 250 1.1 mrg VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1; 251 1.1 mrg 252 1.1 mrg fn->gimple_df->vop = global_var; 253 1.1 mrg } 254 1.1 mrg 255 1.1 mrg /* These are the sizes of the operand memory buffer in bytes which gets 256 1.1 mrg allocated each time more operands space is required. The final value is 257 1.1 mrg the amount that is allocated every time after that. 258 1.1 mrg In 1k we can fit 25 use operands (or 63 def operands) on a host with 259 1.1 mrg 8 byte pointers, that would be 10 statements each with 1 def and 2 260 1.1 mrg uses. */ 261 1.1 mrg 262 1.1 mrg #define OP_SIZE_INIT 0 263 1.1 mrg #define OP_SIZE_1 (1024 - sizeof (void *)) 264 1.1 mrg #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) 265 1.1 mrg #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) 266 1.1 mrg 267 1.1 mrg /* Initialize the operand cache routines. */ 268 1.1 mrg 269 1.1 mrg void 270 1.1 mrg init_ssa_operands (struct function *fn) 271 1.1 mrg { 272 1.1 mrg gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL); 273 1.1 mrg gimple_ssa_operands (fn)->operand_memory_index 274 1.1 mrg = gimple_ssa_operands (fn)->ssa_operand_mem_size; 275 1.1 mrg gimple_ssa_operands (fn)->ops_active = true; 276 1.1 mrg gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT; 277 1.1 mrg create_vop_var (fn); 278 1.1 mrg } 279 1.1 mrg 280 1.1 mrg 281 1.1 mrg /* Dispose of anything required by the operand routines. */ 282 1.1 mrg 283 1.1 mrg void 284 1.1 mrg fini_ssa_operands (struct function *fn) 285 1.1 mrg { 286 1.1 mrg struct ssa_operand_memory_d *ptr; 287 1.1 mrg 288 1.1 mrg gimple_ssa_operands (fn)->free_uses = NULL; 289 1.1 mrg 290 1.1 mrg while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL) 291 1.1 mrg { 292 1.1 mrg gimple_ssa_operands (fn)->operand_memory 293 1.1 mrg = gimple_ssa_operands (fn)->operand_memory->next; 294 1.1 mrg ggc_free (ptr); 295 1.1 mrg } 296 1.1 mrg 297 1.1 mrg gimple_ssa_operands (fn)->ops_active = false; 298 1.1 mrg 299 1.1 mrg fn->gimple_df->vop = NULL_TREE; 300 1.1 mrg } 301 1.1 mrg 302 1.1 mrg 303 1.1 mrg /* Return memory for an operand of size SIZE. */ 304 1.1 mrg 305 1.1 mrg static inline void * 306 1.1 mrg ssa_operand_alloc (struct function *fn, unsigned size) 307 1.1 mrg { 308 1.1 mrg char *ptr; 309 1.1 mrg 310 1.1 mrg gcc_assert (size == sizeof (struct use_optype_d)); 311 1.1 mrg 312 1.1 mrg if (gimple_ssa_operands (fn)->operand_memory_index + size 313 1.1 mrg >= gimple_ssa_operands (fn)->ssa_operand_mem_size) 314 1.1 mrg { 315 1.1 mrg struct ssa_operand_memory_d *ptr; 316 1.1 mrg 317 1.1 mrg switch (gimple_ssa_operands (fn)->ssa_operand_mem_size) 318 1.1 mrg { 319 1.1 mrg case OP_SIZE_INIT: 320 1.1 mrg gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1; 321 1.1 mrg break; 322 1.1 mrg case OP_SIZE_1: 323 1.1 mrg gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2; 324 1.1 mrg break; 325 1.1 mrg case OP_SIZE_2: 326 1.1 mrg case OP_SIZE_3: 327 1.1 mrg gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3; 328 1.1 mrg break; 329 1.1 mrg default: 330 1.1 mrg gcc_unreachable (); 331 1.1 mrg } 332 1.1 mrg 333 1.1 mrg 334 1.1 mrg ptr = (ssa_operand_memory_d *) ggc_internal_alloc 335 1.1 mrg (sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size); 336 1.1 mrg 337 1.1 mrg ptr->next = gimple_ssa_operands (fn)->operand_memory; 338 1.1 mrg gimple_ssa_operands (fn)->operand_memory = ptr; 339 1.1 mrg gimple_ssa_operands (fn)->operand_memory_index = 0; 340 1.1 mrg } 341 1.1 mrg 342 1.1 mrg ptr = &(gimple_ssa_operands (fn)->operand_memory 343 1.1 mrg ->mem[gimple_ssa_operands (fn)->operand_memory_index]); 344 1.1 mrg gimple_ssa_operands (fn)->operand_memory_index += size; 345 1.1 mrg return ptr; 346 1.1 mrg } 347 1.1 mrg 348 1.1 mrg 349 1.1 mrg /* Allocate a USE operand. */ 350 1.1 mrg 351 1.1 mrg static inline struct use_optype_d * 352 1.1 mrg alloc_use (struct function *fn) 353 1.1 mrg { 354 1.1 mrg struct use_optype_d *ret; 355 1.1 mrg if (gimple_ssa_operands (fn)->free_uses) 356 1.1 mrg { 357 1.1 mrg ret = gimple_ssa_operands (fn)->free_uses; 358 1.1 mrg gimple_ssa_operands (fn)->free_uses 359 1.1 mrg = gimple_ssa_operands (fn)->free_uses->next; 360 1.1 mrg } 361 1.1 mrg else 362 1.1 mrg ret = (struct use_optype_d *) 363 1.1 mrg ssa_operand_alloc (fn, sizeof (struct use_optype_d)); 364 1.1 mrg return ret; 365 1.1 mrg } 366 1.1 mrg 367 1.1 mrg 368 1.1 mrg /* Adds OP to the list of uses of statement STMT after LAST. */ 369 1.1 mrg 370 1.1 mrg static inline use_optype_p 371 1.1 mrg add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last) 372 1.1 mrg { 373 1.1 mrg use_optype_p new_use; 374 1.1 mrg 375 1.1 mrg new_use = alloc_use (fn); 376 1.1 mrg USE_OP_PTR (new_use)->use = op; 377 1.1 mrg link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); 378 1.1 mrg last->next = new_use; 379 1.1 mrg new_use->next = NULL; 380 1.1 mrg return new_use; 381 1.1 mrg } 382 1.1 mrg 383 1.1 mrg 384 1.1 mrg 385 1.1 mrg /* Takes elements from build_defs and turns them into def operands of STMT. 386 1.1 mrg TODO -- Make build_defs vec of tree *. */ 387 1.1 mrg 388 1.1 mrg inline void 389 1.1 mrg operands_scanner::finalize_ssa_defs () 390 1.1 mrg { 391 1.1 mrg /* Pre-pend the vdef we may have built. */ 392 1.1 mrg if (build_vdef != NULL_TREE) 393 1.1 mrg { 394 1.1 mrg tree oldvdef = gimple_vdef (stmt); 395 1.1 mrg if (oldvdef 396 1.1 mrg && TREE_CODE (oldvdef) == SSA_NAME) 397 1.1 mrg oldvdef = SSA_NAME_VAR (oldvdef); 398 1.1 mrg if (oldvdef != build_vdef) 399 1.1 mrg gimple_set_vdef (stmt, build_vdef); 400 1.1 mrg } 401 1.1 mrg 402 1.1 mrg /* Clear and unlink a no longer necessary VDEF. */ 403 1.1 mrg if (build_vdef == NULL_TREE 404 1.1 mrg && gimple_vdef (stmt) != NULL_TREE) 405 1.1 mrg { 406 1.1 mrg if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) 407 1.1 mrg { 408 1.1 mrg unlink_stmt_vdef (stmt); 409 1.1 mrg release_ssa_name_fn (fn, gimple_vdef (stmt)); 410 1.1 mrg } 411 1.1 mrg gimple_set_vdef (stmt, NULL_TREE); 412 1.1 mrg } 413 1.1 mrg 414 1.1 mrg /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ 415 1.1 mrg if (gimple_vdef (stmt) 416 1.1 mrg && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) 417 1.1 mrg { 418 1.1 mrg fn->gimple_df->rename_vops = 1; 419 1.1 mrg fn->gimple_df->ssa_renaming_needed = 1; 420 1.1 mrg } 421 1.1 mrg } 422 1.1 mrg 423 1.1 mrg 424 1.1 mrg /* Takes elements from build_uses and turns them into use operands of STMT. */ 425 1.1 mrg 426 1.1 mrg inline void 427 1.1 mrg operands_scanner::finalize_ssa_uses () 428 1.1 mrg { 429 1.1 mrg unsigned new_i; 430 1.1 mrg struct use_optype_d new_list; 431 1.1 mrg use_optype_p old_ops, ptr, last; 432 1.1 mrg 433 1.1 mrg /* Pre-pend the VUSE we may have built. */ 434 1.1 mrg if (build_vuse != NULL_TREE) 435 1.1 mrg { 436 1.1 mrg tree oldvuse = gimple_vuse (stmt); 437 1.1 mrg if (oldvuse 438 1.1 mrg && TREE_CODE (oldvuse) == SSA_NAME) 439 1.1 mrg oldvuse = SSA_NAME_VAR (oldvuse); 440 1.1 mrg if (oldvuse != (build_vuse != NULL_TREE 441 1.1 mrg ? build_vuse : build_vdef)) 442 1.1 mrg gimple_set_vuse (stmt, NULL_TREE); 443 1.1 mrg build_uses.safe_insert (0, gimple_vuse_ptr (stmt)); 444 1.1 mrg } 445 1.1 mrg 446 1.1 mrg new_list.next = NULL; 447 1.1 mrg last = &new_list; 448 1.1 mrg 449 1.1 mrg old_ops = gimple_use_ops (stmt); 450 1.1 mrg 451 1.1 mrg /* Clear a no longer necessary VUSE. */ 452 1.1 mrg if (build_vuse == NULL_TREE 453 1.1 mrg && gimple_vuse (stmt) != NULL_TREE) 454 1.1 mrg gimple_set_vuse (stmt, NULL_TREE); 455 1.1 mrg 456 1.1 mrg /* If there is anything in the old list, free it. */ 457 1.1 mrg if (old_ops) 458 1.1 mrg { 459 1.1 mrg for (ptr = old_ops; ptr->next; ptr = ptr->next) 460 1.1 mrg delink_imm_use (USE_OP_PTR (ptr)); 461 1.1 mrg delink_imm_use (USE_OP_PTR (ptr)); 462 1.1 mrg ptr->next = gimple_ssa_operands (fn)->free_uses; 463 1.1 mrg gimple_ssa_operands (fn)->free_uses = old_ops; 464 1.1 mrg } 465 1.1 mrg 466 1.1 mrg /* If we added a VUSE, make sure to set the operand if it is not already 467 1.1 mrg present and mark it for renaming. */ 468 1.1 mrg if (build_vuse != NULL_TREE 469 1.1 mrg && gimple_vuse (stmt) == NULL_TREE) 470 1.1 mrg { 471 1.1 mrg gimple_set_vuse (stmt, gimple_vop (fn)); 472 1.1 mrg fn->gimple_df->rename_vops = 1; 473 1.1 mrg fn->gimple_df->ssa_renaming_needed = 1; 474 1.1 mrg } 475 1.1 mrg 476 1.1 mrg /* Now create nodes for all the new nodes. */ 477 1.1 mrg for (new_i = 0; new_i < build_uses.length (); new_i++) 478 1.1 mrg { 479 1.1 mrg tree *op = build_uses[new_i]; 480 1.1 mrg last = add_use_op (fn, stmt, op, last); 481 1.1 mrg } 482 1.1 mrg 483 1.1 mrg /* Now set the stmt's operands. */ 484 1.1 mrg gimple_set_use_ops (stmt, new_list.next); 485 1.1 mrg } 486 1.1 mrg 487 1.1 mrg 488 1.1 mrg /* Clear the in_list bits and empty the build array for VDEFs and 489 1.1 mrg VUSEs. */ 490 1.1 mrg 491 1.1 mrg inline void 492 1.1 mrg operands_scanner::cleanup_build_arrays () 493 1.1 mrg { 494 1.1 mrg build_vdef = NULL_TREE; 495 1.1 mrg build_vuse = NULL_TREE; 496 1.1 mrg build_uses.truncate (0); 497 1.1 mrg } 498 1.1 mrg 499 1.1 mrg 500 1.1 mrg /* Finalize all the build vectors, fill the new ones into INFO. */ 501 1.1 mrg 502 1.1 mrg inline void 503 1.1 mrg operands_scanner::finalize_ssa_stmt_operands () 504 1.1 mrg { 505 1.1 mrg finalize_ssa_defs (); 506 1.1 mrg finalize_ssa_uses (); 507 1.1 mrg cleanup_build_arrays (); 508 1.1 mrg } 509 1.1 mrg 510 1.1 mrg 511 1.1 mrg /* Start the process of building up operands vectors in INFO. */ 512 1.1 mrg 513 1.1 mrg inline void 514 1.1 mrg operands_scanner::start_ssa_stmt_operands () 515 1.1 mrg { 516 1.1 mrg gcc_assert (build_uses.length () == 0); 517 1.1 mrg gcc_assert (build_vuse == NULL_TREE); 518 1.1 mrg gcc_assert (build_vdef == NULL_TREE); 519 1.1 mrg } 520 1.1 mrg 521 1.1 mrg 522 1.1 mrg /* Add USE_P to the list of pointers to operands. */ 523 1.1 mrg 524 1.1 mrg inline void 525 1.1 mrg operands_scanner::append_use (tree *use_p) 526 1.1 mrg { 527 1.1 mrg build_uses.safe_push (use_p); 528 1.1 mrg } 529 1.1 mrg 530 1.1 mrg 531 1.1 mrg /* Add VAR to the set of variables that require a VDEF operator. */ 532 1.1 mrg 533 1.1 mrg inline void 534 1.1 mrg operands_scanner::append_vdef (tree var) 535 1.1 mrg { 536 1.1 mrg gcc_assert ((build_vdef == NULL_TREE 537 1.1 mrg || build_vdef == var) 538 1.1 mrg && (build_vuse == NULL_TREE 539 1.1 mrg || build_vuse == var)); 540 1.1 mrg 541 1.1 mrg build_vdef = var; 542 1.1 mrg build_vuse = var; 543 1.1 mrg } 544 1.1 mrg 545 1.1 mrg 546 1.1 mrg /* Add VAR to the set of variables that require a VUSE operator. */ 547 1.1 mrg 548 1.1 mrg inline void 549 1.1 mrg operands_scanner::append_vuse (tree var) 550 1.1 mrg { 551 1.1 mrg gcc_assert (build_vuse == NULL_TREE 552 1.1 mrg || build_vuse == var); 553 1.1 mrg 554 1.1 mrg build_vuse = var; 555 1.1 mrg } 556 1.1 mrg 557 1.1 mrg /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ 558 1.1 mrg 559 1.1 mrg void 560 1.1 mrg operands_scanner::add_virtual_operand (int flags) 561 1.1 mrg { 562 1.1 mrg /* Add virtual operands to the stmt, unless the caller has specifically 563 1.1 mrg requested not to do that (used when adding operands inside an 564 1.1 mrg ADDR_EXPR expression). */ 565 1.1 mrg if (flags & opf_no_vops) 566 1.1 mrg return; 567 1.1 mrg 568 1.1 mrg gcc_assert (!is_gimple_debug (stmt)); 569 1.1 mrg 570 1.1 mrg if (flags & opf_def) 571 1.1 mrg append_vdef (gimple_vop (fn)); 572 1.1 mrg else 573 1.1 mrg append_vuse (gimple_vop (fn)); 574 1.1 mrg } 575 1.1 mrg 576 1.1 mrg 577 1.1 mrg /* Add *VAR_P to the appropriate operand array for statement STMT. 578 1.1 mrg FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, 579 1.1 mrg it will be added to the statement's real operands, otherwise it is 580 1.1 mrg added to virtual operands. */ 581 1.1 mrg 582 1.1 mrg void 583 1.1 mrg operands_scanner::add_stmt_operand (tree *var_p, int flags) 584 1.1 mrg { 585 1.1 mrg tree var = *var_p; 586 1.1 mrg 587 1.1 mrg gcc_assert (SSA_VAR_P (*var_p) 588 1.1 mrg || TREE_CODE (*var_p) == STRING_CST 589 1.1 mrg || TREE_CODE (*var_p) == CONST_DECL); 590 1.1 mrg 591 1.1 mrg if (is_gimple_reg (var)) 592 1.1 mrg { 593 1.1 mrg /* The variable is a GIMPLE register. Add it to real operands. */ 594 1.1 mrg if (flags & opf_def) 595 1.1 mrg ; 596 1.1 mrg else 597 1.1 mrg append_use (var_p); 598 1.1 mrg if (DECL_P (*var_p)) 599 1.1 mrg fn->gimple_df->ssa_renaming_needed = 1; 600 1.1 mrg } 601 1.1 mrg else 602 1.1 mrg { 603 1.1 mrg /* Mark statements with volatile operands. */ 604 1.1 mrg if (!(flags & opf_no_vops) 605 1.1 mrg && TREE_THIS_VOLATILE (var)) 606 1.1 mrg gimple_set_has_volatile_ops (stmt, true); 607 1.1 mrg 608 1.1 mrg /* The variable is a memory access. Add virtual operands. */ 609 1.1 mrg add_virtual_operand (flags); 610 1.1 mrg } 611 1.1 mrg } 612 1.1 mrg 613 1.1 mrg /* Mark the base address of REF as having its address taken. 614 1.1 mrg REF may be a single variable whose address has been taken or any 615 1.1 mrg other valid GIMPLE memory reference (structure reference, array, 616 1.1 mrg etc). */ 617 1.1 mrg 618 1.1 mrg static void 619 1.1 mrg mark_address_taken (tree ref) 620 1.1 mrg { 621 1.1 mrg tree var; 622 1.1 mrg 623 1.1 mrg /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF 624 1.1 mrg as the only thing we take the address of. If VAR is a structure, 625 1.1 mrg taking the address of a field means that the whole structure may 626 1.1 mrg be referenced using pointer arithmetic. See PR 21407 and the 627 1.1 mrg ensuing mailing list discussion. */ 628 1.1 mrg var = get_base_address (ref); 629 1.1 mrg if (VAR_P (var) 630 1.1 mrg || TREE_CODE (var) == RESULT_DECL 631 1.1 mrg || TREE_CODE (var) == PARM_DECL) 632 1.1 mrg TREE_ADDRESSABLE (var) = 1; 633 1.1 mrg } 634 1.1 mrg 635 1.1 mrg 636 1.1 mrg /* A subroutine of get_expr_operands to handle MEM_REF. 637 1.1 mrg 638 1.1 mrg STMT is the statement being processed, EXPR is the MEM_REF 639 1.1 mrg that got us here. 640 1.1 mrg 641 1.1 mrg FLAGS is as in get_expr_operands. */ 642 1.1 mrg 643 1.1 mrg void 644 1.1 mrg operands_scanner::get_mem_ref_operands (tree expr, int flags) 645 1.1 mrg { 646 1.1 mrg tree *pptr = &TREE_OPERAND (expr, 0); 647 1.1 mrg 648 1.1 mrg if (!(flags & opf_no_vops) 649 1.1 mrg && TREE_THIS_VOLATILE (expr)) 650 1.1 mrg gimple_set_has_volatile_ops (stmt, true); 651 1.1 mrg 652 1.1 mrg /* Add the VOP. */ 653 1.1 mrg add_virtual_operand (flags); 654 1.1 mrg 655 1.1 mrg /* If requested, add a USE operand for the base pointer. */ 656 1.1 mrg get_expr_operands (pptr, 657 1.1 mrg opf_non_addressable | opf_use 658 1.1 mrg | (flags & (opf_no_vops|opf_not_non_addressable))); 659 1.1 mrg } 660 1.1 mrg 661 1.1 mrg 662 1.1 mrg /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ 663 1.1 mrg 664 1.1 mrg void 665 1.1 mrg operands_scanner::get_tmr_operands(tree expr, int flags) 666 1.1 mrg { 667 1.1 mrg if (!(flags & opf_no_vops) 668 1.1 mrg && TREE_THIS_VOLATILE (expr)) 669 1.1 mrg gimple_set_has_volatile_ops (stmt, true); 670 1.1 mrg 671 1.1 mrg /* First record the real operands. */ 672 1.1 mrg get_expr_operands (&TMR_BASE (expr), 673 1.1 mrg opf_non_addressable | opf_use 674 1.1 mrg | (flags & (opf_no_vops|opf_not_non_addressable))); 675 1.1 mrg get_expr_operands (&TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); 676 1.1 mrg get_expr_operands (&TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); 677 1.1 mrg 678 1.1 mrg add_virtual_operand (flags); 679 1.1 mrg } 680 1.1 mrg 681 1.1 mrg 682 1.1 mrg /* If STMT is a call that may clobber globals and other symbols that 683 1.1 mrg escape, add them to the VDEF/VUSE lists for it. */ 684 1.1 mrg 685 1.1 mrg void 686 1.1 mrg operands_scanner::maybe_add_call_vops (gcall *stmt) 687 1.1 mrg { 688 1.1 mrg int call_flags = gimple_call_flags (stmt); 689 1.1 mrg 690 1.1 mrg /* If aliases have been computed already, add VDEF or VUSE 691 1.1 mrg operands for all the symbols that have been found to be 692 1.1 mrg call-clobbered. */ 693 1.1 mrg if (!(call_flags & ECF_NOVOPS)) 694 1.1 mrg { 695 1.1 mrg /* A 'pure' or a 'const' function never call-clobbers anything. */ 696 1.1 mrg if (!(call_flags & (ECF_PURE | ECF_CONST))) 697 1.1 mrg add_virtual_operand (opf_def); 698 1.1 mrg else if (!(call_flags & ECF_CONST)) 699 1.1 mrg add_virtual_operand (opf_use); 700 1.1 mrg } 701 1.1 mrg } 702 1.1 mrg 703 1.1 mrg 704 1.1 mrg /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ 705 1.1 mrg 706 1.1 mrg void 707 1.1 mrg operands_scanner::get_asm_stmt_operands (gasm *stmt) 708 1.1 mrg { 709 1.1 mrg size_t i, noutputs; 710 1.1 mrg const char **oconstraints; 711 1.1 mrg const char *constraint; 712 1.1 mrg bool allows_mem, allows_reg, is_inout; 713 1.1 mrg 714 1.1 mrg noutputs = gimple_asm_noutputs (stmt); 715 1.1 mrg oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); 716 1.1 mrg 717 1.1 mrg /* Gather all output operands. */ 718 1.1 mrg for (i = 0; i < gimple_asm_noutputs (stmt); i++) 719 1.1 mrg { 720 1.1 mrg tree link = gimple_asm_output_op (stmt, i); 721 1.1 mrg constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 722 1.1 mrg oconstraints[i] = constraint; 723 1.1 mrg parse_output_constraint (&constraint, i, 0, 0, &allows_mem, 724 1.1 mrg &allows_reg, &is_inout); 725 1.1 mrg 726 1.1 mrg /* This should have been split in gimplify_asm_expr. */ 727 1.1 mrg gcc_assert (!allows_reg || !is_inout); 728 1.1 mrg 729 1.1 mrg /* Memory operands are addressable. Note that STMT needs the 730 1.1 mrg address of this operand. */ 731 1.1 mrg if (!allows_reg && allows_mem) 732 1.1 mrg mark_address_taken (TREE_VALUE (link)); 733 1.1 mrg 734 1.1 mrg get_expr_operands (&TREE_VALUE (link), opf_def | opf_not_non_addressable); 735 1.1 mrg } 736 1.1 mrg 737 1.1 mrg /* Gather all input operands. */ 738 1.1 mrg for (i = 0; i < gimple_asm_ninputs (stmt); i++) 739 1.1 mrg { 740 1.1 mrg tree link = gimple_asm_input_op (stmt, i); 741 1.1 mrg constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 742 1.1 mrg parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, 743 1.1 mrg &allows_mem, &allows_reg); 744 1.1 mrg 745 1.1 mrg /* Memory operands are addressable. Note that STMT needs the 746 1.1 mrg address of this operand. */ 747 1.1 mrg if (!allows_reg && allows_mem) 748 1.1 mrg mark_address_taken (TREE_VALUE (link)); 749 1.1 mrg 750 1.1 mrg get_expr_operands (&TREE_VALUE (link), opf_not_non_addressable); 751 1.1 mrg } 752 1.1 mrg 753 1.1 mrg /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ 754 1.1 mrg if (gimple_asm_clobbers_memory_p (stmt)) 755 1.1 mrg add_virtual_operand (opf_def); 756 1.1 mrg } 757 1.1 mrg 758 1.1 mrg 759 1.1 mrg /* Recursively scan the expression pointed to by EXPR_P in statement 760 1.1 mrg STMT. FLAGS is one of the OPF_* constants modifying how to 761 1.1 mrg interpret the operands found. */ 762 1.1 mrg 763 1.1 mrg void 764 1.1 mrg operands_scanner::get_expr_operands (tree *expr_p, int flags) 765 1.1 mrg { 766 1.1 mrg enum tree_code code; 767 1.1 mrg enum tree_code_class codeclass; 768 1.1 mrg tree expr = *expr_p; 769 1.1 mrg int uflags = opf_use; 770 1.1 mrg 771 1.1 mrg if (expr == NULL) 772 1.1 mrg return; 773 1.1 mrg 774 1.1 mrg if (is_gimple_debug (stmt)) 775 1.1 mrg uflags |= (flags & opf_no_vops); 776 1.1 mrg 777 1.1 mrg code = TREE_CODE (expr); 778 1.1 mrg codeclass = TREE_CODE_CLASS (code); 779 1.1 mrg 780 1.1 mrg switch (code) 781 1.1 mrg { 782 1.1 mrg case ADDR_EXPR: 783 1.1 mrg /* Taking the address of a variable does not represent a 784 1.1 mrg reference to it, but the fact that the statement takes its 785 1.1 mrg address will be of interest to some passes (e.g. alias 786 1.1 mrg resolution). */ 787 1.1 mrg if ((!(flags & opf_non_addressable) 788 1.1 mrg || (flags & opf_not_non_addressable)) 789 1.1 mrg && !is_gimple_debug (stmt)) 790 1.1 mrg mark_address_taken (TREE_OPERAND (expr, 0)); 791 1.1 mrg 792 1.1 mrg /* Otherwise, there may be variables referenced inside but there 793 1.1 mrg should be no VUSEs created, since the referenced objects are 794 1.1 mrg not really accessed. The only operands that we should find 795 1.1 mrg here are ARRAY_REF indices which will always be real operands 796 1.1 mrg (GIMPLE does not allow non-registers as array indices). */ 797 1.1 mrg flags |= opf_no_vops; 798 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 0), 799 1.1 mrg flags | opf_not_non_addressable | opf_address_taken); 800 1.1 mrg return; 801 1.1 mrg 802 1.1 mrg case SSA_NAME: 803 1.1 mrg case VAR_DECL: 804 1.1 mrg case PARM_DECL: 805 1.1 mrg case RESULT_DECL: 806 1.1 mrg case STRING_CST: 807 1.1 mrg case CONST_DECL: 808 1.1 mrg if (!(flags & opf_address_taken)) 809 1.1 mrg add_stmt_operand (expr_p, flags); 810 1.1 mrg return; 811 1.1 mrg 812 1.1 mrg case DEBUG_EXPR_DECL: 813 1.1 mrg gcc_assert (gimple_debug_bind_p (stmt)); 814 1.1 mrg return; 815 1.1 mrg 816 1.1 mrg case MEM_REF: 817 1.1 mrg get_mem_ref_operands (expr, flags); 818 1.1 mrg return; 819 1.1 mrg 820 1.1 mrg case TARGET_MEM_REF: 821 1.1 mrg get_tmr_operands (expr, flags); 822 1.1 mrg return; 823 1.1 mrg 824 1.1 mrg case ARRAY_REF: 825 1.1 mrg case ARRAY_RANGE_REF: 826 1.1 mrg case COMPONENT_REF: 827 1.1 mrg case REALPART_EXPR: 828 1.1 mrg case IMAGPART_EXPR: 829 1.1 mrg { 830 1.1 mrg if (!(flags & opf_no_vops) 831 1.1 mrg && TREE_THIS_VOLATILE (expr)) 832 1.1 mrg gimple_set_has_volatile_ops (stmt, true); 833 1.1 mrg 834 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 0), flags); 835 1.1 mrg 836 1.1 mrg if (code == COMPONENT_REF) 837 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 2), uflags); 838 1.1 mrg else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) 839 1.1 mrg { 840 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 1), uflags); 841 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 2), uflags); 842 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 3), uflags); 843 1.1 mrg } 844 1.1 mrg 845 1.1 mrg return; 846 1.1 mrg } 847 1.1 mrg 848 1.1 mrg case WITH_SIZE_EXPR: 849 1.1 mrg /* WITH_SIZE_EXPR is a pass-through reference to its first argument, 850 1.1 mrg and an rvalue reference to its second argument. */ 851 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 1), uflags); 852 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 0), flags); 853 1.1 mrg return; 854 1.1 mrg 855 1.1 mrg case COND_EXPR: 856 1.1 mrg case VEC_COND_EXPR: 857 1.1 mrg case VEC_PERM_EXPR: 858 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 0), uflags); 859 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 1), uflags); 860 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 2), uflags); 861 1.1 mrg return; 862 1.1 mrg 863 1.1 mrg case CONSTRUCTOR: 864 1.1 mrg { 865 1.1 mrg /* General aggregate CONSTRUCTORs have been decomposed, but they 866 1.1 mrg are still in use as the COMPLEX_EXPR equivalent for vectors. */ 867 1.1 mrg constructor_elt *ce; 868 1.1 mrg unsigned HOST_WIDE_INT idx; 869 1.1 mrg 870 1.1 mrg /* A volatile constructor is actually TREE_CLOBBER_P, transfer 871 1.1 mrg the volatility to the statement, don't use TREE_CLOBBER_P for 872 1.1 mrg mirroring the other uses of THIS_VOLATILE in this file. */ 873 1.1 mrg if (!(flags & opf_no_vops) 874 1.1 mrg && TREE_THIS_VOLATILE (expr)) 875 1.1 mrg gimple_set_has_volatile_ops (stmt, true); 876 1.1 mrg 877 1.1 mrg for (idx = 0; 878 1.1 mrg vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce); 879 1.1 mrg idx++) 880 1.1 mrg get_expr_operands (&ce->value, uflags); 881 1.1 mrg 882 1.1 mrg return; 883 1.1 mrg } 884 1.1 mrg 885 1.1 mrg case BIT_FIELD_REF: 886 1.1 mrg if (!(flags & opf_no_vops) 887 1.1 mrg && TREE_THIS_VOLATILE (expr)) 888 1.1 mrg gimple_set_has_volatile_ops (stmt, true); 889 1.1 mrg /* FALLTHRU */ 890 1.1 mrg 891 1.1 mrg case VIEW_CONVERT_EXPR: 892 1.1 mrg do_unary: 893 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 0), flags); 894 1.1 mrg return; 895 1.1 mrg 896 1.1 mrg case BIT_INSERT_EXPR: 897 1.1 mrg case COMPOUND_EXPR: 898 1.1 mrg case OBJ_TYPE_REF: 899 1.1 mrg case ASSERT_EXPR: 900 1.1 mrg do_binary: 901 1.1 mrg { 902 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 0), flags); 903 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 1), flags); 904 1.1 mrg return; 905 1.1 mrg } 906 1.1 mrg 907 1.1 mrg case DOT_PROD_EXPR: 908 1.1 mrg case SAD_EXPR: 909 1.1 mrg case REALIGN_LOAD_EXPR: 910 1.1 mrg case WIDEN_MULT_PLUS_EXPR: 911 1.1 mrg case WIDEN_MULT_MINUS_EXPR: 912 1.1 mrg { 913 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 0), flags); 914 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 1), flags); 915 1.1 mrg get_expr_operands (&TREE_OPERAND (expr, 2), flags); 916 1.1 mrg return; 917 1.1 mrg } 918 1.1 mrg 919 1.1 mrg case FUNCTION_DECL: 920 1.1 mrg case LABEL_DECL: 921 1.1 mrg case CASE_LABEL_EXPR: 922 1.1 mrg /* Expressions that make no memory references. */ 923 1.1 mrg return; 924 1.1 mrg 925 1.1 mrg default: 926 1.1 mrg if (codeclass == tcc_unary) 927 1.1 mrg goto do_unary; 928 1.1 mrg if (codeclass == tcc_binary || codeclass == tcc_comparison) 929 1.1 mrg goto do_binary; 930 1.1 mrg if (codeclass == tcc_constant || codeclass == tcc_type) 931 1.1 mrg return; 932 1.1 mrg } 933 1.1 mrg 934 1.1 mrg /* If we get here, something has gone wrong. */ 935 1.1 mrg if (flag_checking) 936 1.1 mrg { 937 1.1 mrg fprintf (stderr, "unhandled expression in get_expr_operands():\n"); 938 1.1 mrg debug_tree (expr); 939 1.1 mrg fputs ("\n", stderr); 940 1.1 mrg gcc_unreachable (); 941 1.1 mrg } 942 1.1 mrg } 943 1.1 mrg 944 1.1 mrg 945 1.1 mrg /* Parse STMT looking for operands. When finished, the various 946 1.1 mrg build_* operand vectors will have potential operands in them. */ 947 1.1 mrg 948 1.1 mrg void 949 1.1 mrg operands_scanner::parse_ssa_operands () 950 1.1 mrg { 951 1.1 mrg enum gimple_code code = gimple_code (stmt); 952 1.1 mrg size_t i, n, start = 0; 953 1.1 mrg 954 1.1 mrg switch (code) 955 1.1 mrg { 956 1.1 mrg case GIMPLE_ASM: 957 1.1 mrg get_asm_stmt_operands (as_a <gasm *> (stmt)); 958 1.1 mrg break; 959 1.1 mrg 960 1.1 mrg case GIMPLE_TRANSACTION: 961 1.1 mrg /* The start of a transaction is a memory barrier. */ 962 1.1 mrg add_virtual_operand (opf_def | opf_use); 963 1.1 mrg break; 964 1.1 mrg 965 1.1 mrg case GIMPLE_DEBUG: 966 1.1 mrg if (gimple_debug_bind_p (stmt) 967 1.1 mrg && gimple_debug_bind_has_value_p (stmt)) 968 1.1 mrg get_expr_operands (gimple_debug_bind_get_value_ptr (stmt), 969 1.1 mrg opf_use | opf_no_vops); 970 1.1 mrg break; 971 1.1 mrg 972 1.1 mrg case GIMPLE_RETURN: 973 1.1 mrg append_vuse (gimple_vop (fn)); 974 1.1 mrg goto do_default; 975 1.1 mrg 976 1.1 mrg case GIMPLE_CALL: 977 1.1 mrg /* Add call-clobbered operands, if needed. */ 978 1.1 mrg maybe_add_call_vops (as_a <gcall *> (stmt)); 979 1.1 mrg /* FALLTHRU */ 980 1.1 mrg 981 1.1 mrg case GIMPLE_ASSIGN: 982 1.1 mrg get_expr_operands (gimple_op_ptr (stmt, 0), opf_def); 983 1.1 mrg start = 1; 984 1.1 mrg /* FALLTHRU */ 985 1.1 mrg 986 1.1 mrg default: 987 1.1 mrg do_default: 988 1.1 mrg n = gimple_num_ops (stmt); 989 1.1 mrg for (i = start; i < n; i++) 990 1.1 mrg get_expr_operands (gimple_op_ptr (stmt, i), opf_use); 991 1.1 mrg break; 992 1.1 mrg } 993 1.1 mrg } 994 1.1 mrg 995 1.1 mrg 996 1.1 mrg /* Create an operands cache for STMT. */ 997 1.1 mrg 998 1.1 mrg void 999 1.1 mrg operands_scanner::build_ssa_operands () 1000 1.1 mrg { 1001 1.1 mrg /* Initially assume that the statement has no volatile operands. */ 1002 1.1 mrg gimple_set_has_volatile_ops (stmt, false); 1003 1.1 mrg 1004 1.1 mrg start_ssa_stmt_operands (); 1005 1.1 mrg parse_ssa_operands (); 1006 1.1 mrg finalize_ssa_stmt_operands (); 1007 1.1 mrg } 1008 1.1 mrg 1009 1.1 mrg /* Verifies SSA statement operands. */ 1010 1.1 mrg 1011 1.1 mrg DEBUG_FUNCTION bool 1012 1.1 mrg operands_scanner::verify_ssa_operands () 1013 1.1 mrg { 1014 1.1 mrg use_operand_p use_p; 1015 1.1 mrg def_operand_p def_p; 1016 1.1 mrg ssa_op_iter iter; 1017 1.1 mrg unsigned i; 1018 1.1 mrg tree def; 1019 1.1 mrg bool volatile_p = gimple_has_volatile_ops (stmt); 1020 1.1 mrg 1021 1.1 mrg /* build_ssa_operands w/o finalizing them. */ 1022 1.1 mrg gimple_set_has_volatile_ops (stmt, false); 1023 1.1 mrg start_ssa_stmt_operands (); 1024 1.1 mrg parse_ssa_operands (); 1025 1.1 mrg 1026 1.1 mrg /* Now verify the built operands are the same as present in STMT. */ 1027 1.1 mrg def = gimple_vdef (stmt); 1028 1.1 mrg if (def 1029 1.1 mrg && TREE_CODE (def) == SSA_NAME) 1030 1.1 mrg def = SSA_NAME_VAR (def); 1031 1.1 mrg if (build_vdef != def) 1032 1.1 mrg { 1033 1.1 mrg error ("virtual definition of statement not up to date"); 1034 1.1 mrg return true; 1035 1.1 mrg } 1036 1.1 mrg if (gimple_vdef (stmt) 1037 1.1 mrg && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P 1038 1.1 mrg || DEF_FROM_PTR (def_p) != gimple_vdef (stmt))) 1039 1.1 mrg { 1040 1.1 mrg error ("virtual def operand missing for statement"); 1041 1.1 mrg return true; 1042 1.1 mrg } 1043 1.1 mrg 1044 1.1 mrg tree use = gimple_vuse (stmt); 1045 1.1 mrg if (use 1046 1.1 mrg && TREE_CODE (use) == SSA_NAME) 1047 1.1 mrg use = SSA_NAME_VAR (use); 1048 1.1 mrg if (build_vuse != use) 1049 1.1 mrg { 1050 1.1 mrg error ("virtual use of statement not up to date"); 1051 1.1 mrg return true; 1052 1.1 mrg } 1053 1.1 mrg if (gimple_vuse (stmt) 1054 1.1 mrg && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P 1055 1.1 mrg || USE_FROM_PTR (use_p) != gimple_vuse (stmt))) 1056 1.1 mrg { 1057 1.1 mrg error ("virtual use operand missing for statement"); 1058 1.1 mrg return true; 1059 1.1 mrg } 1060 1.1 mrg 1061 1.1 mrg FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) 1062 1.1 mrg { 1063 1.1 mrg tree *op; 1064 1.1 mrg FOR_EACH_VEC_ELT (build_uses, i, op) 1065 1.1 mrg { 1066 1.1 mrg if (use_p->use == op) 1067 1.1 mrg { 1068 1.1 mrg build_uses[i] = NULL; 1069 1.1 mrg break; 1070 1.1 mrg } 1071 1.1 mrg } 1072 1.1 mrg if (i == build_uses.length ()) 1073 1.1 mrg { 1074 1.1 mrg error ("excess use operand for statement"); 1075 1.1 mrg debug_generic_expr (USE_FROM_PTR (use_p)); 1076 1.1 mrg return true; 1077 1.1 mrg } 1078 1.1 mrg } 1079 1.1 mrg 1080 1.1 mrg tree *op; 1081 1.1 mrg FOR_EACH_VEC_ELT (build_uses, i, op) 1082 1.1 mrg if (op != NULL) 1083 1.1 mrg { 1084 1.1 mrg error ("use operand missing for statement"); 1085 1.1 mrg debug_generic_expr (*op); 1086 1.1 mrg return true; 1087 1.1 mrg } 1088 1.1 mrg 1089 1.1 mrg if (gimple_has_volatile_ops (stmt) != volatile_p) 1090 1.1 mrg { 1091 1.1 mrg error ("statement volatile flag not up to date"); 1092 1.1 mrg return true; 1093 1.1 mrg } 1094 1.1 mrg 1095 1.1 mrg cleanup_build_arrays (); 1096 1.1 mrg return false; 1097 1.1 mrg } 1098 1.1 mrg 1099 1.1 mrg /* Interface for external use. */ 1100 1.1 mrg 1101 1.1 mrg DEBUG_FUNCTION bool 1102 1.1 mrg verify_ssa_operands (struct function *fn, gimple *stmt) 1103 1.1 mrg { 1104 1.1 mrg return operands_scanner (fn, stmt).verify_ssa_operands (); 1105 1.1 mrg } 1106 1.1 mrg 1107 1.1 mrg 1108 1.1 mrg /* Releases the operands of STMT back to their freelists, and clears 1109 1.1 mrg the stmt operand lists. */ 1110 1.1 mrg 1111 1.1 mrg void 1112 1.1 mrg free_stmt_operands (struct function *fn, gimple *stmt) 1113 1.1 mrg { 1114 1.1 mrg use_optype_p uses = gimple_use_ops (stmt), last_use; 1115 1.1 mrg 1116 1.1 mrg if (uses) 1117 1.1 mrg { 1118 1.1 mrg for (last_use = uses; last_use->next; last_use = last_use->next) 1119 1.1 mrg delink_imm_use (USE_OP_PTR (last_use)); 1120 1.1 mrg delink_imm_use (USE_OP_PTR (last_use)); 1121 1.1 mrg last_use->next = gimple_ssa_operands (fn)->free_uses; 1122 1.1 mrg gimple_ssa_operands (fn)->free_uses = uses; 1123 1.1 mrg gimple_set_use_ops (stmt, NULL); 1124 1.1 mrg } 1125 1.1 mrg 1126 1.1 mrg if (gimple_has_mem_ops (stmt)) 1127 1.1 mrg { 1128 1.1 mrg gimple_set_vuse (stmt, NULL_TREE); 1129 1.1 mrg gimple_set_vdef (stmt, NULL_TREE); 1130 1.1 mrg } 1131 1.1 mrg } 1132 1.1 mrg 1133 1.1 mrg 1134 1.1 mrg /* Get the operands of statement STMT. */ 1135 1.1 mrg 1136 1.1 mrg void 1137 1.1 mrg update_stmt_operands (struct function *fn, gimple *stmt) 1138 1.1 mrg { 1139 1.1 mrg /* If update_stmt_operands is called before SSA is initialized, do 1140 1.1 mrg nothing. */ 1141 1.1 mrg if (!ssa_operands_active (fn)) 1142 1.1 mrg return; 1143 1.1 mrg 1144 1.1 mrg timevar_push (TV_TREE_OPS); 1145 1.1 mrg 1146 1.1 mrg gcc_assert (gimple_modified_p (stmt)); 1147 1.1 mrg operands_scanner (fn, stmt).build_ssa_operands (); 1148 1.1 mrg gimple_set_modified (stmt, false); 1149 1.1 mrg 1150 1.1 mrg timevar_pop (TV_TREE_OPS); 1151 1.1 mrg } 1152 1.1 mrg 1153 1.1 mrg 1154 1.1 mrg /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done 1155 1.1 mrg to test the validity of the swap operation. */ 1156 1.1 mrg 1157 1.1 mrg void 1158 1.1 mrg swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1) 1159 1.1 mrg { 1160 1.1 mrg tree op0, op1; 1161 1.1 mrg op0 = *exp0; 1162 1.1 mrg op1 = *exp1; 1163 1.1 mrg 1164 1.1 mrg if (op0 != op1) 1165 1.1 mrg { 1166 1.1 mrg /* Attempt to preserve the relative positions of these two operands in 1167 1.1 mrg their * respective immediate use lists by adjusting their use pointer 1168 1.1 mrg to point to the new operand position. */ 1169 1.1 mrg use_optype_p use0, use1, ptr; 1170 1.1 mrg use0 = use1 = NULL; 1171 1.1 mrg 1172 1.1 mrg /* Find the 2 operands in the cache, if they are there. */ 1173 1.1 mrg for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) 1174 1.1 mrg if (USE_OP_PTR (ptr)->use == exp0) 1175 1.1 mrg { 1176 1.1 mrg use0 = ptr; 1177 1.1 mrg break; 1178 1.1 mrg } 1179 1.1 mrg 1180 1.1 mrg for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) 1181 1.1 mrg if (USE_OP_PTR (ptr)->use == exp1) 1182 1.1 mrg { 1183 1.1 mrg use1 = ptr; 1184 1.1 mrg break; 1185 1.1 mrg } 1186 1.1 mrg 1187 1.1 mrg /* And adjust their location to point to the new position of the 1188 1.1 mrg operand. */ 1189 1.1 mrg if (use0) 1190 1.1 mrg USE_OP_PTR (use0)->use = exp1; 1191 1.1 mrg if (use1) 1192 1.1 mrg USE_OP_PTR (use1)->use = exp0; 1193 1.1 mrg 1194 1.1 mrg /* Now swap the data. */ 1195 1.1 mrg *exp0 = op1; 1196 1.1 mrg *exp1 = op0; 1197 1.1 mrg } 1198 1.1 mrg } 1199 1.1 mrg 1200 1.1 mrg 1201 1.1 mrg /* Scan the immediate_use list for VAR making sure its linked properly. 1202 1.1 mrg Return TRUE if there is a problem and emit an error message to F. */ 1203 1.1 mrg 1204 1.1 mrg DEBUG_FUNCTION bool 1205 1.1 mrg verify_imm_links (FILE *f, tree var) 1206 1.1 mrg { 1207 1.1 mrg use_operand_p ptr, prev, list; 1208 1.1 mrg unsigned int count; 1209 1.1 mrg 1210 1.1 mrg gcc_assert (TREE_CODE (var) == SSA_NAME); 1211 1.1 mrg 1212 1.1 mrg list = &(SSA_NAME_IMM_USE_NODE (var)); 1213 1.1 mrg gcc_assert (list->use == NULL); 1214 1.1 mrg 1215 1.1 mrg if (list->prev == NULL) 1216 1.1 mrg { 1217 1.1 mrg gcc_assert (list->next == NULL); 1218 1.1 mrg return false; 1219 1.1 mrg } 1220 1.1 mrg 1221 1.1 mrg prev = list; 1222 1.1 mrg count = 0; 1223 1.1 mrg for (ptr = list->next; ptr != list; ) 1224 1.1 mrg { 1225 1.1 mrg if (prev != ptr->prev) 1226 1.1 mrg { 1227 1.1 mrg fprintf (f, "prev != ptr->prev\n"); 1228 1.1 mrg goto error; 1229 1.1 mrg } 1230 1.1 mrg 1231 1.1 mrg if (ptr->use == NULL) 1232 1.1 mrg { 1233 1.1 mrg fprintf (f, "ptr->use == NULL\n"); 1234 1.1 mrg goto error; /* 2 roots, or SAFE guard node. */ 1235 1.1 mrg } 1236 1.1 mrg else if (*(ptr->use) != var) 1237 1.1 mrg { 1238 1.1 mrg fprintf (f, "*(ptr->use) != var\n"); 1239 1.1 mrg goto error; 1240 1.1 mrg } 1241 1.1 mrg 1242 1.1 mrg prev = ptr; 1243 1.1 mrg ptr = ptr->next; 1244 1.1 mrg 1245 1.1 mrg count++; 1246 1.1 mrg if (count == 0) 1247 1.1 mrg { 1248 1.1 mrg fprintf (f, "number of immediate uses doesn't fit unsigned int\n"); 1249 1.1 mrg goto error; 1250 1.1 mrg } 1251 1.1 mrg } 1252 1.1 mrg 1253 1.1 mrg /* Verify list in the other direction. */ 1254 1.1 mrg prev = list; 1255 1.1 mrg for (ptr = list->prev; ptr != list; ) 1256 1.1 mrg { 1257 1.1 mrg if (prev != ptr->next) 1258 1.1 mrg { 1259 1.1 mrg fprintf (f, "prev != ptr->next\n"); 1260 1.1 mrg goto error; 1261 1.1 mrg } 1262 1.1 mrg prev = ptr; 1263 1.1 mrg ptr = ptr->prev; 1264 1.1 mrg if (count == 0) 1265 1.1 mrg { 1266 1.1 mrg fprintf (f, "count-- < 0\n"); 1267 1.1 mrg goto error; 1268 1.1 mrg } 1269 1.1 mrg count--; 1270 1.1 mrg } 1271 1.1 mrg 1272 1.1 mrg if (count != 0) 1273 1.1 mrg { 1274 1.1 mrg fprintf (f, "count != 0\n"); 1275 1.1 mrg goto error; 1276 1.1 mrg } 1277 1.1 mrg 1278 1.1 mrg return false; 1279 1.1 mrg 1280 1.1 mrg error: 1281 1.1 mrg if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) 1282 1.1 mrg { 1283 1.1 mrg fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); 1284 1.1 mrg print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); 1285 1.1 mrg } 1286 1.1 mrg fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, 1287 1.1 mrg (void *)ptr->use); 1288 1.1 mrg print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); 1289 1.1 mrg fprintf (f, "\n"); 1290 1.1 mrg return true; 1291 1.1 mrg } 1292 1.1 mrg 1293 1.1 mrg 1294 1.1 mrg /* Dump all the immediate uses to FILE. */ 1295 1.1 mrg 1296 1.1 mrg void 1297 1.1 mrg dump_immediate_uses_for (FILE *file, tree var) 1298 1.1 mrg { 1299 1.1 mrg imm_use_iterator iter; 1300 1.1 mrg use_operand_p use_p; 1301 1.1 mrg 1302 1.1 mrg gcc_assert (var && TREE_CODE (var) == SSA_NAME); 1303 1.1 mrg 1304 1.1 mrg print_generic_expr (file, var, TDF_SLIM); 1305 1.1 mrg fprintf (file, " : -->"); 1306 1.1 mrg if (has_zero_uses (var)) 1307 1.1 mrg fprintf (file, " no uses.\n"); 1308 1.1 mrg else 1309 1.1 mrg if (has_single_use (var)) 1310 1.1 mrg fprintf (file, " single use.\n"); 1311 1.1 mrg else 1312 1.1 mrg fprintf (file, "%d uses.\n", num_imm_uses (var)); 1313 1.1 mrg 1314 1.1 mrg FOR_EACH_IMM_USE_FAST (use_p, iter, var) 1315 1.1 mrg { 1316 1.1 mrg if (use_p->loc.stmt == NULL && use_p->use == NULL) 1317 1.1 mrg fprintf (file, "***end of stmt iterator marker***\n"); 1318 1.1 mrg else 1319 1.1 mrg if (!is_gimple_reg (USE_FROM_PTR (use_p))) 1320 1.1 mrg print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); 1321 1.1 mrg else 1322 1.1 mrg print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); 1323 1.1 mrg } 1324 1.1 mrg fprintf (file, "\n"); 1325 1.1 mrg } 1326 1.1 mrg 1327 1.1 mrg 1328 1.1 mrg /* Dump all the immediate uses to FILE. */ 1329 1.1 mrg 1330 1.1 mrg void 1331 1.1 mrg dump_immediate_uses (FILE *file) 1332 1.1 mrg { 1333 1.1 mrg tree var; 1334 1.1 mrg unsigned int x; 1335 1.1 mrg 1336 1.1 mrg fprintf (file, "Immediate_uses: \n\n"); 1337 1.1 mrg FOR_EACH_SSA_NAME (x, var, cfun) 1338 1.1 mrg { 1339 1.1 mrg dump_immediate_uses_for (file, var); 1340 1.1 mrg } 1341 1.1 mrg } 1342 1.1 mrg 1343 1.1 mrg 1344 1.1 mrg /* Dump def-use edges on stderr. */ 1345 1.1 mrg 1346 1.1 mrg DEBUG_FUNCTION void 1347 1.1 mrg debug_immediate_uses (void) 1348 1.1 mrg { 1349 1.1 mrg dump_immediate_uses (stderr); 1350 1.1 mrg } 1351 1.1 mrg 1352 1.1 mrg 1353 1.1 mrg /* Dump def-use edges on stderr. */ 1354 1.1 mrg 1355 1.1 mrg DEBUG_FUNCTION void 1356 1.1 mrg debug_immediate_uses_for (tree var) 1357 1.1 mrg { 1358 1.1 mrg dump_immediate_uses_for (stderr, var); 1359 1.1 mrg } 1360 1.1 mrg 1361 1.1 mrg 1362 1.1 mrg /* Unlink STMTs virtual definition from the IL by propagating its use. */ 1363 1.1 mrg 1364 1.1 mrg void 1365 1.1 mrg unlink_stmt_vdef (gimple *stmt) 1366 1.1 mrg { 1367 1.1 mrg use_operand_p use_p; 1368 1.1 mrg imm_use_iterator iter; 1369 1.1 mrg gimple *use_stmt; 1370 1.1 mrg tree vdef = gimple_vdef (stmt); 1371 1.1 mrg tree vuse = gimple_vuse (stmt); 1372 1.1 mrg 1373 1.1 mrg if (!vdef 1374 1.1 mrg || TREE_CODE (vdef) != SSA_NAME) 1375 1.1 mrg return; 1376 1.1 mrg 1377 1.1 mrg FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) 1378 1.1 mrg { 1379 1.1 mrg FOR_EACH_IMM_USE_ON_STMT (use_p, iter) 1380 1.1 mrg SET_USE (use_p, vuse); 1381 1.1 mrg } 1382 1.1 mrg 1383 1.1 mrg if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) 1384 1.1 mrg SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; 1385 1.1 mrg } 1386 1.1 mrg 1387 1.1 mrg /* Return true if the var whose chain of uses starts at PTR has a 1388 1.1 mrg single nondebug use. Set USE_P and STMT to that single nondebug 1389 1.1 mrg use, if so, or to NULL otherwise. */ 1390 1.1 mrg bool 1391 1.1 mrg single_imm_use_1 (const ssa_use_operand_t *head, 1392 1.1 mrg use_operand_p *use_p, gimple **stmt) 1393 1.1 mrg { 1394 1.1 mrg ssa_use_operand_t *ptr, *single_use = 0; 1395 1.1 mrg 1396 1.1 mrg for (ptr = head->next; ptr != head; ptr = ptr->next) 1397 1.1 mrg if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr))) 1398 1.1 mrg { 1399 1.1 mrg if (single_use) 1400 1.1 mrg { 1401 1.1 mrg single_use = NULL; 1402 1.1 mrg break; 1403 1.1 mrg } 1404 1.1 mrg single_use = ptr; 1405 1.1 mrg } 1406 1.1 mrg 1407 1.1 mrg if (use_p) 1408 1.1 mrg *use_p = single_use; 1409 1.1 mrg 1410 1.1 mrg if (stmt) 1411 1.1 mrg *stmt = single_use ? single_use->loc.stmt : NULL; 1412 1.1 mrg 1413 1.1 mrg return single_use; 1414 1.1 mrg } 1415 1.1 mrg 1416