1 1.1 mrg /* Miscellaneous SSA utility functions. 2 1.1 mrg Copyright (C) 2001-2022 Free Software Foundation, Inc. 3 1.1 mrg 4 1.1 mrg This file is part of GCC. 5 1.1 mrg 6 1.1 mrg GCC is free software; you can redistribute it and/or modify 7 1.1 mrg it under the terms of the GNU General Public License as published by 8 1.1 mrg the Free Software Foundation; either version 3, or (at your option) 9 1.1 mrg any later version. 10 1.1 mrg 11 1.1 mrg GCC is distributed in the hope that it will be useful, 12 1.1 mrg but WITHOUT ANY WARRANTY; without even the implied warranty of 13 1.1 mrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 1.1 mrg GNU General Public License for more details. 15 1.1 mrg 16 1.1 mrg You should have received a copy of the GNU General Public License 17 1.1 mrg along with GCC; see the file COPYING3. If not see 18 1.1 mrg <http://www.gnu.org/licenses/>. */ 19 1.1 mrg 20 1.1 mrg #include "config.h" 21 1.1 mrg #include "system.h" 22 1.1 mrg #include "coretypes.h" 23 1.1 mrg #include "backend.h" 24 1.1 mrg #include "tree.h" 25 1.1 mrg #include "gimple.h" 26 1.1 mrg #include "cfghooks.h" 27 1.1 mrg #include "tree-pass.h" 28 1.1 mrg #include "ssa.h" 29 1.1 mrg #include "gimple-pretty-print.h" 30 1.1 mrg #include "diagnostic-core.h" 31 1.1 mrg #include "fold-const.h" 32 1.1 mrg #include "stor-layout.h" 33 1.1 mrg #include "gimple-fold.h" 34 1.1 mrg #include "gimplify.h" 35 1.1 mrg #include "gimple-iterator.h" 36 1.1 mrg #include "gimple-walk.h" 37 1.1 mrg #include "tree-ssa-loop-manip.h" 38 1.1 mrg #include "tree-into-ssa.h" 39 1.1 mrg #include "tree-ssa.h" 40 1.1 mrg #include "cfgloop.h" 41 1.1 mrg #include "cfgexpand.h" 42 1.1 mrg #include "tree-cfg.h" 43 1.1 mrg #include "tree-dfa.h" 44 1.1 mrg #include "stringpool.h" 45 1.1 mrg #include "attribs.h" 46 1.1 mrg #include "asan.h" 47 1.1 mrg 48 1.1 mrg /* Pointer map of variable mappings, keyed by edge. */ 49 1.1 mrg static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps; 50 1.1 mrg 51 1.1 mrg 52 1.1 mrg /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */ 53 1.1 mrg 54 1.1 mrg void 55 1.1 mrg redirect_edge_var_map_add (edge e, tree result, tree def, location_t locus) 56 1.1 mrg { 57 1.1 mrg edge_var_map new_node; 58 1.1 mrg 59 1.1 mrg if (edge_var_maps == NULL) 60 1.1 mrg edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >; 61 1.1 mrg 62 1.1 mrg auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e); 63 1.1 mrg new_node.def = def; 64 1.1 mrg new_node.result = result; 65 1.1 mrg new_node.locus = locus; 66 1.1 mrg 67 1.1 mrg slot.safe_push (new_node); 68 1.1 mrg } 69 1.1 mrg 70 1.1 mrg 71 1.1 mrg /* Clear the var mappings in edge E. */ 72 1.1 mrg 73 1.1 mrg void 74 1.1 mrg redirect_edge_var_map_clear (edge e) 75 1.1 mrg { 76 1.1 mrg if (!edge_var_maps) 77 1.1 mrg return; 78 1.1 mrg 79 1.1 mrg auto_vec<edge_var_map> *head = edge_var_maps->get (e); 80 1.1 mrg 81 1.1 mrg if (head) 82 1.1 mrg head->release (); 83 1.1 mrg } 84 1.1 mrg 85 1.1 mrg 86 1.1 mrg /* Duplicate the redirected var mappings in OLDE in NEWE. 87 1.1 mrg 88 1.1 mrg This assumes a hash_map can have multiple edges mapping to the same 89 1.1 mrg var_map (many to one mapping), since we don't remove the previous mappings. 90 1.1 mrg */ 91 1.1 mrg 92 1.1 mrg void 93 1.1 mrg redirect_edge_var_map_dup (edge newe, edge olde) 94 1.1 mrg { 95 1.1 mrg if (!edge_var_maps) 96 1.1 mrg return; 97 1.1 mrg 98 1.1 mrg auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe); 99 1.1 mrg auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde); 100 1.1 mrg if (!old_head) 101 1.1 mrg return; 102 1.1 mrg 103 1.1 mrg new_head->safe_splice (*old_head); 104 1.1 mrg } 105 1.1 mrg 106 1.1 mrg 107 1.1 mrg /* Return the variable mappings for a given edge. If there is none, return 108 1.1 mrg NULL. */ 109 1.1 mrg 110 1.1 mrg vec<edge_var_map> * 111 1.1 mrg redirect_edge_var_map_vector (edge e) 112 1.1 mrg { 113 1.1 mrg /* Hey, what kind of idiot would... you'd be surprised. */ 114 1.1 mrg if (!edge_var_maps) 115 1.1 mrg return NULL; 116 1.1 mrg 117 1.1 mrg auto_vec<edge_var_map> *slot = edge_var_maps->get (e); 118 1.1 mrg if (!slot) 119 1.1 mrg return NULL; 120 1.1 mrg 121 1.1 mrg return slot; 122 1.1 mrg } 123 1.1 mrg 124 1.1 mrg /* Clear the edge variable mappings. */ 125 1.1 mrg 126 1.1 mrg void 127 1.1 mrg redirect_edge_var_map_empty (void) 128 1.1 mrg { 129 1.1 mrg if (edge_var_maps) 130 1.1 mrg edge_var_maps->empty (); 131 1.1 mrg } 132 1.1 mrg 133 1.1 mrg 134 1.1 mrg /* Remove the corresponding arguments from the PHI nodes in E's 135 1.1 mrg destination block and redirect it to DEST. Return redirected edge. 136 1.1 mrg The list of removed arguments is stored in a vector accessed 137 1.1 mrg through edge_var_maps. */ 138 1.1 mrg 139 1.1 mrg edge 140 1.1 mrg ssa_redirect_edge (edge e, basic_block dest) 141 1.1 mrg { 142 1.1 mrg gphi_iterator gsi; 143 1.1 mrg gphi *phi; 144 1.1 mrg 145 1.1 mrg redirect_edge_var_map_clear (e); 146 1.1 mrg 147 1.1 mrg /* Remove the appropriate PHI arguments in E's destination block. 148 1.1 mrg If we are redirecting a copied edge the destination has not 149 1.1 mrg got PHI argument space reserved nor an interesting argument. */ 150 1.1 mrg if (! (e->dest->flags & BB_DUPLICATED)) 151 1.1 mrg for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) 152 1.1 mrg { 153 1.1 mrg tree def; 154 1.1 mrg location_t locus; 155 1.1 mrg 156 1.1 mrg phi = gsi.phi (); 157 1.1 mrg def = gimple_phi_arg_def (phi, e->dest_idx); 158 1.1 mrg locus = gimple_phi_arg_location (phi, e->dest_idx); 159 1.1 mrg 160 1.1 mrg if (def == NULL_TREE) 161 1.1 mrg continue; 162 1.1 mrg 163 1.1 mrg redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus); 164 1.1 mrg } 165 1.1 mrg 166 1.1 mrg e = redirect_edge_succ_nodup (e, dest); 167 1.1 mrg 168 1.1 mrg return e; 169 1.1 mrg } 170 1.1 mrg 171 1.1 mrg 172 1.1 mrg /* Add PHI arguments queued in PENDING_STMT list on edge E to edge 173 1.1 mrg E->dest. */ 174 1.1 mrg 175 1.1 mrg void 176 1.1 mrg flush_pending_stmts (edge e) 177 1.1 mrg { 178 1.1 mrg gphi *phi; 179 1.1 mrg edge_var_map *vm; 180 1.1 mrg int i; 181 1.1 mrg gphi_iterator gsi; 182 1.1 mrg 183 1.1 mrg vec<edge_var_map> *v = redirect_edge_var_map_vector (e); 184 1.1 mrg if (!v) 185 1.1 mrg return; 186 1.1 mrg 187 1.1 mrg for (gsi = gsi_start_phis (e->dest), i = 0; 188 1.1 mrg !gsi_end_p (gsi) && v->iterate (i, &vm); 189 1.1 mrg gsi_next (&gsi), i++) 190 1.1 mrg { 191 1.1 mrg tree def; 192 1.1 mrg 193 1.1 mrg phi = gsi.phi (); 194 1.1 mrg def = redirect_edge_var_map_def (vm); 195 1.1 mrg add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm)); 196 1.1 mrg } 197 1.1 mrg 198 1.1 mrg redirect_edge_var_map_clear (e); 199 1.1 mrg } 200 1.1 mrg 201 1.1 mrg /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a 202 1.1 mrg GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an 203 1.1 mrg expression with a different value. 204 1.1 mrg 205 1.1 mrg This will update any annotations (say debug bind stmts) referring 206 1.1 mrg to the original LHS, so that they use the RHS instead. This is 207 1.1 mrg done even if NLHS and LHS are the same, for it is understood that 208 1.1 mrg the RHS will be modified afterwards, and NLHS will not be assigned 209 1.1 mrg an equivalent value. 210 1.1 mrg 211 1.1 mrg Adjusting any non-annotation uses of the LHS, if needed, is a 212 1.1 mrg responsibility of the caller. 213 1.1 mrg 214 1.1 mrg The effect of this call should be pretty much the same as that of 215 1.1 mrg inserting a copy of STMT before STMT, and then removing the 216 1.1 mrg original stmt, at which time gsi_remove() would have update 217 1.1 mrg annotations, but using this function saves all the inserting, 218 1.1 mrg copying and removing. */ 219 1.1 mrg 220 1.1 mrg void 221 1.1 mrg gimple_replace_ssa_lhs (gimple *stmt, tree nlhs) 222 1.1 mrg { 223 1.1 mrg if (MAY_HAVE_DEBUG_BIND_STMTS) 224 1.1 mrg { 225 1.1 mrg tree lhs = gimple_get_lhs (stmt); 226 1.1 mrg 227 1.1 mrg gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt); 228 1.1 mrg 229 1.1 mrg insert_debug_temp_for_var_def (NULL, lhs); 230 1.1 mrg } 231 1.1 mrg 232 1.1 mrg gimple_set_lhs (stmt, nlhs); 233 1.1 mrg } 234 1.1 mrg 235 1.1 mrg 236 1.1 mrg /* Given a tree for an expression for which we might want to emit 237 1.1 mrg locations or values in debug information (generally a variable, but 238 1.1 mrg we might deal with other kinds of trees in the future), return the 239 1.1 mrg tree that should be used as the variable of a DEBUG_BIND STMT or 240 1.1 mrg VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */ 241 1.1 mrg 242 1.1 mrg tree 243 1.1 mrg target_for_debug_bind (tree var) 244 1.1 mrg { 245 1.1 mrg if (!MAY_HAVE_DEBUG_BIND_STMTS) 246 1.1 mrg return NULL_TREE; 247 1.1 mrg 248 1.1 mrg if (TREE_CODE (var) == SSA_NAME) 249 1.1 mrg { 250 1.1 mrg var = SSA_NAME_VAR (var); 251 1.1 mrg if (var == NULL_TREE) 252 1.1 mrg return NULL_TREE; 253 1.1 mrg } 254 1.1 mrg 255 1.1 mrg if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var)) 256 1.1 mrg && TREE_CODE (var) != PARM_DECL) 257 1.1 mrg return NULL_TREE; 258 1.1 mrg 259 1.1 mrg if (DECL_HAS_VALUE_EXPR_P (var)) 260 1.1 mrg return target_for_debug_bind (DECL_VALUE_EXPR (var)); 261 1.1 mrg 262 1.1 mrg if (DECL_IGNORED_P (var)) 263 1.1 mrg return NULL_TREE; 264 1.1 mrg 265 1.1 mrg /* var-tracking only tracks registers. */ 266 1.1 mrg if (!is_gimple_reg_type (TREE_TYPE (var))) 267 1.1 mrg return NULL_TREE; 268 1.1 mrg 269 1.1 mrg return var; 270 1.1 mrg } 271 1.1 mrg 272 1.1 mrg /* Called via walk_tree, look for SSA_NAMEs that have already been 273 1.1 mrg released. */ 274 1.1 mrg 275 1.1 mrg tree 276 1.1 mrg find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_) 277 1.1 mrg { 278 1.1 mrg struct walk_stmt_info *wi = (struct walk_stmt_info *) data_; 279 1.1 mrg 280 1.1 mrg if (wi && wi->is_lhs) 281 1.1 mrg return NULL_TREE; 282 1.1 mrg 283 1.1 mrg if (TREE_CODE (*tp) == SSA_NAME) 284 1.1 mrg { 285 1.1 mrg if (SSA_NAME_IN_FREE_LIST (*tp)) 286 1.1 mrg return *tp; 287 1.1 mrg 288 1.1 mrg *walk_subtrees = 0; 289 1.1 mrg } 290 1.1 mrg else if (IS_TYPE_OR_DECL_P (*tp)) 291 1.1 mrg *walk_subtrees = 0; 292 1.1 mrg 293 1.1 mrg return NULL_TREE; 294 1.1 mrg } 295 1.1 mrg 296 1.1 mrg /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced 297 1.1 mrg by other DEBUG stmts, and replace uses of the DEF with the 298 1.1 mrg newly-created debug temp. */ 299 1.1 mrg 300 1.1 mrg void 301 1.1 mrg insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var) 302 1.1 mrg { 303 1.1 mrg imm_use_iterator imm_iter; 304 1.1 mrg use_operand_p use_p; 305 1.1 mrg gimple *stmt; 306 1.1 mrg gimple *def_stmt = NULL; 307 1.1 mrg int usecount = 0; 308 1.1 mrg tree value = NULL; 309 1.1 mrg 310 1.1 mrg if (!MAY_HAVE_DEBUG_BIND_STMTS) 311 1.1 mrg return; 312 1.1 mrg 313 1.1 mrg /* If this name has already been registered for replacement, do nothing 314 1.1 mrg as anything that uses this name isn't in SSA form. */ 315 1.1 mrg if (name_registered_for_update_p (var)) 316 1.1 mrg return; 317 1.1 mrg 318 1.1 mrg /* Check whether there are debug stmts that reference this variable and, 319 1.1 mrg if there are, decide whether we should use a debug temp. */ 320 1.1 mrg FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var) 321 1.1 mrg { 322 1.1 mrg stmt = USE_STMT (use_p); 323 1.1 mrg 324 1.1 mrg if (!gimple_debug_bind_p (stmt)) 325 1.1 mrg continue; 326 1.1 mrg 327 1.1 mrg if (usecount++) 328 1.1 mrg break; 329 1.1 mrg 330 1.1 mrg if (gimple_debug_bind_get_value (stmt) != var) 331 1.1 mrg { 332 1.1 mrg /* Count this as an additional use, so as to make sure we 333 1.1 mrg use a temp unless VAR's definition has a SINGLE_RHS that 334 1.1 mrg can be shared. */ 335 1.1 mrg usecount++; 336 1.1 mrg break; 337 1.1 mrg } 338 1.1 mrg } 339 1.1 mrg 340 1.1 mrg if (!usecount) 341 1.1 mrg return; 342 1.1 mrg 343 1.1 mrg if (gsi) 344 1.1 mrg def_stmt = gsi_stmt (*gsi); 345 1.1 mrg else 346 1.1 mrg def_stmt = SSA_NAME_DEF_STMT (var); 347 1.1 mrg 348 1.1 mrg /* If we didn't get an insertion point, and the stmt has already 349 1.1 mrg been removed, we won't be able to insert the debug bind stmt, so 350 1.1 mrg we'll have to drop debug information. */ 351 1.1 mrg if (gimple_code (def_stmt) == GIMPLE_PHI) 352 1.1 mrg { 353 1.1 mrg value = degenerate_phi_result (as_a <gphi *> (def_stmt)); 354 1.1 mrg if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL)) 355 1.1 mrg value = NULL; 356 1.1 mrg /* error_mark_node is what fixup_noreturn_call changes PHI arguments 357 1.1 mrg to. */ 358 1.1 mrg else if (value == error_mark_node) 359 1.1 mrg value = NULL; 360 1.1 mrg } 361 1.1 mrg else if (gimple_clobber_p (def_stmt)) 362 1.1 mrg /* We can end up here when rewriting a decl into SSA and coming 363 1.1 mrg along a clobber for the original decl. Turn that into 364 1.1 mrg # DEBUG decl => NULL */ 365 1.1 mrg value = NULL; 366 1.1 mrg else if (is_gimple_assign (def_stmt)) 367 1.1 mrg { 368 1.1 mrg bool no_value = false; 369 1.1 mrg 370 1.1 mrg if (!dom_info_available_p (CDI_DOMINATORS)) 371 1.1 mrg { 372 1.1 mrg struct walk_stmt_info wi; 373 1.1 mrg 374 1.1 mrg memset (&wi, 0, sizeof (wi)); 375 1.1 mrg 376 1.1 mrg /* When removing blocks without following reverse dominance 377 1.1 mrg order, we may sometimes encounter SSA_NAMEs that have 378 1.1 mrg already been released, referenced in other SSA_DEFs that 379 1.1 mrg we're about to release. Consider: 380 1.1 mrg 381 1.1 mrg <bb X>: 382 1.1 mrg v_1 = foo; 383 1.1 mrg 384 1.1 mrg <bb Y>: 385 1.1 mrg w_2 = v_1 + bar; 386 1.1 mrg # DEBUG w => w_2 387 1.1 mrg 388 1.1 mrg If we deleted BB X first, propagating the value of w_2 389 1.1 mrg won't do us any good. It's too late to recover their 390 1.1 mrg original definition of v_1: when it was deleted, it was 391 1.1 mrg only referenced in other DEFs, it couldn't possibly know 392 1.1 mrg it should have been retained, and propagating every 393 1.1 mrg single DEF just in case it might have to be propagated 394 1.1 mrg into a DEBUG STMT would probably be too wasteful. 395 1.1 mrg 396 1.1 mrg When dominator information is not readily available, we 397 1.1 mrg check for and accept some loss of debug information. But 398 1.1 mrg if it is available, there's no excuse for us to remove 399 1.1 mrg blocks in the wrong order, so we don't even check for 400 1.1 mrg dead SSA NAMEs. SSA verification shall catch any 401 1.1 mrg errors. */ 402 1.1 mrg if ((!gsi && !gimple_bb (def_stmt)) 403 1.1 mrg || walk_gimple_op (def_stmt, find_released_ssa_name, &wi)) 404 1.1 mrg no_value = true; 405 1.1 mrg } 406 1.1 mrg 407 1.1 mrg if (!no_value) 408 1.1 mrg value = gimple_assign_rhs_to_tree (def_stmt); 409 1.1 mrg } 410 1.1 mrg 411 1.1 mrg if (value) 412 1.1 mrg { 413 1.1 mrg /* If there's a single use of VAR, and VAR is the entire debug 414 1.1 mrg expression (usecount would have been incremented again 415 1.1 mrg otherwise), and the definition involves only constants and 416 1.1 mrg SSA names, then we can propagate VALUE into this single use, 417 1.1 mrg avoiding the temp. 418 1.1 mrg 419 1.1 mrg We can also avoid using a temp if VALUE can be shared and 420 1.1 mrg propagated into all uses, without generating expressions that 421 1.1 mrg wouldn't be valid gimple RHSs. 422 1.1 mrg 423 1.1 mrg Other cases that would require unsharing or non-gimple RHSs 424 1.1 mrg are deferred to a debug temp, although we could avoid temps 425 1.1 mrg at the expense of duplication of expressions. */ 426 1.1 mrg 427 1.1 mrg if (CONSTANT_CLASS_P (value) 428 1.1 mrg || gimple_code (def_stmt) == GIMPLE_PHI 429 1.1 mrg || (usecount == 1 430 1.1 mrg && (!gimple_assign_single_p (def_stmt) 431 1.1 mrg || is_gimple_min_invariant (value))) 432 1.1 mrg || is_gimple_reg (value)) 433 1.1 mrg ; 434 1.1 mrg else 435 1.1 mrg { 436 1.1 mrg gdebug *def_temp; 437 1.1 mrg tree vexpr = build_debug_expr_decl (TREE_TYPE (value)); 438 1.1 mrg 439 1.1 mrg def_temp = gimple_build_debug_bind (vexpr, 440 1.1 mrg unshare_expr (value), 441 1.1 mrg def_stmt); 442 1.1 mrg 443 1.1 mrg /* FIXME: Is setting the mode really necessary? */ 444 1.1 mrg if (DECL_P (value)) 445 1.1 mrg SET_DECL_MODE (vexpr, DECL_MODE (value)); 446 1.1 mrg else 447 1.1 mrg SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value))); 448 1.1 mrg 449 1.1 mrg if (gsi) 450 1.1 mrg gsi_insert_before (gsi, def_temp, GSI_SAME_STMT); 451 1.1 mrg else 452 1.1 mrg { 453 1.1 mrg gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt); 454 1.1 mrg gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT); 455 1.1 mrg } 456 1.1 mrg 457 1.1 mrg value = vexpr; 458 1.1 mrg } 459 1.1 mrg } 460 1.1 mrg 461 1.1 mrg FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var) 462 1.1 mrg { 463 1.1 mrg if (!gimple_debug_bind_p (stmt)) 464 1.1 mrg continue; 465 1.1 mrg 466 1.1 mrg if (value) 467 1.1 mrg { 468 1.1 mrg FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) 469 1.1 mrg /* unshare_expr is not needed here. vexpr is either a 470 1.1 mrg SINGLE_RHS, that can be safely shared, some other RHS 471 1.1 mrg that was unshared when we found it had a single debug 472 1.1 mrg use, or a DEBUG_EXPR_DECL, that can be safely 473 1.1 mrg shared. */ 474 1.1 mrg SET_USE (use_p, unshare_expr (value)); 475 1.1 mrg /* If we didn't replace uses with a debug decl fold the 476 1.1 mrg resulting expression. Otherwise we end up with invalid IL. */ 477 1.1 mrg if (TREE_CODE (value) != DEBUG_EXPR_DECL) 478 1.1 mrg { 479 1.1 mrg gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 480 1.1 mrg fold_stmt_inplace (&gsi); 481 1.1 mrg } 482 1.1 mrg } 483 1.1 mrg else 484 1.1 mrg gimple_debug_bind_reset_value (stmt); 485 1.1 mrg 486 1.1 mrg update_stmt (stmt); 487 1.1 mrg } 488 1.1 mrg } 489 1.1 mrg 490 1.1 mrg 491 1.1 mrg /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by 492 1.1 mrg other DEBUG stmts, and replace uses of the DEF with the 493 1.1 mrg newly-created debug temp. */ 494 1.1 mrg 495 1.1 mrg void 496 1.1 mrg insert_debug_temps_for_defs (gimple_stmt_iterator *gsi) 497 1.1 mrg { 498 1.1 mrg gimple *stmt; 499 1.1 mrg ssa_op_iter op_iter; 500 1.1 mrg def_operand_p def_p; 501 1.1 mrg 502 1.1 mrg if (!MAY_HAVE_DEBUG_BIND_STMTS) 503 1.1 mrg return; 504 1.1 mrg 505 1.1 mrg stmt = gsi_stmt (*gsi); 506 1.1 mrg 507 1.1 mrg FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) 508 1.1 mrg { 509 1.1 mrg tree var = DEF_FROM_PTR (def_p); 510 1.1 mrg 511 1.1 mrg if (TREE_CODE (var) != SSA_NAME) 512 1.1 mrg continue; 513 1.1 mrg 514 1.1 mrg insert_debug_temp_for_var_def (gsi, var); 515 1.1 mrg } 516 1.1 mrg } 517 1.1 mrg 518 1.1 mrg /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */ 519 1.1 mrg 520 1.1 mrg void 521 1.1 mrg reset_debug_uses (gimple *stmt) 522 1.1 mrg { 523 1.1 mrg ssa_op_iter op_iter; 524 1.1 mrg def_operand_p def_p; 525 1.1 mrg imm_use_iterator imm_iter; 526 1.1 mrg gimple *use_stmt; 527 1.1 mrg 528 1.1 mrg if (!MAY_HAVE_DEBUG_BIND_STMTS) 529 1.1 mrg return; 530 1.1 mrg 531 1.1 mrg FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) 532 1.1 mrg { 533 1.1 mrg tree var = DEF_FROM_PTR (def_p); 534 1.1 mrg 535 1.1 mrg if (TREE_CODE (var) != SSA_NAME) 536 1.1 mrg continue; 537 1.1 mrg 538 1.1 mrg FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var) 539 1.1 mrg { 540 1.1 mrg if (!gimple_debug_bind_p (use_stmt)) 541 1.1 mrg continue; 542 1.1 mrg 543 1.1 mrg gimple_debug_bind_reset_value (use_stmt); 544 1.1 mrg update_stmt (use_stmt); 545 1.1 mrg } 546 1.1 mrg } 547 1.1 mrg } 548 1.1 mrg 549 1.1 mrg /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing 550 1.1 mrg dominated stmts before their dominators, so that release_ssa_defs 551 1.1 mrg stands a chance of propagating DEFs into debug bind stmts. */ 552 1.1 mrg 553 1.1 mrg void 554 1.1 mrg release_defs_bitset (bitmap toremove) 555 1.1 mrg { 556 1.1 mrg unsigned j; 557 1.1 mrg bitmap_iterator bi; 558 1.1 mrg 559 1.1 mrg /* Performing a topological sort is probably overkill, this will 560 1.1 mrg most likely run in slightly superlinear time, rather than the 561 1.1 mrg pathological quadratic worst case. 562 1.1 mrg But iterate from max SSA name version to min one because 563 1.1 mrg that mimics allocation order during code generation behavior best. 564 1.1 mrg Use an array for this which we compact on-the-fly with a NULL 565 1.1 mrg marker moving towards the end of the vector. */ 566 1.1 mrg auto_vec<tree, 16> names; 567 1.1 mrg names.reserve (bitmap_count_bits (toremove) + 1); 568 1.1 mrg names.quick_push (NULL_TREE); 569 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi) 570 1.1 mrg names.quick_push (ssa_name (j)); 571 1.1 mrg 572 1.1 mrg bitmap_tree_view (toremove); 573 1.1 mrg while (!bitmap_empty_p (toremove)) 574 1.1 mrg { 575 1.1 mrg j = names.length () - 1; 576 1.1 mrg for (unsigned i = names.length () - 1; names[i];) 577 1.1 mrg { 578 1.1 mrg bool remove_now = true; 579 1.1 mrg tree var = names[i]; 580 1.1 mrg gimple *stmt; 581 1.1 mrg imm_use_iterator uit; 582 1.1 mrg 583 1.1 mrg FOR_EACH_IMM_USE_STMT (stmt, uit, var) 584 1.1 mrg { 585 1.1 mrg ssa_op_iter dit; 586 1.1 mrg def_operand_p def_p; 587 1.1 mrg 588 1.1 mrg /* We can't propagate PHI nodes into debug stmts. */ 589 1.1 mrg if (gimple_code (stmt) == GIMPLE_PHI 590 1.1 mrg || is_gimple_debug (stmt)) 591 1.1 mrg continue; 592 1.1 mrg 593 1.1 mrg /* If we find another definition to remove that uses 594 1.1 mrg the one we're looking at, defer the removal of this 595 1.1 mrg one, so that it can be propagated into debug stmts 596 1.1 mrg after the other is. */ 597 1.1 mrg FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF) 598 1.1 mrg { 599 1.1 mrg tree odef = DEF_FROM_PTR (def_p); 600 1.1 mrg 601 1.1 mrg if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef))) 602 1.1 mrg { 603 1.1 mrg remove_now = false; 604 1.1 mrg break; 605 1.1 mrg } 606 1.1 mrg } 607 1.1 mrg 608 1.1 mrg if (!remove_now) 609 1.1 mrg break; 610 1.1 mrg } 611 1.1 mrg 612 1.1 mrg if (remove_now) 613 1.1 mrg { 614 1.1 mrg gimple *def = SSA_NAME_DEF_STMT (var); 615 1.1 mrg gimple_stmt_iterator gsi = gsi_for_stmt (def); 616 1.1 mrg 617 1.1 mrg if (gimple_code (def) == GIMPLE_PHI) 618 1.1 mrg remove_phi_node (&gsi, true); 619 1.1 mrg else 620 1.1 mrg { 621 1.1 mrg gsi_remove (&gsi, true); 622 1.1 mrg release_defs (def); 623 1.1 mrg } 624 1.1 mrg bitmap_clear_bit (toremove, SSA_NAME_VERSION (var)); 625 1.1 mrg } 626 1.1 mrg else 627 1.1 mrg --i; 628 1.1 mrg if (--j != i) 629 1.1 mrg names[i] = names[j]; 630 1.1 mrg } 631 1.1 mrg } 632 1.1 mrg bitmap_list_view (toremove); 633 1.1 mrg } 634 1.1 mrg 635 1.1 mrg /* Disable warnings about missing quoting in GCC diagnostics for 636 1.1 mrg the verification errors. Their format strings don't follow GCC 637 1.1 mrg diagnostic conventions and the calls are ultimately followed by 638 1.1 mrg one to internal_error. */ 639 1.1 mrg #if __GNUC__ >= 10 640 1.1 mrg # pragma GCC diagnostic push 641 1.1 mrg # pragma GCC diagnostic ignored "-Wformat-diag" 642 1.1 mrg #endif 643 1.1 mrg 644 1.1 mrg /* Verify virtual SSA form. */ 645 1.1 mrg 646 1.1 mrg bool 647 1.1 mrg verify_vssa (basic_block bb, tree current_vdef, sbitmap visited) 648 1.1 mrg { 649 1.1 mrg bool err = false; 650 1.1 mrg 651 1.1 mrg if (!bitmap_set_bit (visited, bb->index)) 652 1.1 mrg return false; 653 1.1 mrg 654 1.1 mrg /* Pick up the single virtual PHI def. */ 655 1.1 mrg gphi *phi = NULL; 656 1.1 mrg for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si); 657 1.1 mrg gsi_next (&si)) 658 1.1 mrg { 659 1.1 mrg tree res = gimple_phi_result (si.phi ()); 660 1.1 mrg if (virtual_operand_p (res)) 661 1.1 mrg { 662 1.1 mrg if (phi) 663 1.1 mrg { 664 1.1 mrg error ("multiple virtual PHI nodes in BB %d", bb->index); 665 1.1 mrg print_gimple_stmt (stderr, phi, 0); 666 1.1 mrg print_gimple_stmt (stderr, si.phi (), 0); 667 1.1 mrg err = true; 668 1.1 mrg } 669 1.1 mrg else 670 1.1 mrg phi = si.phi (); 671 1.1 mrg } 672 1.1 mrg } 673 1.1 mrg if (phi) 674 1.1 mrg { 675 1.1 mrg current_vdef = gimple_phi_result (phi); 676 1.1 mrg if (TREE_CODE (current_vdef) != SSA_NAME) 677 1.1 mrg { 678 1.1 mrg error ("virtual definition is not an SSA name"); 679 1.1 mrg print_gimple_stmt (stderr, phi, 0); 680 1.1 mrg err = true; 681 1.1 mrg } 682 1.1 mrg } 683 1.1 mrg 684 1.1 mrg /* Verify stmts. */ 685 1.1 mrg for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); 686 1.1 mrg gsi_next (&gsi)) 687 1.1 mrg { 688 1.1 mrg gimple *stmt = gsi_stmt (gsi); 689 1.1 mrg tree vuse = gimple_vuse (stmt); 690 1.1 mrg if (vuse) 691 1.1 mrg { 692 1.1 mrg if (vuse != current_vdef) 693 1.1 mrg { 694 1.1 mrg error ("stmt with wrong VUSE"); 695 1.1 mrg print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 696 1.1 mrg fprintf (stderr, "expected "); 697 1.1 mrg print_generic_expr (stderr, current_vdef); 698 1.1 mrg fprintf (stderr, "\n"); 699 1.1 mrg err = true; 700 1.1 mrg } 701 1.1 mrg tree vdef = gimple_vdef (stmt); 702 1.1 mrg if (vdef) 703 1.1 mrg { 704 1.1 mrg current_vdef = vdef; 705 1.1 mrg if (TREE_CODE (current_vdef) != SSA_NAME) 706 1.1 mrg { 707 1.1 mrg error ("virtual definition is not an SSA name"); 708 1.1 mrg print_gimple_stmt (stderr, phi, 0); 709 1.1 mrg err = true; 710 1.1 mrg } 711 1.1 mrg } 712 1.1 mrg } 713 1.1 mrg } 714 1.1 mrg 715 1.1 mrg /* Verify destination PHI uses and recurse. */ 716 1.1 mrg edge_iterator ei; 717 1.1 mrg edge e; 718 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs) 719 1.1 mrg { 720 1.1 mrg gphi *phi = get_virtual_phi (e->dest); 721 1.1 mrg if (phi 722 1.1 mrg && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef) 723 1.1 mrg { 724 1.1 mrg error ("PHI node with wrong VUSE on edge from BB %d", 725 1.1 mrg e->src->index); 726 1.1 mrg print_gimple_stmt (stderr, phi, 0, TDF_VOPS); 727 1.1 mrg fprintf (stderr, "expected "); 728 1.1 mrg print_generic_expr (stderr, current_vdef); 729 1.1 mrg fprintf (stderr, "\n"); 730 1.1 mrg err = true; 731 1.1 mrg } 732 1.1 mrg 733 1.1 mrg /* Recurse. */ 734 1.1 mrg err |= verify_vssa (e->dest, current_vdef, visited); 735 1.1 mrg } 736 1.1 mrg 737 1.1 mrg return err; 738 1.1 mrg } 739 1.1 mrg 740 1.1 mrg /* Return true if SSA_NAME is malformed and mark it visited. 741 1.1 mrg 742 1.1 mrg IS_VIRTUAL is true if this SSA_NAME was found inside a virtual 743 1.1 mrg operand. */ 744 1.1 mrg 745 1.1 mrg static bool 746 1.1 mrg verify_ssa_name (tree ssa_name, bool is_virtual) 747 1.1 mrg { 748 1.1 mrg if (TREE_CODE (ssa_name) != SSA_NAME) 749 1.1 mrg { 750 1.1 mrg error ("expected an SSA_NAME object"); 751 1.1 mrg return true; 752 1.1 mrg } 753 1.1 mrg 754 1.1 mrg if (SSA_NAME_IN_FREE_LIST (ssa_name)) 755 1.1 mrg { 756 1.1 mrg error ("found an SSA_NAME that had been released into the free pool"); 757 1.1 mrg return true; 758 1.1 mrg } 759 1.1 mrg 760 1.1 mrg if (SSA_NAME_VAR (ssa_name) != NULL_TREE 761 1.1 mrg && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name))) 762 1.1 mrg { 763 1.1 mrg error ("type mismatch between an SSA_NAME and its symbol"); 764 1.1 mrg return true; 765 1.1 mrg } 766 1.1 mrg 767 1.1 mrg if (is_virtual && !virtual_operand_p (ssa_name)) 768 1.1 mrg { 769 1.1 mrg error ("found a virtual definition for a GIMPLE register"); 770 1.1 mrg return true; 771 1.1 mrg } 772 1.1 mrg 773 1.1 mrg if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun)) 774 1.1 mrg { 775 1.1 mrg error ("virtual SSA name for non-VOP decl"); 776 1.1 mrg return true; 777 1.1 mrg } 778 1.1 mrg 779 1.1 mrg if (!is_virtual && virtual_operand_p (ssa_name)) 780 1.1 mrg { 781 1.1 mrg error ("found a real definition for a non-register"); 782 1.1 mrg return true; 783 1.1 mrg } 784 1.1 mrg 785 1.1 mrg if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) 786 1.1 mrg && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))) 787 1.1 mrg { 788 1.1 mrg error ("found a default name with a non-empty defining statement"); 789 1.1 mrg return true; 790 1.1 mrg } 791 1.1 mrg 792 1.1 mrg return false; 793 1.1 mrg } 794 1.1 mrg 795 1.1 mrg 796 1.1 mrg /* Return true if the definition of SSA_NAME at block BB is malformed. 797 1.1 mrg 798 1.1 mrg STMT is the statement where SSA_NAME is created. 799 1.1 mrg 800 1.1 mrg DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME 801 1.1 mrg version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, 802 1.1 mrg it means that the block in that array slot contains the 803 1.1 mrg definition of SSA_NAME. 804 1.1 mrg 805 1.1 mrg IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */ 806 1.1 mrg 807 1.1 mrg static bool 808 1.1 mrg verify_def (basic_block bb, basic_block *definition_block, tree ssa_name, 809 1.1 mrg gimple *stmt, bool is_virtual) 810 1.1 mrg { 811 1.1 mrg if (verify_ssa_name (ssa_name, is_virtual)) 812 1.1 mrg goto err; 813 1.1 mrg 814 1.1 mrg if (SSA_NAME_VAR (ssa_name) 815 1.1 mrg && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL 816 1.1 mrg && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name))) 817 1.1 mrg { 818 1.1 mrg error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set"); 819 1.1 mrg goto err; 820 1.1 mrg } 821 1.1 mrg 822 1.1 mrg if (definition_block[SSA_NAME_VERSION (ssa_name)]) 823 1.1 mrg { 824 1.1 mrg error ("SSA_NAME created in two different blocks %i and %i", 825 1.1 mrg definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index); 826 1.1 mrg goto err; 827 1.1 mrg } 828 1.1 mrg 829 1.1 mrg definition_block[SSA_NAME_VERSION (ssa_name)] = bb; 830 1.1 mrg 831 1.1 mrg if (SSA_NAME_DEF_STMT (ssa_name) != stmt) 832 1.1 mrg { 833 1.1 mrg error ("SSA_NAME_DEF_STMT is wrong"); 834 1.1 mrg fprintf (stderr, "Expected definition statement:\n"); 835 1.1 mrg print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS); 836 1.1 mrg fprintf (stderr, "\nActual definition statement:\n"); 837 1.1 mrg print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 838 1.1 mrg goto err; 839 1.1 mrg } 840 1.1 mrg 841 1.1 mrg return false; 842 1.1 mrg 843 1.1 mrg err: 844 1.1 mrg fprintf (stderr, "while verifying SSA_NAME "); 845 1.1 mrg print_generic_expr (stderr, ssa_name); 846 1.1 mrg fprintf (stderr, " in statement\n"); 847 1.1 mrg print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 848 1.1 mrg 849 1.1 mrg return true; 850 1.1 mrg } 851 1.1 mrg 852 1.1 mrg 853 1.1 mrg /* Return true if the use of SSA_NAME at statement STMT in block BB is 854 1.1 mrg malformed. 855 1.1 mrg 856 1.1 mrg DEF_BB is the block where SSA_NAME was found to be created. 857 1.1 mrg 858 1.1 mrg IDOM contains immediate dominator information for the flowgraph. 859 1.1 mrg 860 1.1 mrg CHECK_ABNORMAL is true if the caller wants to check whether this use 861 1.1 mrg is flowing through an abnormal edge (only used when checking PHI 862 1.1 mrg arguments). 863 1.1 mrg 864 1.1 mrg If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names 865 1.1 mrg that are defined before STMT in basic block BB. */ 866 1.1 mrg 867 1.1 mrg static bool 868 1.1 mrg verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p, 869 1.1 mrg gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb) 870 1.1 mrg { 871 1.1 mrg bool err = false; 872 1.1 mrg tree ssa_name = USE_FROM_PTR (use_p); 873 1.1 mrg 874 1.1 mrg if (!TREE_VISITED (ssa_name)) 875 1.1 mrg if (verify_imm_links (stderr, ssa_name)) 876 1.1 mrg err = true; 877 1.1 mrg 878 1.1 mrg TREE_VISITED (ssa_name) = 1; 879 1.1 mrg 880 1.1 mrg if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)) 881 1.1 mrg && SSA_NAME_IS_DEFAULT_DEF (ssa_name)) 882 1.1 mrg ; /* Default definitions have empty statements. Nothing to do. */ 883 1.1 mrg else if (!def_bb) 884 1.1 mrg { 885 1.1 mrg error ("missing definition"); 886 1.1 mrg err = true; 887 1.1 mrg } 888 1.1 mrg else if (bb != def_bb 889 1.1 mrg && !dominated_by_p (CDI_DOMINATORS, bb, def_bb)) 890 1.1 mrg { 891 1.1 mrg error ("definition in block %i does not dominate use in block %i", 892 1.1 mrg def_bb->index, bb->index); 893 1.1 mrg err = true; 894 1.1 mrg } 895 1.1 mrg else if (bb == def_bb 896 1.1 mrg && names_defined_in_bb != NULL 897 1.1 mrg && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name))) 898 1.1 mrg { 899 1.1 mrg error ("definition in block %i follows the use", def_bb->index); 900 1.1 mrg err = true; 901 1.1 mrg } 902 1.1 mrg 903 1.1 mrg if (check_abnormal 904 1.1 mrg && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 905 1.1 mrg { 906 1.1 mrg error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set"); 907 1.1 mrg err = true; 908 1.1 mrg } 909 1.1 mrg 910 1.1 mrg /* Make sure the use is in an appropriate list by checking the previous 911 1.1 mrg element to make sure it's the same. */ 912 1.1 mrg if (use_p->prev == NULL) 913 1.1 mrg { 914 1.1 mrg error ("no immediate_use list"); 915 1.1 mrg err = true; 916 1.1 mrg } 917 1.1 mrg else 918 1.1 mrg { 919 1.1 mrg tree listvar; 920 1.1 mrg if (use_p->prev->use == NULL) 921 1.1 mrg listvar = use_p->prev->loc.ssa_name; 922 1.1 mrg else 923 1.1 mrg listvar = USE_FROM_PTR (use_p->prev); 924 1.1 mrg if (listvar != ssa_name) 925 1.1 mrg { 926 1.1 mrg error ("wrong immediate use list"); 927 1.1 mrg err = true; 928 1.1 mrg } 929 1.1 mrg } 930 1.1 mrg 931 1.1 mrg if (err) 932 1.1 mrg { 933 1.1 mrg fprintf (stderr, "for SSA_NAME: "); 934 1.1 mrg print_generic_expr (stderr, ssa_name, TDF_VOPS); 935 1.1 mrg fprintf (stderr, " in statement:\n"); 936 1.1 mrg print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 937 1.1 mrg } 938 1.1 mrg 939 1.1 mrg return err; 940 1.1 mrg } 941 1.1 mrg 942 1.1 mrg 943 1.1 mrg /* Return true if any of the arguments for PHI node PHI at block BB is 944 1.1 mrg malformed. 945 1.1 mrg 946 1.1 mrg DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME 947 1.1 mrg version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, 948 1.1 mrg it means that the block in that array slot contains the 949 1.1 mrg definition of SSA_NAME. */ 950 1.1 mrg 951 1.1 mrg static bool 952 1.1 mrg verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block) 953 1.1 mrg { 954 1.1 mrg edge e; 955 1.1 mrg bool err = false; 956 1.1 mrg size_t i, phi_num_args = gimple_phi_num_args (phi); 957 1.1 mrg 958 1.1 mrg if (EDGE_COUNT (bb->preds) != phi_num_args) 959 1.1 mrg { 960 1.1 mrg error ("incoming edge count does not match number of PHI arguments"); 961 1.1 mrg err = true; 962 1.1 mrg goto error; 963 1.1 mrg } 964 1.1 mrg 965 1.1 mrg for (i = 0; i < phi_num_args; i++) 966 1.1 mrg { 967 1.1 mrg use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i); 968 1.1 mrg tree op = USE_FROM_PTR (op_p); 969 1.1 mrg 970 1.1 mrg e = EDGE_PRED (bb, i); 971 1.1 mrg 972 1.1 mrg if (op == NULL_TREE) 973 1.1 mrg { 974 1.1 mrg error ("PHI argument is missing for edge %d->%d", 975 1.1 mrg e->src->index, 976 1.1 mrg e->dest->index); 977 1.1 mrg err = true; 978 1.1 mrg goto error; 979 1.1 mrg } 980 1.1 mrg 981 1.1 mrg if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op)) 982 1.1 mrg { 983 1.1 mrg error ("PHI argument is not SSA_NAME, or invariant"); 984 1.1 mrg err = true; 985 1.1 mrg } 986 1.1 mrg 987 1.1 mrg if ((e->flags & EDGE_ABNORMAL) && TREE_CODE (op) != SSA_NAME) 988 1.1 mrg { 989 1.1 mrg error ("PHI argument on abnormal edge is not SSA_NAME"); 990 1.1 mrg err = true; 991 1.1 mrg } 992 1.1 mrg 993 1.1 mrg if (TREE_CODE (op) == SSA_NAME) 994 1.1 mrg { 995 1.1 mrg err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi))); 996 1.1 mrg err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)], 997 1.1 mrg op_p, phi, e->flags & EDGE_ABNORMAL, NULL); 998 1.1 mrg } 999 1.1 mrg 1000 1.1 mrg if (TREE_CODE (op) == ADDR_EXPR) 1001 1.1 mrg { 1002 1.1 mrg tree base = TREE_OPERAND (op, 0); 1003 1.1 mrg while (handled_component_p (base)) 1004 1.1 mrg base = TREE_OPERAND (base, 0); 1005 1.1 mrg if ((VAR_P (base) 1006 1.1 mrg || TREE_CODE (base) == PARM_DECL 1007 1.1 mrg || TREE_CODE (base) == RESULT_DECL) 1008 1.1 mrg && !TREE_ADDRESSABLE (base)) 1009 1.1 mrg { 1010 1.1 mrg error ("address taken, but ADDRESSABLE bit not set"); 1011 1.1 mrg err = true; 1012 1.1 mrg } 1013 1.1 mrg } 1014 1.1 mrg 1015 1.1 mrg if (e->dest != bb) 1016 1.1 mrg { 1017 1.1 mrg error ("wrong edge %d->%d for PHI argument", 1018 1.1 mrg e->src->index, e->dest->index); 1019 1.1 mrg err = true; 1020 1.1 mrg } 1021 1.1 mrg 1022 1.1 mrg if (err) 1023 1.1 mrg { 1024 1.1 mrg fprintf (stderr, "PHI argument\n"); 1025 1.1 mrg print_generic_stmt (stderr, op, TDF_VOPS); 1026 1.1 mrg goto error; 1027 1.1 mrg } 1028 1.1 mrg } 1029 1.1 mrg 1030 1.1 mrg error: 1031 1.1 mrg if (err) 1032 1.1 mrg { 1033 1.1 mrg fprintf (stderr, "for PHI node\n"); 1034 1.1 mrg print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS); 1035 1.1 mrg } 1036 1.1 mrg 1037 1.1 mrg 1038 1.1 mrg return err; 1039 1.1 mrg } 1040 1.1 mrg 1041 1.1 mrg 1042 1.1 mrg /* Verify common invariants in the SSA web. 1043 1.1 mrg TODO: verify the variable annotations. */ 1044 1.1 mrg 1045 1.1 mrg DEBUG_FUNCTION void 1046 1.1 mrg verify_ssa (bool check_modified_stmt, bool check_ssa_operands) 1047 1.1 mrg { 1048 1.1 mrg basic_block bb; 1049 1.1 mrg basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names); 1050 1.1 mrg ssa_op_iter iter; 1051 1.1 mrg tree op; 1052 1.1 mrg enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS); 1053 1.1 mrg auto_bitmap names_defined_in_bb; 1054 1.1 mrg 1055 1.1 mrg gcc_assert (!need_ssa_update_p (cfun)); 1056 1.1 mrg 1057 1.1 mrg timevar_push (TV_TREE_SSA_VERIFY); 1058 1.1 mrg 1059 1.1 mrg { 1060 1.1 mrg /* Keep track of SSA names present in the IL. */ 1061 1.1 mrg size_t i; 1062 1.1 mrg tree name; 1063 1.1 mrg hash_map <void *, tree> ssa_info; 1064 1.1 mrg 1065 1.1 mrg FOR_EACH_SSA_NAME (i, name, cfun) 1066 1.1 mrg { 1067 1.1 mrg gimple *stmt; 1068 1.1 mrg TREE_VISITED (name) = 0; 1069 1.1 mrg 1070 1.1 mrg verify_ssa_name (name, virtual_operand_p (name)); 1071 1.1 mrg 1072 1.1 mrg stmt = SSA_NAME_DEF_STMT (name); 1073 1.1 mrg if (!gimple_nop_p (stmt)) 1074 1.1 mrg { 1075 1.1 mrg basic_block bb = gimple_bb (stmt); 1076 1.1 mrg if (verify_def (bb, definition_block, 1077 1.1 mrg name, stmt, virtual_operand_p (name))) 1078 1.1 mrg goto err; 1079 1.1 mrg } 1080 1.1 mrg 1081 1.1 mrg void *info = NULL; 1082 1.1 mrg if (POINTER_TYPE_P (TREE_TYPE (name))) 1083 1.1 mrg info = SSA_NAME_PTR_INFO (name); 1084 1.1 mrg else if (INTEGRAL_TYPE_P (TREE_TYPE (name))) 1085 1.1 mrg info = SSA_NAME_RANGE_INFO (name); 1086 1.1 mrg if (info) 1087 1.1 mrg { 1088 1.1 mrg bool existed; 1089 1.1 mrg tree &val = ssa_info.get_or_insert (info, &existed); 1090 1.1 mrg if (existed) 1091 1.1 mrg { 1092 1.1 mrg error ("shared SSA name info"); 1093 1.1 mrg print_generic_expr (stderr, val); 1094 1.1 mrg fprintf (stderr, " and "); 1095 1.1 mrg print_generic_expr (stderr, name); 1096 1.1 mrg fprintf (stderr, "\n"); 1097 1.1 mrg goto err; 1098 1.1 mrg } 1099 1.1 mrg else 1100 1.1 mrg val = name; 1101 1.1 mrg } 1102 1.1 mrg } 1103 1.1 mrg } 1104 1.1 mrg 1105 1.1 mrg calculate_dominance_info (CDI_DOMINATORS); 1106 1.1 mrg 1107 1.1 mrg /* Now verify all the uses and make sure they agree with the definitions 1108 1.1 mrg found in the previous pass. */ 1109 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 1110 1.1 mrg { 1111 1.1 mrg edge e; 1112 1.1 mrg edge_iterator ei; 1113 1.1 mrg 1114 1.1 mrg /* Make sure that all edges have a clear 'aux' field. */ 1115 1.1 mrg FOR_EACH_EDGE (e, ei, bb->preds) 1116 1.1 mrg { 1117 1.1 mrg if (e->aux) 1118 1.1 mrg { 1119 1.1 mrg error ("AUX pointer initialized for edge %d->%d", e->src->index, 1120 1.1 mrg e->dest->index); 1121 1.1 mrg goto err; 1122 1.1 mrg } 1123 1.1 mrg } 1124 1.1 mrg 1125 1.1 mrg /* Verify the arguments for every PHI node in the block. */ 1126 1.1 mrg for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1127 1.1 mrg { 1128 1.1 mrg gphi *phi = gsi.phi (); 1129 1.1 mrg if (verify_phi_args (phi, bb, definition_block)) 1130 1.1 mrg goto err; 1131 1.1 mrg 1132 1.1 mrg bitmap_set_bit (names_defined_in_bb, 1133 1.1 mrg SSA_NAME_VERSION (gimple_phi_result (phi))); 1134 1.1 mrg } 1135 1.1 mrg 1136 1.1 mrg /* Now verify all the uses and vuses in every statement of the block. */ 1137 1.1 mrg for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); 1138 1.1 mrg gsi_next (&gsi)) 1139 1.1 mrg { 1140 1.1 mrg gimple *stmt = gsi_stmt (gsi); 1141 1.1 mrg use_operand_p use_p; 1142 1.1 mrg 1143 1.1 mrg if (check_modified_stmt && gimple_modified_p (stmt)) 1144 1.1 mrg { 1145 1.1 mrg error ("stmt (%p) marked modified after optimization pass: ", 1146 1.1 mrg (void *)stmt); 1147 1.1 mrg print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 1148 1.1 mrg goto err; 1149 1.1 mrg } 1150 1.1 mrg 1151 1.1 mrg if (check_ssa_operands && verify_ssa_operands (cfun, stmt)) 1152 1.1 mrg { 1153 1.1 mrg print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 1154 1.1 mrg goto err; 1155 1.1 mrg } 1156 1.1 mrg 1157 1.1 mrg if (gimple_debug_bind_p (stmt) 1158 1.1 mrg && !gimple_debug_bind_has_value_p (stmt)) 1159 1.1 mrg continue; 1160 1.1 mrg 1161 1.1 mrg FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE) 1162 1.1 mrg { 1163 1.1 mrg op = USE_FROM_PTR (use_p); 1164 1.1 mrg if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)], 1165 1.1 mrg use_p, stmt, false, names_defined_in_bb)) 1166 1.1 mrg goto err; 1167 1.1 mrg } 1168 1.1 mrg 1169 1.1 mrg FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS) 1170 1.1 mrg { 1171 1.1 mrg if (SSA_NAME_DEF_STMT (op) != stmt) 1172 1.1 mrg { 1173 1.1 mrg error ("SSA_NAME_DEF_STMT is wrong"); 1174 1.1 mrg fprintf (stderr, "Expected definition statement:\n"); 1175 1.1 mrg print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 1176 1.1 mrg fprintf (stderr, "\nActual definition statement:\n"); 1177 1.1 mrg print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op), 1178 1.1 mrg 4, TDF_VOPS); 1179 1.1 mrg goto err; 1180 1.1 mrg } 1181 1.1 mrg bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op)); 1182 1.1 mrg } 1183 1.1 mrg } 1184 1.1 mrg 1185 1.1 mrg bitmap_clear (names_defined_in_bb); 1186 1.1 mrg } 1187 1.1 mrg 1188 1.1 mrg free (definition_block); 1189 1.1 mrg 1190 1.1 mrg if (gimple_vop (cfun) 1191 1.1 mrg && ssa_default_def (cfun, gimple_vop (cfun))) 1192 1.1 mrg { 1193 1.1 mrg auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1); 1194 1.1 mrg bitmap_clear (visited); 1195 1.1 mrg if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun), 1196 1.1 mrg ssa_default_def (cfun, gimple_vop (cfun)), visited)) 1197 1.1 mrg goto err; 1198 1.1 mrg } 1199 1.1 mrg 1200 1.1 mrg /* Restore the dominance information to its prior known state, so 1201 1.1 mrg that we do not perturb the compiler's subsequent behavior. */ 1202 1.1 mrg if (orig_dom_state == DOM_NONE) 1203 1.1 mrg free_dominance_info (CDI_DOMINATORS); 1204 1.1 mrg else 1205 1.1 mrg set_dom_info_availability (CDI_DOMINATORS, orig_dom_state); 1206 1.1 mrg 1207 1.1 mrg timevar_pop (TV_TREE_SSA_VERIFY); 1208 1.1 mrg return; 1209 1.1 mrg 1210 1.1 mrg err: 1211 1.1 mrg internal_error ("verify_ssa failed"); 1212 1.1 mrg } 1213 1.1 mrg 1214 1.1 mrg #if __GNUC__ >= 10 1215 1.1 mrg # pragma GCC diagnostic pop 1216 1.1 mrg #endif 1217 1.1 mrg 1218 1.1 mrg /* Initialize global DFA and SSA structures. 1219 1.1 mrg If SIZE is non-zero allocated ssa names array of a given size. */ 1220 1.1 mrg 1221 1.1 mrg void 1222 1.1 mrg init_tree_ssa (struct function *fn, int size) 1223 1.1 mrg { 1224 1.1 mrg fn->gimple_df = ggc_cleared_alloc<gimple_df> (); 1225 1.1 mrg fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20); 1226 1.1 mrg pt_solution_reset (&fn->gimple_df->escaped); 1227 1.1 mrg init_ssanames (fn, size); 1228 1.1 mrg } 1229 1.1 mrg 1230 1.1 mrg /* Deallocate memory associated with SSA data structures for FNDECL. */ 1231 1.1 mrg 1232 1.1 mrg void 1233 1.1 mrg delete_tree_ssa (struct function *fn) 1234 1.1 mrg { 1235 1.1 mrg fini_ssanames (fn); 1236 1.1 mrg 1237 1.1 mrg /* We no longer maintain the SSA operand cache at this point. */ 1238 1.1 mrg if (ssa_operands_active (fn)) 1239 1.1 mrg fini_ssa_operands (fn); 1240 1.1 mrg 1241 1.1 mrg fn->gimple_df->default_defs->empty (); 1242 1.1 mrg fn->gimple_df->default_defs = NULL; 1243 1.1 mrg pt_solution_reset (&fn->gimple_df->escaped); 1244 1.1 mrg if (fn->gimple_df->decls_to_pointers != NULL) 1245 1.1 mrg delete fn->gimple_df->decls_to_pointers; 1246 1.1 mrg fn->gimple_df->decls_to_pointers = NULL; 1247 1.1 mrg fn->gimple_df = NULL; 1248 1.1 mrg 1249 1.1 mrg /* We no longer need the edge variable maps. */ 1250 1.1 mrg redirect_edge_var_map_empty (); 1251 1.1 mrg } 1252 1.1 mrg 1253 1.1 mrg /* Return true if EXPR is a useless type conversion, otherwise return 1254 1.1 mrg false. */ 1255 1.1 mrg 1256 1.1 mrg bool 1257 1.1 mrg tree_ssa_useless_type_conversion (tree expr) 1258 1.1 mrg { 1259 1.1 mrg tree outer_type, inner_type; 1260 1.1 mrg 1261 1.1 mrg /* If we have an assignment that merely uses a NOP_EXPR to change 1262 1.1 mrg the top of the RHS to the type of the LHS and the type conversion 1263 1.1 mrg is "safe", then strip away the type conversion so that we can 1264 1.1 mrg enter LHS = RHS into the const_and_copies table. */ 1265 1.1 mrg if (!CONVERT_EXPR_P (expr) 1266 1.1 mrg && TREE_CODE (expr) != VIEW_CONVERT_EXPR 1267 1.1 mrg && TREE_CODE (expr) != NON_LVALUE_EXPR) 1268 1.1 mrg return false; 1269 1.1 mrg 1270 1.1 mrg outer_type = TREE_TYPE (expr); 1271 1.1 mrg inner_type = TREE_TYPE (TREE_OPERAND (expr, 0)); 1272 1.1 mrg 1273 1.1 mrg if (inner_type == error_mark_node) 1274 1.1 mrg return false; 1275 1.1 mrg 1276 1.1 mrg return useless_type_conversion_p (outer_type, inner_type); 1277 1.1 mrg } 1278 1.1 mrg 1279 1.1 mrg /* Strip conversions from EXP according to 1280 1.1 mrg tree_ssa_useless_type_conversion and return the resulting 1281 1.1 mrg expression. */ 1282 1.1 mrg 1283 1.1 mrg tree 1284 1.1 mrg tree_ssa_strip_useless_type_conversions (tree exp) 1285 1.1 mrg { 1286 1.1 mrg while (tree_ssa_useless_type_conversion (exp)) 1287 1.1 mrg exp = TREE_OPERAND (exp, 0); 1288 1.1 mrg return exp; 1289 1.1 mrg } 1290 1.1 mrg 1291 1.1 mrg /* Return true if T, as SSA_NAME, has an implicit default defined value. */ 1292 1.1 mrg 1293 1.1 mrg bool 1294 1.1 mrg ssa_defined_default_def_p (tree t) 1295 1.1 mrg { 1296 1.1 mrg tree var = SSA_NAME_VAR (t); 1297 1.1 mrg 1298 1.1 mrg if (!var) 1299 1.1 mrg ; 1300 1.1 mrg /* Parameters get their initial value from the function entry. */ 1301 1.1 mrg else if (TREE_CODE (var) == PARM_DECL) 1302 1.1 mrg return true; 1303 1.1 mrg /* When returning by reference the return address is actually a hidden 1304 1.1 mrg parameter. */ 1305 1.1 mrg else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var)) 1306 1.1 mrg return true; 1307 1.1 mrg /* Hard register variables get their initial value from the ether. */ 1308 1.1 mrg else if (VAR_P (var) && DECL_HARD_REGISTER (var)) 1309 1.1 mrg return true; 1310 1.1 mrg 1311 1.1 mrg return false; 1312 1.1 mrg } 1313 1.1 mrg 1314 1.1 mrg 1315 1.1 mrg /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what 1316 1.1 mrg should be returned if the value is only partially undefined. */ 1317 1.1 mrg 1318 1.1 mrg bool 1319 1.1 mrg ssa_undefined_value_p (tree t, bool partial) 1320 1.1 mrg { 1321 1.1 mrg gimple *def_stmt; 1322 1.1 mrg 1323 1.1 mrg if (ssa_defined_default_def_p (t)) 1324 1.1 mrg return false; 1325 1.1 mrg 1326 1.1 mrg /* The value is undefined iff its definition statement is empty. */ 1327 1.1 mrg def_stmt = SSA_NAME_DEF_STMT (t); 1328 1.1 mrg if (gimple_nop_p (def_stmt)) 1329 1.1 mrg return true; 1330 1.1 mrg 1331 1.1 mrg /* The value is undefined if the definition statement is a call 1332 1.1 mrg to .DEFERRED_INIT function. */ 1333 1.1 mrg if (gimple_call_internal_p (def_stmt, IFN_DEFERRED_INIT)) 1334 1.1 mrg return true; 1335 1.1 mrg 1336 1.1 mrg /* The value is partially undefined if the definition statement is 1337 1.1 mrg a REALPART_EXPR or IMAGPART_EXPR and its operand is defined by 1338 1.1 mrg the call to .DEFERRED_INIT function. This is for handling the 1339 1.1 mrg following case: 1340 1.1 mrg 1341 1.1 mrg 1 typedef _Complex float C; 1342 1.1 mrg 2 C foo (int cond) 1343 1.1 mrg 3 { 1344 1.1 mrg 4 C f; 1345 1.1 mrg 5 __imag__ f = 0; 1346 1.1 mrg 6 if (cond) 1347 1.1 mrg 7 { 1348 1.1 mrg 8 __real__ f = 1; 1349 1.1 mrg 9 return f; 1350 1.1 mrg 10 } 1351 1.1 mrg 11 return f; 1352 1.1 mrg 12 } 1353 1.1 mrg 1354 1.1 mrg with -ftrivial-auto-var-init, compiler will insert the following 1355 1.1 mrg artificial initialization: 1356 1.1 mrg f = .DEFERRED_INIT (f, 2); 1357 1.1 mrg _1 = REALPART_EXPR <f>; 1358 1.1 mrg 1359 1.1 mrg we should treat the definition _1 = REALPART_EXPR <f> as undefined. */ 1360 1.1 mrg if (partial && is_gimple_assign (def_stmt) 1361 1.1 mrg && (gimple_assign_rhs_code (def_stmt) == REALPART_EXPR 1362 1.1 mrg || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)) 1363 1.1 mrg { 1364 1.1 mrg tree real_imag_part = TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0); 1365 1.1 mrg if (TREE_CODE (real_imag_part) == SSA_NAME 1366 1.1 mrg && gimple_call_internal_p (SSA_NAME_DEF_STMT (real_imag_part), 1367 1.1 mrg IFN_DEFERRED_INIT)) 1368 1.1 mrg return true; 1369 1.1 mrg } 1370 1.1 mrg 1371 1.1 mrg /* Check if the complex was not only partially defined. */ 1372 1.1 mrg if (partial && is_gimple_assign (def_stmt) 1373 1.1 mrg && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR) 1374 1.1 mrg { 1375 1.1 mrg tree rhs1, rhs2; 1376 1.1 mrg 1377 1.1 mrg rhs1 = gimple_assign_rhs1 (def_stmt); 1378 1.1 mrg rhs2 = gimple_assign_rhs2 (def_stmt); 1379 1.1 mrg return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1)) 1380 1.1 mrg || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2)); 1381 1.1 mrg } 1382 1.1 mrg return false; 1383 1.1 mrg } 1384 1.1 mrg 1385 1.1 mrg 1386 1.1 mrg /* Return TRUE iff STMT, a gimple statement, references an undefined 1387 1.1 mrg SSA name. */ 1388 1.1 mrg 1389 1.1 mrg bool 1390 1.1 mrg gimple_uses_undefined_value_p (gimple *stmt) 1391 1.1 mrg { 1392 1.1 mrg ssa_op_iter iter; 1393 1.1 mrg tree op; 1394 1.1 mrg 1395 1.1 mrg FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) 1396 1.1 mrg if (ssa_undefined_value_p (op)) 1397 1.1 mrg return true; 1398 1.1 mrg 1399 1.1 mrg return false; 1400 1.1 mrg } 1401 1.1 mrg 1402 1.1 mrg 1403 1.1 mrg /* Return TRUE iff there are any non-PHI uses of VAR that dominate the 1404 1.1 mrg end of BB. If we return TRUE and BB is a loop header, then VAR we 1405 1.1 mrg be assumed to be defined within the loop, even if it is marked as 1406 1.1 mrg maybe-undefined. */ 1407 1.1 mrg 1408 1.1 mrg bool 1409 1.1 mrg ssa_name_any_use_dominates_bb_p (tree var, basic_block bb) 1410 1.1 mrg { 1411 1.1 mrg imm_use_iterator iter; 1412 1.1 mrg use_operand_p use_p; 1413 1.1 mrg FOR_EACH_IMM_USE_FAST (use_p, iter, var) 1414 1.1 mrg { 1415 1.1 mrg if (is_a <gphi *> (USE_STMT (use_p)) 1416 1.1 mrg || is_gimple_debug (USE_STMT (use_p))) 1417 1.1 mrg continue; 1418 1.1 mrg basic_block dombb = gimple_bb (USE_STMT (use_p)); 1419 1.1 mrg if (dominated_by_p (CDI_DOMINATORS, bb, dombb)) 1420 1.1 mrg return true; 1421 1.1 mrg } 1422 1.1 mrg 1423 1.1 mrg return false; 1424 1.1 mrg } 1425 1.1 mrg 1426 1.1 mrg /* Mark as maybe_undef any SSA_NAMEs that are unsuitable as ivopts 1427 1.1 mrg candidates for potentially involving undefined behavior. */ 1428 1.1 mrg 1429 1.1 mrg void 1430 1.1 mrg mark_ssa_maybe_undefs (void) 1431 1.1 mrg { 1432 1.1 mrg auto_vec<tree> queue; 1433 1.1 mrg 1434 1.1 mrg /* Scan all SSA_NAMEs, marking the definitely-undefined ones as 1435 1.1 mrg maybe-undefined and queuing them for propagation, while clearing 1436 1.1 mrg the mark on others. */ 1437 1.1 mrg unsigned int i; 1438 1.1 mrg tree var; 1439 1.1 mrg FOR_EACH_SSA_NAME (i, var, cfun) 1440 1.1 mrg { 1441 1.1 mrg if (SSA_NAME_IS_VIRTUAL_OPERAND (var) 1442 1.1 mrg || !ssa_undefined_value_p (var, false)) 1443 1.1 mrg ssa_name_set_maybe_undef (var, false); 1444 1.1 mrg else 1445 1.1 mrg { 1446 1.1 mrg ssa_name_set_maybe_undef (var); 1447 1.1 mrg queue.safe_push (var); 1448 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 1449 1.1 mrg fprintf (dump_file, "marking _%i as maybe-undef\n", 1450 1.1 mrg SSA_NAME_VERSION (var)); 1451 1.1 mrg } 1452 1.1 mrg } 1453 1.1 mrg 1454 1.1 mrg /* Now propagate maybe-undefined from a DEF to any other PHI that 1455 1.1 mrg uses it, as long as there isn't any intervening use of DEF. */ 1456 1.1 mrg while (!queue.is_empty ()) 1457 1.1 mrg { 1458 1.1 mrg var = queue.pop (); 1459 1.1 mrg imm_use_iterator iter; 1460 1.1 mrg use_operand_p use_p; 1461 1.1 mrg FOR_EACH_IMM_USE_FAST (use_p, iter, var) 1462 1.1 mrg { 1463 1.1 mrg /* Any uses of VAR that aren't PHI args imply VAR must be 1464 1.1 mrg defined, otherwise undefined behavior would have been 1465 1.1 mrg definitely invoked. Only PHI args may hold 1466 1.1 mrg maybe-undefined values without invoking undefined 1467 1.1 mrg behavior for that reason alone. */ 1468 1.1 mrg if (!is_a <gphi *> (USE_STMT (use_p))) 1469 1.1 mrg continue; 1470 1.1 mrg gphi *phi = as_a <gphi *> (USE_STMT (use_p)); 1471 1.1 mrg 1472 1.1 mrg tree def = gimple_phi_result (phi); 1473 1.1 mrg if (ssa_name_maybe_undef_p (def)) 1474 1.1 mrg continue; 1475 1.1 mrg 1476 1.1 mrg /* Look for any uses of the maybe-unused SSA_NAME that 1477 1.1 mrg dominates the block that reaches the incoming block 1478 1.1 mrg corresponding to the PHI arg in which it is mentioned. 1479 1.1 mrg That means we can assume the SSA_NAME is defined in that 1480 1.1 mrg path, so we only mark a PHI result as maybe-undef if we 1481 1.1 mrg find an unused reaching SSA_NAME. */ 1482 1.1 mrg int idx = phi_arg_index_from_use (use_p); 1483 1.1 mrg basic_block bb = gimple_phi_arg_edge (phi, idx)->src; 1484 1.1 mrg if (ssa_name_any_use_dominates_bb_p (var, bb)) 1485 1.1 mrg continue; 1486 1.1 mrg 1487 1.1 mrg ssa_name_set_maybe_undef (def); 1488 1.1 mrg queue.safe_push (def); 1489 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 1490 1.1 mrg fprintf (dump_file, "marking _%i as maybe-undef because of _%i\n", 1491 1.1 mrg SSA_NAME_VERSION (def), SSA_NAME_VERSION (var)); 1492 1.1 mrg } 1493 1.1 mrg } 1494 1.1 mrg } 1495 1.1 mrg 1496 1.1 mrg 1497 1.1 mrg /* If necessary, rewrite the base of the reference tree *TP from 1498 1.1 mrg a MEM_REF to a plain or converted symbol. */ 1499 1.1 mrg 1500 1.1 mrg static void 1501 1.1 mrg maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming) 1502 1.1 mrg { 1503 1.1 mrg tree sym; 1504 1.1 mrg 1505 1.1 mrg while (handled_component_p (*tp)) 1506 1.1 mrg tp = &TREE_OPERAND (*tp, 0); 1507 1.1 mrg if (TREE_CODE (*tp) == MEM_REF 1508 1.1 mrg && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR 1509 1.1 mrg && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0)) 1510 1.1 mrg && DECL_P (sym) 1511 1.1 mrg && !TREE_ADDRESSABLE (sym) 1512 1.1 mrg && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)) 1513 1.1 mrg && is_gimple_reg_type (TREE_TYPE (*tp)) 1514 1.1 mrg && ! VOID_TYPE_P (TREE_TYPE (*tp))) 1515 1.1 mrg { 1516 1.1 mrg if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE 1517 1.1 mrg && useless_type_conversion_p (TREE_TYPE (*tp), 1518 1.1 mrg TREE_TYPE (TREE_TYPE (sym))) 1519 1.1 mrg && multiple_p (mem_ref_offset (*tp), 1520 1.1 mrg wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))))) 1521 1.1 mrg { 1522 1.1 mrg *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym, 1523 1.1 mrg TYPE_SIZE (TREE_TYPE (*tp)), 1524 1.1 mrg int_const_binop (MULT_EXPR, 1525 1.1 mrg bitsize_int (BITS_PER_UNIT), 1526 1.1 mrg TREE_OPERAND (*tp, 1))); 1527 1.1 mrg } 1528 1.1 mrg else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE 1529 1.1 mrg && useless_type_conversion_p (TREE_TYPE (*tp), 1530 1.1 mrg TREE_TYPE (TREE_TYPE (sym))) 1531 1.1 mrg && (integer_zerop (TREE_OPERAND (*tp, 1)) 1532 1.1 mrg || tree_int_cst_equal (TREE_OPERAND (*tp, 1), 1533 1.1 mrg TYPE_SIZE_UNIT (TREE_TYPE (*tp))))) 1534 1.1 mrg { 1535 1.1 mrg *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1)) 1536 1.1 mrg ? REALPART_EXPR : IMAGPART_EXPR, 1537 1.1 mrg TREE_TYPE (*tp), sym); 1538 1.1 mrg } 1539 1.1 mrg else if (integer_zerop (TREE_OPERAND (*tp, 1)) 1540 1.1 mrg && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp))) 1541 1.1 mrg { 1542 1.1 mrg if (!useless_type_conversion_p (TREE_TYPE (*tp), 1543 1.1 mrg TREE_TYPE (sym))) 1544 1.1 mrg *tp = build1 (VIEW_CONVERT_EXPR, 1545 1.1 mrg TREE_TYPE (*tp), sym); 1546 1.1 mrg else 1547 1.1 mrg *tp = sym; 1548 1.1 mrg } 1549 1.1 mrg else if (DECL_SIZE (sym) 1550 1.1 mrg && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST 1551 1.1 mrg && (known_subrange_p 1552 1.1 mrg (mem_ref_offset (*tp), 1553 1.1 mrg wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))), 1554 1.1 mrg 0, wi::to_offset (DECL_SIZE_UNIT (sym)))) 1555 1.1 mrg && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp)) 1556 1.1 mrg || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))) 1557 1.1 mrg == TYPE_PRECISION (TREE_TYPE (*tp)))) 1558 1.1 mrg && (! INTEGRAL_TYPE_P (TREE_TYPE (sym)) 1559 1.1 mrg || type_has_mode_precision_p (TREE_TYPE (sym))) 1560 1.1 mrg && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))), 1561 1.1 mrg BITS_PER_UNIT) == 0) 1562 1.1 mrg { 1563 1.1 mrg *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym, 1564 1.1 mrg TYPE_SIZE (TREE_TYPE (*tp)), 1565 1.1 mrg wide_int_to_tree (bitsizetype, 1566 1.1 mrg mem_ref_offset (*tp) 1567 1.1 mrg << LOG2_BITS_PER_UNIT)); 1568 1.1 mrg } 1569 1.1 mrg } 1570 1.1 mrg } 1571 1.1 mrg 1572 1.1 mrg /* For a tree REF return its base if it is the base of a MEM_REF 1573 1.1 mrg that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */ 1574 1.1 mrg 1575 1.1 mrg static tree 1576 1.1 mrg non_rewritable_mem_ref_base (tree ref) 1577 1.1 mrg { 1578 1.1 mrg tree base; 1579 1.1 mrg 1580 1.1 mrg /* A plain decl does not need it set. */ 1581 1.1 mrg if (DECL_P (ref)) 1582 1.1 mrg return NULL_TREE; 1583 1.1 mrg 1584 1.1 mrg if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref)))) 1585 1.1 mrg { 1586 1.1 mrg base = get_base_address (ref); 1587 1.1 mrg if (DECL_P (base)) 1588 1.1 mrg return base; 1589 1.1 mrg return NULL_TREE; 1590 1.1 mrg } 1591 1.1 mrg 1592 1.1 mrg /* But watch out for MEM_REFs we cannot lower to a 1593 1.1 mrg VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */ 1594 1.1 mrg if (TREE_CODE (base) == MEM_REF 1595 1.1 mrg && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR) 1596 1.1 mrg { 1597 1.1 mrg tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0); 1598 1.1 mrg if (! DECL_P (decl)) 1599 1.1 mrg return NULL_TREE; 1600 1.1 mrg if (! is_gimple_reg_type (TREE_TYPE (base)) 1601 1.1 mrg || VOID_TYPE_P (TREE_TYPE (base)) 1602 1.1 mrg || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)) 1603 1.1 mrg return decl; 1604 1.1 mrg if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE 1605 1.1 mrg || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE) 1606 1.1 mrg && useless_type_conversion_p (TREE_TYPE (base), 1607 1.1 mrg TREE_TYPE (TREE_TYPE (decl))) 1608 1.1 mrg && known_ge (mem_ref_offset (base), 0) 1609 1.1 mrg && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))), 1610 1.1 mrg mem_ref_offset (base)) 1611 1.1 mrg && multiple_p (mem_ref_offset (base), 1612 1.1 mrg wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))))) 1613 1.1 mrg return NULL_TREE; 1614 1.1 mrg /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */ 1615 1.1 mrg if (integer_zerop (TREE_OPERAND (base, 1)) 1616 1.1 mrg && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base))) 1617 1.1 mrg return NULL_TREE; 1618 1.1 mrg /* For integral typed extracts we can use a BIT_FIELD_REF. */ 1619 1.1 mrg if (DECL_SIZE (decl) 1620 1.1 mrg && TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST 1621 1.1 mrg && (known_subrange_p 1622 1.1 mrg (mem_ref_offset (base), 1623 1.1 mrg wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))), 1624 1.1 mrg 0, wi::to_poly_offset (DECL_SIZE_UNIT (decl)))) 1625 1.1 mrg /* ??? We can't handle bitfield precision extracts without 1626 1.1 mrg either using an alternate type for the BIT_FIELD_REF and 1627 1.1 mrg then doing a conversion or possibly adjusting the offset 1628 1.1 mrg according to endianness. */ 1629 1.1 mrg && (! INTEGRAL_TYPE_P (TREE_TYPE (base)) 1630 1.1 mrg || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))) 1631 1.1 mrg == TYPE_PRECISION (TREE_TYPE (base)))) 1632 1.1 mrg /* ??? Likewise for extracts from bitfields, we'd have 1633 1.1 mrg to pun the base object to a size precision mode first. */ 1634 1.1 mrg && (! INTEGRAL_TYPE_P (TREE_TYPE (decl)) 1635 1.1 mrg || type_has_mode_precision_p (TREE_TYPE (decl))) 1636 1.1 mrg && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))), 1637 1.1 mrg BITS_PER_UNIT) == 0) 1638 1.1 mrg return NULL_TREE; 1639 1.1 mrg return decl; 1640 1.1 mrg } 1641 1.1 mrg 1642 1.1 mrg /* We cannot rewrite TARGET_MEM_REFs. */ 1643 1.1 mrg if (TREE_CODE (base) == TARGET_MEM_REF 1644 1.1 mrg && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR) 1645 1.1 mrg { 1646 1.1 mrg tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0); 1647 1.1 mrg if (! DECL_P (decl)) 1648 1.1 mrg return NULL_TREE; 1649 1.1 mrg return decl; 1650 1.1 mrg } 1651 1.1 mrg 1652 1.1 mrg return NULL_TREE; 1653 1.1 mrg } 1654 1.1 mrg 1655 1.1 mrg /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form. 1656 1.1 mrg Otherwise return true. */ 1657 1.1 mrg 1658 1.1 mrg static bool 1659 1.1 mrg non_rewritable_lvalue_p (tree lhs) 1660 1.1 mrg { 1661 1.1 mrg /* A plain decl is always rewritable. */ 1662 1.1 mrg if (DECL_P (lhs)) 1663 1.1 mrg return false; 1664 1.1 mrg 1665 1.1 mrg /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in 1666 1.1 mrg a reasonably efficient manner... */ 1667 1.1 mrg if ((TREE_CODE (lhs) == REALPART_EXPR 1668 1.1 mrg || TREE_CODE (lhs) == IMAGPART_EXPR) 1669 1.1 mrg && DECL_P (TREE_OPERAND (lhs, 0))) 1670 1.1 mrg return false; 1671 1.1 mrg 1672 1.1 mrg /* ??? The following could be relaxed allowing component 1673 1.1 mrg references that do not change the access size. */ 1674 1.1 mrg if (TREE_CODE (lhs) == MEM_REF 1675 1.1 mrg && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR) 1676 1.1 mrg { 1677 1.1 mrg tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0); 1678 1.1 mrg 1679 1.1 mrg /* A decl that is wrapped inside a MEM-REF that covers 1680 1.1 mrg it full is also rewritable. */ 1681 1.1 mrg if (integer_zerop (TREE_OPERAND (lhs, 1)) 1682 1.1 mrg && DECL_P (decl) 1683 1.1 mrg && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs)) 1684 1.1 mrg /* If the dynamic type of the decl has larger precision than 1685 1.1 mrg the decl itself we can't use the decls type for SSA rewriting. */ 1686 1.1 mrg && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl)) 1687 1.1 mrg || compare_tree_int (DECL_SIZE (decl), 1688 1.1 mrg TYPE_PRECISION (TREE_TYPE (decl))) == 0) 1689 1.1 mrg || (INTEGRAL_TYPE_P (TREE_TYPE (lhs)) 1690 1.1 mrg && (TYPE_PRECISION (TREE_TYPE (decl)) 1691 1.1 mrg >= TYPE_PRECISION (TREE_TYPE (lhs))))) 1692 1.1 mrg /* Make sure we are not re-writing non-float copying into float 1693 1.1 mrg copying as that can incur normalization. */ 1694 1.1 mrg && (! FLOAT_TYPE_P (TREE_TYPE (decl)) 1695 1.1 mrg || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl))) 1696 1.1 mrg && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs))) 1697 1.1 mrg return false; 1698 1.1 mrg 1699 1.1 mrg /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable 1700 1.1 mrg using a BIT_INSERT_EXPR. */ 1701 1.1 mrg if (DECL_P (decl) 1702 1.1 mrg && VECTOR_TYPE_P (TREE_TYPE (decl)) 1703 1.1 mrg && TYPE_MODE (TREE_TYPE (decl)) != BLKmode 1704 1.1 mrg && known_ge (mem_ref_offset (lhs), 0) 1705 1.1 mrg && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))), 1706 1.1 mrg mem_ref_offset (lhs)) 1707 1.1 mrg && multiple_p (mem_ref_offset (lhs), 1708 1.1 mrg wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) 1709 1.1 mrg && known_ge (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (decl))), 1710 1.1 mrg wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (lhs))))) 1711 1.1 mrg { 1712 1.1 mrg poly_uint64 lhs_bits, nelts; 1713 1.1 mrg if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs)), &lhs_bits) 1714 1.1 mrg && multiple_p (lhs_bits, 1715 1.1 mrg tree_to_uhwi 1716 1.1 mrg (TYPE_SIZE (TREE_TYPE (TREE_TYPE (decl)))), 1717 1.1 mrg &nelts) 1718 1.1 mrg && valid_vector_subparts_p (nelts)) 1719 1.1 mrg { 1720 1.1 mrg if (known_eq (nelts, 1u)) 1721 1.1 mrg return false; 1722 1.1 mrg /* For sub-vector inserts the insert vector mode has to be 1723 1.1 mrg supported. */ 1724 1.1 mrg tree vtype = build_vector_type (TREE_TYPE (TREE_TYPE (decl)), 1725 1.1 mrg nelts); 1726 1.1 mrg if (TYPE_MODE (vtype) != BLKmode) 1727 1.1 mrg return false; 1728 1.1 mrg } 1729 1.1 mrg } 1730 1.1 mrg } 1731 1.1 mrg 1732 1.1 mrg /* A vector-insert using a BIT_FIELD_REF is rewritable using 1733 1.1 mrg BIT_INSERT_EXPR. */ 1734 1.1 mrg if (TREE_CODE (lhs) == BIT_FIELD_REF 1735 1.1 mrg && DECL_P (TREE_OPERAND (lhs, 0)) 1736 1.1 mrg && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0))) 1737 1.1 mrg && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode 1738 1.1 mrg && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), 1739 1.1 mrg TYPE_SIZE_UNIT 1740 1.1 mrg (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0) 1741 1.1 mrg && (tree_to_uhwi (TREE_OPERAND (lhs, 2)) 1742 1.1 mrg % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0) 1743 1.1 mrg return false; 1744 1.1 mrg 1745 1.1 mrg return true; 1746 1.1 mrg } 1747 1.1 mrg 1748 1.1 mrg /* When possible, clear TREE_ADDRESSABLE bit, set or clear DECL_NOT_GIMPLE_REG_P 1749 1.1 mrg and mark the variable VAR for conversion into SSA. Return true when updating 1750 1.1 mrg stmts is required. */ 1751 1.1 mrg 1752 1.1 mrg static void 1753 1.1 mrg maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs, 1754 1.1 mrg bitmap suitable_for_renaming) 1755 1.1 mrg { 1756 1.1 mrg /* Global Variables, result decls cannot be changed. */ 1757 1.1 mrg if (is_global_var (var) 1758 1.1 mrg || TREE_CODE (var) == RESULT_DECL 1759 1.1 mrg || bitmap_bit_p (addresses_taken, DECL_UID (var))) 1760 1.1 mrg return; 1761 1.1 mrg 1762 1.1 mrg bool maybe_reg = false; 1763 1.1 mrg if (TREE_ADDRESSABLE (var)) 1764 1.1 mrg { 1765 1.1 mrg TREE_ADDRESSABLE (var) = 0; 1766 1.1 mrg maybe_reg = true; 1767 1.1 mrg if (dump_file) 1768 1.1 mrg { 1769 1.1 mrg fprintf (dump_file, "No longer having address taken: "); 1770 1.1 mrg print_generic_expr (dump_file, var); 1771 1.1 mrg fprintf (dump_file, "\n"); 1772 1.1 mrg } 1773 1.1 mrg } 1774 1.1 mrg 1775 1.1 mrg /* For register type decls if we do not have any partial defs 1776 1.1 mrg we cannot express in SSA form mark them as DECL_NOT_GIMPLE_REG_P 1777 1.1 mrg as to avoid SSA rewrite. For the others go ahead and mark 1778 1.1 mrg them for renaming. */ 1779 1.1 mrg if (is_gimple_reg_type (TREE_TYPE (var))) 1780 1.1 mrg { 1781 1.1 mrg if (bitmap_bit_p (not_reg_needs, DECL_UID (var))) 1782 1.1 mrg { 1783 1.1 mrg DECL_NOT_GIMPLE_REG_P (var) = 1; 1784 1.1 mrg if (dump_file) 1785 1.1 mrg { 1786 1.1 mrg fprintf (dump_file, "Has partial defs: "); 1787 1.1 mrg print_generic_expr (dump_file, var); 1788 1.1 mrg fprintf (dump_file, "\n"); 1789 1.1 mrg } 1790 1.1 mrg } 1791 1.1 mrg else if (DECL_NOT_GIMPLE_REG_P (var)) 1792 1.1 mrg { 1793 1.1 mrg maybe_reg = true; 1794 1.1 mrg DECL_NOT_GIMPLE_REG_P (var) = 0; 1795 1.1 mrg } 1796 1.1 mrg if (maybe_reg) 1797 1.1 mrg { 1798 1.1 mrg if (is_gimple_reg (var)) 1799 1.1 mrg { 1800 1.1 mrg if (dump_file) 1801 1.1 mrg { 1802 1.1 mrg fprintf (dump_file, "Now a gimple register: "); 1803 1.1 mrg print_generic_expr (dump_file, var); 1804 1.1 mrg fprintf (dump_file, "\n"); 1805 1.1 mrg } 1806 1.1 mrg bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); 1807 1.1 mrg } 1808 1.1 mrg else 1809 1.1 mrg DECL_NOT_GIMPLE_REG_P (var) = 1; 1810 1.1 mrg } 1811 1.1 mrg } 1812 1.1 mrg } 1813 1.1 mrg 1814 1.1 mrg /* Return true when STMT is ASAN mark where second argument is an address 1815 1.1 mrg of a local variable. */ 1816 1.1 mrg 1817 1.1 mrg static bool 1818 1.1 mrg is_asan_mark_p (gimple *stmt) 1819 1.1 mrg { 1820 1.1 mrg if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 1821 1.1 mrg return false; 1822 1.1 mrg 1823 1.1 mrg tree addr = get_base_address (gimple_call_arg (stmt, 1)); 1824 1.1 mrg if (TREE_CODE (addr) == ADDR_EXPR 1825 1.1 mrg && VAR_P (TREE_OPERAND (addr, 0))) 1826 1.1 mrg { 1827 1.1 mrg tree var = TREE_OPERAND (addr, 0); 1828 1.1 mrg if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE, 1829 1.1 mrg DECL_ATTRIBUTES (var))) 1830 1.1 mrg return false; 1831 1.1 mrg 1832 1.1 mrg unsigned addressable = TREE_ADDRESSABLE (var); 1833 1.1 mrg TREE_ADDRESSABLE (var) = 0; 1834 1.1 mrg bool r = is_gimple_reg (var); 1835 1.1 mrg TREE_ADDRESSABLE (var) = addressable; 1836 1.1 mrg return r; 1837 1.1 mrg } 1838 1.1 mrg 1839 1.1 mrg return false; 1840 1.1 mrg } 1841 1.1 mrg 1842 1.1 mrg /* Compute TREE_ADDRESSABLE and whether we have unhandled partial defs 1843 1.1 mrg for local variables. */ 1844 1.1 mrg 1845 1.1 mrg void 1846 1.1 mrg execute_update_addresses_taken (void) 1847 1.1 mrg { 1848 1.1 mrg basic_block bb; 1849 1.1 mrg auto_bitmap addresses_taken; 1850 1.1 mrg auto_bitmap not_reg_needs; 1851 1.1 mrg auto_bitmap suitable_for_renaming; 1852 1.1 mrg bool optimistic_not_addressable = false; 1853 1.1 mrg tree var; 1854 1.1 mrg unsigned i; 1855 1.1 mrg 1856 1.1 mrg timevar_push (TV_ADDRESS_TAKEN); 1857 1.1 mrg 1858 1.1 mrg /* Collect into ADDRESSES_TAKEN all variables whose address is taken within 1859 1.1 mrg the function body. */ 1860 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 1861 1.1 mrg { 1862 1.1 mrg for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); 1863 1.1 mrg gsi_next (&gsi)) 1864 1.1 mrg { 1865 1.1 mrg gimple *stmt = gsi_stmt (gsi); 1866 1.1 mrg enum gimple_code code = gimple_code (stmt); 1867 1.1 mrg tree decl; 1868 1.1 mrg 1869 1.1 mrg if (code == GIMPLE_CALL) 1870 1.1 mrg { 1871 1.1 mrg if (optimize_atomic_compare_exchange_p (stmt)) 1872 1.1 mrg { 1873 1.1 mrg /* For __atomic_compare_exchange_N if the second argument 1874 1.1 mrg is &var, don't mark var addressable; 1875 1.1 mrg if it becomes non-addressable, we'll rewrite it into 1876 1.1 mrg ATOMIC_COMPARE_EXCHANGE call. */ 1877 1.1 mrg tree arg = gimple_call_arg (stmt, 1); 1878 1.1 mrg gimple_call_set_arg (stmt, 1, null_pointer_node); 1879 1.1 mrg gimple_ior_addresses_taken (addresses_taken, stmt); 1880 1.1 mrg gimple_call_set_arg (stmt, 1, arg); 1881 1.1 mrg /* Remember we have to check again below. */ 1882 1.1 mrg optimistic_not_addressable = true; 1883 1.1 mrg } 1884 1.1 mrg else if (is_asan_mark_p (stmt) 1885 1.1 mrg || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER)) 1886 1.1 mrg ; 1887 1.1 mrg else 1888 1.1 mrg gimple_ior_addresses_taken (addresses_taken, stmt); 1889 1.1 mrg } 1890 1.1 mrg else 1891 1.1 mrg /* Note all addresses taken by the stmt. */ 1892 1.1 mrg gimple_ior_addresses_taken (addresses_taken, stmt); 1893 1.1 mrg 1894 1.1 mrg /* If we have a call or an assignment, see if the lhs contains 1895 1.1 mrg a local decl that requires not to be a gimple register. */ 1896 1.1 mrg if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) 1897 1.1 mrg { 1898 1.1 mrg tree lhs = gimple_get_lhs (stmt); 1899 1.1 mrg if (lhs 1900 1.1 mrg && TREE_CODE (lhs) != SSA_NAME 1901 1.1 mrg && ((code == GIMPLE_CALL && ! DECL_P (lhs)) 1902 1.1 mrg || non_rewritable_lvalue_p (lhs))) 1903 1.1 mrg { 1904 1.1 mrg decl = get_base_address (lhs); 1905 1.1 mrg if (DECL_P (decl)) 1906 1.1 mrg bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1907 1.1 mrg } 1908 1.1 mrg } 1909 1.1 mrg 1910 1.1 mrg if (gimple_assign_single_p (stmt)) 1911 1.1 mrg { 1912 1.1 mrg tree rhs = gimple_assign_rhs1 (stmt); 1913 1.1 mrg if ((decl = non_rewritable_mem_ref_base (rhs))) 1914 1.1 mrg bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1915 1.1 mrg } 1916 1.1 mrg 1917 1.1 mrg else if (code == GIMPLE_CALL) 1918 1.1 mrg { 1919 1.1 mrg for (i = 0; i < gimple_call_num_args (stmt); ++i) 1920 1.1 mrg { 1921 1.1 mrg tree arg = gimple_call_arg (stmt, i); 1922 1.1 mrg if ((decl = non_rewritable_mem_ref_base (arg))) 1923 1.1 mrg bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1924 1.1 mrg } 1925 1.1 mrg } 1926 1.1 mrg 1927 1.1 mrg else if (code == GIMPLE_ASM) 1928 1.1 mrg { 1929 1.1 mrg gasm *asm_stmt = as_a <gasm *> (stmt); 1930 1.1 mrg for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) 1931 1.1 mrg { 1932 1.1 mrg tree link = gimple_asm_output_op (asm_stmt, i); 1933 1.1 mrg tree lhs = TREE_VALUE (link); 1934 1.1 mrg if (TREE_CODE (lhs) != SSA_NAME) 1935 1.1 mrg { 1936 1.1 mrg decl = get_base_address (lhs); 1937 1.1 mrg if (DECL_P (decl) 1938 1.1 mrg && (non_rewritable_lvalue_p (lhs) 1939 1.1 mrg /* We cannot move required conversions from 1940 1.1 mrg the lhs to the rhs in asm statements, so 1941 1.1 mrg require we do not need any. */ 1942 1.1 mrg || !useless_type_conversion_p 1943 1.1 mrg (TREE_TYPE (lhs), TREE_TYPE (decl)))) 1944 1.1 mrg bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1945 1.1 mrg } 1946 1.1 mrg } 1947 1.1 mrg for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 1948 1.1 mrg { 1949 1.1 mrg tree link = gimple_asm_input_op (asm_stmt, i); 1950 1.1 mrg if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link)))) 1951 1.1 mrg bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1952 1.1 mrg } 1953 1.1 mrg } 1954 1.1 mrg } 1955 1.1 mrg 1956 1.1 mrg for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); 1957 1.1 mrg gsi_next (&gsi)) 1958 1.1 mrg { 1959 1.1 mrg size_t i; 1960 1.1 mrg gphi *phi = gsi.phi (); 1961 1.1 mrg 1962 1.1 mrg for (i = 0; i < gimple_phi_num_args (phi); i++) 1963 1.1 mrg { 1964 1.1 mrg tree op = PHI_ARG_DEF (phi, i), var; 1965 1.1 mrg if (TREE_CODE (op) == ADDR_EXPR 1966 1.1 mrg && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL 1967 1.1 mrg && DECL_P (var)) 1968 1.1 mrg bitmap_set_bit (addresses_taken, DECL_UID (var)); 1969 1.1 mrg } 1970 1.1 mrg } 1971 1.1 mrg } 1972 1.1 mrg 1973 1.1 mrg /* We cannot iterate over all referenced vars because that can contain 1974 1.1 mrg unused vars from BLOCK trees, which causes code generation differences 1975 1.1 mrg for -g vs. -g0. */ 1976 1.1 mrg for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var)) 1977 1.1 mrg maybe_optimize_var (var, addresses_taken, not_reg_needs, 1978 1.1 mrg suitable_for_renaming); 1979 1.1 mrg 1980 1.1 mrg FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var) 1981 1.1 mrg maybe_optimize_var (var, addresses_taken, not_reg_needs, 1982 1.1 mrg suitable_for_renaming); 1983 1.1 mrg 1984 1.1 mrg /* Operand caches need to be recomputed for operands referencing the updated 1985 1.1 mrg variables and operands need to be rewritten to expose bare symbols. */ 1986 1.1 mrg if (!bitmap_empty_p (suitable_for_renaming) 1987 1.1 mrg || optimistic_not_addressable) 1988 1.1 mrg { 1989 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 1990 1.1 mrg for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) 1991 1.1 mrg { 1992 1.1 mrg gimple *stmt = gsi_stmt (gsi); 1993 1.1 mrg 1994 1.1 mrg /* Re-write TARGET_MEM_REFs of symbols we want to 1995 1.1 mrg rewrite into SSA form. */ 1996 1.1 mrg if (gimple_assign_single_p (stmt)) 1997 1.1 mrg { 1998 1.1 mrg tree lhs = gimple_assign_lhs (stmt); 1999 1.1 mrg tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt); 2000 1.1 mrg tree sym; 2001 1.1 mrg 2002 1.1 mrg /* Rewrite LHS IMAG/REALPART_EXPR similar to 2003 1.1 mrg gimplify_modify_expr_complex_part. */ 2004 1.1 mrg if ((TREE_CODE (lhs) == IMAGPART_EXPR 2005 1.1 mrg || TREE_CODE (lhs) == REALPART_EXPR) 2006 1.1 mrg && DECL_P (TREE_OPERAND (lhs, 0)) 2007 1.1 mrg && bitmap_bit_p (suitable_for_renaming, 2008 1.1 mrg DECL_UID (TREE_OPERAND (lhs, 0)))) 2009 1.1 mrg { 2010 1.1 mrg tree other = make_ssa_name (TREE_TYPE (lhs)); 2011 1.1 mrg tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR 2012 1.1 mrg ? REALPART_EXPR : IMAGPART_EXPR, 2013 1.1 mrg TREE_TYPE (other), 2014 1.1 mrg TREE_OPERAND (lhs, 0)); 2015 1.1 mrg suppress_warning (lrhs); 2016 1.1 mrg gimple *load = gimple_build_assign (other, lrhs); 2017 1.1 mrg location_t loc = gimple_location (stmt); 2018 1.1 mrg gimple_set_location (load, loc); 2019 1.1 mrg gimple_set_vuse (load, gimple_vuse (stmt)); 2020 1.1 mrg gsi_insert_before (&gsi, load, GSI_SAME_STMT); 2021 1.1 mrg gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0)); 2022 1.1 mrg gimple_assign_set_rhs_with_ops 2023 1.1 mrg (&gsi, COMPLEX_EXPR, 2024 1.1 mrg TREE_CODE (lhs) == IMAGPART_EXPR 2025 1.1 mrg ? other : gimple_assign_rhs1 (stmt), 2026 1.1 mrg TREE_CODE (lhs) == IMAGPART_EXPR 2027 1.1 mrg ? gimple_assign_rhs1 (stmt) : other, NULL_TREE); 2028 1.1 mrg stmt = gsi_stmt (gsi); 2029 1.1 mrg unlink_stmt_vdef (stmt); 2030 1.1 mrg update_stmt (stmt); 2031 1.1 mrg continue; 2032 1.1 mrg } 2033 1.1 mrg 2034 1.1 mrg /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS 2035 1.1 mrg into a BIT_INSERT_EXPR. */ 2036 1.1 mrg if (TREE_CODE (lhs) == BIT_FIELD_REF 2037 1.1 mrg && DECL_P (TREE_OPERAND (lhs, 0)) 2038 1.1 mrg && bitmap_bit_p (suitable_for_renaming, 2039 1.1 mrg DECL_UID (TREE_OPERAND (lhs, 0))) 2040 1.1 mrg && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0))) 2041 1.1 mrg && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode 2042 1.1 mrg && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), 2043 1.1 mrg TYPE_SIZE_UNIT (TREE_TYPE 2044 1.1 mrg (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 2045 1.1 mrg 0) 2046 1.1 mrg && (tree_to_uhwi (TREE_OPERAND (lhs, 2)) 2047 1.1 mrg % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0)) 2048 1.1 mrg { 2049 1.1 mrg tree var = TREE_OPERAND (lhs, 0); 2050 1.1 mrg tree val = gimple_assign_rhs1 (stmt); 2051 1.1 mrg if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)), 2052 1.1 mrg TREE_TYPE (val))) 2053 1.1 mrg { 2054 1.1 mrg tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var))); 2055 1.1 mrg gimple *pun 2056 1.1 mrg = gimple_build_assign (tem, 2057 1.1 mrg build1 (VIEW_CONVERT_EXPR, 2058 1.1 mrg TREE_TYPE (tem), val)); 2059 1.1 mrg gsi_insert_before (&gsi, pun, GSI_SAME_STMT); 2060 1.1 mrg val = tem; 2061 1.1 mrg } 2062 1.1 mrg tree bitpos = TREE_OPERAND (lhs, 2); 2063 1.1 mrg gimple_assign_set_lhs (stmt, var); 2064 1.1 mrg gimple_assign_set_rhs_with_ops 2065 1.1 mrg (&gsi, BIT_INSERT_EXPR, var, val, bitpos); 2066 1.1 mrg stmt = gsi_stmt (gsi); 2067 1.1 mrg unlink_stmt_vdef (stmt); 2068 1.1 mrg update_stmt (stmt); 2069 1.1 mrg continue; 2070 1.1 mrg } 2071 1.1 mrg 2072 1.1 mrg /* Rewrite a vector insert using a MEM_REF on the LHS 2073 1.1 mrg into a BIT_INSERT_EXPR. */ 2074 1.1 mrg if (TREE_CODE (lhs) == MEM_REF 2075 1.1 mrg && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR 2076 1.1 mrg && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0)) 2077 1.1 mrg && DECL_P (sym) 2078 1.1 mrg && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)) 2079 1.1 mrg && VECTOR_TYPE_P (TREE_TYPE (sym)) 2080 1.1 mrg && TYPE_MODE (TREE_TYPE (sym)) != BLKmode 2081 1.1 mrg /* If it is a full replacement we can do better below. */ 2082 1.1 mrg && maybe_ne (wi::to_poly_offset 2083 1.1 mrg (TYPE_SIZE_UNIT (TREE_TYPE (lhs))), 2084 1.1 mrg wi::to_poly_offset 2085 1.1 mrg (TYPE_SIZE_UNIT (TREE_TYPE (sym)))) 2086 1.1 mrg && known_ge (mem_ref_offset (lhs), 0) 2087 1.1 mrg && known_gt (wi::to_poly_offset 2088 1.1 mrg (TYPE_SIZE_UNIT (TREE_TYPE (sym))), 2089 1.1 mrg mem_ref_offset (lhs)) 2090 1.1 mrg && multiple_p (mem_ref_offset (lhs), 2091 1.1 mrg wi::to_poly_offset 2092 1.1 mrg (TYPE_SIZE_UNIT (TREE_TYPE (lhs))))) 2093 1.1 mrg { 2094 1.1 mrg tree val = gimple_assign_rhs1 (stmt); 2095 1.1 mrg if (! types_compatible_p (TREE_TYPE (val), 2096 1.1 mrg TREE_TYPE (TREE_TYPE (sym)))) 2097 1.1 mrg { 2098 1.1 mrg poly_uint64 lhs_bits, nelts; 2099 1.1 mrg tree temtype = TREE_TYPE (TREE_TYPE (sym)); 2100 1.1 mrg if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs)), 2101 1.1 mrg &lhs_bits) 2102 1.1 mrg && multiple_p (lhs_bits, 2103 1.1 mrg tree_to_uhwi 2104 1.1 mrg (TYPE_SIZE (TREE_TYPE 2105 1.1 mrg (TREE_TYPE (sym)))), 2106 1.1 mrg &nelts) 2107 1.1 mrg && maybe_ne (nelts, 1u) 2108 1.1 mrg && valid_vector_subparts_p (nelts)) 2109 1.1 mrg temtype = build_vector_type (temtype, nelts); 2110 1.1 mrg tree tem = make_ssa_name (temtype); 2111 1.1 mrg gimple *pun 2112 1.1 mrg = gimple_build_assign (tem, 2113 1.1 mrg build1 (VIEW_CONVERT_EXPR, 2114 1.1 mrg TREE_TYPE (tem), val)); 2115 1.1 mrg gsi_insert_before (&gsi, pun, GSI_SAME_STMT); 2116 1.1 mrg val = tem; 2117 1.1 mrg } 2118 1.1 mrg tree bitpos 2119 1.1 mrg = wide_int_to_tree (bitsizetype, 2120 1.1 mrg mem_ref_offset (lhs) * BITS_PER_UNIT); 2121 1.1 mrg gimple_assign_set_lhs (stmt, sym); 2122 1.1 mrg gimple_assign_set_rhs_with_ops 2123 1.1 mrg (&gsi, BIT_INSERT_EXPR, sym, val, bitpos); 2124 1.1 mrg stmt = gsi_stmt (gsi); 2125 1.1 mrg unlink_stmt_vdef (stmt); 2126 1.1 mrg update_stmt (stmt); 2127 1.1 mrg continue; 2128 1.1 mrg } 2129 1.1 mrg 2130 1.1 mrg /* We shouldn't have any fancy wrapping of 2131 1.1 mrg component-refs on the LHS, but look through 2132 1.1 mrg VIEW_CONVERT_EXPRs as that is easy. */ 2133 1.1 mrg while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR) 2134 1.1 mrg lhs = TREE_OPERAND (lhs, 0); 2135 1.1 mrg if (TREE_CODE (lhs) == MEM_REF 2136 1.1 mrg && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR 2137 1.1 mrg && integer_zerop (TREE_OPERAND (lhs, 1)) 2138 1.1 mrg && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0)) 2139 1.1 mrg && DECL_P (sym) 2140 1.1 mrg && !TREE_ADDRESSABLE (sym) 2141 1.1 mrg && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))) 2142 1.1 mrg lhs = sym; 2143 1.1 mrg else 2144 1.1 mrg lhs = gimple_assign_lhs (stmt); 2145 1.1 mrg 2146 1.1 mrg /* Rewrite the RHS and make sure the resulting assignment 2147 1.1 mrg is validly typed. */ 2148 1.1 mrg maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming); 2149 1.1 mrg rhs = gimple_assign_rhs1 (stmt); 2150 1.1 mrg if (gimple_assign_lhs (stmt) != lhs 2151 1.1 mrg && !useless_type_conversion_p (TREE_TYPE (lhs), 2152 1.1 mrg TREE_TYPE (rhs))) 2153 1.1 mrg { 2154 1.1 mrg if (gimple_clobber_p (stmt)) 2155 1.1 mrg { 2156 1.1 mrg rhs = build_constructor (TREE_TYPE (lhs), NULL); 2157 1.1 mrg TREE_THIS_VOLATILE (rhs) = 1; 2158 1.1 mrg } 2159 1.1 mrg else 2160 1.1 mrg rhs = fold_build1 (VIEW_CONVERT_EXPR, 2161 1.1 mrg TREE_TYPE (lhs), rhs); 2162 1.1 mrg } 2163 1.1 mrg if (gimple_assign_lhs (stmt) != lhs) 2164 1.1 mrg gimple_assign_set_lhs (stmt, lhs); 2165 1.1 mrg 2166 1.1 mrg if (gimple_assign_rhs1 (stmt) != rhs) 2167 1.1 mrg { 2168 1.1 mrg gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 2169 1.1 mrg gimple_assign_set_rhs_from_tree (&gsi, rhs); 2170 1.1 mrg } 2171 1.1 mrg } 2172 1.1 mrg 2173 1.1 mrg else if (gimple_code (stmt) == GIMPLE_CALL) 2174 1.1 mrg { 2175 1.1 mrg unsigned i; 2176 1.1 mrg if (optimize_atomic_compare_exchange_p (stmt)) 2177 1.1 mrg { 2178 1.1 mrg tree expected = gimple_call_arg (stmt, 1); 2179 1.1 mrg tree decl = TREE_OPERAND (expected, 0); 2180 1.1 mrg if (bitmap_bit_p (suitable_for_renaming, DECL_UID (decl))) 2181 1.1 mrg { 2182 1.1 mrg fold_builtin_atomic_compare_exchange (&gsi); 2183 1.1 mrg continue; 2184 1.1 mrg } 2185 1.1 mrg else if (!TREE_ADDRESSABLE (decl)) 2186 1.1 mrg /* If there are partial defs of the decl we may 2187 1.1 mrg have cleared the addressable bit but set 2188 1.1 mrg DECL_NOT_GIMPLE_REG_P. We have to restore 2189 1.1 mrg TREE_ADDRESSABLE here. */ 2190 1.1 mrg TREE_ADDRESSABLE (decl) = 1; 2191 1.1 mrg } 2192 1.1 mrg else if (is_asan_mark_p (stmt)) 2193 1.1 mrg { 2194 1.1 mrg tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); 2195 1.1 mrg if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var))) 2196 1.1 mrg { 2197 1.1 mrg unlink_stmt_vdef (stmt); 2198 1.1 mrg if (asan_mark_p (stmt, ASAN_MARK_POISON)) 2199 1.1 mrg { 2200 1.1 mrg gcall *call 2201 1.1 mrg = gimple_build_call_internal (IFN_ASAN_POISON, 0); 2202 1.1 mrg gimple_call_set_lhs (call, var); 2203 1.1 mrg gsi_replace (&gsi, call, true); 2204 1.1 mrg } 2205 1.1 mrg else 2206 1.1 mrg { 2207 1.1 mrg /* In ASAN_MARK (UNPOISON, &b, ...) the variable 2208 1.1 mrg is uninitialized. Avoid dependencies on 2209 1.1 mrg previous out of scope value. */ 2210 1.1 mrg tree clobber = build_clobber (TREE_TYPE (var)); 2211 1.1 mrg gimple *g = gimple_build_assign (var, clobber); 2212 1.1 mrg gsi_replace (&gsi, g, true); 2213 1.1 mrg } 2214 1.1 mrg continue; 2215 1.1 mrg } 2216 1.1 mrg } 2217 1.1 mrg else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER)) 2218 1.1 mrg for (i = 1; i < gimple_call_num_args (stmt); i++) 2219 1.1 mrg { 2220 1.1 mrg tree *argp = gimple_call_arg_ptr (stmt, i); 2221 1.1 mrg if (*argp == null_pointer_node) 2222 1.1 mrg continue; 2223 1.1 mrg gcc_assert (TREE_CODE (*argp) == ADDR_EXPR 2224 1.1 mrg && VAR_P (TREE_OPERAND (*argp, 0))); 2225 1.1 mrg tree var = TREE_OPERAND (*argp, 0); 2226 1.1 mrg if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var))) 2227 1.1 mrg *argp = null_pointer_node; 2228 1.1 mrg } 2229 1.1 mrg for (i = 0; i < gimple_call_num_args (stmt); ++i) 2230 1.1 mrg { 2231 1.1 mrg tree *argp = gimple_call_arg_ptr (stmt, i); 2232 1.1 mrg maybe_rewrite_mem_ref_base (argp, suitable_for_renaming); 2233 1.1 mrg } 2234 1.1 mrg } 2235 1.1 mrg 2236 1.1 mrg else if (gimple_code (stmt) == GIMPLE_ASM) 2237 1.1 mrg { 2238 1.1 mrg gasm *asm_stmt = as_a <gasm *> (stmt); 2239 1.1 mrg unsigned i; 2240 1.1 mrg for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) 2241 1.1 mrg { 2242 1.1 mrg tree link = gimple_asm_output_op (asm_stmt, i); 2243 1.1 mrg maybe_rewrite_mem_ref_base (&TREE_VALUE (link), 2244 1.1 mrg suitable_for_renaming); 2245 1.1 mrg } 2246 1.1 mrg for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 2247 1.1 mrg { 2248 1.1 mrg tree link = gimple_asm_input_op (asm_stmt, i); 2249 1.1 mrg maybe_rewrite_mem_ref_base (&TREE_VALUE (link), 2250 1.1 mrg suitable_for_renaming); 2251 1.1 mrg } 2252 1.1 mrg } 2253 1.1 mrg 2254 1.1 mrg else if (gimple_debug_bind_p (stmt) 2255 1.1 mrg && gimple_debug_bind_has_value_p (stmt)) 2256 1.1 mrg { 2257 1.1 mrg tree *valuep = gimple_debug_bind_get_value_ptr (stmt); 2258 1.1 mrg tree decl; 2259 1.1 mrg maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming); 2260 1.1 mrg decl = non_rewritable_mem_ref_base (*valuep); 2261 1.1 mrg if (decl 2262 1.1 mrg && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl))) 2263 1.1 mrg gimple_debug_bind_reset_value (stmt); 2264 1.1 mrg } 2265 1.1 mrg 2266 1.1 mrg if (gimple_references_memory_p (stmt) 2267 1.1 mrg || is_gimple_debug (stmt)) 2268 1.1 mrg update_stmt (stmt); 2269 1.1 mrg 2270 1.1 mrg gsi_next (&gsi); 2271 1.1 mrg } 2272 1.1 mrg 2273 1.1 mrg /* Update SSA form here, we are called as non-pass as well. */ 2274 1.1 mrg if (number_of_loops (cfun) > 1 2275 1.1 mrg && loops_state_satisfies_p (LOOP_CLOSED_SSA)) 2276 1.1 mrg rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); 2277 1.1 mrg else 2278 1.1 mrg update_ssa (TODO_update_ssa); 2279 1.1 mrg } 2280 1.1 mrg 2281 1.1 mrg timevar_pop (TV_ADDRESS_TAKEN); 2282 1.1 mrg } 2283 1.1 mrg 2284 1.1 mrg namespace { 2285 1.1 mrg 2286 1.1 mrg const pass_data pass_data_update_address_taken = 2287 1.1 mrg { 2288 1.1 mrg GIMPLE_PASS, /* type */ 2289 1.1 mrg "addressables", /* name */ 2290 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */ 2291 1.1 mrg TV_ADDRESS_TAKEN, /* tv_id */ 2292 1.1 mrg PROP_ssa, /* properties_required */ 2293 1.1 mrg 0, /* properties_provided */ 2294 1.1 mrg 0, /* properties_destroyed */ 2295 1.1 mrg 0, /* todo_flags_start */ 2296 1.1 mrg TODO_update_address_taken, /* todo_flags_finish */ 2297 1.1 mrg }; 2298 1.1 mrg 2299 1.1 mrg class pass_update_address_taken : public gimple_opt_pass 2300 1.1 mrg { 2301 1.1 mrg public: 2302 1.1 mrg pass_update_address_taken (gcc::context *ctxt) 2303 1.1 mrg : gimple_opt_pass (pass_data_update_address_taken, ctxt) 2304 1.1 mrg {} 2305 1.1 mrg 2306 1.1 mrg /* opt_pass methods: */ 2307 1.1 mrg 2308 1.1 mrg }; // class pass_update_address_taken 2309 1.1 mrg 2310 1.1 mrg } // anon namespace 2311 1.1 mrg 2312 1.1 mrg gimple_opt_pass * 2313 1.1 mrg make_pass_update_address_taken (gcc::context *ctxt) 2314 1.1 mrg { 2315 1.1 mrg return new pass_update_address_taken (ctxt); 2316 1.1 mrg } 2317