1 1.1 mrg /* Variable tracking routines for the GNU compiler. 2 1.1 mrg Copyright (C) 2002-2022 Free Software Foundation, Inc. 3 1.1 mrg 4 1.1 mrg This file is part of GCC. 5 1.1 mrg 6 1.1 mrg GCC is free software; you can redistribute it and/or modify it 7 1.1 mrg under the terms of the GNU General Public License as published by 8 1.1 mrg the Free Software Foundation; either version 3, or (at your option) 9 1.1 mrg any later version. 10 1.1 mrg 11 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT 12 1.1 mrg ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 13 1.1 mrg or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 14 1.1 mrg License for more details. 15 1.1 mrg 16 1.1 mrg You should have received a copy of the GNU General Public License 17 1.1 mrg along with GCC; see the file COPYING3. If not see 18 1.1 mrg <http://www.gnu.org/licenses/>. */ 19 1.1 mrg 20 1.1 mrg /* This file contains the variable tracking pass. It computes where 21 1.1 mrg variables are located (which registers or where in memory) at each position 22 1.1 mrg in instruction stream and emits notes describing the locations. 23 1.1 mrg Debug information (DWARF2 location lists) is finally generated from 24 1.1 mrg these notes. 25 1.1 mrg With this debug information, it is possible to show variables 26 1.1 mrg even when debugging optimized code. 27 1.1 mrg 28 1.1 mrg How does the variable tracking pass work? 29 1.1 mrg 30 1.1 mrg First, it scans RTL code for uses, stores and clobbers (register/memory 31 1.1 mrg references in instructions), for call insns and for stack adjustments 32 1.1 mrg separately for each basic block and saves them to an array of micro 33 1.1 mrg operations. 34 1.1 mrg The micro operations of one instruction are ordered so that 35 1.1 mrg pre-modifying stack adjustment < use < use with no var < call insn < 36 1.1 mrg < clobber < set < post-modifying stack adjustment 37 1.1 mrg 38 1.1 mrg Then, a forward dataflow analysis is performed to find out how locations 39 1.1 mrg of variables change through code and to propagate the variable locations 40 1.1 mrg along control flow graph. 41 1.1 mrg The IN set for basic block BB is computed as a union of OUT sets of BB's 42 1.1 mrg predecessors, the OUT set for BB is copied from the IN set for BB and 43 1.1 mrg is changed according to micro operations in BB. 44 1.1 mrg 45 1.1 mrg The IN and OUT sets for basic blocks consist of a current stack adjustment 46 1.1 mrg (used for adjusting offset of variables addressed using stack pointer), 47 1.1 mrg the table of structures describing the locations of parts of a variable 48 1.1 mrg and for each physical register a linked list for each physical register. 49 1.1 mrg The linked list is a list of variable parts stored in the register, 50 1.1 mrg i.e. it is a list of triplets (reg, decl, offset) where decl is 51 1.1 mrg REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for 52 1.1 mrg effective deleting appropriate variable parts when we set or clobber the 53 1.1 mrg register. 54 1.1 mrg 55 1.1 mrg There may be more than one variable part in a register. The linked lists 56 1.1 mrg should be pretty short so it is a good data structure here. 57 1.1 mrg For example in the following code, register allocator may assign same 58 1.1 mrg register to variables A and B, and both of them are stored in the same 59 1.1 mrg register in CODE: 60 1.1 mrg 61 1.1 mrg if (cond) 62 1.1 mrg set A; 63 1.1 mrg else 64 1.1 mrg set B; 65 1.1 mrg CODE; 66 1.1 mrg if (cond) 67 1.1 mrg use A; 68 1.1 mrg else 69 1.1 mrg use B; 70 1.1 mrg 71 1.1 mrg Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations 72 1.1 mrg are emitted to appropriate positions in RTL code. Each such a note describes 73 1.1 mrg the location of one variable at the point in instruction stream where the 74 1.1 mrg note is. There is no need to emit a note for each variable before each 75 1.1 mrg instruction, we only emit these notes where the location of variable changes 76 1.1 mrg (this means that we also emit notes for changes between the OUT set of the 77 1.1 mrg previous block and the IN set of the current block). 78 1.1 mrg 79 1.1 mrg The notes consist of two parts: 80 1.1 mrg 1. the declaration (from REG_EXPR or MEM_EXPR) 81 1.1 mrg 2. the location of a variable - it is either a simple register/memory 82 1.1 mrg reference (for simple variables, for example int), 83 1.1 mrg or a parallel of register/memory references (for a large variables 84 1.1 mrg which consist of several parts, for example long long). 85 1.1 mrg 86 1.1 mrg */ 87 1.1 mrg 88 1.1 mrg #include "config.h" 89 1.1 mrg #include "system.h" 90 1.1 mrg #include "coretypes.h" 91 1.1 mrg #include "backend.h" 92 1.1 mrg #include "target.h" 93 1.1 mrg #include "rtl.h" 94 1.1 mrg #include "tree.h" 95 1.1 mrg #include "cfghooks.h" 96 1.1 mrg #include "alloc-pool.h" 97 1.1 mrg #include "tree-pass.h" 98 1.1 mrg #include "memmodel.h" 99 1.1 mrg #include "tm_p.h" 100 1.1 mrg #include "insn-config.h" 101 1.1 mrg #include "regs.h" 102 1.1 mrg #include "emit-rtl.h" 103 1.1 mrg #include "recog.h" 104 1.1 mrg #include "diagnostic.h" 105 1.1 mrg #include "varasm.h" 106 1.1 mrg #include "stor-layout.h" 107 1.1 mrg #include "cfgrtl.h" 108 1.1 mrg #include "cfganal.h" 109 1.1 mrg #include "reload.h" 110 1.1 mrg #include "calls.h" 111 1.1 mrg #include "tree-dfa.h" 112 1.1 mrg #include "tree-ssa.h" 113 1.1 mrg #include "cselib.h" 114 1.1 mrg #include "tree-pretty-print.h" 115 1.1 mrg #include "rtl-iter.h" 116 1.1 mrg #include "fibonacci_heap.h" 117 1.1 mrg #include "print-rtl.h" 118 1.1 mrg #include "function-abi.h" 119 1.1 mrg 120 1.1 mrg typedef fibonacci_heap <long, basic_block_def> bb_heap_t; 121 1.1 mrg 122 1.1 mrg /* var-tracking.cc assumes that tree code with the same value as VALUE rtx code 123 1.1 mrg has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl. 124 1.1 mrg Currently the value is the same as IDENTIFIER_NODE, which has such 125 1.1 mrg a property. If this compile time assertion ever fails, make sure that 126 1.1 mrg the new tree code that equals (int) VALUE has the same property. */ 127 1.1 mrg extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1]; 128 1.1 mrg 129 1.1 mrg /* Type of micro operation. */ 130 1.1 mrg enum micro_operation_type 131 1.1 mrg { 132 1.1 mrg MO_USE, /* Use location (REG or MEM). */ 133 1.1 mrg MO_USE_NO_VAR,/* Use location which is not associated with a variable 134 1.1 mrg or the variable is not trackable. */ 135 1.1 mrg MO_VAL_USE, /* Use location which is associated with a value. */ 136 1.1 mrg MO_VAL_LOC, /* Use location which appears in a debug insn. */ 137 1.1 mrg MO_VAL_SET, /* Set location associated with a value. */ 138 1.1 mrg MO_SET, /* Set location. */ 139 1.1 mrg MO_COPY, /* Copy the same portion of a variable from one 140 1.1 mrg location to another. */ 141 1.1 mrg MO_CLOBBER, /* Clobber location. */ 142 1.1 mrg MO_CALL, /* Call insn. */ 143 1.1 mrg MO_ADJUST /* Adjust stack pointer. */ 144 1.1 mrg 145 1.1 mrg }; 146 1.1 mrg 147 1.1 mrg static const char * const ATTRIBUTE_UNUSED 148 1.1 mrg micro_operation_type_name[] = { 149 1.1 mrg "MO_USE", 150 1.1 mrg "MO_USE_NO_VAR", 151 1.1 mrg "MO_VAL_USE", 152 1.1 mrg "MO_VAL_LOC", 153 1.1 mrg "MO_VAL_SET", 154 1.1 mrg "MO_SET", 155 1.1 mrg "MO_COPY", 156 1.1 mrg "MO_CLOBBER", 157 1.1 mrg "MO_CALL", 158 1.1 mrg "MO_ADJUST" 159 1.1 mrg }; 160 1.1 mrg 161 1.1 mrg /* Where shall the note be emitted? BEFORE or AFTER the instruction. 162 1.1 mrg Notes emitted as AFTER_CALL are to take effect during the call, 163 1.1 mrg rather than after the call. */ 164 1.1 mrg enum emit_note_where 165 1.1 mrg { 166 1.1 mrg EMIT_NOTE_BEFORE_INSN, 167 1.1 mrg EMIT_NOTE_AFTER_INSN, 168 1.1 mrg EMIT_NOTE_AFTER_CALL_INSN 169 1.1 mrg }; 170 1.1 mrg 171 1.1 mrg /* Structure holding information about micro operation. */ 172 1.1 mrg struct micro_operation 173 1.1 mrg { 174 1.1 mrg /* Type of micro operation. */ 175 1.1 mrg enum micro_operation_type type; 176 1.1 mrg 177 1.1 mrg /* The instruction which the micro operation is in, for MO_USE, 178 1.1 mrg MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent 179 1.1 mrg instruction or note in the original flow (before any var-tracking 180 1.1 mrg notes are inserted, to simplify emission of notes), for MO_SET 181 1.1 mrg and MO_CLOBBER. */ 182 1.1 mrg rtx_insn *insn; 183 1.1 mrg 184 1.1 mrg union { 185 1.1 mrg /* Location. For MO_SET and MO_COPY, this is the SET that 186 1.1 mrg performs the assignment, if known, otherwise it is the target 187 1.1 mrg of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a 188 1.1 mrg CONCAT of the VALUE and the LOC associated with it. For 189 1.1 mrg MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION 190 1.1 mrg associated with it. */ 191 1.1 mrg rtx loc; 192 1.1 mrg 193 1.1 mrg /* Stack adjustment. */ 194 1.1 mrg HOST_WIDE_INT adjust; 195 1.1 mrg } u; 196 1.1 mrg }; 197 1.1 mrg 198 1.1 mrg 199 1.1 mrg /* A declaration of a variable, or an RTL value being handled like a 200 1.1 mrg declaration. */ 201 1.1 mrg typedef void *decl_or_value; 202 1.1 mrg 203 1.1 mrg /* Return true if a decl_or_value DV is a DECL or NULL. */ 204 1.1 mrg static inline bool 205 1.1 mrg dv_is_decl_p (decl_or_value dv) 206 1.1 mrg { 207 1.1 mrg return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE; 208 1.1 mrg } 209 1.1 mrg 210 1.1 mrg /* Return true if a decl_or_value is a VALUE rtl. */ 211 1.1 mrg static inline bool 212 1.1 mrg dv_is_value_p (decl_or_value dv) 213 1.1 mrg { 214 1.1 mrg return dv && !dv_is_decl_p (dv); 215 1.1 mrg } 216 1.1 mrg 217 1.1 mrg /* Return the decl in the decl_or_value. */ 218 1.1 mrg static inline tree 219 1.1 mrg dv_as_decl (decl_or_value dv) 220 1.1 mrg { 221 1.1 mrg gcc_checking_assert (dv_is_decl_p (dv)); 222 1.1 mrg return (tree) dv; 223 1.1 mrg } 224 1.1 mrg 225 1.1 mrg /* Return the value in the decl_or_value. */ 226 1.1 mrg static inline rtx 227 1.1 mrg dv_as_value (decl_or_value dv) 228 1.1 mrg { 229 1.1 mrg gcc_checking_assert (dv_is_value_p (dv)); 230 1.1 mrg return (rtx)dv; 231 1.1 mrg } 232 1.1 mrg 233 1.1 mrg /* Return the opaque pointer in the decl_or_value. */ 234 1.1 mrg static inline void * 235 1.1 mrg dv_as_opaque (decl_or_value dv) 236 1.1 mrg { 237 1.1 mrg return dv; 238 1.1 mrg } 239 1.1 mrg 240 1.1 mrg 241 1.1 mrg /* Description of location of a part of a variable. The content of a physical 242 1.1 mrg register is described by a chain of these structures. 243 1.1 mrg The chains are pretty short (usually 1 or 2 elements) and thus 244 1.1 mrg chain is the best data structure. */ 245 1.1 mrg struct attrs 246 1.1 mrg { 247 1.1 mrg /* Pointer to next member of the list. */ 248 1.1 mrg attrs *next; 249 1.1 mrg 250 1.1 mrg /* The rtx of register. */ 251 1.1 mrg rtx loc; 252 1.1 mrg 253 1.1 mrg /* The declaration corresponding to LOC. */ 254 1.1 mrg decl_or_value dv; 255 1.1 mrg 256 1.1 mrg /* Offset from start of DECL. */ 257 1.1 mrg HOST_WIDE_INT offset; 258 1.1 mrg }; 259 1.1 mrg 260 1.1 mrg /* Structure for chaining the locations. */ 261 1.1 mrg struct location_chain 262 1.1 mrg { 263 1.1 mrg /* Next element in the chain. */ 264 1.1 mrg location_chain *next; 265 1.1 mrg 266 1.1 mrg /* The location (REG, MEM or VALUE). */ 267 1.1 mrg rtx loc; 268 1.1 mrg 269 1.1 mrg /* The "value" stored in this location. */ 270 1.1 mrg rtx set_src; 271 1.1 mrg 272 1.1 mrg /* Initialized? */ 273 1.1 mrg enum var_init_status init; 274 1.1 mrg }; 275 1.1 mrg 276 1.1 mrg /* A vector of loc_exp_dep holds the active dependencies of a one-part 277 1.1 mrg DV on VALUEs, i.e., the VALUEs expanded so as to form the current 278 1.1 mrg location of DV. Each entry is also part of VALUE' s linked-list of 279 1.1 mrg backlinks back to DV. */ 280 1.1 mrg struct loc_exp_dep 281 1.1 mrg { 282 1.1 mrg /* The dependent DV. */ 283 1.1 mrg decl_or_value dv; 284 1.1 mrg /* The dependency VALUE or DECL_DEBUG. */ 285 1.1 mrg rtx value; 286 1.1 mrg /* The next entry in VALUE's backlinks list. */ 287 1.1 mrg struct loc_exp_dep *next; 288 1.1 mrg /* A pointer to the pointer to this entry (head or prev's next) in 289 1.1 mrg the doubly-linked list. */ 290 1.1 mrg struct loc_exp_dep **pprev; 291 1.1 mrg }; 292 1.1 mrg 293 1.1 mrg 294 1.1 mrg /* This data structure holds information about the depth of a variable 295 1.1 mrg expansion. */ 296 1.1 mrg struct expand_depth 297 1.1 mrg { 298 1.1 mrg /* This measures the complexity of the expanded expression. It 299 1.1 mrg grows by one for each level of expansion that adds more than one 300 1.1 mrg operand. */ 301 1.1 mrg int complexity; 302 1.1 mrg /* This counts the number of ENTRY_VALUE expressions in an 303 1.1 mrg expansion. We want to minimize their use. */ 304 1.1 mrg int entryvals; 305 1.1 mrg }; 306 1.1 mrg 307 1.1 mrg /* Type for dependencies actively used when expand FROM into cur_loc. */ 308 1.1 mrg typedef vec<loc_exp_dep, va_heap, vl_embed> deps_vec; 309 1.1 mrg 310 1.1 mrg /* This data structure is allocated for one-part variables at the time 311 1.1 mrg of emitting notes. */ 312 1.1 mrg struct onepart_aux 313 1.1 mrg { 314 1.1 mrg /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc 315 1.1 mrg computation used the expansion of this variable, and that ought 316 1.1 mrg to be notified should this variable change. If the DV's cur_loc 317 1.1 mrg expanded to NULL, all components of the loc list are regarded as 318 1.1 mrg active, so that any changes in them give us a chance to get a 319 1.1 mrg location. Otherwise, only components of the loc that expanded to 320 1.1 mrg non-NULL are regarded as active dependencies. */ 321 1.1 mrg loc_exp_dep *backlinks; 322 1.1 mrg /* This holds the LOC that was expanded into cur_loc. We need only 323 1.1 mrg mark a one-part variable as changed if the FROM loc is removed, 324 1.1 mrg or if it has no known location and a loc is added, or if it gets 325 1.1 mrg a change notification from any of its active dependencies. */ 326 1.1 mrg rtx from; 327 1.1 mrg /* The depth of the cur_loc expression. */ 328 1.1 mrg expand_depth depth; 329 1.1 mrg /* Dependencies actively used when expand FROM into cur_loc. */ 330 1.1 mrg deps_vec deps; 331 1.1 mrg }; 332 1.1 mrg 333 1.1 mrg /* Structure describing one part of variable. */ 334 1.1 mrg struct variable_part 335 1.1 mrg { 336 1.1 mrg /* Chain of locations of the part. */ 337 1.1 mrg location_chain *loc_chain; 338 1.1 mrg 339 1.1 mrg /* Location which was last emitted to location list. */ 340 1.1 mrg rtx cur_loc; 341 1.1 mrg 342 1.1 mrg union variable_aux 343 1.1 mrg { 344 1.1 mrg /* The offset in the variable, if !var->onepart. */ 345 1.1 mrg HOST_WIDE_INT offset; 346 1.1 mrg 347 1.1 mrg /* Pointer to auxiliary data, if var->onepart and emit_notes. */ 348 1.1 mrg struct onepart_aux *onepaux; 349 1.1 mrg } aux; 350 1.1 mrg }; 351 1.1 mrg 352 1.1 mrg /* Maximum number of location parts. */ 353 1.1 mrg #define MAX_VAR_PARTS 16 354 1.1 mrg 355 1.1 mrg /* Enumeration type used to discriminate various types of one-part 356 1.1 mrg variables. */ 357 1.1 mrg enum onepart_enum 358 1.1 mrg { 359 1.1 mrg /* Not a one-part variable. */ 360 1.1 mrg NOT_ONEPART = 0, 361 1.1 mrg /* A one-part DECL that is not a DEBUG_EXPR_DECL. */ 362 1.1 mrg ONEPART_VDECL = 1, 363 1.1 mrg /* A DEBUG_EXPR_DECL. */ 364 1.1 mrg ONEPART_DEXPR = 2, 365 1.1 mrg /* A VALUE. */ 366 1.1 mrg ONEPART_VALUE = 3 367 1.1 mrg }; 368 1.1 mrg 369 1.1 mrg /* Structure describing where the variable is located. */ 370 1.1 mrg struct variable 371 1.1 mrg { 372 1.1 mrg /* The declaration of the variable, or an RTL value being handled 373 1.1 mrg like a declaration. */ 374 1.1 mrg decl_or_value dv; 375 1.1 mrg 376 1.1 mrg /* Reference count. */ 377 1.1 mrg int refcount; 378 1.1 mrg 379 1.1 mrg /* Number of variable parts. */ 380 1.1 mrg char n_var_parts; 381 1.1 mrg 382 1.1 mrg /* What type of DV this is, according to enum onepart_enum. */ 383 1.1 mrg ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT; 384 1.1 mrg 385 1.1 mrg /* True if this variable_def struct is currently in the 386 1.1 mrg changed_variables hash table. */ 387 1.1 mrg bool in_changed_variables; 388 1.1 mrg 389 1.1 mrg /* The variable parts. */ 390 1.1 mrg variable_part var_part[1]; 391 1.1 mrg }; 392 1.1 mrg 393 1.1 mrg /* Pointer to the BB's information specific to variable tracking pass. */ 394 1.1 mrg #define VTI(BB) ((variable_tracking_info *) (BB)->aux) 395 1.1 mrg 396 1.1 mrg /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't. */ 397 1.1 mrg 398 1.1 mrg static inline HOST_WIDE_INT 399 1.1 mrg int_mem_offset (const_rtx mem) 400 1.1 mrg { 401 1.1 mrg HOST_WIDE_INT offset; 402 1.1 mrg if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (&offset)) 403 1.1 mrg return offset; 404 1.1 mrg return 0; 405 1.1 mrg } 406 1.1 mrg 407 1.1 mrg #if CHECKING_P && (GCC_VERSION >= 2007) 408 1.1 mrg 409 1.1 mrg /* Access VAR's Ith part's offset, checking that it's not a one-part 410 1.1 mrg variable. */ 411 1.1 mrg #define VAR_PART_OFFSET(var, i) __extension__ \ 412 1.1 mrg (*({ variable *const __v = (var); \ 413 1.1 mrg gcc_checking_assert (!__v->onepart); \ 414 1.1 mrg &__v->var_part[(i)].aux.offset; })) 415 1.1 mrg 416 1.1 mrg /* Access VAR's one-part auxiliary data, checking that it is a 417 1.1 mrg one-part variable. */ 418 1.1 mrg #define VAR_LOC_1PAUX(var) __extension__ \ 419 1.1 mrg (*({ variable *const __v = (var); \ 420 1.1 mrg gcc_checking_assert (__v->onepart); \ 421 1.1 mrg &__v->var_part[0].aux.onepaux; })) 422 1.1 mrg 423 1.1 mrg #else 424 1.1 mrg #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset) 425 1.1 mrg #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux) 426 1.1 mrg #endif 427 1.1 mrg 428 1.1 mrg /* These are accessor macros for the one-part auxiliary data. When 429 1.1 mrg convenient for users, they're guarded by tests that the data was 430 1.1 mrg allocated. */ 431 1.1 mrg #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \ 432 1.1 mrg ? VAR_LOC_1PAUX (var)->backlinks \ 433 1.1 mrg : NULL) 434 1.1 mrg #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \ 435 1.1 mrg ? &VAR_LOC_1PAUX (var)->backlinks \ 436 1.1 mrg : NULL) 437 1.1 mrg #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from) 438 1.1 mrg #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth) 439 1.1 mrg #define VAR_LOC_DEP_VEC(var) var_loc_dep_vec (var) 440 1.1 mrg 441 1.1 mrg /* Implements the VAR_LOC_DEP_VEC above as a function to work around 442 1.1 mrg a bogus -Wnonnull (PR c/95554). */ 443 1.1 mrg 444 1.1 mrg static inline deps_vec* 445 1.1 mrg var_loc_dep_vec (variable *var) 446 1.1 mrg { 447 1.1 mrg return VAR_LOC_1PAUX (var) ? &VAR_LOC_1PAUX (var)->deps : NULL; 448 1.1 mrg } 449 1.1 mrg 450 1.1 mrg 451 1.1 mrg typedef unsigned int dvuid; 452 1.1 mrg 453 1.1 mrg /* Return the uid of DV. */ 454 1.1 mrg 455 1.1 mrg static inline dvuid 456 1.1 mrg dv_uid (decl_or_value dv) 457 1.1 mrg { 458 1.1 mrg if (dv_is_value_p (dv)) 459 1.1 mrg return CSELIB_VAL_PTR (dv_as_value (dv))->uid; 460 1.1 mrg else 461 1.1 mrg return DECL_UID (dv_as_decl (dv)); 462 1.1 mrg } 463 1.1 mrg 464 1.1 mrg /* Compute the hash from the uid. */ 465 1.1 mrg 466 1.1 mrg static inline hashval_t 467 1.1 mrg dv_uid2hash (dvuid uid) 468 1.1 mrg { 469 1.1 mrg return uid; 470 1.1 mrg } 471 1.1 mrg 472 1.1 mrg /* The hash function for a mask table in a shared_htab chain. */ 473 1.1 mrg 474 1.1 mrg static inline hashval_t 475 1.1 mrg dv_htab_hash (decl_or_value dv) 476 1.1 mrg { 477 1.1 mrg return dv_uid2hash (dv_uid (dv)); 478 1.1 mrg } 479 1.1 mrg 480 1.1 mrg static void variable_htab_free (void *); 481 1.1 mrg 482 1.1 mrg /* Variable hashtable helpers. */ 483 1.1 mrg 484 1.1 mrg struct variable_hasher : pointer_hash <variable> 485 1.1 mrg { 486 1.1 mrg typedef void *compare_type; 487 1.1 mrg static inline hashval_t hash (const variable *); 488 1.1 mrg static inline bool equal (const variable *, const void *); 489 1.1 mrg static inline void remove (variable *); 490 1.1 mrg }; 491 1.1 mrg 492 1.1 mrg /* The hash function for variable_htab, computes the hash value 493 1.1 mrg from the declaration of variable X. */ 494 1.1 mrg 495 1.1 mrg inline hashval_t 496 1.1 mrg variable_hasher::hash (const variable *v) 497 1.1 mrg { 498 1.1 mrg return dv_htab_hash (v->dv); 499 1.1 mrg } 500 1.1 mrg 501 1.1 mrg /* Compare the declaration of variable X with declaration Y. */ 502 1.1 mrg 503 1.1 mrg inline bool 504 1.1 mrg variable_hasher::equal (const variable *v, const void *y) 505 1.1 mrg { 506 1.1 mrg decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y); 507 1.1 mrg 508 1.1 mrg return (dv_as_opaque (v->dv) == dv_as_opaque (dv)); 509 1.1 mrg } 510 1.1 mrg 511 1.1 mrg /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */ 512 1.1 mrg 513 1.1 mrg inline void 514 1.1 mrg variable_hasher::remove (variable *var) 515 1.1 mrg { 516 1.1 mrg variable_htab_free (var); 517 1.1 mrg } 518 1.1 mrg 519 1.1 mrg typedef hash_table<variable_hasher> variable_table_type; 520 1.1 mrg typedef variable_table_type::iterator variable_iterator_type; 521 1.1 mrg 522 1.1 mrg /* Structure for passing some other parameters to function 523 1.1 mrg emit_note_insn_var_location. */ 524 1.1 mrg struct emit_note_data 525 1.1 mrg { 526 1.1 mrg /* The instruction which the note will be emitted before/after. */ 527 1.1 mrg rtx_insn *insn; 528 1.1 mrg 529 1.1 mrg /* Where the note will be emitted (before/after insn)? */ 530 1.1 mrg enum emit_note_where where; 531 1.1 mrg 532 1.1 mrg /* The variables and values active at this point. */ 533 1.1 mrg variable_table_type *vars; 534 1.1 mrg }; 535 1.1 mrg 536 1.1 mrg /* Structure holding a refcounted hash table. If refcount > 1, 537 1.1 mrg it must be first unshared before modified. */ 538 1.1 mrg struct shared_hash 539 1.1 mrg { 540 1.1 mrg /* Reference count. */ 541 1.1 mrg int refcount; 542 1.1 mrg 543 1.1 mrg /* Actual hash table. */ 544 1.1 mrg variable_table_type *htab; 545 1.1 mrg }; 546 1.1 mrg 547 1.1 mrg /* Structure holding the IN or OUT set for a basic block. */ 548 1.1 mrg struct dataflow_set 549 1.1 mrg { 550 1.1 mrg /* Adjustment of stack offset. */ 551 1.1 mrg HOST_WIDE_INT stack_adjust; 552 1.1 mrg 553 1.1 mrg /* Attributes for registers (lists of attrs). */ 554 1.1 mrg attrs *regs[FIRST_PSEUDO_REGISTER]; 555 1.1 mrg 556 1.1 mrg /* Variable locations. */ 557 1.1 mrg shared_hash *vars; 558 1.1 mrg 559 1.1 mrg /* Vars that is being traversed. */ 560 1.1 mrg shared_hash *traversed_vars; 561 1.1 mrg }; 562 1.1 mrg 563 1.1 mrg /* The structure (one for each basic block) containing the information 564 1.1 mrg needed for variable tracking. */ 565 1.1 mrg struct variable_tracking_info 566 1.1 mrg { 567 1.1 mrg /* The vector of micro operations. */ 568 1.1 mrg vec<micro_operation> mos; 569 1.1 mrg 570 1.1 mrg /* The IN and OUT set for dataflow analysis. */ 571 1.1 mrg dataflow_set in; 572 1.1 mrg dataflow_set out; 573 1.1 mrg 574 1.1 mrg /* The permanent-in dataflow set for this block. This is used to 575 1.1 mrg hold values for which we had to compute entry values. ??? This 576 1.1 mrg should probably be dynamically allocated, to avoid using more 577 1.1 mrg memory in non-debug builds. */ 578 1.1 mrg dataflow_set *permp; 579 1.1 mrg 580 1.1 mrg /* Has the block been visited in DFS? */ 581 1.1 mrg bool visited; 582 1.1 mrg 583 1.1 mrg /* Has the block been flooded in VTA? */ 584 1.1 mrg bool flooded; 585 1.1 mrg 586 1.1 mrg }; 587 1.1 mrg 588 1.1 mrg /* Alloc pool for struct attrs_def. */ 589 1.1 mrg object_allocator<attrs> attrs_pool ("attrs pool"); 590 1.1 mrg 591 1.1 mrg /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */ 592 1.1 mrg 593 1.1 mrg static pool_allocator var_pool 594 1.1 mrg ("variable_def pool", sizeof (variable) + 595 1.1 mrg (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0])); 596 1.1 mrg 597 1.1 mrg /* Alloc pool for struct variable_def with a single var_part entry. */ 598 1.1 mrg static pool_allocator valvar_pool 599 1.1 mrg ("small variable_def pool", sizeof (variable)); 600 1.1 mrg 601 1.1 mrg /* Alloc pool for struct location_chain. */ 602 1.1 mrg static object_allocator<location_chain> location_chain_pool 603 1.1 mrg ("location_chain pool"); 604 1.1 mrg 605 1.1 mrg /* Alloc pool for struct shared_hash. */ 606 1.1 mrg static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool"); 607 1.1 mrg 608 1.1 mrg /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */ 609 1.1 mrg object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool"); 610 1.1 mrg 611 1.1 mrg /* Changed variables, notes will be emitted for them. */ 612 1.1 mrg static variable_table_type *changed_variables; 613 1.1 mrg 614 1.1 mrg /* Shall notes be emitted? */ 615 1.1 mrg static bool emit_notes; 616 1.1 mrg 617 1.1 mrg /* Values whose dynamic location lists have gone empty, but whose 618 1.1 mrg cselib location lists are still usable. Use this to hold the 619 1.1 mrg current location, the backlinks, etc, during emit_notes. */ 620 1.1 mrg static variable_table_type *dropped_values; 621 1.1 mrg 622 1.1 mrg /* Empty shared hashtable. */ 623 1.1 mrg static shared_hash *empty_shared_hash; 624 1.1 mrg 625 1.1 mrg /* Scratch register bitmap used by cselib_expand_value_rtx. */ 626 1.1 mrg static bitmap scratch_regs = NULL; 627 1.1 mrg 628 1.1 mrg #ifdef HAVE_window_save 629 1.1 mrg struct GTY(()) parm_reg { 630 1.1 mrg rtx outgoing; 631 1.1 mrg rtx incoming; 632 1.1 mrg }; 633 1.1 mrg 634 1.1 mrg 635 1.1 mrg /* Vector of windowed parameter registers, if any. */ 636 1.1 mrg static vec<parm_reg, va_gc> *windowed_parm_regs = NULL; 637 1.1 mrg #endif 638 1.1 mrg 639 1.1 mrg /* Variable used to tell whether cselib_process_insn called our hook. */ 640 1.1 mrg static bool cselib_hook_called; 641 1.1 mrg 642 1.1 mrg /* Local function prototypes. */ 643 1.1 mrg static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, 644 1.1 mrg HOST_WIDE_INT *); 645 1.1 mrg static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *, 646 1.1 mrg HOST_WIDE_INT *); 647 1.1 mrg static bool vt_stack_adjustments (void); 648 1.1 mrg 649 1.1 mrg static void init_attrs_list_set (attrs **); 650 1.1 mrg static void attrs_list_clear (attrs **); 651 1.1 mrg static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT); 652 1.1 mrg static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx); 653 1.1 mrg static void attrs_list_copy (attrs **, attrs *); 654 1.1 mrg static void attrs_list_union (attrs **, attrs *); 655 1.1 mrg 656 1.1 mrg static variable **unshare_variable (dataflow_set *set, variable **slot, 657 1.1 mrg variable *var, enum var_init_status); 658 1.1 mrg static void vars_copy (variable_table_type *, variable_table_type *); 659 1.1 mrg static tree var_debug_decl (tree); 660 1.1 mrg static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx); 661 1.1 mrg static void var_reg_delete_and_set (dataflow_set *, rtx, bool, 662 1.1 mrg enum var_init_status, rtx); 663 1.1 mrg static void var_reg_delete (dataflow_set *, rtx, bool); 664 1.1 mrg static void var_regno_delete (dataflow_set *, int); 665 1.1 mrg static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx); 666 1.1 mrg static void var_mem_delete_and_set (dataflow_set *, rtx, bool, 667 1.1 mrg enum var_init_status, rtx); 668 1.1 mrg static void var_mem_delete (dataflow_set *, rtx, bool); 669 1.1 mrg 670 1.1 mrg static void dataflow_set_init (dataflow_set *); 671 1.1 mrg static void dataflow_set_clear (dataflow_set *); 672 1.1 mrg static void dataflow_set_copy (dataflow_set *, dataflow_set *); 673 1.1 mrg static int variable_union_info_cmp_pos (const void *, const void *); 674 1.1 mrg static void dataflow_set_union (dataflow_set *, dataflow_set *); 675 1.1 mrg static location_chain *find_loc_in_1pdv (rtx, variable *, 676 1.1 mrg variable_table_type *); 677 1.1 mrg static bool canon_value_cmp (rtx, rtx); 678 1.1 mrg static int loc_cmp (rtx, rtx); 679 1.1 mrg static bool variable_part_different_p (variable_part *, variable_part *); 680 1.1 mrg static bool onepart_variable_different_p (variable *, variable *); 681 1.1 mrg static bool variable_different_p (variable *, variable *); 682 1.1 mrg static bool dataflow_set_different (dataflow_set *, dataflow_set *); 683 1.1 mrg static void dataflow_set_destroy (dataflow_set *); 684 1.1 mrg 685 1.1 mrg static bool track_expr_p (tree, bool); 686 1.1 mrg static void add_uses_1 (rtx *, void *); 687 1.1 mrg static void add_stores (rtx, const_rtx, void *); 688 1.1 mrg static bool compute_bb_dataflow (basic_block); 689 1.1 mrg static bool vt_find_locations (void); 690 1.1 mrg 691 1.1 mrg static void dump_attrs_list (attrs *); 692 1.1 mrg static void dump_var (variable *); 693 1.1 mrg static void dump_vars (variable_table_type *); 694 1.1 mrg static void dump_dataflow_set (dataflow_set *); 695 1.1 mrg static void dump_dataflow_sets (void); 696 1.1 mrg 697 1.1 mrg static void set_dv_changed (decl_or_value, bool); 698 1.1 mrg static void variable_was_changed (variable *, dataflow_set *); 699 1.1 mrg static variable **set_slot_part (dataflow_set *, rtx, variable **, 700 1.1 mrg decl_or_value, HOST_WIDE_INT, 701 1.1 mrg enum var_init_status, rtx); 702 1.1 mrg static void set_variable_part (dataflow_set *, rtx, 703 1.1 mrg decl_or_value, HOST_WIDE_INT, 704 1.1 mrg enum var_init_status, rtx, enum insert_option); 705 1.1 mrg static variable **clobber_slot_part (dataflow_set *, rtx, 706 1.1 mrg variable **, HOST_WIDE_INT, rtx); 707 1.1 mrg static void clobber_variable_part (dataflow_set *, rtx, 708 1.1 mrg decl_or_value, HOST_WIDE_INT, rtx); 709 1.1 mrg static variable **delete_slot_part (dataflow_set *, rtx, variable **, 710 1.1 mrg HOST_WIDE_INT); 711 1.1 mrg static void delete_variable_part (dataflow_set *, rtx, 712 1.1 mrg decl_or_value, HOST_WIDE_INT); 713 1.1 mrg static void emit_notes_in_bb (basic_block, dataflow_set *); 714 1.1 mrg static void vt_emit_notes (void); 715 1.1 mrg 716 1.1 mrg static void vt_add_function_parameters (void); 717 1.1 mrg static bool vt_initialize (void); 718 1.1 mrg static void vt_finalize (void); 719 1.1 mrg 720 1.1 mrg /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */ 721 1.1 mrg 722 1.1 mrg static int 723 1.1 mrg stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff, 724 1.1 mrg void *arg) 725 1.1 mrg { 726 1.1 mrg if (dest != stack_pointer_rtx) 727 1.1 mrg return 0; 728 1.1 mrg 729 1.1 mrg switch (GET_CODE (op)) 730 1.1 mrg { 731 1.1 mrg case PRE_INC: 732 1.1 mrg case PRE_DEC: 733 1.1 mrg ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff); 734 1.1 mrg return 0; 735 1.1 mrg case POST_INC: 736 1.1 mrg case POST_DEC: 737 1.1 mrg ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff); 738 1.1 mrg return 0; 739 1.1 mrg case PRE_MODIFY: 740 1.1 mrg case POST_MODIFY: 741 1.1 mrg /* We handle only adjustments by constant amount. */ 742 1.1 mrg gcc_assert (GET_CODE (src) == PLUS 743 1.1 mrg && CONST_INT_P (XEXP (src, 1)) 744 1.1 mrg && XEXP (src, 0) == stack_pointer_rtx); 745 1.1 mrg ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY] 746 1.1 mrg -= INTVAL (XEXP (src, 1)); 747 1.1 mrg return 0; 748 1.1 mrg default: 749 1.1 mrg gcc_unreachable (); 750 1.1 mrg } 751 1.1 mrg } 752 1.1 mrg 753 1.1 mrg /* Given a SET, calculate the amount of stack adjustment it contains 754 1.1 mrg PRE- and POST-modifying stack pointer. 755 1.1 mrg This function is similar to stack_adjust_offset. */ 756 1.1 mrg 757 1.1 mrg static void 758 1.1 mrg stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre, 759 1.1 mrg HOST_WIDE_INT *post) 760 1.1 mrg { 761 1.1 mrg rtx src = SET_SRC (pattern); 762 1.1 mrg rtx dest = SET_DEST (pattern); 763 1.1 mrg enum rtx_code code; 764 1.1 mrg 765 1.1 mrg if (dest == stack_pointer_rtx) 766 1.1 mrg { 767 1.1 mrg /* (set (reg sp) (plus (reg sp) (const_int))) */ 768 1.1 mrg code = GET_CODE (src); 769 1.1 mrg if (! (code == PLUS || code == MINUS) 770 1.1 mrg || XEXP (src, 0) != stack_pointer_rtx 771 1.1 mrg || !CONST_INT_P (XEXP (src, 1))) 772 1.1 mrg return; 773 1.1 mrg 774 1.1 mrg if (code == MINUS) 775 1.1 mrg *post += INTVAL (XEXP (src, 1)); 776 1.1 mrg else 777 1.1 mrg *post -= INTVAL (XEXP (src, 1)); 778 1.1 mrg return; 779 1.1 mrg } 780 1.1 mrg HOST_WIDE_INT res[2] = { 0, 0 }; 781 1.1 mrg for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res); 782 1.1 mrg *pre += res[0]; 783 1.1 mrg *post += res[1]; 784 1.1 mrg } 785 1.1 mrg 786 1.1 mrg /* Given an INSN, calculate the amount of stack adjustment it contains 787 1.1 mrg PRE- and POST-modifying stack pointer. */ 788 1.1 mrg 789 1.1 mrg static void 790 1.1 mrg insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre, 791 1.1 mrg HOST_WIDE_INT *post) 792 1.1 mrg { 793 1.1 mrg rtx pattern; 794 1.1 mrg 795 1.1 mrg *pre = 0; 796 1.1 mrg *post = 0; 797 1.1 mrg 798 1.1 mrg pattern = PATTERN (insn); 799 1.1 mrg if (RTX_FRAME_RELATED_P (insn)) 800 1.1 mrg { 801 1.1 mrg rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX); 802 1.1 mrg if (expr) 803 1.1 mrg pattern = XEXP (expr, 0); 804 1.1 mrg } 805 1.1 mrg 806 1.1 mrg if (GET_CODE (pattern) == SET) 807 1.1 mrg stack_adjust_offset_pre_post (pattern, pre, post); 808 1.1 mrg else if (GET_CODE (pattern) == PARALLEL 809 1.1 mrg || GET_CODE (pattern) == SEQUENCE) 810 1.1 mrg { 811 1.1 mrg int i; 812 1.1 mrg 813 1.1 mrg /* There may be stack adjustments inside compound insns. Search 814 1.1 mrg for them. */ 815 1.1 mrg for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--) 816 1.1 mrg if (GET_CODE (XVECEXP (pattern, 0, i)) == SET) 817 1.1 mrg stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post); 818 1.1 mrg } 819 1.1 mrg } 820 1.1 mrg 821 1.1 mrg /* Compute stack adjustments for all blocks by traversing DFS tree. 822 1.1 mrg Return true when the adjustments on all incoming edges are consistent. 823 1.1 mrg Heavily borrowed from pre_and_rev_post_order_compute. */ 824 1.1 mrg 825 1.1 mrg static bool 826 1.1 mrg vt_stack_adjustments (void) 827 1.1 mrg { 828 1.1 mrg edge_iterator *stack; 829 1.1 mrg int sp; 830 1.1 mrg 831 1.1 mrg /* Initialize entry block. */ 832 1.1 mrg VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true; 833 1.1 mrg VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust 834 1.1 mrg = INCOMING_FRAME_SP_OFFSET; 835 1.1 mrg VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust 836 1.1 mrg = INCOMING_FRAME_SP_OFFSET; 837 1.1 mrg 838 1.1 mrg /* Allocate stack for back-tracking up CFG. */ 839 1.1 mrg stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1); 840 1.1 mrg sp = 0; 841 1.1 mrg 842 1.1 mrg /* Push the first edge on to the stack. */ 843 1.1 mrg stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs); 844 1.1 mrg 845 1.1 mrg while (sp) 846 1.1 mrg { 847 1.1 mrg edge_iterator ei; 848 1.1 mrg basic_block src; 849 1.1 mrg basic_block dest; 850 1.1 mrg 851 1.1 mrg /* Look at the edge on the top of the stack. */ 852 1.1 mrg ei = stack[sp - 1]; 853 1.1 mrg src = ei_edge (ei)->src; 854 1.1 mrg dest = ei_edge (ei)->dest; 855 1.1 mrg 856 1.1 mrg /* Check if the edge destination has been visited yet. */ 857 1.1 mrg if (!VTI (dest)->visited) 858 1.1 mrg { 859 1.1 mrg rtx_insn *insn; 860 1.1 mrg HOST_WIDE_INT pre, post, offset; 861 1.1 mrg VTI (dest)->visited = true; 862 1.1 mrg VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust; 863 1.1 mrg 864 1.1 mrg if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) 865 1.1 mrg for (insn = BB_HEAD (dest); 866 1.1 mrg insn != NEXT_INSN (BB_END (dest)); 867 1.1 mrg insn = NEXT_INSN (insn)) 868 1.1 mrg if (INSN_P (insn)) 869 1.1 mrg { 870 1.1 mrg insn_stack_adjust_offset_pre_post (insn, &pre, &post); 871 1.1 mrg offset += pre + post; 872 1.1 mrg } 873 1.1 mrg 874 1.1 mrg VTI (dest)->out.stack_adjust = offset; 875 1.1 mrg 876 1.1 mrg if (EDGE_COUNT (dest->succs) > 0) 877 1.1 mrg /* Since the DEST node has been visited for the first 878 1.1 mrg time, check its successors. */ 879 1.1 mrg stack[sp++] = ei_start (dest->succs); 880 1.1 mrg } 881 1.1 mrg else 882 1.1 mrg { 883 1.1 mrg /* We can end up with different stack adjustments for the exit block 884 1.1 mrg of a shrink-wrapped function if stack_adjust_offset_pre_post 885 1.1 mrg doesn't understand the rtx pattern used to restore the stack 886 1.1 mrg pointer in the epilogue. For example, on s390(x), the stack 887 1.1 mrg pointer is often restored via a load-multiple instruction 888 1.1 mrg and so no stack_adjust offset is recorded for it. This means 889 1.1 mrg that the stack offset at the end of the epilogue block is the 890 1.1 mrg same as the offset before the epilogue, whereas other paths 891 1.1 mrg to the exit block will have the correct stack_adjust. 892 1.1 mrg 893 1.1 mrg It is safe to ignore these differences because (a) we never 894 1.1 mrg use the stack_adjust for the exit block in this pass and 895 1.1 mrg (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped 896 1.1 mrg function are correct. 897 1.1 mrg 898 1.1 mrg We must check whether the adjustments on other edges are 899 1.1 mrg the same though. */ 900 1.1 mrg if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) 901 1.1 mrg && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust) 902 1.1 mrg { 903 1.1 mrg free (stack); 904 1.1 mrg return false; 905 1.1 mrg } 906 1.1 mrg 907 1.1 mrg if (! ei_one_before_end_p (ei)) 908 1.1 mrg /* Go to the next edge. */ 909 1.1 mrg ei_next (&stack[sp - 1]); 910 1.1 mrg else 911 1.1 mrg /* Return to previous level if there are no more edges. */ 912 1.1 mrg sp--; 913 1.1 mrg } 914 1.1 mrg } 915 1.1 mrg 916 1.1 mrg free (stack); 917 1.1 mrg return true; 918 1.1 mrg } 919 1.1 mrg 920 1.1 mrg /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or 921 1.1 mrg hard_frame_pointer_rtx is being mapped to it and offset for it. */ 922 1.1 mrg static rtx cfa_base_rtx; 923 1.1 mrg static HOST_WIDE_INT cfa_base_offset; 924 1.1 mrg 925 1.1 mrg /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx 926 1.1 mrg or hard_frame_pointer_rtx. */ 927 1.1 mrg 928 1.1 mrg static inline rtx 929 1.1 mrg compute_cfa_pointer (poly_int64 adjustment) 930 1.1 mrg { 931 1.1 mrg return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset); 932 1.1 mrg } 933 1.1 mrg 934 1.1 mrg /* Adjustment for hard_frame_pointer_rtx to cfa base reg, 935 1.1 mrg or -1 if the replacement shouldn't be done. */ 936 1.1 mrg static poly_int64 hard_frame_pointer_adjustment = -1; 937 1.1 mrg 938 1.1 mrg /* Data for adjust_mems callback. */ 939 1.1 mrg 940 1.1 mrg class adjust_mem_data 941 1.1 mrg { 942 1.1 mrg public: 943 1.1 mrg bool store; 944 1.1 mrg machine_mode mem_mode; 945 1.1 mrg HOST_WIDE_INT stack_adjust; 946 1.1 mrg auto_vec<rtx> side_effects; 947 1.1 mrg }; 948 1.1 mrg 949 1.1 mrg /* Helper for adjust_mems. Return true if X is suitable for 950 1.1 mrg transformation of wider mode arithmetics to narrower mode. */ 951 1.1 mrg 952 1.1 mrg static bool 953 1.1 mrg use_narrower_mode_test (rtx x, const_rtx subreg) 954 1.1 mrg { 955 1.1 mrg subrtx_var_iterator::array_type array; 956 1.1 mrg FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST) 957 1.1 mrg { 958 1.1 mrg rtx x = *iter; 959 1.1 mrg if (CONSTANT_P (x)) 960 1.1 mrg iter.skip_subrtxes (); 961 1.1 mrg else 962 1.1 mrg switch (GET_CODE (x)) 963 1.1 mrg { 964 1.1 mrg case REG: 965 1.1 mrg if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode)) 966 1.1 mrg return false; 967 1.1 mrg if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x, 968 1.1 mrg subreg_lowpart_offset (GET_MODE (subreg), 969 1.1 mrg GET_MODE (x)))) 970 1.1 mrg return false; 971 1.1 mrg break; 972 1.1 mrg case PLUS: 973 1.1 mrg case MINUS: 974 1.1 mrg case MULT: 975 1.1 mrg break; 976 1.1 mrg case ASHIFT: 977 1.1 mrg if (GET_MODE (XEXP (x, 1)) != VOIDmode) 978 1.1 mrg { 979 1.1 mrg enum machine_mode mode = GET_MODE (subreg); 980 1.1 mrg rtx op1 = XEXP (x, 1); 981 1.1 mrg enum machine_mode op1_mode = GET_MODE (op1); 982 1.1 mrg if (GET_MODE_PRECISION (as_a <scalar_int_mode> (mode)) 983 1.1 mrg < GET_MODE_PRECISION (as_a <scalar_int_mode> (op1_mode))) 984 1.1 mrg { 985 1.1 mrg poly_uint64 byte = subreg_lowpart_offset (mode, op1_mode); 986 1.1 mrg if (GET_CODE (op1) == SUBREG || GET_CODE (op1) == CONCAT) 987 1.1 mrg { 988 1.1 mrg if (!simplify_subreg (mode, op1, op1_mode, byte)) 989 1.1 mrg return false; 990 1.1 mrg } 991 1.1 mrg else if (!validate_subreg (mode, op1_mode, op1, byte)) 992 1.1 mrg return false; 993 1.1 mrg } 994 1.1 mrg } 995 1.1 mrg iter.substitute (XEXP (x, 0)); 996 1.1 mrg break; 997 1.1 mrg default: 998 1.1 mrg return false; 999 1.1 mrg } 1000 1.1 mrg } 1001 1.1 mrg return true; 1002 1.1 mrg } 1003 1.1 mrg 1004 1.1 mrg /* Transform X into narrower mode MODE from wider mode WMODE. */ 1005 1.1 mrg 1006 1.1 mrg static rtx 1007 1.1 mrg use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode) 1008 1.1 mrg { 1009 1.1 mrg rtx op0, op1; 1010 1.1 mrg if (CONSTANT_P (x)) 1011 1.1 mrg return lowpart_subreg (mode, x, wmode); 1012 1.1 mrg switch (GET_CODE (x)) 1013 1.1 mrg { 1014 1.1 mrg case REG: 1015 1.1 mrg return lowpart_subreg (mode, x, wmode); 1016 1.1 mrg case PLUS: 1017 1.1 mrg case MINUS: 1018 1.1 mrg case MULT: 1019 1.1 mrg op0 = use_narrower_mode (XEXP (x, 0), mode, wmode); 1020 1.1 mrg op1 = use_narrower_mode (XEXP (x, 1), mode, wmode); 1021 1.1 mrg return simplify_gen_binary (GET_CODE (x), mode, op0, op1); 1022 1.1 mrg case ASHIFT: 1023 1.1 mrg op0 = use_narrower_mode (XEXP (x, 0), mode, wmode); 1024 1.1 mrg op1 = XEXP (x, 1); 1025 1.1 mrg /* Ensure shift amount is not wider than mode. */ 1026 1.1 mrg if (GET_MODE (op1) == VOIDmode) 1027 1.1 mrg op1 = lowpart_subreg (mode, op1, wmode); 1028 1.1 mrg else if (GET_MODE_PRECISION (mode) 1029 1.1 mrg < GET_MODE_PRECISION (as_a <scalar_int_mode> (GET_MODE (op1)))) 1030 1.1 mrg op1 = lowpart_subreg (mode, op1, GET_MODE (op1)); 1031 1.1 mrg return simplify_gen_binary (ASHIFT, mode, op0, op1); 1032 1.1 mrg default: 1033 1.1 mrg gcc_unreachable (); 1034 1.1 mrg } 1035 1.1 mrg } 1036 1.1 mrg 1037 1.1 mrg /* Helper function for adjusting used MEMs. */ 1038 1.1 mrg 1039 1.1 mrg static rtx 1040 1.1 mrg adjust_mems (rtx loc, const_rtx old_rtx, void *data) 1041 1.1 mrg { 1042 1.1 mrg class adjust_mem_data *amd = (class adjust_mem_data *) data; 1043 1.1 mrg rtx mem, addr = loc, tem; 1044 1.1 mrg machine_mode mem_mode_save; 1045 1.1 mrg bool store_save; 1046 1.1 mrg scalar_int_mode tem_mode, tem_subreg_mode; 1047 1.1 mrg poly_int64 size; 1048 1.1 mrg switch (GET_CODE (loc)) 1049 1.1 mrg { 1050 1.1 mrg case REG: 1051 1.1 mrg /* Don't do any sp or fp replacements outside of MEM addresses 1052 1.1 mrg on the LHS. */ 1053 1.1 mrg if (amd->mem_mode == VOIDmode && amd->store) 1054 1.1 mrg return loc; 1055 1.1 mrg if (loc == stack_pointer_rtx 1056 1.1 mrg && !frame_pointer_needed 1057 1.1 mrg && cfa_base_rtx) 1058 1.1 mrg return compute_cfa_pointer (amd->stack_adjust); 1059 1.1 mrg else if (loc == hard_frame_pointer_rtx 1060 1.1 mrg && frame_pointer_needed 1061 1.1 mrg && maybe_ne (hard_frame_pointer_adjustment, -1) 1062 1.1 mrg && cfa_base_rtx) 1063 1.1 mrg return compute_cfa_pointer (hard_frame_pointer_adjustment); 1064 1.1 mrg gcc_checking_assert (loc != virtual_incoming_args_rtx); 1065 1.1 mrg return loc; 1066 1.1 mrg case MEM: 1067 1.1 mrg mem = loc; 1068 1.1 mrg if (!amd->store) 1069 1.1 mrg { 1070 1.1 mrg mem = targetm.delegitimize_address (mem); 1071 1.1 mrg if (mem != loc && !MEM_P (mem)) 1072 1.1 mrg return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data); 1073 1.1 mrg } 1074 1.1 mrg 1075 1.1 mrg addr = XEXP (mem, 0); 1076 1.1 mrg mem_mode_save = amd->mem_mode; 1077 1.1 mrg amd->mem_mode = GET_MODE (mem); 1078 1.1 mrg store_save = amd->store; 1079 1.1 mrg amd->store = false; 1080 1.1 mrg addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); 1081 1.1 mrg amd->store = store_save; 1082 1.1 mrg amd->mem_mode = mem_mode_save; 1083 1.1 mrg if (mem == loc) 1084 1.1 mrg addr = targetm.delegitimize_address (addr); 1085 1.1 mrg if (addr != XEXP (mem, 0)) 1086 1.1 mrg mem = replace_equiv_address_nv (mem, addr); 1087 1.1 mrg if (!amd->store) 1088 1.1 mrg mem = avoid_constant_pool_reference (mem); 1089 1.1 mrg return mem; 1090 1.1 mrg case PRE_INC: 1091 1.1 mrg case PRE_DEC: 1092 1.1 mrg size = GET_MODE_SIZE (amd->mem_mode); 1093 1.1 mrg addr = plus_constant (GET_MODE (loc), XEXP (loc, 0), 1094 1.1 mrg GET_CODE (loc) == PRE_INC ? size : -size); 1095 1.1 mrg /* FALLTHRU */ 1096 1.1 mrg case POST_INC: 1097 1.1 mrg case POST_DEC: 1098 1.1 mrg if (addr == loc) 1099 1.1 mrg addr = XEXP (loc, 0); 1100 1.1 mrg gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode); 1101 1.1 mrg addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); 1102 1.1 mrg size = GET_MODE_SIZE (amd->mem_mode); 1103 1.1 mrg tem = plus_constant (GET_MODE (loc), XEXP (loc, 0), 1104 1.1 mrg (GET_CODE (loc) == PRE_INC 1105 1.1 mrg || GET_CODE (loc) == POST_INC) ? size : -size); 1106 1.1 mrg store_save = amd->store; 1107 1.1 mrg amd->store = false; 1108 1.1 mrg tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data); 1109 1.1 mrg amd->store = store_save; 1110 1.1 mrg amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem)); 1111 1.1 mrg return addr; 1112 1.1 mrg case PRE_MODIFY: 1113 1.1 mrg addr = XEXP (loc, 1); 1114 1.1 mrg /* FALLTHRU */ 1115 1.1 mrg case POST_MODIFY: 1116 1.1 mrg if (addr == loc) 1117 1.1 mrg addr = XEXP (loc, 0); 1118 1.1 mrg gcc_assert (amd->mem_mode != VOIDmode); 1119 1.1 mrg addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); 1120 1.1 mrg store_save = amd->store; 1121 1.1 mrg amd->store = false; 1122 1.1 mrg tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx, 1123 1.1 mrg adjust_mems, data); 1124 1.1 mrg amd->store = store_save; 1125 1.1 mrg amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem)); 1126 1.1 mrg return addr; 1127 1.1 mrg case SUBREG: 1128 1.1 mrg /* First try without delegitimization of whole MEMs and 1129 1.1 mrg avoid_constant_pool_reference, which is more likely to succeed. */ 1130 1.1 mrg store_save = amd->store; 1131 1.1 mrg amd->store = true; 1132 1.1 mrg addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems, 1133 1.1 mrg data); 1134 1.1 mrg amd->store = store_save; 1135 1.1 mrg mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); 1136 1.1 mrg if (mem == SUBREG_REG (loc)) 1137 1.1 mrg { 1138 1.1 mrg tem = loc; 1139 1.1 mrg goto finish_subreg; 1140 1.1 mrg } 1141 1.1 mrg tem = simplify_gen_subreg (GET_MODE (loc), mem, 1142 1.1 mrg GET_MODE (SUBREG_REG (loc)), 1143 1.1 mrg SUBREG_BYTE (loc)); 1144 1.1 mrg if (tem) 1145 1.1 mrg goto finish_subreg; 1146 1.1 mrg tem = simplify_gen_subreg (GET_MODE (loc), addr, 1147 1.1 mrg GET_MODE (SUBREG_REG (loc)), 1148 1.1 mrg SUBREG_BYTE (loc)); 1149 1.1 mrg if (tem == NULL_RTX) 1150 1.1 mrg tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc)); 1151 1.1 mrg finish_subreg: 1152 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS 1153 1.1 mrg && GET_CODE (tem) == SUBREG 1154 1.1 mrg && (GET_CODE (SUBREG_REG (tem)) == PLUS 1155 1.1 mrg || GET_CODE (SUBREG_REG (tem)) == MINUS 1156 1.1 mrg || GET_CODE (SUBREG_REG (tem)) == MULT 1157 1.1 mrg || GET_CODE (SUBREG_REG (tem)) == ASHIFT) 1158 1.1 mrg && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode) 1159 1.1 mrg && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)), 1160 1.1 mrg &tem_subreg_mode) 1161 1.1 mrg && (GET_MODE_PRECISION (tem_mode) 1162 1.1 mrg < GET_MODE_PRECISION (tem_subreg_mode)) 1163 1.1 mrg && subreg_lowpart_p (tem) 1164 1.1 mrg && use_narrower_mode_test (SUBREG_REG (tem), tem)) 1165 1.1 mrg return use_narrower_mode (SUBREG_REG (tem), tem_mode, tem_subreg_mode); 1166 1.1 mrg return tem; 1167 1.1 mrg case ASM_OPERANDS: 1168 1.1 mrg /* Don't do any replacements in second and following 1169 1.1 mrg ASM_OPERANDS of inline-asm with multiple sets. 1170 1.1 mrg ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC 1171 1.1 mrg and ASM_OPERANDS_LABEL_VEC need to be equal between 1172 1.1 mrg all the ASM_OPERANDs in the insn and adjust_insn will 1173 1.1 mrg fix this up. */ 1174 1.1 mrg if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0) 1175 1.1 mrg return loc; 1176 1.1 mrg break; 1177 1.1 mrg default: 1178 1.1 mrg break; 1179 1.1 mrg } 1180 1.1 mrg return NULL_RTX; 1181 1.1 mrg } 1182 1.1 mrg 1183 1.1 mrg /* Helper function for replacement of uses. */ 1184 1.1 mrg 1185 1.1 mrg static void 1186 1.1 mrg adjust_mem_uses (rtx *x, void *data) 1187 1.1 mrg { 1188 1.1 mrg rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data); 1189 1.1 mrg if (new_x != *x) 1190 1.1 mrg validate_change (NULL_RTX, x, new_x, true); 1191 1.1 mrg } 1192 1.1 mrg 1193 1.1 mrg /* Helper function for replacement of stores. */ 1194 1.1 mrg 1195 1.1 mrg static void 1196 1.1 mrg adjust_mem_stores (rtx loc, const_rtx expr, void *data) 1197 1.1 mrg { 1198 1.1 mrg if (MEM_P (loc)) 1199 1.1 mrg { 1200 1.1 mrg rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX, 1201 1.1 mrg adjust_mems, data); 1202 1.1 mrg if (new_dest != SET_DEST (expr)) 1203 1.1 mrg { 1204 1.1 mrg rtx xexpr = CONST_CAST_RTX (expr); 1205 1.1 mrg validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true); 1206 1.1 mrg } 1207 1.1 mrg } 1208 1.1 mrg } 1209 1.1 mrg 1210 1.1 mrg /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes, 1211 1.1 mrg replace them with their value in the insn and add the side-effects 1212 1.1 mrg as other sets to the insn. */ 1213 1.1 mrg 1214 1.1 mrg static void 1215 1.1 mrg adjust_insn (basic_block bb, rtx_insn *insn) 1216 1.1 mrg { 1217 1.1 mrg rtx set; 1218 1.1 mrg 1219 1.1 mrg #ifdef HAVE_window_save 1220 1.1 mrg /* If the target machine has an explicit window save instruction, the 1221 1.1 mrg transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */ 1222 1.1 mrg if (RTX_FRAME_RELATED_P (insn) 1223 1.1 mrg && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX)) 1224 1.1 mrg { 1225 1.1 mrg unsigned int i, nregs = vec_safe_length (windowed_parm_regs); 1226 1.1 mrg rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2)); 1227 1.1 mrg parm_reg *p; 1228 1.1 mrg 1229 1.1 mrg FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p) 1230 1.1 mrg { 1231 1.1 mrg XVECEXP (rtl, 0, i * 2) 1232 1.1 mrg = gen_rtx_SET (p->incoming, p->outgoing); 1233 1.1 mrg /* Do not clobber the attached DECL, but only the REG. */ 1234 1.1 mrg XVECEXP (rtl, 0, i * 2 + 1) 1235 1.1 mrg = gen_rtx_CLOBBER (GET_MODE (p->outgoing), 1236 1.1 mrg gen_raw_REG (GET_MODE (p->outgoing), 1237 1.1 mrg REGNO (p->outgoing))); 1238 1.1 mrg } 1239 1.1 mrg 1240 1.1 mrg validate_change (NULL_RTX, &PATTERN (insn), rtl, true); 1241 1.1 mrg return; 1242 1.1 mrg } 1243 1.1 mrg #endif 1244 1.1 mrg 1245 1.1 mrg adjust_mem_data amd; 1246 1.1 mrg amd.mem_mode = VOIDmode; 1247 1.1 mrg amd.stack_adjust = -VTI (bb)->out.stack_adjust; 1248 1.1 mrg 1249 1.1 mrg amd.store = true; 1250 1.1 mrg note_stores (insn, adjust_mem_stores, &amd); 1251 1.1 mrg 1252 1.1 mrg amd.store = false; 1253 1.1 mrg if (GET_CODE (PATTERN (insn)) == PARALLEL 1254 1.1 mrg && asm_noperands (PATTERN (insn)) > 0 1255 1.1 mrg && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) 1256 1.1 mrg { 1257 1.1 mrg rtx body, set0; 1258 1.1 mrg int i; 1259 1.1 mrg 1260 1.1 mrg /* inline-asm with multiple sets is tiny bit more complicated, 1261 1.1 mrg because the 3 vectors in ASM_OPERANDS need to be shared between 1262 1.1 mrg all ASM_OPERANDS in the instruction. adjust_mems will 1263 1.1 mrg not touch ASM_OPERANDS other than the first one, asm_noperands 1264 1.1 mrg test above needs to be called before that (otherwise it would fail) 1265 1.1 mrg and afterwards this code fixes it up. */ 1266 1.1 mrg note_uses (&PATTERN (insn), adjust_mem_uses, &amd); 1267 1.1 mrg body = PATTERN (insn); 1268 1.1 mrg set0 = XVECEXP (body, 0, 0); 1269 1.1 mrg gcc_checking_assert (GET_CODE (set0) == SET 1270 1.1 mrg && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS 1271 1.1 mrg && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0); 1272 1.1 mrg for (i = 1; i < XVECLEN (body, 0); i++) 1273 1.1 mrg if (GET_CODE (XVECEXP (body, 0, i)) != SET) 1274 1.1 mrg break; 1275 1.1 mrg else 1276 1.1 mrg { 1277 1.1 mrg set = XVECEXP (body, 0, i); 1278 1.1 mrg gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS 1279 1.1 mrg && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set)) 1280 1.1 mrg == i); 1281 1.1 mrg if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set)) 1282 1.1 mrg != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0)) 1283 1.1 mrg || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set)) 1284 1.1 mrg != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0)) 1285 1.1 mrg || ASM_OPERANDS_LABEL_VEC (SET_SRC (set)) 1286 1.1 mrg != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0))) 1287 1.1 mrg { 1288 1.1 mrg rtx newsrc = shallow_copy_rtx (SET_SRC (set)); 1289 1.1 mrg ASM_OPERANDS_INPUT_VEC (newsrc) 1290 1.1 mrg = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0)); 1291 1.1 mrg ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc) 1292 1.1 mrg = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0)); 1293 1.1 mrg ASM_OPERANDS_LABEL_VEC (newsrc) 1294 1.1 mrg = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)); 1295 1.1 mrg validate_change (NULL_RTX, &SET_SRC (set), newsrc, true); 1296 1.1 mrg } 1297 1.1 mrg } 1298 1.1 mrg } 1299 1.1 mrg else 1300 1.1 mrg note_uses (&PATTERN (insn), adjust_mem_uses, &amd); 1301 1.1 mrg 1302 1.1 mrg /* For read-only MEMs containing some constant, prefer those 1303 1.1 mrg constants. */ 1304 1.1 mrg set = single_set (insn); 1305 1.1 mrg if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set))) 1306 1.1 mrg { 1307 1.1 mrg rtx note = find_reg_equal_equiv_note (insn); 1308 1.1 mrg 1309 1.1 mrg if (note && CONSTANT_P (XEXP (note, 0))) 1310 1.1 mrg validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true); 1311 1.1 mrg } 1312 1.1 mrg 1313 1.1 mrg if (!amd.side_effects.is_empty ()) 1314 1.1 mrg { 1315 1.1 mrg rtx *pat, new_pat; 1316 1.1 mrg int i, oldn; 1317 1.1 mrg 1318 1.1 mrg pat = &PATTERN (insn); 1319 1.1 mrg if (GET_CODE (*pat) == COND_EXEC) 1320 1.1 mrg pat = &COND_EXEC_CODE (*pat); 1321 1.1 mrg if (GET_CODE (*pat) == PARALLEL) 1322 1.1 mrg oldn = XVECLEN (*pat, 0); 1323 1.1 mrg else 1324 1.1 mrg oldn = 1; 1325 1.1 mrg unsigned int newn = amd.side_effects.length (); 1326 1.1 mrg new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn)); 1327 1.1 mrg if (GET_CODE (*pat) == PARALLEL) 1328 1.1 mrg for (i = 0; i < oldn; i++) 1329 1.1 mrg XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i); 1330 1.1 mrg else 1331 1.1 mrg XVECEXP (new_pat, 0, 0) = *pat; 1332 1.1 mrg 1333 1.1 mrg rtx effect; 1334 1.1 mrg unsigned int j; 1335 1.1 mrg FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect) 1336 1.1 mrg XVECEXP (new_pat, 0, j + oldn) = effect; 1337 1.1 mrg validate_change (NULL_RTX, pat, new_pat, true); 1338 1.1 mrg } 1339 1.1 mrg } 1340 1.1 mrg 1341 1.1 mrg /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */ 1342 1.1 mrg static inline rtx 1343 1.1 mrg dv_as_rtx (decl_or_value dv) 1344 1.1 mrg { 1345 1.1 mrg tree decl; 1346 1.1 mrg 1347 1.1 mrg if (dv_is_value_p (dv)) 1348 1.1 mrg return dv_as_value (dv); 1349 1.1 mrg 1350 1.1 mrg decl = dv_as_decl (dv); 1351 1.1 mrg 1352 1.1 mrg gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL); 1353 1.1 mrg return DECL_RTL_KNOWN_SET (decl); 1354 1.1 mrg } 1355 1.1 mrg 1356 1.1 mrg /* Return nonzero if a decl_or_value must not have more than one 1357 1.1 mrg variable part. The returned value discriminates among various 1358 1.1 mrg kinds of one-part DVs ccording to enum onepart_enum. */ 1359 1.1 mrg static inline onepart_enum 1360 1.1 mrg dv_onepart_p (decl_or_value dv) 1361 1.1 mrg { 1362 1.1 mrg tree decl; 1363 1.1 mrg 1364 1.1 mrg if (!MAY_HAVE_DEBUG_BIND_INSNS) 1365 1.1 mrg return NOT_ONEPART; 1366 1.1 mrg 1367 1.1 mrg if (dv_is_value_p (dv)) 1368 1.1 mrg return ONEPART_VALUE; 1369 1.1 mrg 1370 1.1 mrg decl = dv_as_decl (dv); 1371 1.1 mrg 1372 1.1 mrg if (TREE_CODE (decl) == DEBUG_EXPR_DECL) 1373 1.1 mrg return ONEPART_DEXPR; 1374 1.1 mrg 1375 1.1 mrg if (target_for_debug_bind (decl) != NULL_TREE) 1376 1.1 mrg return ONEPART_VDECL; 1377 1.1 mrg 1378 1.1 mrg return NOT_ONEPART; 1379 1.1 mrg } 1380 1.1 mrg 1381 1.1 mrg /* Return the variable pool to be used for a dv of type ONEPART. */ 1382 1.1 mrg static inline pool_allocator & 1383 1.1 mrg onepart_pool (onepart_enum onepart) 1384 1.1 mrg { 1385 1.1 mrg return onepart ? valvar_pool : var_pool; 1386 1.1 mrg } 1387 1.1 mrg 1388 1.1 mrg /* Allocate a variable_def from the corresponding variable pool. */ 1389 1.1 mrg static inline variable * 1390 1.1 mrg onepart_pool_allocate (onepart_enum onepart) 1391 1.1 mrg { 1392 1.1 mrg return (variable*) onepart_pool (onepart).allocate (); 1393 1.1 mrg } 1394 1.1 mrg 1395 1.1 mrg /* Build a decl_or_value out of a decl. */ 1396 1.1 mrg static inline decl_or_value 1397 1.1 mrg dv_from_decl (tree decl) 1398 1.1 mrg { 1399 1.1 mrg decl_or_value dv; 1400 1.1 mrg dv = decl; 1401 1.1 mrg gcc_checking_assert (dv_is_decl_p (dv)); 1402 1.1 mrg return dv; 1403 1.1 mrg } 1404 1.1 mrg 1405 1.1 mrg /* Build a decl_or_value out of a value. */ 1406 1.1 mrg static inline decl_or_value 1407 1.1 mrg dv_from_value (rtx value) 1408 1.1 mrg { 1409 1.1 mrg decl_or_value dv; 1410 1.1 mrg dv = value; 1411 1.1 mrg gcc_checking_assert (dv_is_value_p (dv)); 1412 1.1 mrg return dv; 1413 1.1 mrg } 1414 1.1 mrg 1415 1.1 mrg /* Return a value or the decl of a debug_expr as a decl_or_value. */ 1416 1.1 mrg static inline decl_or_value 1417 1.1 mrg dv_from_rtx (rtx x) 1418 1.1 mrg { 1419 1.1 mrg decl_or_value dv; 1420 1.1 mrg 1421 1.1 mrg switch (GET_CODE (x)) 1422 1.1 mrg { 1423 1.1 mrg case DEBUG_EXPR: 1424 1.1 mrg dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x)); 1425 1.1 mrg gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x); 1426 1.1 mrg break; 1427 1.1 mrg 1428 1.1 mrg case VALUE: 1429 1.1 mrg dv = dv_from_value (x); 1430 1.1 mrg break; 1431 1.1 mrg 1432 1.1 mrg default: 1433 1.1 mrg gcc_unreachable (); 1434 1.1 mrg } 1435 1.1 mrg 1436 1.1 mrg return dv; 1437 1.1 mrg } 1438 1.1 mrg 1439 1.1 mrg extern void debug_dv (decl_or_value dv); 1440 1.1 mrg 1441 1.1 mrg DEBUG_FUNCTION void 1442 1.1 mrg debug_dv (decl_or_value dv) 1443 1.1 mrg { 1444 1.1 mrg if (dv_is_value_p (dv)) 1445 1.1 mrg debug_rtx (dv_as_value (dv)); 1446 1.1 mrg else 1447 1.1 mrg debug_generic_stmt (dv_as_decl (dv)); 1448 1.1 mrg } 1449 1.1 mrg 1450 1.1 mrg static void loc_exp_dep_clear (variable *var); 1451 1.1 mrg 1452 1.1 mrg /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */ 1453 1.1 mrg 1454 1.1 mrg static void 1455 1.1 mrg variable_htab_free (void *elem) 1456 1.1 mrg { 1457 1.1 mrg int i; 1458 1.1 mrg variable *var = (variable *) elem; 1459 1.1 mrg location_chain *node, *next; 1460 1.1 mrg 1461 1.1 mrg gcc_checking_assert (var->refcount > 0); 1462 1.1 mrg 1463 1.1 mrg var->refcount--; 1464 1.1 mrg if (var->refcount > 0) 1465 1.1 mrg return; 1466 1.1 mrg 1467 1.1 mrg for (i = 0; i < var->n_var_parts; i++) 1468 1.1 mrg { 1469 1.1 mrg for (node = var->var_part[i].loc_chain; node; node = next) 1470 1.1 mrg { 1471 1.1 mrg next = node->next; 1472 1.1 mrg delete node; 1473 1.1 mrg } 1474 1.1 mrg var->var_part[i].loc_chain = NULL; 1475 1.1 mrg } 1476 1.1 mrg if (var->onepart && VAR_LOC_1PAUX (var)) 1477 1.1 mrg { 1478 1.1 mrg loc_exp_dep_clear (var); 1479 1.1 mrg if (VAR_LOC_DEP_LST (var)) 1480 1.1 mrg VAR_LOC_DEP_LST (var)->pprev = NULL; 1481 1.1 mrg XDELETE (VAR_LOC_1PAUX (var)); 1482 1.1 mrg /* These may be reused across functions, so reset 1483 1.1 mrg e.g. NO_LOC_P. */ 1484 1.1 mrg if (var->onepart == ONEPART_DEXPR) 1485 1.1 mrg set_dv_changed (var->dv, true); 1486 1.1 mrg } 1487 1.1 mrg onepart_pool (var->onepart).remove (var); 1488 1.1 mrg } 1489 1.1 mrg 1490 1.1 mrg /* Initialize the set (array) SET of attrs to empty lists. */ 1491 1.1 mrg 1492 1.1 mrg static void 1493 1.1 mrg init_attrs_list_set (attrs **set) 1494 1.1 mrg { 1495 1.1 mrg int i; 1496 1.1 mrg 1497 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1498 1.1 mrg set[i] = NULL; 1499 1.1 mrg } 1500 1.1 mrg 1501 1.1 mrg /* Make the list *LISTP empty. */ 1502 1.1 mrg 1503 1.1 mrg static void 1504 1.1 mrg attrs_list_clear (attrs **listp) 1505 1.1 mrg { 1506 1.1 mrg attrs *list, *next; 1507 1.1 mrg 1508 1.1 mrg for (list = *listp; list; list = next) 1509 1.1 mrg { 1510 1.1 mrg next = list->next; 1511 1.1 mrg delete list; 1512 1.1 mrg } 1513 1.1 mrg *listp = NULL; 1514 1.1 mrg } 1515 1.1 mrg 1516 1.1 mrg /* Return true if the pair of DECL and OFFSET is the member of the LIST. */ 1517 1.1 mrg 1518 1.1 mrg static attrs * 1519 1.1 mrg attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset) 1520 1.1 mrg { 1521 1.1 mrg for (; list; list = list->next) 1522 1.1 mrg if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset) 1523 1.1 mrg return list; 1524 1.1 mrg return NULL; 1525 1.1 mrg } 1526 1.1 mrg 1527 1.1 mrg /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */ 1528 1.1 mrg 1529 1.1 mrg static void 1530 1.1 mrg attrs_list_insert (attrs **listp, decl_or_value dv, 1531 1.1 mrg HOST_WIDE_INT offset, rtx loc) 1532 1.1 mrg { 1533 1.1 mrg attrs *list = new attrs; 1534 1.1 mrg list->loc = loc; 1535 1.1 mrg list->dv = dv; 1536 1.1 mrg list->offset = offset; 1537 1.1 mrg list->next = *listp; 1538 1.1 mrg *listp = list; 1539 1.1 mrg } 1540 1.1 mrg 1541 1.1 mrg /* Copy all nodes from SRC and create a list *DSTP of the copies. */ 1542 1.1 mrg 1543 1.1 mrg static void 1544 1.1 mrg attrs_list_copy (attrs **dstp, attrs *src) 1545 1.1 mrg { 1546 1.1 mrg attrs_list_clear (dstp); 1547 1.1 mrg for (; src; src = src->next) 1548 1.1 mrg { 1549 1.1 mrg attrs *n = new attrs; 1550 1.1 mrg n->loc = src->loc; 1551 1.1 mrg n->dv = src->dv; 1552 1.1 mrg n->offset = src->offset; 1553 1.1 mrg n->next = *dstp; 1554 1.1 mrg *dstp = n; 1555 1.1 mrg } 1556 1.1 mrg } 1557 1.1 mrg 1558 1.1 mrg /* Add all nodes from SRC which are not in *DSTP to *DSTP. */ 1559 1.1 mrg 1560 1.1 mrg static void 1561 1.1 mrg attrs_list_union (attrs **dstp, attrs *src) 1562 1.1 mrg { 1563 1.1 mrg for (; src; src = src->next) 1564 1.1 mrg { 1565 1.1 mrg if (!attrs_list_member (*dstp, src->dv, src->offset)) 1566 1.1 mrg attrs_list_insert (dstp, src->dv, src->offset, src->loc); 1567 1.1 mrg } 1568 1.1 mrg } 1569 1.1 mrg 1570 1.1 mrg /* Combine nodes that are not onepart nodes from SRC and SRC2 into 1571 1.1 mrg *DSTP. */ 1572 1.1 mrg 1573 1.1 mrg static void 1574 1.1 mrg attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2) 1575 1.1 mrg { 1576 1.1 mrg gcc_assert (!*dstp); 1577 1.1 mrg for (; src; src = src->next) 1578 1.1 mrg { 1579 1.1 mrg if (!dv_onepart_p (src->dv)) 1580 1.1 mrg attrs_list_insert (dstp, src->dv, src->offset, src->loc); 1581 1.1 mrg } 1582 1.1 mrg for (src = src2; src; src = src->next) 1583 1.1 mrg { 1584 1.1 mrg if (!dv_onepart_p (src->dv) 1585 1.1 mrg && !attrs_list_member (*dstp, src->dv, src->offset)) 1586 1.1 mrg attrs_list_insert (dstp, src->dv, src->offset, src->loc); 1587 1.1 mrg } 1588 1.1 mrg } 1589 1.1 mrg 1590 1.1 mrg /* Shared hashtable support. */ 1591 1.1 mrg 1592 1.1 mrg /* Return true if VARS is shared. */ 1593 1.1 mrg 1594 1.1 mrg static inline bool 1595 1.1 mrg shared_hash_shared (shared_hash *vars) 1596 1.1 mrg { 1597 1.1 mrg return vars->refcount > 1; 1598 1.1 mrg } 1599 1.1 mrg 1600 1.1 mrg /* Return the hash table for VARS. */ 1601 1.1 mrg 1602 1.1 mrg static inline variable_table_type * 1603 1.1 mrg shared_hash_htab (shared_hash *vars) 1604 1.1 mrg { 1605 1.1 mrg return vars->htab; 1606 1.1 mrg } 1607 1.1 mrg 1608 1.1 mrg /* Return true if VAR is shared, or maybe because VARS is shared. */ 1609 1.1 mrg 1610 1.1 mrg static inline bool 1611 1.1 mrg shared_var_p (variable *var, shared_hash *vars) 1612 1.1 mrg { 1613 1.1 mrg /* Don't count an entry in the changed_variables table as a duplicate. */ 1614 1.1 mrg return ((var->refcount > 1 + (int) var->in_changed_variables) 1615 1.1 mrg || shared_hash_shared (vars)); 1616 1.1 mrg } 1617 1.1 mrg 1618 1.1 mrg /* Copy variables into a new hash table. */ 1619 1.1 mrg 1620 1.1 mrg static shared_hash * 1621 1.1 mrg shared_hash_unshare (shared_hash *vars) 1622 1.1 mrg { 1623 1.1 mrg shared_hash *new_vars = new shared_hash; 1624 1.1 mrg gcc_assert (vars->refcount > 1); 1625 1.1 mrg new_vars->refcount = 1; 1626 1.1 mrg new_vars->htab = new variable_table_type (vars->htab->elements () + 3); 1627 1.1 mrg vars_copy (new_vars->htab, vars->htab); 1628 1.1 mrg vars->refcount--; 1629 1.1 mrg return new_vars; 1630 1.1 mrg } 1631 1.1 mrg 1632 1.1 mrg /* Increment reference counter on VARS and return it. */ 1633 1.1 mrg 1634 1.1 mrg static inline shared_hash * 1635 1.1 mrg shared_hash_copy (shared_hash *vars) 1636 1.1 mrg { 1637 1.1 mrg vars->refcount++; 1638 1.1 mrg return vars; 1639 1.1 mrg } 1640 1.1 mrg 1641 1.1 mrg /* Decrement reference counter and destroy hash table if not shared 1642 1.1 mrg anymore. */ 1643 1.1 mrg 1644 1.1 mrg static void 1645 1.1 mrg shared_hash_destroy (shared_hash *vars) 1646 1.1 mrg { 1647 1.1 mrg gcc_checking_assert (vars->refcount > 0); 1648 1.1 mrg if (--vars->refcount == 0) 1649 1.1 mrg { 1650 1.1 mrg delete vars->htab; 1651 1.1 mrg delete vars; 1652 1.1 mrg } 1653 1.1 mrg } 1654 1.1 mrg 1655 1.1 mrg /* Unshare *PVARS if shared and return slot for DV. If INS is 1656 1.1 mrg INSERT, insert it if not already present. */ 1657 1.1 mrg 1658 1.1 mrg static inline variable ** 1659 1.1 mrg shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv, 1660 1.1 mrg hashval_t dvhash, enum insert_option ins) 1661 1.1 mrg { 1662 1.1 mrg if (shared_hash_shared (*pvars)) 1663 1.1 mrg *pvars = shared_hash_unshare (*pvars); 1664 1.1 mrg return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins); 1665 1.1 mrg } 1666 1.1 mrg 1667 1.1 mrg static inline variable ** 1668 1.1 mrg shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv, 1669 1.1 mrg enum insert_option ins) 1670 1.1 mrg { 1671 1.1 mrg return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins); 1672 1.1 mrg } 1673 1.1 mrg 1674 1.1 mrg /* Return slot for DV, if it is already present in the hash table. 1675 1.1 mrg If it is not present, insert it only VARS is not shared, otherwise 1676 1.1 mrg return NULL. */ 1677 1.1 mrg 1678 1.1 mrg static inline variable ** 1679 1.1 mrg shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash) 1680 1.1 mrg { 1681 1.1 mrg return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, 1682 1.1 mrg shared_hash_shared (vars) 1683 1.1 mrg ? NO_INSERT : INSERT); 1684 1.1 mrg } 1685 1.1 mrg 1686 1.1 mrg static inline variable ** 1687 1.1 mrg shared_hash_find_slot (shared_hash *vars, decl_or_value dv) 1688 1.1 mrg { 1689 1.1 mrg return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv)); 1690 1.1 mrg } 1691 1.1 mrg 1692 1.1 mrg /* Return slot for DV only if it is already present in the hash table. */ 1693 1.1 mrg 1694 1.1 mrg static inline variable ** 1695 1.1 mrg shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv, 1696 1.1 mrg hashval_t dvhash) 1697 1.1 mrg { 1698 1.1 mrg return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT); 1699 1.1 mrg } 1700 1.1 mrg 1701 1.1 mrg static inline variable ** 1702 1.1 mrg shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv) 1703 1.1 mrg { 1704 1.1 mrg return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv)); 1705 1.1 mrg } 1706 1.1 mrg 1707 1.1 mrg /* Return variable for DV or NULL if not already present in the hash 1708 1.1 mrg table. */ 1709 1.1 mrg 1710 1.1 mrg static inline variable * 1711 1.1 mrg shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash) 1712 1.1 mrg { 1713 1.1 mrg return shared_hash_htab (vars)->find_with_hash (dv, dvhash); 1714 1.1 mrg } 1715 1.1 mrg 1716 1.1 mrg static inline variable * 1717 1.1 mrg shared_hash_find (shared_hash *vars, decl_or_value dv) 1718 1.1 mrg { 1719 1.1 mrg return shared_hash_find_1 (vars, dv, dv_htab_hash (dv)); 1720 1.1 mrg } 1721 1.1 mrg 1722 1.1 mrg /* Return true if TVAL is better than CVAL as a canonival value. We 1723 1.1 mrg choose lowest-numbered VALUEs, using the RTX address as a 1724 1.1 mrg tie-breaker. The idea is to arrange them into a star topology, 1725 1.1 mrg such that all of them are at most one step away from the canonical 1726 1.1 mrg value, and the canonical value has backlinks to all of them, in 1727 1.1 mrg addition to all the actual locations. We don't enforce this 1728 1.1 mrg topology throughout the entire dataflow analysis, though. 1729 1.1 mrg */ 1730 1.1 mrg 1731 1.1 mrg static inline bool 1732 1.1 mrg canon_value_cmp (rtx tval, rtx cval) 1733 1.1 mrg { 1734 1.1 mrg return !cval 1735 1.1 mrg || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid; 1736 1.1 mrg } 1737 1.1 mrg 1738 1.1 mrg static bool dst_can_be_shared; 1739 1.1 mrg 1740 1.1 mrg /* Return a copy of a variable VAR and insert it to dataflow set SET. */ 1741 1.1 mrg 1742 1.1 mrg static variable ** 1743 1.1 mrg unshare_variable (dataflow_set *set, variable **slot, variable *var, 1744 1.1 mrg enum var_init_status initialized) 1745 1.1 mrg { 1746 1.1 mrg variable *new_var; 1747 1.1 mrg int i; 1748 1.1 mrg 1749 1.1 mrg new_var = onepart_pool_allocate (var->onepart); 1750 1.1 mrg new_var->dv = var->dv; 1751 1.1 mrg new_var->refcount = 1; 1752 1.1 mrg var->refcount--; 1753 1.1 mrg new_var->n_var_parts = var->n_var_parts; 1754 1.1 mrg new_var->onepart = var->onepart; 1755 1.1 mrg new_var->in_changed_variables = false; 1756 1.1 mrg 1757 1.1 mrg if (! flag_var_tracking_uninit) 1758 1.1 mrg initialized = VAR_INIT_STATUS_INITIALIZED; 1759 1.1 mrg 1760 1.1 mrg for (i = 0; i < var->n_var_parts; i++) 1761 1.1 mrg { 1762 1.1 mrg location_chain *node; 1763 1.1 mrg location_chain **nextp; 1764 1.1 mrg 1765 1.1 mrg if (i == 0 && var->onepart) 1766 1.1 mrg { 1767 1.1 mrg /* One-part auxiliary data is only used while emitting 1768 1.1 mrg notes, so propagate it to the new variable in the active 1769 1.1 mrg dataflow set. If we're not emitting notes, this will be 1770 1.1 mrg a no-op. */ 1771 1.1 mrg gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes); 1772 1.1 mrg VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var); 1773 1.1 mrg VAR_LOC_1PAUX (var) = NULL; 1774 1.1 mrg } 1775 1.1 mrg else 1776 1.1 mrg VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i); 1777 1.1 mrg nextp = &new_var->var_part[i].loc_chain; 1778 1.1 mrg for (node = var->var_part[i].loc_chain; node; node = node->next) 1779 1.1 mrg { 1780 1.1 mrg location_chain *new_lc; 1781 1.1 mrg 1782 1.1 mrg new_lc = new location_chain; 1783 1.1 mrg new_lc->next = NULL; 1784 1.1 mrg if (node->init > initialized) 1785 1.1 mrg new_lc->init = node->init; 1786 1.1 mrg else 1787 1.1 mrg new_lc->init = initialized; 1788 1.1 mrg if (node->set_src && !(MEM_P (node->set_src))) 1789 1.1 mrg new_lc->set_src = node->set_src; 1790 1.1 mrg else 1791 1.1 mrg new_lc->set_src = NULL; 1792 1.1 mrg new_lc->loc = node->loc; 1793 1.1 mrg 1794 1.1 mrg *nextp = new_lc; 1795 1.1 mrg nextp = &new_lc->next; 1796 1.1 mrg } 1797 1.1 mrg 1798 1.1 mrg new_var->var_part[i].cur_loc = var->var_part[i].cur_loc; 1799 1.1 mrg } 1800 1.1 mrg 1801 1.1 mrg dst_can_be_shared = false; 1802 1.1 mrg if (shared_hash_shared (set->vars)) 1803 1.1 mrg slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT); 1804 1.1 mrg else if (set->traversed_vars && set->vars != set->traversed_vars) 1805 1.1 mrg slot = shared_hash_find_slot_noinsert (set->vars, var->dv); 1806 1.1 mrg *slot = new_var; 1807 1.1 mrg if (var->in_changed_variables) 1808 1.1 mrg { 1809 1.1 mrg variable **cslot 1810 1.1 mrg = changed_variables->find_slot_with_hash (var->dv, 1811 1.1 mrg dv_htab_hash (var->dv), 1812 1.1 mrg NO_INSERT); 1813 1.1 mrg gcc_assert (*cslot == (void *) var); 1814 1.1 mrg var->in_changed_variables = false; 1815 1.1 mrg variable_htab_free (var); 1816 1.1 mrg *cslot = new_var; 1817 1.1 mrg new_var->in_changed_variables = true; 1818 1.1 mrg } 1819 1.1 mrg return slot; 1820 1.1 mrg } 1821 1.1 mrg 1822 1.1 mrg /* Copy all variables from hash table SRC to hash table DST. */ 1823 1.1 mrg 1824 1.1 mrg static void 1825 1.1 mrg vars_copy (variable_table_type *dst, variable_table_type *src) 1826 1.1 mrg { 1827 1.1 mrg variable_iterator_type hi; 1828 1.1 mrg variable *var; 1829 1.1 mrg 1830 1.1 mrg FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi) 1831 1.1 mrg { 1832 1.1 mrg variable **dstp; 1833 1.1 mrg var->refcount++; 1834 1.1 mrg dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv), 1835 1.1 mrg INSERT); 1836 1.1 mrg *dstp = var; 1837 1.1 mrg } 1838 1.1 mrg } 1839 1.1 mrg 1840 1.1 mrg /* Map a decl to its main debug decl. */ 1841 1.1 mrg 1842 1.1 mrg static inline tree 1843 1.1 mrg var_debug_decl (tree decl) 1844 1.1 mrg { 1845 1.1 mrg if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl)) 1846 1.1 mrg { 1847 1.1 mrg tree debugdecl = DECL_DEBUG_EXPR (decl); 1848 1.1 mrg if (DECL_P (debugdecl)) 1849 1.1 mrg decl = debugdecl; 1850 1.1 mrg } 1851 1.1 mrg 1852 1.1 mrg return decl; 1853 1.1 mrg } 1854 1.1 mrg 1855 1.1 mrg /* Set the register LOC to contain DV, OFFSET. */ 1856 1.1 mrg 1857 1.1 mrg static void 1858 1.1 mrg var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized, 1859 1.1 mrg decl_or_value dv, HOST_WIDE_INT offset, rtx set_src, 1860 1.1 mrg enum insert_option iopt) 1861 1.1 mrg { 1862 1.1 mrg attrs *node; 1863 1.1 mrg bool decl_p = dv_is_decl_p (dv); 1864 1.1 mrg 1865 1.1 mrg if (decl_p) 1866 1.1 mrg dv = dv_from_decl (var_debug_decl (dv_as_decl (dv))); 1867 1.1 mrg 1868 1.1 mrg for (node = set->regs[REGNO (loc)]; node; node = node->next) 1869 1.1 mrg if (dv_as_opaque (node->dv) == dv_as_opaque (dv) 1870 1.1 mrg && node->offset == offset) 1871 1.1 mrg break; 1872 1.1 mrg if (!node) 1873 1.1 mrg attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc); 1874 1.1 mrg set_variable_part (set, loc, dv, offset, initialized, set_src, iopt); 1875 1.1 mrg } 1876 1.1 mrg 1877 1.1 mrg /* Return true if we should track a location that is OFFSET bytes from 1878 1.1 mrg a variable. Store the constant offset in *OFFSET_OUT if so. */ 1879 1.1 mrg 1880 1.1 mrg static bool 1881 1.1 mrg track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out) 1882 1.1 mrg { 1883 1.1 mrg HOST_WIDE_INT const_offset; 1884 1.1 mrg if (!offset.is_constant (&const_offset) 1885 1.1 mrg || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1)) 1886 1.1 mrg return false; 1887 1.1 mrg *offset_out = const_offset; 1888 1.1 mrg return true; 1889 1.1 mrg } 1890 1.1 mrg 1891 1.1 mrg /* Return the offset of a register that track_offset_p says we 1892 1.1 mrg should track. */ 1893 1.1 mrg 1894 1.1 mrg static HOST_WIDE_INT 1895 1.1 mrg get_tracked_reg_offset (rtx loc) 1896 1.1 mrg { 1897 1.1 mrg HOST_WIDE_INT offset; 1898 1.1 mrg if (!track_offset_p (REG_OFFSET (loc), &offset)) 1899 1.1 mrg gcc_unreachable (); 1900 1.1 mrg return offset; 1901 1.1 mrg } 1902 1.1 mrg 1903 1.1 mrg /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */ 1904 1.1 mrg 1905 1.1 mrg static void 1906 1.1 mrg var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized, 1907 1.1 mrg rtx set_src) 1908 1.1 mrg { 1909 1.1 mrg tree decl = REG_EXPR (loc); 1910 1.1 mrg HOST_WIDE_INT offset = get_tracked_reg_offset (loc); 1911 1.1 mrg 1912 1.1 mrg var_reg_decl_set (set, loc, initialized, 1913 1.1 mrg dv_from_decl (decl), offset, set_src, INSERT); 1914 1.1 mrg } 1915 1.1 mrg 1916 1.1 mrg static enum var_init_status 1917 1.1 mrg get_init_value (dataflow_set *set, rtx loc, decl_or_value dv) 1918 1.1 mrg { 1919 1.1 mrg variable *var; 1920 1.1 mrg int i; 1921 1.1 mrg enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN; 1922 1.1 mrg 1923 1.1 mrg if (! flag_var_tracking_uninit) 1924 1.1 mrg return VAR_INIT_STATUS_INITIALIZED; 1925 1.1 mrg 1926 1.1 mrg var = shared_hash_find (set->vars, dv); 1927 1.1 mrg if (var) 1928 1.1 mrg { 1929 1.1 mrg for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++) 1930 1.1 mrg { 1931 1.1 mrg location_chain *nextp; 1932 1.1 mrg for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next) 1933 1.1 mrg if (rtx_equal_p (nextp->loc, loc)) 1934 1.1 mrg { 1935 1.1 mrg ret_val = nextp->init; 1936 1.1 mrg break; 1937 1.1 mrg } 1938 1.1 mrg } 1939 1.1 mrg } 1940 1.1 mrg 1941 1.1 mrg return ret_val; 1942 1.1 mrg } 1943 1.1 mrg 1944 1.1 mrg /* Delete current content of register LOC in dataflow set SET and set 1945 1.1 mrg the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If 1946 1.1 mrg MODIFY is true, any other live copies of the same variable part are 1947 1.1 mrg also deleted from the dataflow set, otherwise the variable part is 1948 1.1 mrg assumed to be copied from another location holding the same 1949 1.1 mrg part. */ 1950 1.1 mrg 1951 1.1 mrg static void 1952 1.1 mrg var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify, 1953 1.1 mrg enum var_init_status initialized, rtx set_src) 1954 1.1 mrg { 1955 1.1 mrg tree decl = REG_EXPR (loc); 1956 1.1 mrg HOST_WIDE_INT offset = get_tracked_reg_offset (loc); 1957 1.1 mrg attrs *node, *next; 1958 1.1 mrg attrs **nextp; 1959 1.1 mrg 1960 1.1 mrg decl = var_debug_decl (decl); 1961 1.1 mrg 1962 1.1 mrg if (initialized == VAR_INIT_STATUS_UNKNOWN) 1963 1.1 mrg initialized = get_init_value (set, loc, dv_from_decl (decl)); 1964 1.1 mrg 1965 1.1 mrg nextp = &set->regs[REGNO (loc)]; 1966 1.1 mrg for (node = *nextp; node; node = next) 1967 1.1 mrg { 1968 1.1 mrg next = node->next; 1969 1.1 mrg if (dv_as_opaque (node->dv) != decl || node->offset != offset) 1970 1.1 mrg { 1971 1.1 mrg delete_variable_part (set, node->loc, node->dv, node->offset); 1972 1.1 mrg delete node; 1973 1.1 mrg *nextp = next; 1974 1.1 mrg } 1975 1.1 mrg else 1976 1.1 mrg { 1977 1.1 mrg node->loc = loc; 1978 1.1 mrg nextp = &node->next; 1979 1.1 mrg } 1980 1.1 mrg } 1981 1.1 mrg if (modify) 1982 1.1 mrg clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src); 1983 1.1 mrg var_reg_set (set, loc, initialized, set_src); 1984 1.1 mrg } 1985 1.1 mrg 1986 1.1 mrg /* Delete the association of register LOC in dataflow set SET with any 1987 1.1 mrg variables that aren't onepart. If CLOBBER is true, also delete any 1988 1.1 mrg other live copies of the same variable part, and delete the 1989 1.1 mrg association with onepart dvs too. */ 1990 1.1 mrg 1991 1.1 mrg static void 1992 1.1 mrg var_reg_delete (dataflow_set *set, rtx loc, bool clobber) 1993 1.1 mrg { 1994 1.1 mrg attrs **nextp = &set->regs[REGNO (loc)]; 1995 1.1 mrg attrs *node, *next; 1996 1.1 mrg 1997 1.1 mrg HOST_WIDE_INT offset; 1998 1.1 mrg if (clobber && track_offset_p (REG_OFFSET (loc), &offset)) 1999 1.1 mrg { 2000 1.1 mrg tree decl = REG_EXPR (loc); 2001 1.1 mrg 2002 1.1 mrg decl = var_debug_decl (decl); 2003 1.1 mrg 2004 1.1 mrg clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL); 2005 1.1 mrg } 2006 1.1 mrg 2007 1.1 mrg for (node = *nextp; node; node = next) 2008 1.1 mrg { 2009 1.1 mrg next = node->next; 2010 1.1 mrg if (clobber || !dv_onepart_p (node->dv)) 2011 1.1 mrg { 2012 1.1 mrg delete_variable_part (set, node->loc, node->dv, node->offset); 2013 1.1 mrg delete node; 2014 1.1 mrg *nextp = next; 2015 1.1 mrg } 2016 1.1 mrg else 2017 1.1 mrg nextp = &node->next; 2018 1.1 mrg } 2019 1.1 mrg } 2020 1.1 mrg 2021 1.1 mrg /* Delete content of register with number REGNO in dataflow set SET. */ 2022 1.1 mrg 2023 1.1 mrg static void 2024 1.1 mrg var_regno_delete (dataflow_set *set, int regno) 2025 1.1 mrg { 2026 1.1 mrg attrs **reg = &set->regs[regno]; 2027 1.1 mrg attrs *node, *next; 2028 1.1 mrg 2029 1.1 mrg for (node = *reg; node; node = next) 2030 1.1 mrg { 2031 1.1 mrg next = node->next; 2032 1.1 mrg delete_variable_part (set, node->loc, node->dv, node->offset); 2033 1.1 mrg delete node; 2034 1.1 mrg } 2035 1.1 mrg *reg = NULL; 2036 1.1 mrg } 2037 1.1 mrg 2038 1.1 mrg /* Return true if I is the negated value of a power of two. */ 2039 1.1 mrg static bool 2040 1.1 mrg negative_power_of_two_p (HOST_WIDE_INT i) 2041 1.1 mrg { 2042 1.1 mrg unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i; 2043 1.1 mrg return pow2_or_zerop (x); 2044 1.1 mrg } 2045 1.1 mrg 2046 1.1 mrg /* Strip constant offsets and alignments off of LOC. Return the base 2047 1.1 mrg expression. */ 2048 1.1 mrg 2049 1.1 mrg static rtx 2050 1.1 mrg vt_get_canonicalize_base (rtx loc) 2051 1.1 mrg { 2052 1.1 mrg while ((GET_CODE (loc) == PLUS 2053 1.1 mrg || GET_CODE (loc) == AND) 2054 1.1 mrg && GET_CODE (XEXP (loc, 1)) == CONST_INT 2055 1.1 mrg && (GET_CODE (loc) != AND 2056 1.1 mrg || negative_power_of_two_p (INTVAL (XEXP (loc, 1))))) 2057 1.1 mrg loc = XEXP (loc, 0); 2058 1.1 mrg 2059 1.1 mrg return loc; 2060 1.1 mrg } 2061 1.1 mrg 2062 1.1 mrg /* This caches canonicalized addresses for VALUEs, computed using 2063 1.1 mrg information in the global cselib table. */ 2064 1.1 mrg static hash_map<rtx, rtx> *global_get_addr_cache; 2065 1.1 mrg 2066 1.1 mrg /* This caches canonicalized addresses for VALUEs, computed using 2067 1.1 mrg information from the global cache and information pertaining to a 2068 1.1 mrg basic block being analyzed. */ 2069 1.1 mrg static hash_map<rtx, rtx> *local_get_addr_cache; 2070 1.1 mrg 2071 1.1 mrg static rtx vt_canonicalize_addr (dataflow_set *, rtx); 2072 1.1 mrg 2073 1.1 mrg /* Return the canonical address for LOC, that must be a VALUE, using a 2074 1.1 mrg cached global equivalence or computing it and storing it in the 2075 1.1 mrg global cache. */ 2076 1.1 mrg 2077 1.1 mrg static rtx 2078 1.1 mrg get_addr_from_global_cache (rtx const loc) 2079 1.1 mrg { 2080 1.1 mrg rtx x; 2081 1.1 mrg 2082 1.1 mrg gcc_checking_assert (GET_CODE (loc) == VALUE); 2083 1.1 mrg 2084 1.1 mrg bool existed; 2085 1.1 mrg rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed); 2086 1.1 mrg if (existed) 2087 1.1 mrg return *slot; 2088 1.1 mrg 2089 1.1 mrg x = canon_rtx (get_addr (loc)); 2090 1.1 mrg 2091 1.1 mrg /* Tentative, avoiding infinite recursion. */ 2092 1.1 mrg *slot = x; 2093 1.1 mrg 2094 1.1 mrg if (x != loc) 2095 1.1 mrg { 2096 1.1 mrg rtx nx = vt_canonicalize_addr (NULL, x); 2097 1.1 mrg if (nx != x) 2098 1.1 mrg { 2099 1.1 mrg /* The table may have moved during recursion, recompute 2100 1.1 mrg SLOT. */ 2101 1.1 mrg *global_get_addr_cache->get (loc) = x = nx; 2102 1.1 mrg } 2103 1.1 mrg } 2104 1.1 mrg 2105 1.1 mrg return x; 2106 1.1 mrg } 2107 1.1 mrg 2108 1.1 mrg /* Return the canonical address for LOC, that must be a VALUE, using a 2109 1.1 mrg cached local equivalence or computing it and storing it in the 2110 1.1 mrg local cache. */ 2111 1.1 mrg 2112 1.1 mrg static rtx 2113 1.1 mrg get_addr_from_local_cache (dataflow_set *set, rtx const loc) 2114 1.1 mrg { 2115 1.1 mrg rtx x; 2116 1.1 mrg decl_or_value dv; 2117 1.1 mrg variable *var; 2118 1.1 mrg location_chain *l; 2119 1.1 mrg 2120 1.1 mrg gcc_checking_assert (GET_CODE (loc) == VALUE); 2121 1.1 mrg 2122 1.1 mrg bool existed; 2123 1.1 mrg rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed); 2124 1.1 mrg if (existed) 2125 1.1 mrg return *slot; 2126 1.1 mrg 2127 1.1 mrg x = get_addr_from_global_cache (loc); 2128 1.1 mrg 2129 1.1 mrg /* Tentative, avoiding infinite recursion. */ 2130 1.1 mrg *slot = x; 2131 1.1 mrg 2132 1.1 mrg /* Recurse to cache local expansion of X, or if we need to search 2133 1.1 mrg for a VALUE in the expansion. */ 2134 1.1 mrg if (x != loc) 2135 1.1 mrg { 2136 1.1 mrg rtx nx = vt_canonicalize_addr (set, x); 2137 1.1 mrg if (nx != x) 2138 1.1 mrg { 2139 1.1 mrg slot = local_get_addr_cache->get (loc); 2140 1.1 mrg *slot = x = nx; 2141 1.1 mrg } 2142 1.1 mrg return x; 2143 1.1 mrg } 2144 1.1 mrg 2145 1.1 mrg dv = dv_from_rtx (x); 2146 1.1 mrg var = shared_hash_find (set->vars, dv); 2147 1.1 mrg if (!var) 2148 1.1 mrg return x; 2149 1.1 mrg 2150 1.1 mrg /* Look for an improved equivalent expression. */ 2151 1.1 mrg for (l = var->var_part[0].loc_chain; l; l = l->next) 2152 1.1 mrg { 2153 1.1 mrg rtx base = vt_get_canonicalize_base (l->loc); 2154 1.1 mrg if (GET_CODE (base) == VALUE 2155 1.1 mrg && canon_value_cmp (base, loc)) 2156 1.1 mrg { 2157 1.1 mrg rtx nx = vt_canonicalize_addr (set, l->loc); 2158 1.1 mrg if (x != nx) 2159 1.1 mrg { 2160 1.1 mrg slot = local_get_addr_cache->get (loc); 2161 1.1 mrg *slot = x = nx; 2162 1.1 mrg } 2163 1.1 mrg break; 2164 1.1 mrg } 2165 1.1 mrg } 2166 1.1 mrg 2167 1.1 mrg return x; 2168 1.1 mrg } 2169 1.1 mrg 2170 1.1 mrg /* Canonicalize LOC using equivalences from SET in addition to those 2171 1.1 mrg in the cselib static table. It expects a VALUE-based expression, 2172 1.1 mrg and it will only substitute VALUEs with other VALUEs or 2173 1.1 mrg function-global equivalences, so that, if two addresses have base 2174 1.1 mrg VALUEs that are locally or globally related in ways that 2175 1.1 mrg memrefs_conflict_p cares about, they will both canonicalize to 2176 1.1 mrg expressions that have the same base VALUE. 2177 1.1 mrg 2178 1.1 mrg The use of VALUEs as canonical base addresses enables the canonical 2179 1.1 mrg RTXs to remain unchanged globally, if they resolve to a constant, 2180 1.1 mrg or throughout a basic block otherwise, so that they can be cached 2181 1.1 mrg and the cache needs not be invalidated when REGs, MEMs or such 2182 1.1 mrg change. */ 2183 1.1 mrg 2184 1.1 mrg static rtx 2185 1.1 mrg vt_canonicalize_addr (dataflow_set *set, rtx oloc) 2186 1.1 mrg { 2187 1.1 mrg poly_int64 ofst = 0, term; 2188 1.1 mrg machine_mode mode = GET_MODE (oloc); 2189 1.1 mrg rtx loc = oloc; 2190 1.1 mrg rtx x; 2191 1.1 mrg bool retry = true; 2192 1.1 mrg 2193 1.1 mrg while (retry) 2194 1.1 mrg { 2195 1.1 mrg while (GET_CODE (loc) == PLUS 2196 1.1 mrg && poly_int_rtx_p (XEXP (loc, 1), &term)) 2197 1.1 mrg { 2198 1.1 mrg ofst += term; 2199 1.1 mrg loc = XEXP (loc, 0); 2200 1.1 mrg } 2201 1.1 mrg 2202 1.1 mrg /* Alignment operations can't normally be combined, so just 2203 1.1 mrg canonicalize the base and we're done. We'll normally have 2204 1.1 mrg only one stack alignment anyway. */ 2205 1.1 mrg if (GET_CODE (loc) == AND 2206 1.1 mrg && GET_CODE (XEXP (loc, 1)) == CONST_INT 2207 1.1 mrg && negative_power_of_two_p (INTVAL (XEXP (loc, 1)))) 2208 1.1 mrg { 2209 1.1 mrg x = vt_canonicalize_addr (set, XEXP (loc, 0)); 2210 1.1 mrg if (x != XEXP (loc, 0)) 2211 1.1 mrg loc = gen_rtx_AND (mode, x, XEXP (loc, 1)); 2212 1.1 mrg retry = false; 2213 1.1 mrg } 2214 1.1 mrg 2215 1.1 mrg if (GET_CODE (loc) == VALUE) 2216 1.1 mrg { 2217 1.1 mrg if (set) 2218 1.1 mrg loc = get_addr_from_local_cache (set, loc); 2219 1.1 mrg else 2220 1.1 mrg loc = get_addr_from_global_cache (loc); 2221 1.1 mrg 2222 1.1 mrg /* Consolidate plus_constants. */ 2223 1.1 mrg while (maybe_ne (ofst, 0) 2224 1.1 mrg && GET_CODE (loc) == PLUS 2225 1.1 mrg && poly_int_rtx_p (XEXP (loc, 1), &term)) 2226 1.1 mrg { 2227 1.1 mrg ofst += term; 2228 1.1 mrg loc = XEXP (loc, 0); 2229 1.1 mrg } 2230 1.1 mrg 2231 1.1 mrg retry = false; 2232 1.1 mrg } 2233 1.1 mrg else 2234 1.1 mrg { 2235 1.1 mrg x = canon_rtx (loc); 2236 1.1 mrg if (retry) 2237 1.1 mrg retry = (x != loc); 2238 1.1 mrg loc = x; 2239 1.1 mrg } 2240 1.1 mrg } 2241 1.1 mrg 2242 1.1 mrg /* Add OFST back in. */ 2243 1.1 mrg if (maybe_ne (ofst, 0)) 2244 1.1 mrg { 2245 1.1 mrg /* Don't build new RTL if we can help it. */ 2246 1.1 mrg if (strip_offset (oloc, &term) == loc && known_eq (term, ofst)) 2247 1.1 mrg return oloc; 2248 1.1 mrg 2249 1.1 mrg loc = plus_constant (mode, loc, ofst); 2250 1.1 mrg } 2251 1.1 mrg 2252 1.1 mrg return loc; 2253 1.1 mrg } 2254 1.1 mrg 2255 1.1 mrg /* Return true iff there's a true dependence between MLOC and LOC. 2256 1.1 mrg MADDR must be a canonicalized version of MLOC's address. */ 2257 1.1 mrg 2258 1.1 mrg static inline bool 2259 1.1 mrg vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc) 2260 1.1 mrg { 2261 1.1 mrg if (GET_CODE (loc) != MEM) 2262 1.1 mrg return false; 2263 1.1 mrg 2264 1.1 mrg rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0)); 2265 1.1 mrg if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr)) 2266 1.1 mrg return false; 2267 1.1 mrg 2268 1.1 mrg return true; 2269 1.1 mrg } 2270 1.1 mrg 2271 1.1 mrg /* Hold parameters for the hashtab traversal function 2272 1.1 mrg drop_overlapping_mem_locs, see below. */ 2273 1.1 mrg 2274 1.1 mrg struct overlapping_mems 2275 1.1 mrg { 2276 1.1 mrg dataflow_set *set; 2277 1.1 mrg rtx loc, addr; 2278 1.1 mrg }; 2279 1.1 mrg 2280 1.1 mrg /* Remove all MEMs that overlap with COMS->LOC from the location list 2281 1.1 mrg of a hash table entry for a onepart variable. COMS->ADDR must be a 2282 1.1 mrg canonicalized form of COMS->LOC's address, and COMS->LOC must be 2283 1.1 mrg canonicalized itself. */ 2284 1.1 mrg 2285 1.1 mrg int 2286 1.1 mrg drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms) 2287 1.1 mrg { 2288 1.1 mrg dataflow_set *set = coms->set; 2289 1.1 mrg rtx mloc = coms->loc, addr = coms->addr; 2290 1.1 mrg variable *var = *slot; 2291 1.1 mrg 2292 1.1 mrg if (var->onepart != NOT_ONEPART) 2293 1.1 mrg { 2294 1.1 mrg location_chain *loc, **locp; 2295 1.1 mrg bool changed = false; 2296 1.1 mrg rtx cur_loc; 2297 1.1 mrg 2298 1.1 mrg gcc_assert (var->n_var_parts == 1); 2299 1.1 mrg 2300 1.1 mrg if (shared_var_p (var, set->vars)) 2301 1.1 mrg { 2302 1.1 mrg for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) 2303 1.1 mrg if (vt_canon_true_dep (set, mloc, addr, loc->loc)) 2304 1.1 mrg break; 2305 1.1 mrg 2306 1.1 mrg if (!loc) 2307 1.1 mrg return 1; 2308 1.1 mrg 2309 1.1 mrg slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN); 2310 1.1 mrg var = *slot; 2311 1.1 mrg gcc_assert (var->n_var_parts == 1); 2312 1.1 mrg } 2313 1.1 mrg 2314 1.1 mrg if (VAR_LOC_1PAUX (var)) 2315 1.1 mrg cur_loc = VAR_LOC_FROM (var); 2316 1.1 mrg else 2317 1.1 mrg cur_loc = var->var_part[0].cur_loc; 2318 1.1 mrg 2319 1.1 mrg for (locp = &var->var_part[0].loc_chain, loc = *locp; 2320 1.1 mrg loc; loc = *locp) 2321 1.1 mrg { 2322 1.1 mrg if (!vt_canon_true_dep (set, mloc, addr, loc->loc)) 2323 1.1 mrg { 2324 1.1 mrg locp = &loc->next; 2325 1.1 mrg continue; 2326 1.1 mrg } 2327 1.1 mrg 2328 1.1 mrg *locp = loc->next; 2329 1.1 mrg /* If we have deleted the location which was last emitted 2330 1.1 mrg we have to emit new location so add the variable to set 2331 1.1 mrg of changed variables. */ 2332 1.1 mrg if (cur_loc == loc->loc) 2333 1.1 mrg { 2334 1.1 mrg changed = true; 2335 1.1 mrg var->var_part[0].cur_loc = NULL; 2336 1.1 mrg if (VAR_LOC_1PAUX (var)) 2337 1.1 mrg VAR_LOC_FROM (var) = NULL; 2338 1.1 mrg } 2339 1.1 mrg delete loc; 2340 1.1 mrg } 2341 1.1 mrg 2342 1.1 mrg if (!var->var_part[0].loc_chain) 2343 1.1 mrg { 2344 1.1 mrg var->n_var_parts--; 2345 1.1 mrg changed = true; 2346 1.1 mrg } 2347 1.1 mrg if (changed) 2348 1.1 mrg variable_was_changed (var, set); 2349 1.1 mrg } 2350 1.1 mrg 2351 1.1 mrg return 1; 2352 1.1 mrg } 2353 1.1 mrg 2354 1.1 mrg /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */ 2355 1.1 mrg 2356 1.1 mrg static void 2357 1.1 mrg clobber_overlapping_mems (dataflow_set *set, rtx loc) 2358 1.1 mrg { 2359 1.1 mrg struct overlapping_mems coms; 2360 1.1 mrg 2361 1.1 mrg gcc_checking_assert (GET_CODE (loc) == MEM); 2362 1.1 mrg 2363 1.1 mrg coms.set = set; 2364 1.1 mrg coms.loc = canon_rtx (loc); 2365 1.1 mrg coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0)); 2366 1.1 mrg 2367 1.1 mrg set->traversed_vars = set->vars; 2368 1.1 mrg shared_hash_htab (set->vars) 2369 1.1 mrg ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms); 2370 1.1 mrg set->traversed_vars = NULL; 2371 1.1 mrg } 2372 1.1 mrg 2373 1.1 mrg /* Set the location of DV, OFFSET as the MEM LOC. */ 2374 1.1 mrg 2375 1.1 mrg static void 2376 1.1 mrg var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized, 2377 1.1 mrg decl_or_value dv, HOST_WIDE_INT offset, rtx set_src, 2378 1.1 mrg enum insert_option iopt) 2379 1.1 mrg { 2380 1.1 mrg if (dv_is_decl_p (dv)) 2381 1.1 mrg dv = dv_from_decl (var_debug_decl (dv_as_decl (dv))); 2382 1.1 mrg 2383 1.1 mrg set_variable_part (set, loc, dv, offset, initialized, set_src, iopt); 2384 1.1 mrg } 2385 1.1 mrg 2386 1.1 mrg /* Set the location part of variable MEM_EXPR (LOC) in dataflow set 2387 1.1 mrg SET to LOC. 2388 1.1 mrg Adjust the address first if it is stack pointer based. */ 2389 1.1 mrg 2390 1.1 mrg static void 2391 1.1 mrg var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized, 2392 1.1 mrg rtx set_src) 2393 1.1 mrg { 2394 1.1 mrg tree decl = MEM_EXPR (loc); 2395 1.1 mrg HOST_WIDE_INT offset = int_mem_offset (loc); 2396 1.1 mrg 2397 1.1 mrg var_mem_decl_set (set, loc, initialized, 2398 1.1 mrg dv_from_decl (decl), offset, set_src, INSERT); 2399 1.1 mrg } 2400 1.1 mrg 2401 1.1 mrg /* Delete and set the location part of variable MEM_EXPR (LOC) in 2402 1.1 mrg dataflow set SET to LOC. If MODIFY is true, any other live copies 2403 1.1 mrg of the same variable part are also deleted from the dataflow set, 2404 1.1 mrg otherwise the variable part is assumed to be copied from another 2405 1.1 mrg location holding the same part. 2406 1.1 mrg Adjust the address first if it is stack pointer based. */ 2407 1.1 mrg 2408 1.1 mrg static void 2409 1.1 mrg var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify, 2410 1.1 mrg enum var_init_status initialized, rtx set_src) 2411 1.1 mrg { 2412 1.1 mrg tree decl = MEM_EXPR (loc); 2413 1.1 mrg HOST_WIDE_INT offset = int_mem_offset (loc); 2414 1.1 mrg 2415 1.1 mrg clobber_overlapping_mems (set, loc); 2416 1.1 mrg decl = var_debug_decl (decl); 2417 1.1 mrg 2418 1.1 mrg if (initialized == VAR_INIT_STATUS_UNKNOWN) 2419 1.1 mrg initialized = get_init_value (set, loc, dv_from_decl (decl)); 2420 1.1 mrg 2421 1.1 mrg if (modify) 2422 1.1 mrg clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src); 2423 1.1 mrg var_mem_set (set, loc, initialized, set_src); 2424 1.1 mrg } 2425 1.1 mrg 2426 1.1 mrg /* Delete the location part LOC from dataflow set SET. If CLOBBER is 2427 1.1 mrg true, also delete any other live copies of the same variable part. 2428 1.1 mrg Adjust the address first if it is stack pointer based. */ 2429 1.1 mrg 2430 1.1 mrg static void 2431 1.1 mrg var_mem_delete (dataflow_set *set, rtx loc, bool clobber) 2432 1.1 mrg { 2433 1.1 mrg tree decl = MEM_EXPR (loc); 2434 1.1 mrg HOST_WIDE_INT offset = int_mem_offset (loc); 2435 1.1 mrg 2436 1.1 mrg clobber_overlapping_mems (set, loc); 2437 1.1 mrg decl = var_debug_decl (decl); 2438 1.1 mrg if (clobber) 2439 1.1 mrg clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL); 2440 1.1 mrg delete_variable_part (set, loc, dv_from_decl (decl), offset); 2441 1.1 mrg } 2442 1.1 mrg 2443 1.1 mrg /* Return true if LOC should not be expanded for location expressions, 2444 1.1 mrg or used in them. */ 2445 1.1 mrg 2446 1.1 mrg static inline bool 2447 1.1 mrg unsuitable_loc (rtx loc) 2448 1.1 mrg { 2449 1.1 mrg switch (GET_CODE (loc)) 2450 1.1 mrg { 2451 1.1 mrg case PC: 2452 1.1 mrg case SCRATCH: 2453 1.1 mrg case ASM_INPUT: 2454 1.1 mrg case ASM_OPERANDS: 2455 1.1 mrg return true; 2456 1.1 mrg 2457 1.1 mrg default: 2458 1.1 mrg return false; 2459 1.1 mrg } 2460 1.1 mrg } 2461 1.1 mrg 2462 1.1 mrg /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values 2463 1.1 mrg bound to it. */ 2464 1.1 mrg 2465 1.1 mrg static inline void 2466 1.1 mrg val_bind (dataflow_set *set, rtx val, rtx loc, bool modified) 2467 1.1 mrg { 2468 1.1 mrg if (REG_P (loc)) 2469 1.1 mrg { 2470 1.1 mrg if (modified) 2471 1.1 mrg var_regno_delete (set, REGNO (loc)); 2472 1.1 mrg var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED, 2473 1.1 mrg dv_from_value (val), 0, NULL_RTX, INSERT); 2474 1.1 mrg } 2475 1.1 mrg else if (MEM_P (loc)) 2476 1.1 mrg { 2477 1.1 mrg struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs; 2478 1.1 mrg 2479 1.1 mrg if (modified) 2480 1.1 mrg clobber_overlapping_mems (set, loc); 2481 1.1 mrg 2482 1.1 mrg if (l && GET_CODE (l->loc) == VALUE) 2483 1.1 mrg l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs; 2484 1.1 mrg 2485 1.1 mrg /* If this MEM is a global constant, we don't need it in the 2486 1.1 mrg dynamic tables. ??? We should test this before emitting the 2487 1.1 mrg micro-op in the first place. */ 2488 1.1 mrg while (l) 2489 1.1 mrg if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0)) 2490 1.1 mrg break; 2491 1.1 mrg else 2492 1.1 mrg l = l->next; 2493 1.1 mrg 2494 1.1 mrg if (!l) 2495 1.1 mrg var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED, 2496 1.1 mrg dv_from_value (val), 0, NULL_RTX, INSERT); 2497 1.1 mrg } 2498 1.1 mrg else 2499 1.1 mrg { 2500 1.1 mrg /* Other kinds of equivalences are necessarily static, at least 2501 1.1 mrg so long as we do not perform substitutions while merging 2502 1.1 mrg expressions. */ 2503 1.1 mrg gcc_unreachable (); 2504 1.1 mrg set_variable_part (set, loc, dv_from_value (val), 0, 2505 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); 2506 1.1 mrg } 2507 1.1 mrg } 2508 1.1 mrg 2509 1.1 mrg /* Bind a value to a location it was just stored in. If MODIFIED 2510 1.1 mrg holds, assume the location was modified, detaching it from any 2511 1.1 mrg values bound to it. */ 2512 1.1 mrg 2513 1.1 mrg static void 2514 1.1 mrg val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn, 2515 1.1 mrg bool modified) 2516 1.1 mrg { 2517 1.1 mrg cselib_val *v = CSELIB_VAL_PTR (val); 2518 1.1 mrg 2519 1.1 mrg gcc_assert (cselib_preserved_value_p (v)); 2520 1.1 mrg 2521 1.1 mrg if (dump_file) 2522 1.1 mrg { 2523 1.1 mrg fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0); 2524 1.1 mrg print_inline_rtx (dump_file, loc, 0); 2525 1.1 mrg fprintf (dump_file, " evaluates to "); 2526 1.1 mrg print_inline_rtx (dump_file, val, 0); 2527 1.1 mrg if (v->locs) 2528 1.1 mrg { 2529 1.1 mrg struct elt_loc_list *l; 2530 1.1 mrg for (l = v->locs; l; l = l->next) 2531 1.1 mrg { 2532 1.1 mrg fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn)); 2533 1.1 mrg print_inline_rtx (dump_file, l->loc, 0); 2534 1.1 mrg } 2535 1.1 mrg } 2536 1.1 mrg fprintf (dump_file, "\n"); 2537 1.1 mrg } 2538 1.1 mrg 2539 1.1 mrg gcc_checking_assert (!unsuitable_loc (loc)); 2540 1.1 mrg 2541 1.1 mrg val_bind (set, val, loc, modified); 2542 1.1 mrg } 2543 1.1 mrg 2544 1.1 mrg /* Clear (canonical address) slots that reference X. */ 2545 1.1 mrg 2546 1.1 mrg bool 2547 1.1 mrg local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x) 2548 1.1 mrg { 2549 1.1 mrg if (vt_get_canonicalize_base (*slot) == x) 2550 1.1 mrg *slot = NULL; 2551 1.1 mrg return true; 2552 1.1 mrg } 2553 1.1 mrg 2554 1.1 mrg /* Reset this node, detaching all its equivalences. Return the slot 2555 1.1 mrg in the variable hash table that holds dv, if there is one. */ 2556 1.1 mrg 2557 1.1 mrg static void 2558 1.1 mrg val_reset (dataflow_set *set, decl_or_value dv) 2559 1.1 mrg { 2560 1.1 mrg variable *var = shared_hash_find (set->vars, dv) ; 2561 1.1 mrg location_chain *node; 2562 1.1 mrg rtx cval; 2563 1.1 mrg 2564 1.1 mrg if (!var || !var->n_var_parts) 2565 1.1 mrg return; 2566 1.1 mrg 2567 1.1 mrg gcc_assert (var->n_var_parts == 1); 2568 1.1 mrg 2569 1.1 mrg if (var->onepart == ONEPART_VALUE) 2570 1.1 mrg { 2571 1.1 mrg rtx x = dv_as_value (dv); 2572 1.1 mrg 2573 1.1 mrg /* Relationships in the global cache don't change, so reset the 2574 1.1 mrg local cache entry only. */ 2575 1.1 mrg rtx *slot = local_get_addr_cache->get (x); 2576 1.1 mrg if (slot) 2577 1.1 mrg { 2578 1.1 mrg /* If the value resolved back to itself, odds are that other 2579 1.1 mrg values may have cached it too. These entries now refer 2580 1.1 mrg to the old X, so detach them too. Entries that used the 2581 1.1 mrg old X but resolved to something else remain ok as long as 2582 1.1 mrg that something else isn't also reset. */ 2583 1.1 mrg if (*slot == x) 2584 1.1 mrg local_get_addr_cache 2585 1.1 mrg ->traverse<rtx, local_get_addr_clear_given_value> (x); 2586 1.1 mrg *slot = NULL; 2587 1.1 mrg } 2588 1.1 mrg } 2589 1.1 mrg 2590 1.1 mrg cval = NULL; 2591 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 2592 1.1 mrg if (GET_CODE (node->loc) == VALUE 2593 1.1 mrg && canon_value_cmp (node->loc, cval)) 2594 1.1 mrg cval = node->loc; 2595 1.1 mrg 2596 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 2597 1.1 mrg if (GET_CODE (node->loc) == VALUE && cval != node->loc) 2598 1.1 mrg { 2599 1.1 mrg /* Redirect the equivalence link to the new canonical 2600 1.1 mrg value, or simply remove it if it would point at 2601 1.1 mrg itself. */ 2602 1.1 mrg if (cval) 2603 1.1 mrg set_variable_part (set, cval, dv_from_value (node->loc), 2604 1.1 mrg 0, node->init, node->set_src, NO_INSERT); 2605 1.1 mrg delete_variable_part (set, dv_as_value (dv), 2606 1.1 mrg dv_from_value (node->loc), 0); 2607 1.1 mrg } 2608 1.1 mrg 2609 1.1 mrg if (cval) 2610 1.1 mrg { 2611 1.1 mrg decl_or_value cdv = dv_from_value (cval); 2612 1.1 mrg 2613 1.1 mrg /* Keep the remaining values connected, accumulating links 2614 1.1 mrg in the canonical value. */ 2615 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 2616 1.1 mrg { 2617 1.1 mrg if (node->loc == cval) 2618 1.1 mrg continue; 2619 1.1 mrg else if (GET_CODE (node->loc) == REG) 2620 1.1 mrg var_reg_decl_set (set, node->loc, node->init, cdv, 0, 2621 1.1 mrg node->set_src, NO_INSERT); 2622 1.1 mrg else if (GET_CODE (node->loc) == MEM) 2623 1.1 mrg var_mem_decl_set (set, node->loc, node->init, cdv, 0, 2624 1.1 mrg node->set_src, NO_INSERT); 2625 1.1 mrg else 2626 1.1 mrg set_variable_part (set, node->loc, cdv, 0, 2627 1.1 mrg node->init, node->set_src, NO_INSERT); 2628 1.1 mrg } 2629 1.1 mrg } 2630 1.1 mrg 2631 1.1 mrg /* We remove this last, to make sure that the canonical value is not 2632 1.1 mrg removed to the point of requiring reinsertion. */ 2633 1.1 mrg if (cval) 2634 1.1 mrg delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0); 2635 1.1 mrg 2636 1.1 mrg clobber_variable_part (set, NULL, dv, 0, NULL); 2637 1.1 mrg } 2638 1.1 mrg 2639 1.1 mrg /* Find the values in a given location and map the val to another 2640 1.1 mrg value, if it is unique, or add the location as one holding the 2641 1.1 mrg value. */ 2642 1.1 mrg 2643 1.1 mrg static void 2644 1.1 mrg val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn) 2645 1.1 mrg { 2646 1.1 mrg decl_or_value dv = dv_from_value (val); 2647 1.1 mrg 2648 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 2649 1.1 mrg { 2650 1.1 mrg if (insn) 2651 1.1 mrg fprintf (dump_file, "%i: ", INSN_UID (insn)); 2652 1.1 mrg else 2653 1.1 mrg fprintf (dump_file, "head: "); 2654 1.1 mrg print_inline_rtx (dump_file, val, 0); 2655 1.1 mrg fputs (" is at ", dump_file); 2656 1.1 mrg print_inline_rtx (dump_file, loc, 0); 2657 1.1 mrg fputc ('\n', dump_file); 2658 1.1 mrg } 2659 1.1 mrg 2660 1.1 mrg val_reset (set, dv); 2661 1.1 mrg 2662 1.1 mrg gcc_checking_assert (!unsuitable_loc (loc)); 2663 1.1 mrg 2664 1.1 mrg if (REG_P (loc)) 2665 1.1 mrg { 2666 1.1 mrg attrs *node, *found = NULL; 2667 1.1 mrg 2668 1.1 mrg for (node = set->regs[REGNO (loc)]; node; node = node->next) 2669 1.1 mrg if (dv_is_value_p (node->dv) 2670 1.1 mrg && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc)) 2671 1.1 mrg { 2672 1.1 mrg found = node; 2673 1.1 mrg 2674 1.1 mrg /* Map incoming equivalences. ??? Wouldn't it be nice if 2675 1.1 mrg we just started sharing the location lists? Maybe a 2676 1.1 mrg circular list ending at the value itself or some 2677 1.1 mrg such. */ 2678 1.1 mrg set_variable_part (set, dv_as_value (node->dv), 2679 1.1 mrg dv_from_value (val), node->offset, 2680 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); 2681 1.1 mrg set_variable_part (set, val, node->dv, node->offset, 2682 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); 2683 1.1 mrg } 2684 1.1 mrg 2685 1.1 mrg /* If we didn't find any equivalence, we need to remember that 2686 1.1 mrg this value is held in the named register. */ 2687 1.1 mrg if (found) 2688 1.1 mrg return; 2689 1.1 mrg } 2690 1.1 mrg /* ??? Attempt to find and merge equivalent MEMs or other 2691 1.1 mrg expressions too. */ 2692 1.1 mrg 2693 1.1 mrg val_bind (set, val, loc, false); 2694 1.1 mrg } 2695 1.1 mrg 2696 1.1 mrg /* Initialize dataflow set SET to be empty. 2697 1.1 mrg VARS_SIZE is the initial size of hash table VARS. */ 2698 1.1 mrg 2699 1.1 mrg static void 2700 1.1 mrg dataflow_set_init (dataflow_set *set) 2701 1.1 mrg { 2702 1.1 mrg init_attrs_list_set (set->regs); 2703 1.1 mrg set->vars = shared_hash_copy (empty_shared_hash); 2704 1.1 mrg set->stack_adjust = 0; 2705 1.1 mrg set->traversed_vars = NULL; 2706 1.1 mrg } 2707 1.1 mrg 2708 1.1 mrg /* Delete the contents of dataflow set SET. */ 2709 1.1 mrg 2710 1.1 mrg static void 2711 1.1 mrg dataflow_set_clear (dataflow_set *set) 2712 1.1 mrg { 2713 1.1 mrg int i; 2714 1.1 mrg 2715 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 2716 1.1 mrg attrs_list_clear (&set->regs[i]); 2717 1.1 mrg 2718 1.1 mrg shared_hash_destroy (set->vars); 2719 1.1 mrg set->vars = shared_hash_copy (empty_shared_hash); 2720 1.1 mrg } 2721 1.1 mrg 2722 1.1 mrg /* Copy the contents of dataflow set SRC to DST. */ 2723 1.1 mrg 2724 1.1 mrg static void 2725 1.1 mrg dataflow_set_copy (dataflow_set *dst, dataflow_set *src) 2726 1.1 mrg { 2727 1.1 mrg int i; 2728 1.1 mrg 2729 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 2730 1.1 mrg attrs_list_copy (&dst->regs[i], src->regs[i]); 2731 1.1 mrg 2732 1.1 mrg shared_hash_destroy (dst->vars); 2733 1.1 mrg dst->vars = shared_hash_copy (src->vars); 2734 1.1 mrg dst->stack_adjust = src->stack_adjust; 2735 1.1 mrg } 2736 1.1 mrg 2737 1.1 mrg /* Information for merging lists of locations for a given offset of variable. 2738 1.1 mrg */ 2739 1.1 mrg struct variable_union_info 2740 1.1 mrg { 2741 1.1 mrg /* Node of the location chain. */ 2742 1.1 mrg location_chain *lc; 2743 1.1 mrg 2744 1.1 mrg /* The sum of positions in the input chains. */ 2745 1.1 mrg int pos; 2746 1.1 mrg 2747 1.1 mrg /* The position in the chain of DST dataflow set. */ 2748 1.1 mrg int pos_dst; 2749 1.1 mrg }; 2750 1.1 mrg 2751 1.1 mrg /* Buffer for location list sorting and its allocated size. */ 2752 1.1 mrg static struct variable_union_info *vui_vec; 2753 1.1 mrg static int vui_allocated; 2754 1.1 mrg 2755 1.1 mrg /* Compare function for qsort, order the structures by POS element. */ 2756 1.1 mrg 2757 1.1 mrg static int 2758 1.1 mrg variable_union_info_cmp_pos (const void *n1, const void *n2) 2759 1.1 mrg { 2760 1.1 mrg const struct variable_union_info *const i1 = 2761 1.1 mrg (const struct variable_union_info *) n1; 2762 1.1 mrg const struct variable_union_info *const i2 = 2763 1.1 mrg ( const struct variable_union_info *) n2; 2764 1.1 mrg 2765 1.1 mrg if (i1->pos != i2->pos) 2766 1.1 mrg return i1->pos - i2->pos; 2767 1.1 mrg 2768 1.1 mrg return (i1->pos_dst - i2->pos_dst); 2769 1.1 mrg } 2770 1.1 mrg 2771 1.1 mrg /* Compute union of location parts of variable *SLOT and the same variable 2772 1.1 mrg from hash table DATA. Compute "sorted" union of the location chains 2773 1.1 mrg for common offsets, i.e. the locations of a variable part are sorted by 2774 1.1 mrg a priority where the priority is the sum of the positions in the 2 chains 2775 1.1 mrg (if a location is only in one list the position in the second list is 2776 1.1 mrg defined to be larger than the length of the chains). 2777 1.1 mrg When we are updating the location parts the newest location is in the 2778 1.1 mrg beginning of the chain, so when we do the described "sorted" union 2779 1.1 mrg we keep the newest locations in the beginning. */ 2780 1.1 mrg 2781 1.1 mrg static int 2782 1.1 mrg variable_union (variable *src, dataflow_set *set) 2783 1.1 mrg { 2784 1.1 mrg variable *dst; 2785 1.1 mrg variable **dstp; 2786 1.1 mrg int i, j, k; 2787 1.1 mrg 2788 1.1 mrg dstp = shared_hash_find_slot (set->vars, src->dv); 2789 1.1 mrg if (!dstp || !*dstp) 2790 1.1 mrg { 2791 1.1 mrg src->refcount++; 2792 1.1 mrg 2793 1.1 mrg dst_can_be_shared = false; 2794 1.1 mrg if (!dstp) 2795 1.1 mrg dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT); 2796 1.1 mrg 2797 1.1 mrg *dstp = src; 2798 1.1 mrg 2799 1.1 mrg /* Continue traversing the hash table. */ 2800 1.1 mrg return 1; 2801 1.1 mrg } 2802 1.1 mrg else 2803 1.1 mrg dst = *dstp; 2804 1.1 mrg 2805 1.1 mrg gcc_assert (src->n_var_parts); 2806 1.1 mrg gcc_checking_assert (src->onepart == dst->onepart); 2807 1.1 mrg 2808 1.1 mrg /* We can combine one-part variables very efficiently, because their 2809 1.1 mrg entries are in canonical order. */ 2810 1.1 mrg if (src->onepart) 2811 1.1 mrg { 2812 1.1 mrg location_chain **nodep, *dnode, *snode; 2813 1.1 mrg 2814 1.1 mrg gcc_assert (src->n_var_parts == 1 2815 1.1 mrg && dst->n_var_parts == 1); 2816 1.1 mrg 2817 1.1 mrg snode = src->var_part[0].loc_chain; 2818 1.1 mrg gcc_assert (snode); 2819 1.1 mrg 2820 1.1 mrg restart_onepart_unshared: 2821 1.1 mrg nodep = &dst->var_part[0].loc_chain; 2822 1.1 mrg dnode = *nodep; 2823 1.1 mrg gcc_assert (dnode); 2824 1.1 mrg 2825 1.1 mrg while (snode) 2826 1.1 mrg { 2827 1.1 mrg int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1; 2828 1.1 mrg 2829 1.1 mrg if (r > 0) 2830 1.1 mrg { 2831 1.1 mrg location_chain *nnode; 2832 1.1 mrg 2833 1.1 mrg if (shared_var_p (dst, set->vars)) 2834 1.1 mrg { 2835 1.1 mrg dstp = unshare_variable (set, dstp, dst, 2836 1.1 mrg VAR_INIT_STATUS_INITIALIZED); 2837 1.1 mrg dst = *dstp; 2838 1.1 mrg goto restart_onepart_unshared; 2839 1.1 mrg } 2840 1.1 mrg 2841 1.1 mrg *nodep = nnode = new location_chain; 2842 1.1 mrg nnode->loc = snode->loc; 2843 1.1 mrg nnode->init = snode->init; 2844 1.1 mrg if (!snode->set_src || MEM_P (snode->set_src)) 2845 1.1 mrg nnode->set_src = NULL; 2846 1.1 mrg else 2847 1.1 mrg nnode->set_src = snode->set_src; 2848 1.1 mrg nnode->next = dnode; 2849 1.1 mrg dnode = nnode; 2850 1.1 mrg } 2851 1.1 mrg else if (r == 0) 2852 1.1 mrg gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc)); 2853 1.1 mrg 2854 1.1 mrg if (r >= 0) 2855 1.1 mrg snode = snode->next; 2856 1.1 mrg 2857 1.1 mrg nodep = &dnode->next; 2858 1.1 mrg dnode = *nodep; 2859 1.1 mrg } 2860 1.1 mrg 2861 1.1 mrg return 1; 2862 1.1 mrg } 2863 1.1 mrg 2864 1.1 mrg gcc_checking_assert (!src->onepart); 2865 1.1 mrg 2866 1.1 mrg /* Count the number of location parts, result is K. */ 2867 1.1 mrg for (i = 0, j = 0, k = 0; 2868 1.1 mrg i < src->n_var_parts && j < dst->n_var_parts; k++) 2869 1.1 mrg { 2870 1.1 mrg if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j)) 2871 1.1 mrg { 2872 1.1 mrg i++; 2873 1.1 mrg j++; 2874 1.1 mrg } 2875 1.1 mrg else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j)) 2876 1.1 mrg i++; 2877 1.1 mrg else 2878 1.1 mrg j++; 2879 1.1 mrg } 2880 1.1 mrg k += src->n_var_parts - i; 2881 1.1 mrg k += dst->n_var_parts - j; 2882 1.1 mrg 2883 1.1 mrg /* We track only variables whose size is <= MAX_VAR_PARTS bytes 2884 1.1 mrg thus there are at most MAX_VAR_PARTS different offsets. */ 2885 1.1 mrg gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS); 2886 1.1 mrg 2887 1.1 mrg if (dst->n_var_parts != k && shared_var_p (dst, set->vars)) 2888 1.1 mrg { 2889 1.1 mrg dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN); 2890 1.1 mrg dst = *dstp; 2891 1.1 mrg } 2892 1.1 mrg 2893 1.1 mrg i = src->n_var_parts - 1; 2894 1.1 mrg j = dst->n_var_parts - 1; 2895 1.1 mrg dst->n_var_parts = k; 2896 1.1 mrg 2897 1.1 mrg for (k--; k >= 0; k--) 2898 1.1 mrg { 2899 1.1 mrg location_chain *node, *node2; 2900 1.1 mrg 2901 1.1 mrg if (i >= 0 && j >= 0 2902 1.1 mrg && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j)) 2903 1.1 mrg { 2904 1.1 mrg /* Compute the "sorted" union of the chains, i.e. the locations which 2905 1.1 mrg are in both chains go first, they are sorted by the sum of 2906 1.1 mrg positions in the chains. */ 2907 1.1 mrg int dst_l, src_l; 2908 1.1 mrg int ii, jj, n; 2909 1.1 mrg struct variable_union_info *vui; 2910 1.1 mrg 2911 1.1 mrg /* If DST is shared compare the location chains. 2912 1.1 mrg If they are different we will modify the chain in DST with 2913 1.1 mrg high probability so make a copy of DST. */ 2914 1.1 mrg if (shared_var_p (dst, set->vars)) 2915 1.1 mrg { 2916 1.1 mrg for (node = src->var_part[i].loc_chain, 2917 1.1 mrg node2 = dst->var_part[j].loc_chain; node && node2; 2918 1.1 mrg node = node->next, node2 = node2->next) 2919 1.1 mrg { 2920 1.1 mrg if (!((REG_P (node2->loc) 2921 1.1 mrg && REG_P (node->loc) 2922 1.1 mrg && REGNO (node2->loc) == REGNO (node->loc)) 2923 1.1 mrg || rtx_equal_p (node2->loc, node->loc))) 2924 1.1 mrg { 2925 1.1 mrg if (node2->init < node->init) 2926 1.1 mrg node2->init = node->init; 2927 1.1 mrg break; 2928 1.1 mrg } 2929 1.1 mrg } 2930 1.1 mrg if (node || node2) 2931 1.1 mrg { 2932 1.1 mrg dstp = unshare_variable (set, dstp, dst, 2933 1.1 mrg VAR_INIT_STATUS_UNKNOWN); 2934 1.1 mrg dst = (variable *)*dstp; 2935 1.1 mrg } 2936 1.1 mrg } 2937 1.1 mrg 2938 1.1 mrg src_l = 0; 2939 1.1 mrg for (node = src->var_part[i].loc_chain; node; node = node->next) 2940 1.1 mrg src_l++; 2941 1.1 mrg dst_l = 0; 2942 1.1 mrg for (node = dst->var_part[j].loc_chain; node; node = node->next) 2943 1.1 mrg dst_l++; 2944 1.1 mrg 2945 1.1 mrg if (dst_l == 1) 2946 1.1 mrg { 2947 1.1 mrg /* The most common case, much simpler, no qsort is needed. */ 2948 1.1 mrg location_chain *dstnode = dst->var_part[j].loc_chain; 2949 1.1 mrg dst->var_part[k].loc_chain = dstnode; 2950 1.1 mrg VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j); 2951 1.1 mrg node2 = dstnode; 2952 1.1 mrg for (node = src->var_part[i].loc_chain; node; node = node->next) 2953 1.1 mrg if (!((REG_P (dstnode->loc) 2954 1.1 mrg && REG_P (node->loc) 2955 1.1 mrg && REGNO (dstnode->loc) == REGNO (node->loc)) 2956 1.1 mrg || rtx_equal_p (dstnode->loc, node->loc))) 2957 1.1 mrg { 2958 1.1 mrg location_chain *new_node; 2959 1.1 mrg 2960 1.1 mrg /* Copy the location from SRC. */ 2961 1.1 mrg new_node = new location_chain; 2962 1.1 mrg new_node->loc = node->loc; 2963 1.1 mrg new_node->init = node->init; 2964 1.1 mrg if (!node->set_src || MEM_P (node->set_src)) 2965 1.1 mrg new_node->set_src = NULL; 2966 1.1 mrg else 2967 1.1 mrg new_node->set_src = node->set_src; 2968 1.1 mrg node2->next = new_node; 2969 1.1 mrg node2 = new_node; 2970 1.1 mrg } 2971 1.1 mrg node2->next = NULL; 2972 1.1 mrg } 2973 1.1 mrg else 2974 1.1 mrg { 2975 1.1 mrg if (src_l + dst_l > vui_allocated) 2976 1.1 mrg { 2977 1.1 mrg vui_allocated = MAX (vui_allocated * 2, src_l + dst_l); 2978 1.1 mrg vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec, 2979 1.1 mrg vui_allocated); 2980 1.1 mrg } 2981 1.1 mrg vui = vui_vec; 2982 1.1 mrg 2983 1.1 mrg /* Fill in the locations from DST. */ 2984 1.1 mrg for (node = dst->var_part[j].loc_chain, jj = 0; node; 2985 1.1 mrg node = node->next, jj++) 2986 1.1 mrg { 2987 1.1 mrg vui[jj].lc = node; 2988 1.1 mrg vui[jj].pos_dst = jj; 2989 1.1 mrg 2990 1.1 mrg /* Pos plus value larger than a sum of 2 valid positions. */ 2991 1.1 mrg vui[jj].pos = jj + src_l + dst_l; 2992 1.1 mrg } 2993 1.1 mrg 2994 1.1 mrg /* Fill in the locations from SRC. */ 2995 1.1 mrg n = dst_l; 2996 1.1 mrg for (node = src->var_part[i].loc_chain, ii = 0; node; 2997 1.1 mrg node = node->next, ii++) 2998 1.1 mrg { 2999 1.1 mrg /* Find location from NODE. */ 3000 1.1 mrg for (jj = 0; jj < dst_l; jj++) 3001 1.1 mrg { 3002 1.1 mrg if ((REG_P (vui[jj].lc->loc) 3003 1.1 mrg && REG_P (node->loc) 3004 1.1 mrg && REGNO (vui[jj].lc->loc) == REGNO (node->loc)) 3005 1.1 mrg || rtx_equal_p (vui[jj].lc->loc, node->loc)) 3006 1.1 mrg { 3007 1.1 mrg vui[jj].pos = jj + ii; 3008 1.1 mrg break; 3009 1.1 mrg } 3010 1.1 mrg } 3011 1.1 mrg if (jj >= dst_l) /* The location has not been found. */ 3012 1.1 mrg { 3013 1.1 mrg location_chain *new_node; 3014 1.1 mrg 3015 1.1 mrg /* Copy the location from SRC. */ 3016 1.1 mrg new_node = new location_chain; 3017 1.1 mrg new_node->loc = node->loc; 3018 1.1 mrg new_node->init = node->init; 3019 1.1 mrg if (!node->set_src || MEM_P (node->set_src)) 3020 1.1 mrg new_node->set_src = NULL; 3021 1.1 mrg else 3022 1.1 mrg new_node->set_src = node->set_src; 3023 1.1 mrg vui[n].lc = new_node; 3024 1.1 mrg vui[n].pos_dst = src_l + dst_l; 3025 1.1 mrg vui[n].pos = ii + src_l + dst_l; 3026 1.1 mrg n++; 3027 1.1 mrg } 3028 1.1 mrg } 3029 1.1 mrg 3030 1.1 mrg if (dst_l == 2) 3031 1.1 mrg { 3032 1.1 mrg /* Special case still very common case. For dst_l == 2 3033 1.1 mrg all entries dst_l ... n-1 are sorted, with for i >= dst_l 3034 1.1 mrg vui[i].pos == i + src_l + dst_l. */ 3035 1.1 mrg if (vui[0].pos > vui[1].pos) 3036 1.1 mrg { 3037 1.1 mrg /* Order should be 1, 0, 2... */ 3038 1.1 mrg dst->var_part[k].loc_chain = vui[1].lc; 3039 1.1 mrg vui[1].lc->next = vui[0].lc; 3040 1.1 mrg if (n >= 3) 3041 1.1 mrg { 3042 1.1 mrg vui[0].lc->next = vui[2].lc; 3043 1.1 mrg vui[n - 1].lc->next = NULL; 3044 1.1 mrg } 3045 1.1 mrg else 3046 1.1 mrg vui[0].lc->next = NULL; 3047 1.1 mrg ii = 3; 3048 1.1 mrg } 3049 1.1 mrg else 3050 1.1 mrg { 3051 1.1 mrg dst->var_part[k].loc_chain = vui[0].lc; 3052 1.1 mrg if (n >= 3 && vui[2].pos < vui[1].pos) 3053 1.1 mrg { 3054 1.1 mrg /* Order should be 0, 2, 1, 3... */ 3055 1.1 mrg vui[0].lc->next = vui[2].lc; 3056 1.1 mrg vui[2].lc->next = vui[1].lc; 3057 1.1 mrg if (n >= 4) 3058 1.1 mrg { 3059 1.1 mrg vui[1].lc->next = vui[3].lc; 3060 1.1 mrg vui[n - 1].lc->next = NULL; 3061 1.1 mrg } 3062 1.1 mrg else 3063 1.1 mrg vui[1].lc->next = NULL; 3064 1.1 mrg ii = 4; 3065 1.1 mrg } 3066 1.1 mrg else 3067 1.1 mrg { 3068 1.1 mrg /* Order should be 0, 1, 2... */ 3069 1.1 mrg ii = 1; 3070 1.1 mrg vui[n - 1].lc->next = NULL; 3071 1.1 mrg } 3072 1.1 mrg } 3073 1.1 mrg for (; ii < n; ii++) 3074 1.1 mrg vui[ii - 1].lc->next = vui[ii].lc; 3075 1.1 mrg } 3076 1.1 mrg else 3077 1.1 mrg { 3078 1.1 mrg qsort (vui, n, sizeof (struct variable_union_info), 3079 1.1 mrg variable_union_info_cmp_pos); 3080 1.1 mrg 3081 1.1 mrg /* Reconnect the nodes in sorted order. */ 3082 1.1 mrg for (ii = 1; ii < n; ii++) 3083 1.1 mrg vui[ii - 1].lc->next = vui[ii].lc; 3084 1.1 mrg vui[n - 1].lc->next = NULL; 3085 1.1 mrg dst->var_part[k].loc_chain = vui[0].lc; 3086 1.1 mrg } 3087 1.1 mrg 3088 1.1 mrg VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j); 3089 1.1 mrg } 3090 1.1 mrg i--; 3091 1.1 mrg j--; 3092 1.1 mrg } 3093 1.1 mrg else if ((i >= 0 && j >= 0 3094 1.1 mrg && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j)) 3095 1.1 mrg || i < 0) 3096 1.1 mrg { 3097 1.1 mrg dst->var_part[k] = dst->var_part[j]; 3098 1.1 mrg j--; 3099 1.1 mrg } 3100 1.1 mrg else if ((i >= 0 && j >= 0 3101 1.1 mrg && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j)) 3102 1.1 mrg || j < 0) 3103 1.1 mrg { 3104 1.1 mrg location_chain **nextp; 3105 1.1 mrg 3106 1.1 mrg /* Copy the chain from SRC. */ 3107 1.1 mrg nextp = &dst->var_part[k].loc_chain; 3108 1.1 mrg for (node = src->var_part[i].loc_chain; node; node = node->next) 3109 1.1 mrg { 3110 1.1 mrg location_chain *new_lc; 3111 1.1 mrg 3112 1.1 mrg new_lc = new location_chain; 3113 1.1 mrg new_lc->next = NULL; 3114 1.1 mrg new_lc->init = node->init; 3115 1.1 mrg if (!node->set_src || MEM_P (node->set_src)) 3116 1.1 mrg new_lc->set_src = NULL; 3117 1.1 mrg else 3118 1.1 mrg new_lc->set_src = node->set_src; 3119 1.1 mrg new_lc->loc = node->loc; 3120 1.1 mrg 3121 1.1 mrg *nextp = new_lc; 3122 1.1 mrg nextp = &new_lc->next; 3123 1.1 mrg } 3124 1.1 mrg 3125 1.1 mrg VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i); 3126 1.1 mrg i--; 3127 1.1 mrg } 3128 1.1 mrg dst->var_part[k].cur_loc = NULL; 3129 1.1 mrg } 3130 1.1 mrg 3131 1.1 mrg if (flag_var_tracking_uninit) 3132 1.1 mrg for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++) 3133 1.1 mrg { 3134 1.1 mrg location_chain *node, *node2; 3135 1.1 mrg for (node = src->var_part[i].loc_chain; node; node = node->next) 3136 1.1 mrg for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next) 3137 1.1 mrg if (rtx_equal_p (node->loc, node2->loc)) 3138 1.1 mrg { 3139 1.1 mrg if (node->init > node2->init) 3140 1.1 mrg node2->init = node->init; 3141 1.1 mrg } 3142 1.1 mrg } 3143 1.1 mrg 3144 1.1 mrg /* Continue traversing the hash table. */ 3145 1.1 mrg return 1; 3146 1.1 mrg } 3147 1.1 mrg 3148 1.1 mrg /* Compute union of dataflow sets SRC and DST and store it to DST. */ 3149 1.1 mrg 3150 1.1 mrg static void 3151 1.1 mrg dataflow_set_union (dataflow_set *dst, dataflow_set *src) 3152 1.1 mrg { 3153 1.1 mrg int i; 3154 1.1 mrg 3155 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3156 1.1 mrg attrs_list_union (&dst->regs[i], src->regs[i]); 3157 1.1 mrg 3158 1.1 mrg if (dst->vars == empty_shared_hash) 3159 1.1 mrg { 3160 1.1 mrg shared_hash_destroy (dst->vars); 3161 1.1 mrg dst->vars = shared_hash_copy (src->vars); 3162 1.1 mrg } 3163 1.1 mrg else 3164 1.1 mrg { 3165 1.1 mrg variable_iterator_type hi; 3166 1.1 mrg variable *var; 3167 1.1 mrg 3168 1.1 mrg FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars), 3169 1.1 mrg var, variable, hi) 3170 1.1 mrg variable_union (var, dst); 3171 1.1 mrg } 3172 1.1 mrg } 3173 1.1 mrg 3174 1.1 mrg /* Whether the value is currently being expanded. */ 3175 1.1 mrg #define VALUE_RECURSED_INTO(x) \ 3176 1.1 mrg (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used) 3177 1.1 mrg 3178 1.1 mrg /* Whether no expansion was found, saving useless lookups. 3179 1.1 mrg It must only be set when VALUE_CHANGED is clear. */ 3180 1.1 mrg #define NO_LOC_P(x) \ 3181 1.1 mrg (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val) 3182 1.1 mrg 3183 1.1 mrg /* Whether cur_loc in the value needs to be (re)computed. */ 3184 1.1 mrg #define VALUE_CHANGED(x) \ 3185 1.1 mrg (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related) 3186 1.1 mrg /* Whether cur_loc in the decl needs to be (re)computed. */ 3187 1.1 mrg #define DECL_CHANGED(x) TREE_VISITED (x) 3188 1.1 mrg 3189 1.1 mrg /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For 3190 1.1 mrg user DECLs, this means they're in changed_variables. Values and 3191 1.1 mrg debug exprs may be left with this flag set if no user variable 3192 1.1 mrg requires them to be evaluated. */ 3193 1.1 mrg 3194 1.1 mrg static inline void 3195 1.1 mrg set_dv_changed (decl_or_value dv, bool newv) 3196 1.1 mrg { 3197 1.1 mrg switch (dv_onepart_p (dv)) 3198 1.1 mrg { 3199 1.1 mrg case ONEPART_VALUE: 3200 1.1 mrg if (newv) 3201 1.1 mrg NO_LOC_P (dv_as_value (dv)) = false; 3202 1.1 mrg VALUE_CHANGED (dv_as_value (dv)) = newv; 3203 1.1 mrg break; 3204 1.1 mrg 3205 1.1 mrg case ONEPART_DEXPR: 3206 1.1 mrg if (newv) 3207 1.1 mrg NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false; 3208 1.1 mrg /* Fall through. */ 3209 1.1 mrg 3210 1.1 mrg default: 3211 1.1 mrg DECL_CHANGED (dv_as_decl (dv)) = newv; 3212 1.1 mrg break; 3213 1.1 mrg } 3214 1.1 mrg } 3215 1.1 mrg 3216 1.1 mrg /* Return true if DV needs to have its cur_loc recomputed. */ 3217 1.1 mrg 3218 1.1 mrg static inline bool 3219 1.1 mrg dv_changed_p (decl_or_value dv) 3220 1.1 mrg { 3221 1.1 mrg return (dv_is_value_p (dv) 3222 1.1 mrg ? VALUE_CHANGED (dv_as_value (dv)) 3223 1.1 mrg : DECL_CHANGED (dv_as_decl (dv))); 3224 1.1 mrg } 3225 1.1 mrg 3226 1.1 mrg /* Return a location list node whose loc is rtx_equal to LOC, in the 3227 1.1 mrg location list of a one-part variable or value VAR, or in that of 3228 1.1 mrg any values recursively mentioned in the location lists. VARS must 3229 1.1 mrg be in star-canonical form. */ 3230 1.1 mrg 3231 1.1 mrg static location_chain * 3232 1.1 mrg find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars) 3233 1.1 mrg { 3234 1.1 mrg location_chain *node; 3235 1.1 mrg enum rtx_code loc_code; 3236 1.1 mrg 3237 1.1 mrg if (!var) 3238 1.1 mrg return NULL; 3239 1.1 mrg 3240 1.1 mrg gcc_checking_assert (var->onepart); 3241 1.1 mrg 3242 1.1 mrg if (!var->n_var_parts) 3243 1.1 mrg return NULL; 3244 1.1 mrg 3245 1.1 mrg gcc_checking_assert (loc != dv_as_opaque (var->dv)); 3246 1.1 mrg 3247 1.1 mrg loc_code = GET_CODE (loc); 3248 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 3249 1.1 mrg { 3250 1.1 mrg decl_or_value dv; 3251 1.1 mrg variable *rvar; 3252 1.1 mrg 3253 1.1 mrg if (GET_CODE (node->loc) != loc_code) 3254 1.1 mrg { 3255 1.1 mrg if (GET_CODE (node->loc) != VALUE) 3256 1.1 mrg continue; 3257 1.1 mrg } 3258 1.1 mrg else if (loc == node->loc) 3259 1.1 mrg return node; 3260 1.1 mrg else if (loc_code != VALUE) 3261 1.1 mrg { 3262 1.1 mrg if (rtx_equal_p (loc, node->loc)) 3263 1.1 mrg return node; 3264 1.1 mrg continue; 3265 1.1 mrg } 3266 1.1 mrg 3267 1.1 mrg /* Since we're in star-canonical form, we don't need to visit 3268 1.1 mrg non-canonical nodes: one-part variables and non-canonical 3269 1.1 mrg values would only point back to the canonical node. */ 3270 1.1 mrg if (dv_is_value_p (var->dv) 3271 1.1 mrg && !canon_value_cmp (node->loc, dv_as_value (var->dv))) 3272 1.1 mrg { 3273 1.1 mrg /* Skip all subsequent VALUEs. */ 3274 1.1 mrg while (node->next && GET_CODE (node->next->loc) == VALUE) 3275 1.1 mrg { 3276 1.1 mrg node = node->next; 3277 1.1 mrg gcc_checking_assert (!canon_value_cmp (node->loc, 3278 1.1 mrg dv_as_value (var->dv))); 3279 1.1 mrg if (loc == node->loc) 3280 1.1 mrg return node; 3281 1.1 mrg } 3282 1.1 mrg continue; 3283 1.1 mrg } 3284 1.1 mrg 3285 1.1 mrg gcc_checking_assert (node == var->var_part[0].loc_chain); 3286 1.1 mrg gcc_checking_assert (!node->next); 3287 1.1 mrg 3288 1.1 mrg dv = dv_from_value (node->loc); 3289 1.1 mrg rvar = vars->find_with_hash (dv, dv_htab_hash (dv)); 3290 1.1 mrg return find_loc_in_1pdv (loc, rvar, vars); 3291 1.1 mrg } 3292 1.1 mrg 3293 1.1 mrg /* ??? Gotta look in cselib_val locations too. */ 3294 1.1 mrg 3295 1.1 mrg return NULL; 3296 1.1 mrg } 3297 1.1 mrg 3298 1.1 mrg /* Hash table iteration argument passed to variable_merge. */ 3299 1.1 mrg struct dfset_merge 3300 1.1 mrg { 3301 1.1 mrg /* The set in which the merge is to be inserted. */ 3302 1.1 mrg dataflow_set *dst; 3303 1.1 mrg /* The set that we're iterating in. */ 3304 1.1 mrg dataflow_set *cur; 3305 1.1 mrg /* The set that may contain the other dv we are to merge with. */ 3306 1.1 mrg dataflow_set *src; 3307 1.1 mrg /* Number of onepart dvs in src. */ 3308 1.1 mrg int src_onepart_cnt; 3309 1.1 mrg }; 3310 1.1 mrg 3311 1.1 mrg /* Insert LOC in *DNODE, if it's not there yet. The list must be in 3312 1.1 mrg loc_cmp order, and it is maintained as such. */ 3313 1.1 mrg 3314 1.1 mrg static void 3315 1.1 mrg insert_into_intersection (location_chain **nodep, rtx loc, 3316 1.1 mrg enum var_init_status status) 3317 1.1 mrg { 3318 1.1 mrg location_chain *node; 3319 1.1 mrg int r; 3320 1.1 mrg 3321 1.1 mrg for (node = *nodep; node; nodep = &node->next, node = *nodep) 3322 1.1 mrg if ((r = loc_cmp (node->loc, loc)) == 0) 3323 1.1 mrg { 3324 1.1 mrg node->init = MIN (node->init, status); 3325 1.1 mrg return; 3326 1.1 mrg } 3327 1.1 mrg else if (r > 0) 3328 1.1 mrg break; 3329 1.1 mrg 3330 1.1 mrg node = new location_chain; 3331 1.1 mrg 3332 1.1 mrg node->loc = loc; 3333 1.1 mrg node->set_src = NULL; 3334 1.1 mrg node->init = status; 3335 1.1 mrg node->next = *nodep; 3336 1.1 mrg *nodep = node; 3337 1.1 mrg } 3338 1.1 mrg 3339 1.1 mrg /* Insert in DEST the intersection of the locations present in both 3340 1.1 mrg S1NODE and S2VAR, directly or indirectly. S1NODE is from a 3341 1.1 mrg variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in 3342 1.1 mrg DSM->dst. */ 3343 1.1 mrg 3344 1.1 mrg static void 3345 1.1 mrg intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm, 3346 1.1 mrg location_chain *s1node, variable *s2var) 3347 1.1 mrg { 3348 1.1 mrg dataflow_set *s1set = dsm->cur; 3349 1.1 mrg dataflow_set *s2set = dsm->src; 3350 1.1 mrg location_chain *found; 3351 1.1 mrg 3352 1.1 mrg if (s2var) 3353 1.1 mrg { 3354 1.1 mrg location_chain *s2node; 3355 1.1 mrg 3356 1.1 mrg gcc_checking_assert (s2var->onepart); 3357 1.1 mrg 3358 1.1 mrg if (s2var->n_var_parts) 3359 1.1 mrg { 3360 1.1 mrg s2node = s2var->var_part[0].loc_chain; 3361 1.1 mrg 3362 1.1 mrg for (; s1node && s2node; 3363 1.1 mrg s1node = s1node->next, s2node = s2node->next) 3364 1.1 mrg if (s1node->loc != s2node->loc) 3365 1.1 mrg break; 3366 1.1 mrg else if (s1node->loc == val) 3367 1.1 mrg continue; 3368 1.1 mrg else 3369 1.1 mrg insert_into_intersection (dest, s1node->loc, 3370 1.1 mrg MIN (s1node->init, s2node->init)); 3371 1.1 mrg } 3372 1.1 mrg } 3373 1.1 mrg 3374 1.1 mrg for (; s1node; s1node = s1node->next) 3375 1.1 mrg { 3376 1.1 mrg if (s1node->loc == val) 3377 1.1 mrg continue; 3378 1.1 mrg 3379 1.1 mrg if ((found = find_loc_in_1pdv (s1node->loc, s2var, 3380 1.1 mrg shared_hash_htab (s2set->vars)))) 3381 1.1 mrg { 3382 1.1 mrg insert_into_intersection (dest, s1node->loc, 3383 1.1 mrg MIN (s1node->init, found->init)); 3384 1.1 mrg continue; 3385 1.1 mrg } 3386 1.1 mrg 3387 1.1 mrg if (GET_CODE (s1node->loc) == VALUE 3388 1.1 mrg && !VALUE_RECURSED_INTO (s1node->loc)) 3389 1.1 mrg { 3390 1.1 mrg decl_or_value dv = dv_from_value (s1node->loc); 3391 1.1 mrg variable *svar = shared_hash_find (s1set->vars, dv); 3392 1.1 mrg if (svar) 3393 1.1 mrg { 3394 1.1 mrg if (svar->n_var_parts == 1) 3395 1.1 mrg { 3396 1.1 mrg VALUE_RECURSED_INTO (s1node->loc) = true; 3397 1.1 mrg intersect_loc_chains (val, dest, dsm, 3398 1.1 mrg svar->var_part[0].loc_chain, 3399 1.1 mrg s2var); 3400 1.1 mrg VALUE_RECURSED_INTO (s1node->loc) = false; 3401 1.1 mrg } 3402 1.1 mrg } 3403 1.1 mrg } 3404 1.1 mrg 3405 1.1 mrg /* ??? gotta look in cselib_val locations too. */ 3406 1.1 mrg 3407 1.1 mrg /* ??? if the location is equivalent to any location in src, 3408 1.1 mrg searched recursively 3409 1.1 mrg 3410 1.1 mrg add to dst the values needed to represent the equivalence 3411 1.1 mrg 3412 1.1 mrg telling whether locations S is equivalent to another dv's 3413 1.1 mrg location list: 3414 1.1 mrg 3415 1.1 mrg for each location D in the list 3416 1.1 mrg 3417 1.1 mrg if S and D satisfy rtx_equal_p, then it is present 3418 1.1 mrg 3419 1.1 mrg else if D is a value, recurse without cycles 3420 1.1 mrg 3421 1.1 mrg else if S and D have the same CODE and MODE 3422 1.1 mrg 3423 1.1 mrg for each operand oS and the corresponding oD 3424 1.1 mrg 3425 1.1 mrg if oS and oD are not equivalent, then S an D are not equivalent 3426 1.1 mrg 3427 1.1 mrg else if they are RTX vectors 3428 1.1 mrg 3429 1.1 mrg if any vector oS element is not equivalent to its respective oD, 3430 1.1 mrg then S and D are not equivalent 3431 1.1 mrg 3432 1.1 mrg */ 3433 1.1 mrg 3434 1.1 mrg 3435 1.1 mrg } 3436 1.1 mrg } 3437 1.1 mrg 3438 1.1 mrg /* Return -1 if X should be before Y in a location list for a 1-part 3439 1.1 mrg variable, 1 if Y should be before X, and 0 if they're equivalent 3440 1.1 mrg and should not appear in the list. */ 3441 1.1 mrg 3442 1.1 mrg static int 3443 1.1 mrg loc_cmp (rtx x, rtx y) 3444 1.1 mrg { 3445 1.1 mrg int i, j, r; 3446 1.1 mrg RTX_CODE code = GET_CODE (x); 3447 1.1 mrg const char *fmt; 3448 1.1 mrg 3449 1.1 mrg if (x == y) 3450 1.1 mrg return 0; 3451 1.1 mrg 3452 1.1 mrg if (REG_P (x)) 3453 1.1 mrg { 3454 1.1 mrg if (!REG_P (y)) 3455 1.1 mrg return -1; 3456 1.1 mrg gcc_assert (GET_MODE (x) == GET_MODE (y)); 3457 1.1 mrg if (REGNO (x) == REGNO (y)) 3458 1.1 mrg return 0; 3459 1.1 mrg else if (REGNO (x) < REGNO (y)) 3460 1.1 mrg return -1; 3461 1.1 mrg else 3462 1.1 mrg return 1; 3463 1.1 mrg } 3464 1.1 mrg 3465 1.1 mrg if (REG_P (y)) 3466 1.1 mrg return 1; 3467 1.1 mrg 3468 1.1 mrg if (MEM_P (x)) 3469 1.1 mrg { 3470 1.1 mrg if (!MEM_P (y)) 3471 1.1 mrg return -1; 3472 1.1 mrg gcc_assert (GET_MODE (x) == GET_MODE (y)); 3473 1.1 mrg return loc_cmp (XEXP (x, 0), XEXP (y, 0)); 3474 1.1 mrg } 3475 1.1 mrg 3476 1.1 mrg if (MEM_P (y)) 3477 1.1 mrg return 1; 3478 1.1 mrg 3479 1.1 mrg if (GET_CODE (x) == VALUE) 3480 1.1 mrg { 3481 1.1 mrg if (GET_CODE (y) != VALUE) 3482 1.1 mrg return -1; 3483 1.1 mrg /* Don't assert the modes are the same, that is true only 3484 1.1 mrg when not recursing. (subreg:QI (value:SI 1:1) 0) 3485 1.1 mrg and (subreg:QI (value:DI 2:2) 0) can be compared, 3486 1.1 mrg even when the modes are different. */ 3487 1.1 mrg if (canon_value_cmp (x, y)) 3488 1.1 mrg return -1; 3489 1.1 mrg else 3490 1.1 mrg return 1; 3491 1.1 mrg } 3492 1.1 mrg 3493 1.1 mrg if (GET_CODE (y) == VALUE) 3494 1.1 mrg return 1; 3495 1.1 mrg 3496 1.1 mrg /* Entry value is the least preferable kind of expression. */ 3497 1.1 mrg if (GET_CODE (x) == ENTRY_VALUE) 3498 1.1 mrg { 3499 1.1 mrg if (GET_CODE (y) != ENTRY_VALUE) 3500 1.1 mrg return 1; 3501 1.1 mrg gcc_assert (GET_MODE (x) == GET_MODE (y)); 3502 1.1 mrg return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y)); 3503 1.1 mrg } 3504 1.1 mrg 3505 1.1 mrg if (GET_CODE (y) == ENTRY_VALUE) 3506 1.1 mrg return -1; 3507 1.1 mrg 3508 1.1 mrg if (GET_CODE (x) == GET_CODE (y)) 3509 1.1 mrg /* Compare operands below. */; 3510 1.1 mrg else if (GET_CODE (x) < GET_CODE (y)) 3511 1.1 mrg return -1; 3512 1.1 mrg else 3513 1.1 mrg return 1; 3514 1.1 mrg 3515 1.1 mrg gcc_assert (GET_MODE (x) == GET_MODE (y)); 3516 1.1 mrg 3517 1.1 mrg if (GET_CODE (x) == DEBUG_EXPR) 3518 1.1 mrg { 3519 1.1 mrg if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)) 3520 1.1 mrg < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y))) 3521 1.1 mrg return -1; 3522 1.1 mrg gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)) 3523 1.1 mrg > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y))); 3524 1.1 mrg return 1; 3525 1.1 mrg } 3526 1.1 mrg 3527 1.1 mrg fmt = GET_RTX_FORMAT (code); 3528 1.1 mrg for (i = 0; i < GET_RTX_LENGTH (code); i++) 3529 1.1 mrg switch (fmt[i]) 3530 1.1 mrg { 3531 1.1 mrg case 'w': 3532 1.1 mrg if (XWINT (x, i) == XWINT (y, i)) 3533 1.1 mrg break; 3534 1.1 mrg else if (XWINT (x, i) < XWINT (y, i)) 3535 1.1 mrg return -1; 3536 1.1 mrg else 3537 1.1 mrg return 1; 3538 1.1 mrg 3539 1.1 mrg case 'n': 3540 1.1 mrg case 'i': 3541 1.1 mrg if (XINT (x, i) == XINT (y, i)) 3542 1.1 mrg break; 3543 1.1 mrg else if (XINT (x, i) < XINT (y, i)) 3544 1.1 mrg return -1; 3545 1.1 mrg else 3546 1.1 mrg return 1; 3547 1.1 mrg 3548 1.1 mrg case 'p': 3549 1.1 mrg r = compare_sizes_for_sort (SUBREG_BYTE (x), SUBREG_BYTE (y)); 3550 1.1 mrg if (r != 0) 3551 1.1 mrg return r; 3552 1.1 mrg break; 3553 1.1 mrg 3554 1.1 mrg case 'V': 3555 1.1 mrg case 'E': 3556 1.1 mrg /* Compare the vector length first. */ 3557 1.1 mrg if (XVECLEN (x, i) == XVECLEN (y, i)) 3558 1.1 mrg /* Compare the vectors elements. */; 3559 1.1 mrg else if (XVECLEN (x, i) < XVECLEN (y, i)) 3560 1.1 mrg return -1; 3561 1.1 mrg else 3562 1.1 mrg return 1; 3563 1.1 mrg 3564 1.1 mrg for (j = 0; j < XVECLEN (x, i); j++) 3565 1.1 mrg if ((r = loc_cmp (XVECEXP (x, i, j), 3566 1.1 mrg XVECEXP (y, i, j)))) 3567 1.1 mrg return r; 3568 1.1 mrg break; 3569 1.1 mrg 3570 1.1 mrg case 'e': 3571 1.1 mrg if ((r = loc_cmp (XEXP (x, i), XEXP (y, i)))) 3572 1.1 mrg return r; 3573 1.1 mrg break; 3574 1.1 mrg 3575 1.1 mrg case 'S': 3576 1.1 mrg case 's': 3577 1.1 mrg if (XSTR (x, i) == XSTR (y, i)) 3578 1.1 mrg break; 3579 1.1 mrg if (!XSTR (x, i)) 3580 1.1 mrg return -1; 3581 1.1 mrg if (!XSTR (y, i)) 3582 1.1 mrg return 1; 3583 1.1 mrg if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0) 3584 1.1 mrg break; 3585 1.1 mrg else if (r < 0) 3586 1.1 mrg return -1; 3587 1.1 mrg else 3588 1.1 mrg return 1; 3589 1.1 mrg 3590 1.1 mrg case 'u': 3591 1.1 mrg /* These are just backpointers, so they don't matter. */ 3592 1.1 mrg break; 3593 1.1 mrg 3594 1.1 mrg case '0': 3595 1.1 mrg case 't': 3596 1.1 mrg break; 3597 1.1 mrg 3598 1.1 mrg /* It is believed that rtx's at this level will never 3599 1.1 mrg contain anything but integers and other rtx's, 3600 1.1 mrg except for within LABEL_REFs and SYMBOL_REFs. */ 3601 1.1 mrg default: 3602 1.1 mrg gcc_unreachable (); 3603 1.1 mrg } 3604 1.1 mrg if (CONST_WIDE_INT_P (x)) 3605 1.1 mrg { 3606 1.1 mrg /* Compare the vector length first. */ 3607 1.1 mrg if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y)) 3608 1.1 mrg return 1; 3609 1.1 mrg else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y)) 3610 1.1 mrg return -1; 3611 1.1 mrg 3612 1.1 mrg /* Compare the vectors elements. */; 3613 1.1 mrg for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--) 3614 1.1 mrg { 3615 1.1 mrg if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j)) 3616 1.1 mrg return -1; 3617 1.1 mrg if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j)) 3618 1.1 mrg return 1; 3619 1.1 mrg } 3620 1.1 mrg } 3621 1.1 mrg 3622 1.1 mrg return 0; 3623 1.1 mrg } 3624 1.1 mrg 3625 1.1 mrg /* Check the order of entries in one-part variables. */ 3626 1.1 mrg 3627 1.1 mrg int 3628 1.1 mrg canonicalize_loc_order_check (variable **slot, 3629 1.1 mrg dataflow_set *data ATTRIBUTE_UNUSED) 3630 1.1 mrg { 3631 1.1 mrg variable *var = *slot; 3632 1.1 mrg location_chain *node, *next; 3633 1.1 mrg 3634 1.1 mrg #ifdef ENABLE_RTL_CHECKING 3635 1.1 mrg int i; 3636 1.1 mrg for (i = 0; i < var->n_var_parts; i++) 3637 1.1 mrg gcc_assert (var->var_part[0].cur_loc == NULL); 3638 1.1 mrg gcc_assert (!var->in_changed_variables); 3639 1.1 mrg #endif 3640 1.1 mrg 3641 1.1 mrg if (!var->onepart) 3642 1.1 mrg return 1; 3643 1.1 mrg 3644 1.1 mrg gcc_assert (var->n_var_parts == 1); 3645 1.1 mrg node = var->var_part[0].loc_chain; 3646 1.1 mrg gcc_assert (node); 3647 1.1 mrg 3648 1.1 mrg while ((next = node->next)) 3649 1.1 mrg { 3650 1.1 mrg gcc_assert (loc_cmp (node->loc, next->loc) < 0); 3651 1.1 mrg node = next; 3652 1.1 mrg } 3653 1.1 mrg 3654 1.1 mrg return 1; 3655 1.1 mrg } 3656 1.1 mrg 3657 1.1 mrg /* Mark with VALUE_RECURSED_INTO values that have neighbors that are 3658 1.1 mrg more likely to be chosen as canonical for an equivalence set. 3659 1.1 mrg Ensure less likely values can reach more likely neighbors, making 3660 1.1 mrg the connections bidirectional. */ 3661 1.1 mrg 3662 1.1 mrg int 3663 1.1 mrg canonicalize_values_mark (variable **slot, dataflow_set *set) 3664 1.1 mrg { 3665 1.1 mrg variable *var = *slot; 3666 1.1 mrg decl_or_value dv = var->dv; 3667 1.1 mrg rtx val; 3668 1.1 mrg location_chain *node; 3669 1.1 mrg 3670 1.1 mrg if (!dv_is_value_p (dv)) 3671 1.1 mrg return 1; 3672 1.1 mrg 3673 1.1 mrg gcc_checking_assert (var->n_var_parts == 1); 3674 1.1 mrg 3675 1.1 mrg val = dv_as_value (dv); 3676 1.1 mrg 3677 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 3678 1.1 mrg if (GET_CODE (node->loc) == VALUE) 3679 1.1 mrg { 3680 1.1 mrg if (canon_value_cmp (node->loc, val)) 3681 1.1 mrg VALUE_RECURSED_INTO (val) = true; 3682 1.1 mrg else 3683 1.1 mrg { 3684 1.1 mrg decl_or_value odv = dv_from_value (node->loc); 3685 1.1 mrg variable **oslot; 3686 1.1 mrg oslot = shared_hash_find_slot_noinsert (set->vars, odv); 3687 1.1 mrg 3688 1.1 mrg set_slot_part (set, val, oslot, odv, 0, 3689 1.1 mrg node->init, NULL_RTX); 3690 1.1 mrg 3691 1.1 mrg VALUE_RECURSED_INTO (node->loc) = true; 3692 1.1 mrg } 3693 1.1 mrg } 3694 1.1 mrg 3695 1.1 mrg return 1; 3696 1.1 mrg } 3697 1.1 mrg 3698 1.1 mrg /* Remove redundant entries from equivalence lists in onepart 3699 1.1 mrg variables, canonicalizing equivalence sets into star shapes. */ 3700 1.1 mrg 3701 1.1 mrg int 3702 1.1 mrg canonicalize_values_star (variable **slot, dataflow_set *set) 3703 1.1 mrg { 3704 1.1 mrg variable *var = *slot; 3705 1.1 mrg decl_or_value dv = var->dv; 3706 1.1 mrg location_chain *node; 3707 1.1 mrg decl_or_value cdv; 3708 1.1 mrg rtx val, cval; 3709 1.1 mrg variable **cslot; 3710 1.1 mrg bool has_value; 3711 1.1 mrg bool has_marks; 3712 1.1 mrg 3713 1.1 mrg if (!var->onepart) 3714 1.1 mrg return 1; 3715 1.1 mrg 3716 1.1 mrg gcc_checking_assert (var->n_var_parts == 1); 3717 1.1 mrg 3718 1.1 mrg if (dv_is_value_p (dv)) 3719 1.1 mrg { 3720 1.1 mrg cval = dv_as_value (dv); 3721 1.1 mrg if (!VALUE_RECURSED_INTO (cval)) 3722 1.1 mrg return 1; 3723 1.1 mrg VALUE_RECURSED_INTO (cval) = false; 3724 1.1 mrg } 3725 1.1 mrg else 3726 1.1 mrg cval = NULL_RTX; 3727 1.1 mrg 3728 1.1 mrg restart: 3729 1.1 mrg val = cval; 3730 1.1 mrg has_value = false; 3731 1.1 mrg has_marks = false; 3732 1.1 mrg 3733 1.1 mrg gcc_assert (var->n_var_parts == 1); 3734 1.1 mrg 3735 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 3736 1.1 mrg if (GET_CODE (node->loc) == VALUE) 3737 1.1 mrg { 3738 1.1 mrg has_value = true; 3739 1.1 mrg if (VALUE_RECURSED_INTO (node->loc)) 3740 1.1 mrg has_marks = true; 3741 1.1 mrg if (canon_value_cmp (node->loc, cval)) 3742 1.1 mrg cval = node->loc; 3743 1.1 mrg } 3744 1.1 mrg 3745 1.1 mrg if (!has_value) 3746 1.1 mrg return 1; 3747 1.1 mrg 3748 1.1 mrg if (cval == val) 3749 1.1 mrg { 3750 1.1 mrg if (!has_marks || dv_is_decl_p (dv)) 3751 1.1 mrg return 1; 3752 1.1 mrg 3753 1.1 mrg /* Keep it marked so that we revisit it, either after visiting a 3754 1.1 mrg child node, or after visiting a new parent that might be 3755 1.1 mrg found out. */ 3756 1.1 mrg VALUE_RECURSED_INTO (val) = true; 3757 1.1 mrg 3758 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 3759 1.1 mrg if (GET_CODE (node->loc) == VALUE 3760 1.1 mrg && VALUE_RECURSED_INTO (node->loc)) 3761 1.1 mrg { 3762 1.1 mrg cval = node->loc; 3763 1.1 mrg restart_with_cval: 3764 1.1 mrg VALUE_RECURSED_INTO (cval) = false; 3765 1.1 mrg dv = dv_from_value (cval); 3766 1.1 mrg slot = shared_hash_find_slot_noinsert (set->vars, dv); 3767 1.1 mrg if (!slot) 3768 1.1 mrg { 3769 1.1 mrg gcc_assert (dv_is_decl_p (var->dv)); 3770 1.1 mrg /* The canonical value was reset and dropped. 3771 1.1 mrg Remove it. */ 3772 1.1 mrg clobber_variable_part (set, NULL, var->dv, 0, NULL); 3773 1.1 mrg return 1; 3774 1.1 mrg } 3775 1.1 mrg var = *slot; 3776 1.1 mrg gcc_assert (dv_is_value_p (var->dv)); 3777 1.1 mrg if (var->n_var_parts == 0) 3778 1.1 mrg return 1; 3779 1.1 mrg gcc_assert (var->n_var_parts == 1); 3780 1.1 mrg goto restart; 3781 1.1 mrg } 3782 1.1 mrg 3783 1.1 mrg VALUE_RECURSED_INTO (val) = false; 3784 1.1 mrg 3785 1.1 mrg return 1; 3786 1.1 mrg } 3787 1.1 mrg 3788 1.1 mrg /* Push values to the canonical one. */ 3789 1.1 mrg cdv = dv_from_value (cval); 3790 1.1 mrg cslot = shared_hash_find_slot_noinsert (set->vars, cdv); 3791 1.1 mrg 3792 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 3793 1.1 mrg if (node->loc != cval) 3794 1.1 mrg { 3795 1.1 mrg cslot = set_slot_part (set, node->loc, cslot, cdv, 0, 3796 1.1 mrg node->init, NULL_RTX); 3797 1.1 mrg if (GET_CODE (node->loc) == VALUE) 3798 1.1 mrg { 3799 1.1 mrg decl_or_value ndv = dv_from_value (node->loc); 3800 1.1 mrg 3801 1.1 mrg set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX, 3802 1.1 mrg NO_INSERT); 3803 1.1 mrg 3804 1.1 mrg if (canon_value_cmp (node->loc, val)) 3805 1.1 mrg { 3806 1.1 mrg /* If it could have been a local minimum, it's not any more, 3807 1.1 mrg since it's now neighbor to cval, so it may have to push 3808 1.1 mrg to it. Conversely, if it wouldn't have prevailed over 3809 1.1 mrg val, then whatever mark it has is fine: if it was to 3810 1.1 mrg push, it will now push to a more canonical node, but if 3811 1.1 mrg it wasn't, then it has already pushed any values it might 3812 1.1 mrg have to. */ 3813 1.1 mrg VALUE_RECURSED_INTO (node->loc) = true; 3814 1.1 mrg /* Make sure we visit node->loc by ensuring we cval is 3815 1.1 mrg visited too. */ 3816 1.1 mrg VALUE_RECURSED_INTO (cval) = true; 3817 1.1 mrg } 3818 1.1 mrg else if (!VALUE_RECURSED_INTO (node->loc)) 3819 1.1 mrg /* If we have no need to "recurse" into this node, it's 3820 1.1 mrg already "canonicalized", so drop the link to the old 3821 1.1 mrg parent. */ 3822 1.1 mrg clobber_variable_part (set, cval, ndv, 0, NULL); 3823 1.1 mrg } 3824 1.1 mrg else if (GET_CODE (node->loc) == REG) 3825 1.1 mrg { 3826 1.1 mrg attrs *list = set->regs[REGNO (node->loc)], **listp; 3827 1.1 mrg 3828 1.1 mrg /* Change an existing attribute referring to dv so that it 3829 1.1 mrg refers to cdv, removing any duplicate this might 3830 1.1 mrg introduce, and checking that no previous duplicates 3831 1.1 mrg existed, all in a single pass. */ 3832 1.1 mrg 3833 1.1 mrg while (list) 3834 1.1 mrg { 3835 1.1 mrg if (list->offset == 0 3836 1.1 mrg && (dv_as_opaque (list->dv) == dv_as_opaque (dv) 3837 1.1 mrg || dv_as_opaque (list->dv) == dv_as_opaque (cdv))) 3838 1.1 mrg break; 3839 1.1 mrg 3840 1.1 mrg list = list->next; 3841 1.1 mrg } 3842 1.1 mrg 3843 1.1 mrg gcc_assert (list); 3844 1.1 mrg if (dv_as_opaque (list->dv) == dv_as_opaque (dv)) 3845 1.1 mrg { 3846 1.1 mrg list->dv = cdv; 3847 1.1 mrg for (listp = &list->next; (list = *listp); listp = &list->next) 3848 1.1 mrg { 3849 1.1 mrg if (list->offset) 3850 1.1 mrg continue; 3851 1.1 mrg 3852 1.1 mrg if (dv_as_opaque (list->dv) == dv_as_opaque (cdv)) 3853 1.1 mrg { 3854 1.1 mrg *listp = list->next; 3855 1.1 mrg delete list; 3856 1.1 mrg list = *listp; 3857 1.1 mrg break; 3858 1.1 mrg } 3859 1.1 mrg 3860 1.1 mrg gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv)); 3861 1.1 mrg } 3862 1.1 mrg } 3863 1.1 mrg else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv)) 3864 1.1 mrg { 3865 1.1 mrg for (listp = &list->next; (list = *listp); listp = &list->next) 3866 1.1 mrg { 3867 1.1 mrg if (list->offset) 3868 1.1 mrg continue; 3869 1.1 mrg 3870 1.1 mrg if (dv_as_opaque (list->dv) == dv_as_opaque (dv)) 3871 1.1 mrg { 3872 1.1 mrg *listp = list->next; 3873 1.1 mrg delete list; 3874 1.1 mrg list = *listp; 3875 1.1 mrg break; 3876 1.1 mrg } 3877 1.1 mrg 3878 1.1 mrg gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv)); 3879 1.1 mrg } 3880 1.1 mrg } 3881 1.1 mrg else 3882 1.1 mrg gcc_unreachable (); 3883 1.1 mrg 3884 1.1 mrg if (flag_checking) 3885 1.1 mrg while (list) 3886 1.1 mrg { 3887 1.1 mrg if (list->offset == 0 3888 1.1 mrg && (dv_as_opaque (list->dv) == dv_as_opaque (dv) 3889 1.1 mrg || dv_as_opaque (list->dv) == dv_as_opaque (cdv))) 3890 1.1 mrg gcc_unreachable (); 3891 1.1 mrg 3892 1.1 mrg list = list->next; 3893 1.1 mrg } 3894 1.1 mrg } 3895 1.1 mrg } 3896 1.1 mrg 3897 1.1 mrg if (val) 3898 1.1 mrg set_slot_part (set, val, cslot, cdv, 0, 3899 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL_RTX); 3900 1.1 mrg 3901 1.1 mrg slot = clobber_slot_part (set, cval, slot, 0, NULL); 3902 1.1 mrg 3903 1.1 mrg /* Variable may have been unshared. */ 3904 1.1 mrg var = *slot; 3905 1.1 mrg gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval 3906 1.1 mrg && var->var_part[0].loc_chain->next == NULL); 3907 1.1 mrg 3908 1.1 mrg if (VALUE_RECURSED_INTO (cval)) 3909 1.1 mrg goto restart_with_cval; 3910 1.1 mrg 3911 1.1 mrg return 1; 3912 1.1 mrg } 3913 1.1 mrg 3914 1.1 mrg /* Bind one-part variables to the canonical value in an equivalence 3915 1.1 mrg set. Not doing this causes dataflow convergence failure in rare 3916 1.1 mrg circumstances, see PR42873. Unfortunately we can't do this 3917 1.1 mrg efficiently as part of canonicalize_values_star, since we may not 3918 1.1 mrg have determined or even seen the canonical value of a set when we 3919 1.1 mrg get to a variable that references another member of the set. */ 3920 1.1 mrg 3921 1.1 mrg int 3922 1.1 mrg canonicalize_vars_star (variable **slot, dataflow_set *set) 3923 1.1 mrg { 3924 1.1 mrg variable *var = *slot; 3925 1.1 mrg decl_or_value dv = var->dv; 3926 1.1 mrg location_chain *node; 3927 1.1 mrg rtx cval; 3928 1.1 mrg decl_or_value cdv; 3929 1.1 mrg variable **cslot; 3930 1.1 mrg variable *cvar; 3931 1.1 mrg location_chain *cnode; 3932 1.1 mrg 3933 1.1 mrg if (!var->onepart || var->onepart == ONEPART_VALUE) 3934 1.1 mrg return 1; 3935 1.1 mrg 3936 1.1 mrg gcc_assert (var->n_var_parts == 1); 3937 1.1 mrg 3938 1.1 mrg node = var->var_part[0].loc_chain; 3939 1.1 mrg 3940 1.1 mrg if (GET_CODE (node->loc) != VALUE) 3941 1.1 mrg return 1; 3942 1.1 mrg 3943 1.1 mrg gcc_assert (!node->next); 3944 1.1 mrg cval = node->loc; 3945 1.1 mrg 3946 1.1 mrg /* Push values to the canonical one. */ 3947 1.1 mrg cdv = dv_from_value (cval); 3948 1.1 mrg cslot = shared_hash_find_slot_noinsert (set->vars, cdv); 3949 1.1 mrg if (!cslot) 3950 1.1 mrg return 1; 3951 1.1 mrg cvar = *cslot; 3952 1.1 mrg gcc_assert (cvar->n_var_parts == 1); 3953 1.1 mrg 3954 1.1 mrg cnode = cvar->var_part[0].loc_chain; 3955 1.1 mrg 3956 1.1 mrg /* CVAL is canonical if its value list contains non-VALUEs or VALUEs 3957 1.1 mrg that are not more canonical than it. */ 3958 1.1 mrg if (GET_CODE (cnode->loc) != VALUE 3959 1.1 mrg || !canon_value_cmp (cnode->loc, cval)) 3960 1.1 mrg return 1; 3961 1.1 mrg 3962 1.1 mrg /* CVAL was found to be non-canonical. Change the variable to point 3963 1.1 mrg to the canonical VALUE. */ 3964 1.1 mrg gcc_assert (!cnode->next); 3965 1.1 mrg cval = cnode->loc; 3966 1.1 mrg 3967 1.1 mrg slot = set_slot_part (set, cval, slot, dv, 0, 3968 1.1 mrg node->init, node->set_src); 3969 1.1 mrg clobber_slot_part (set, cval, slot, 0, node->set_src); 3970 1.1 mrg 3971 1.1 mrg return 1; 3972 1.1 mrg } 3973 1.1 mrg 3974 1.1 mrg /* Combine variable or value in *S1SLOT (in DSM->cur) with the 3975 1.1 mrg corresponding entry in DSM->src. Multi-part variables are combined 3976 1.1 mrg with variable_union, whereas onepart dvs are combined with 3977 1.1 mrg intersection. */ 3978 1.1 mrg 3979 1.1 mrg static int 3980 1.1 mrg variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm) 3981 1.1 mrg { 3982 1.1 mrg dataflow_set *dst = dsm->dst; 3983 1.1 mrg variable **dstslot; 3984 1.1 mrg variable *s2var, *dvar = NULL; 3985 1.1 mrg decl_or_value dv = s1var->dv; 3986 1.1 mrg onepart_enum onepart = s1var->onepart; 3987 1.1 mrg rtx val; 3988 1.1 mrg hashval_t dvhash; 3989 1.1 mrg location_chain *node, **nodep; 3990 1.1 mrg 3991 1.1 mrg /* If the incoming onepart variable has an empty location list, then 3992 1.1 mrg the intersection will be just as empty. For other variables, 3993 1.1 mrg it's always union. */ 3994 1.1 mrg gcc_checking_assert (s1var->n_var_parts 3995 1.1 mrg && s1var->var_part[0].loc_chain); 3996 1.1 mrg 3997 1.1 mrg if (!onepart) 3998 1.1 mrg return variable_union (s1var, dst); 3999 1.1 mrg 4000 1.1 mrg gcc_checking_assert (s1var->n_var_parts == 1); 4001 1.1 mrg 4002 1.1 mrg dvhash = dv_htab_hash (dv); 4003 1.1 mrg if (dv_is_value_p (dv)) 4004 1.1 mrg val = dv_as_value (dv); 4005 1.1 mrg else 4006 1.1 mrg val = NULL; 4007 1.1 mrg 4008 1.1 mrg s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash); 4009 1.1 mrg if (!s2var) 4010 1.1 mrg { 4011 1.1 mrg dst_can_be_shared = false; 4012 1.1 mrg return 1; 4013 1.1 mrg } 4014 1.1 mrg 4015 1.1 mrg dsm->src_onepart_cnt--; 4016 1.1 mrg gcc_assert (s2var->var_part[0].loc_chain 4017 1.1 mrg && s2var->onepart == onepart 4018 1.1 mrg && s2var->n_var_parts == 1); 4019 1.1 mrg 4020 1.1 mrg dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash); 4021 1.1 mrg if (dstslot) 4022 1.1 mrg { 4023 1.1 mrg dvar = *dstslot; 4024 1.1 mrg gcc_assert (dvar->refcount == 1 4025 1.1 mrg && dvar->onepart == onepart 4026 1.1 mrg && dvar->n_var_parts == 1); 4027 1.1 mrg nodep = &dvar->var_part[0].loc_chain; 4028 1.1 mrg } 4029 1.1 mrg else 4030 1.1 mrg { 4031 1.1 mrg nodep = &node; 4032 1.1 mrg node = NULL; 4033 1.1 mrg } 4034 1.1 mrg 4035 1.1 mrg if (!dstslot && !onepart_variable_different_p (s1var, s2var)) 4036 1.1 mrg { 4037 1.1 mrg dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv, 4038 1.1 mrg dvhash, INSERT); 4039 1.1 mrg *dstslot = dvar = s2var; 4040 1.1 mrg dvar->refcount++; 4041 1.1 mrg } 4042 1.1 mrg else 4043 1.1 mrg { 4044 1.1 mrg dst_can_be_shared = false; 4045 1.1 mrg 4046 1.1 mrg intersect_loc_chains (val, nodep, dsm, 4047 1.1 mrg s1var->var_part[0].loc_chain, s2var); 4048 1.1 mrg 4049 1.1 mrg if (!dstslot) 4050 1.1 mrg { 4051 1.1 mrg if (node) 4052 1.1 mrg { 4053 1.1 mrg dvar = onepart_pool_allocate (onepart); 4054 1.1 mrg dvar->dv = dv; 4055 1.1 mrg dvar->refcount = 1; 4056 1.1 mrg dvar->n_var_parts = 1; 4057 1.1 mrg dvar->onepart = onepart; 4058 1.1 mrg dvar->in_changed_variables = false; 4059 1.1 mrg dvar->var_part[0].loc_chain = node; 4060 1.1 mrg dvar->var_part[0].cur_loc = NULL; 4061 1.1 mrg if (onepart) 4062 1.1 mrg VAR_LOC_1PAUX (dvar) = NULL; 4063 1.1 mrg else 4064 1.1 mrg VAR_PART_OFFSET (dvar, 0) = 0; 4065 1.1 mrg 4066 1.1 mrg dstslot 4067 1.1 mrg = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash, 4068 1.1 mrg INSERT); 4069 1.1 mrg gcc_assert (!*dstslot); 4070 1.1 mrg *dstslot = dvar; 4071 1.1 mrg } 4072 1.1 mrg else 4073 1.1 mrg return 1; 4074 1.1 mrg } 4075 1.1 mrg } 4076 1.1 mrg 4077 1.1 mrg nodep = &dvar->var_part[0].loc_chain; 4078 1.1 mrg while ((node = *nodep)) 4079 1.1 mrg { 4080 1.1 mrg location_chain **nextp = &node->next; 4081 1.1 mrg 4082 1.1 mrg if (GET_CODE (node->loc) == REG) 4083 1.1 mrg { 4084 1.1 mrg attrs *list; 4085 1.1 mrg 4086 1.1 mrg for (list = dst->regs[REGNO (node->loc)]; list; list = list->next) 4087 1.1 mrg if (GET_MODE (node->loc) == GET_MODE (list->loc) 4088 1.1 mrg && dv_is_value_p (list->dv)) 4089 1.1 mrg break; 4090 1.1 mrg 4091 1.1 mrg if (!list) 4092 1.1 mrg attrs_list_insert (&dst->regs[REGNO (node->loc)], 4093 1.1 mrg dv, 0, node->loc); 4094 1.1 mrg /* If this value became canonical for another value that had 4095 1.1 mrg this register, we want to leave it alone. */ 4096 1.1 mrg else if (dv_as_value (list->dv) != val) 4097 1.1 mrg { 4098 1.1 mrg dstslot = set_slot_part (dst, dv_as_value (list->dv), 4099 1.1 mrg dstslot, dv, 0, 4100 1.1 mrg node->init, NULL_RTX); 4101 1.1 mrg dstslot = delete_slot_part (dst, node->loc, dstslot, 0); 4102 1.1 mrg 4103 1.1 mrg /* Since nextp points into the removed node, we can't 4104 1.1 mrg use it. The pointer to the next node moved to nodep. 4105 1.1 mrg However, if the variable we're walking is unshared 4106 1.1 mrg during our walk, we'll keep walking the location list 4107 1.1 mrg of the previously-shared variable, in which case the 4108 1.1 mrg node won't have been removed, and we'll want to skip 4109 1.1 mrg it. That's why we test *nodep here. */ 4110 1.1 mrg if (*nodep != node) 4111 1.1 mrg nextp = nodep; 4112 1.1 mrg } 4113 1.1 mrg } 4114 1.1 mrg else 4115 1.1 mrg /* Canonicalization puts registers first, so we don't have to 4116 1.1 mrg walk it all. */ 4117 1.1 mrg break; 4118 1.1 mrg nodep = nextp; 4119 1.1 mrg } 4120 1.1 mrg 4121 1.1 mrg if (dvar != *dstslot) 4122 1.1 mrg dvar = *dstslot; 4123 1.1 mrg nodep = &dvar->var_part[0].loc_chain; 4124 1.1 mrg 4125 1.1 mrg if (val) 4126 1.1 mrg { 4127 1.1 mrg /* Mark all referenced nodes for canonicalization, and make sure 4128 1.1 mrg we have mutual equivalence links. */ 4129 1.1 mrg VALUE_RECURSED_INTO (val) = true; 4130 1.1 mrg for (node = *nodep; node; node = node->next) 4131 1.1 mrg if (GET_CODE (node->loc) == VALUE) 4132 1.1 mrg { 4133 1.1 mrg VALUE_RECURSED_INTO (node->loc) = true; 4134 1.1 mrg set_variable_part (dst, val, dv_from_value (node->loc), 0, 4135 1.1 mrg node->init, NULL, INSERT); 4136 1.1 mrg } 4137 1.1 mrg 4138 1.1 mrg dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash); 4139 1.1 mrg gcc_assert (*dstslot == dvar); 4140 1.1 mrg canonicalize_values_star (dstslot, dst); 4141 1.1 mrg gcc_checking_assert (dstslot 4142 1.1 mrg == shared_hash_find_slot_noinsert_1 (dst->vars, 4143 1.1 mrg dv, dvhash)); 4144 1.1 mrg dvar = *dstslot; 4145 1.1 mrg } 4146 1.1 mrg else 4147 1.1 mrg { 4148 1.1 mrg bool has_value = false, has_other = false; 4149 1.1 mrg 4150 1.1 mrg /* If we have one value and anything else, we're going to 4151 1.1 mrg canonicalize this, so make sure all values have an entry in 4152 1.1 mrg the table and are marked for canonicalization. */ 4153 1.1 mrg for (node = *nodep; node; node = node->next) 4154 1.1 mrg { 4155 1.1 mrg if (GET_CODE (node->loc) == VALUE) 4156 1.1 mrg { 4157 1.1 mrg /* If this was marked during register canonicalization, 4158 1.1 mrg we know we have to canonicalize values. */ 4159 1.1 mrg if (has_value) 4160 1.1 mrg has_other = true; 4161 1.1 mrg has_value = true; 4162 1.1 mrg if (has_other) 4163 1.1 mrg break; 4164 1.1 mrg } 4165 1.1 mrg else 4166 1.1 mrg { 4167 1.1 mrg has_other = true; 4168 1.1 mrg if (has_value) 4169 1.1 mrg break; 4170 1.1 mrg } 4171 1.1 mrg } 4172 1.1 mrg 4173 1.1 mrg if (has_value && has_other) 4174 1.1 mrg { 4175 1.1 mrg for (node = *nodep; node; node = node->next) 4176 1.1 mrg { 4177 1.1 mrg if (GET_CODE (node->loc) == VALUE) 4178 1.1 mrg { 4179 1.1 mrg decl_or_value dv = dv_from_value (node->loc); 4180 1.1 mrg variable **slot = NULL; 4181 1.1 mrg 4182 1.1 mrg if (shared_hash_shared (dst->vars)) 4183 1.1 mrg slot = shared_hash_find_slot_noinsert (dst->vars, dv); 4184 1.1 mrg if (!slot) 4185 1.1 mrg slot = shared_hash_find_slot_unshare (&dst->vars, dv, 4186 1.1 mrg INSERT); 4187 1.1 mrg if (!*slot) 4188 1.1 mrg { 4189 1.1 mrg variable *var = onepart_pool_allocate (ONEPART_VALUE); 4190 1.1 mrg var->dv = dv; 4191 1.1 mrg var->refcount = 1; 4192 1.1 mrg var->n_var_parts = 1; 4193 1.1 mrg var->onepart = ONEPART_VALUE; 4194 1.1 mrg var->in_changed_variables = false; 4195 1.1 mrg var->var_part[0].loc_chain = NULL; 4196 1.1 mrg var->var_part[0].cur_loc = NULL; 4197 1.1 mrg VAR_LOC_1PAUX (var) = NULL; 4198 1.1 mrg *slot = var; 4199 1.1 mrg } 4200 1.1 mrg 4201 1.1 mrg VALUE_RECURSED_INTO (node->loc) = true; 4202 1.1 mrg } 4203 1.1 mrg } 4204 1.1 mrg 4205 1.1 mrg dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash); 4206 1.1 mrg gcc_assert (*dstslot == dvar); 4207 1.1 mrg canonicalize_values_star (dstslot, dst); 4208 1.1 mrg gcc_checking_assert (dstslot 4209 1.1 mrg == shared_hash_find_slot_noinsert_1 (dst->vars, 4210 1.1 mrg dv, dvhash)); 4211 1.1 mrg dvar = *dstslot; 4212 1.1 mrg } 4213 1.1 mrg } 4214 1.1 mrg 4215 1.1 mrg if (!onepart_variable_different_p (dvar, s2var)) 4216 1.1 mrg { 4217 1.1 mrg variable_htab_free (dvar); 4218 1.1 mrg *dstslot = dvar = s2var; 4219 1.1 mrg dvar->refcount++; 4220 1.1 mrg } 4221 1.1 mrg else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var)) 4222 1.1 mrg { 4223 1.1 mrg variable_htab_free (dvar); 4224 1.1 mrg *dstslot = dvar = s1var; 4225 1.1 mrg dvar->refcount++; 4226 1.1 mrg dst_can_be_shared = false; 4227 1.1 mrg } 4228 1.1 mrg else 4229 1.1 mrg dst_can_be_shared = false; 4230 1.1 mrg 4231 1.1 mrg return 1; 4232 1.1 mrg } 4233 1.1 mrg 4234 1.1 mrg /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a 4235 1.1 mrg multi-part variable. Unions of multi-part variables and 4236 1.1 mrg intersections of one-part ones will be handled in 4237 1.1 mrg variable_merge_over_cur(). */ 4238 1.1 mrg 4239 1.1 mrg static int 4240 1.1 mrg variable_merge_over_src (variable *s2var, struct dfset_merge *dsm) 4241 1.1 mrg { 4242 1.1 mrg dataflow_set *dst = dsm->dst; 4243 1.1 mrg decl_or_value dv = s2var->dv; 4244 1.1 mrg 4245 1.1 mrg if (!s2var->onepart) 4246 1.1 mrg { 4247 1.1 mrg variable **dstp = shared_hash_find_slot (dst->vars, dv); 4248 1.1 mrg *dstp = s2var; 4249 1.1 mrg s2var->refcount++; 4250 1.1 mrg return 1; 4251 1.1 mrg } 4252 1.1 mrg 4253 1.1 mrg dsm->src_onepart_cnt++; 4254 1.1 mrg return 1; 4255 1.1 mrg } 4256 1.1 mrg 4257 1.1 mrg /* Combine dataflow set information from SRC2 into DST, using PDST 4258 1.1 mrg to carry over information across passes. */ 4259 1.1 mrg 4260 1.1 mrg static void 4261 1.1 mrg dataflow_set_merge (dataflow_set *dst, dataflow_set *src2) 4262 1.1 mrg { 4263 1.1 mrg dataflow_set cur = *dst; 4264 1.1 mrg dataflow_set *src1 = &cur; 4265 1.1 mrg struct dfset_merge dsm; 4266 1.1 mrg int i; 4267 1.1 mrg size_t src1_elems, src2_elems; 4268 1.1 mrg variable_iterator_type hi; 4269 1.1 mrg variable *var; 4270 1.1 mrg 4271 1.1 mrg src1_elems = shared_hash_htab (src1->vars)->elements (); 4272 1.1 mrg src2_elems = shared_hash_htab (src2->vars)->elements (); 4273 1.1 mrg dataflow_set_init (dst); 4274 1.1 mrg dst->stack_adjust = cur.stack_adjust; 4275 1.1 mrg shared_hash_destroy (dst->vars); 4276 1.1 mrg dst->vars = new shared_hash; 4277 1.1 mrg dst->vars->refcount = 1; 4278 1.1 mrg dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems)); 4279 1.1 mrg 4280 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4281 1.1 mrg attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]); 4282 1.1 mrg 4283 1.1 mrg dsm.dst = dst; 4284 1.1 mrg dsm.src = src2; 4285 1.1 mrg dsm.cur = src1; 4286 1.1 mrg dsm.src_onepart_cnt = 0; 4287 1.1 mrg 4288 1.1 mrg FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars), 4289 1.1 mrg var, variable, hi) 4290 1.1 mrg variable_merge_over_src (var, &dsm); 4291 1.1 mrg FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars), 4292 1.1 mrg var, variable, hi) 4293 1.1 mrg variable_merge_over_cur (var, &dsm); 4294 1.1 mrg 4295 1.1 mrg if (dsm.src_onepart_cnt) 4296 1.1 mrg dst_can_be_shared = false; 4297 1.1 mrg 4298 1.1 mrg dataflow_set_destroy (src1); 4299 1.1 mrg } 4300 1.1 mrg 4301 1.1 mrg /* Mark register equivalences. */ 4302 1.1 mrg 4303 1.1 mrg static void 4304 1.1 mrg dataflow_set_equiv_regs (dataflow_set *set) 4305 1.1 mrg { 4306 1.1 mrg int i; 4307 1.1 mrg attrs *list, **listp; 4308 1.1 mrg 4309 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4310 1.1 mrg { 4311 1.1 mrg rtx canon[NUM_MACHINE_MODES]; 4312 1.1 mrg 4313 1.1 mrg /* If the list is empty or one entry, no need to canonicalize 4314 1.1 mrg anything. */ 4315 1.1 mrg if (set->regs[i] == NULL || set->regs[i]->next == NULL) 4316 1.1 mrg continue; 4317 1.1 mrg 4318 1.1 mrg memset (canon, 0, sizeof (canon)); 4319 1.1 mrg 4320 1.1 mrg for (list = set->regs[i]; list; list = list->next) 4321 1.1 mrg if (list->offset == 0 && dv_is_value_p (list->dv)) 4322 1.1 mrg { 4323 1.1 mrg rtx val = dv_as_value (list->dv); 4324 1.1 mrg rtx *cvalp = &canon[(int)GET_MODE (val)]; 4325 1.1 mrg rtx cval = *cvalp; 4326 1.1 mrg 4327 1.1 mrg if (canon_value_cmp (val, cval)) 4328 1.1 mrg *cvalp = val; 4329 1.1 mrg } 4330 1.1 mrg 4331 1.1 mrg for (list = set->regs[i]; list; list = list->next) 4332 1.1 mrg if (list->offset == 0 && dv_onepart_p (list->dv)) 4333 1.1 mrg { 4334 1.1 mrg rtx cval = canon[(int)GET_MODE (list->loc)]; 4335 1.1 mrg 4336 1.1 mrg if (!cval) 4337 1.1 mrg continue; 4338 1.1 mrg 4339 1.1 mrg if (dv_is_value_p (list->dv)) 4340 1.1 mrg { 4341 1.1 mrg rtx val = dv_as_value (list->dv); 4342 1.1 mrg 4343 1.1 mrg if (val == cval) 4344 1.1 mrg continue; 4345 1.1 mrg 4346 1.1 mrg VALUE_RECURSED_INTO (val) = true; 4347 1.1 mrg set_variable_part (set, val, dv_from_value (cval), 0, 4348 1.1 mrg VAR_INIT_STATUS_INITIALIZED, 4349 1.1 mrg NULL, NO_INSERT); 4350 1.1 mrg } 4351 1.1 mrg 4352 1.1 mrg VALUE_RECURSED_INTO (cval) = true; 4353 1.1 mrg set_variable_part (set, cval, list->dv, 0, 4354 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT); 4355 1.1 mrg } 4356 1.1 mrg 4357 1.1 mrg for (listp = &set->regs[i]; (list = *listp); 4358 1.1 mrg listp = list ? &list->next : listp) 4359 1.1 mrg if (list->offset == 0 && dv_onepart_p (list->dv)) 4360 1.1 mrg { 4361 1.1 mrg rtx cval = canon[(int)GET_MODE (list->loc)]; 4362 1.1 mrg variable **slot; 4363 1.1 mrg 4364 1.1 mrg if (!cval) 4365 1.1 mrg continue; 4366 1.1 mrg 4367 1.1 mrg if (dv_is_value_p (list->dv)) 4368 1.1 mrg { 4369 1.1 mrg rtx val = dv_as_value (list->dv); 4370 1.1 mrg if (!VALUE_RECURSED_INTO (val)) 4371 1.1 mrg continue; 4372 1.1 mrg } 4373 1.1 mrg 4374 1.1 mrg slot = shared_hash_find_slot_noinsert (set->vars, list->dv); 4375 1.1 mrg canonicalize_values_star (slot, set); 4376 1.1 mrg if (*listp != list) 4377 1.1 mrg list = NULL; 4378 1.1 mrg } 4379 1.1 mrg } 4380 1.1 mrg } 4381 1.1 mrg 4382 1.1 mrg /* Remove any redundant values in the location list of VAR, which must 4383 1.1 mrg be unshared and 1-part. */ 4384 1.1 mrg 4385 1.1 mrg static void 4386 1.1 mrg remove_duplicate_values (variable *var) 4387 1.1 mrg { 4388 1.1 mrg location_chain *node, **nodep; 4389 1.1 mrg 4390 1.1 mrg gcc_assert (var->onepart); 4391 1.1 mrg gcc_assert (var->n_var_parts == 1); 4392 1.1 mrg gcc_assert (var->refcount == 1); 4393 1.1 mrg 4394 1.1 mrg for (nodep = &var->var_part[0].loc_chain; (node = *nodep); ) 4395 1.1 mrg { 4396 1.1 mrg if (GET_CODE (node->loc) == VALUE) 4397 1.1 mrg { 4398 1.1 mrg if (VALUE_RECURSED_INTO (node->loc)) 4399 1.1 mrg { 4400 1.1 mrg /* Remove duplicate value node. */ 4401 1.1 mrg *nodep = node->next; 4402 1.1 mrg delete node; 4403 1.1 mrg continue; 4404 1.1 mrg } 4405 1.1 mrg else 4406 1.1 mrg VALUE_RECURSED_INTO (node->loc) = true; 4407 1.1 mrg } 4408 1.1 mrg nodep = &node->next; 4409 1.1 mrg } 4410 1.1 mrg 4411 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 4412 1.1 mrg if (GET_CODE (node->loc) == VALUE) 4413 1.1 mrg { 4414 1.1 mrg gcc_assert (VALUE_RECURSED_INTO (node->loc)); 4415 1.1 mrg VALUE_RECURSED_INTO (node->loc) = false; 4416 1.1 mrg } 4417 1.1 mrg } 4418 1.1 mrg 4419 1.1 mrg 4420 1.1 mrg /* Hash table iteration argument passed to variable_post_merge. */ 4421 1.1 mrg struct dfset_post_merge 4422 1.1 mrg { 4423 1.1 mrg /* The new input set for the current block. */ 4424 1.1 mrg dataflow_set *set; 4425 1.1 mrg /* Pointer to the permanent input set for the current block, or 4426 1.1 mrg NULL. */ 4427 1.1 mrg dataflow_set **permp; 4428 1.1 mrg }; 4429 1.1 mrg 4430 1.1 mrg /* Create values for incoming expressions associated with one-part 4431 1.1 mrg variables that don't have value numbers for them. */ 4432 1.1 mrg 4433 1.1 mrg int 4434 1.1 mrg variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm) 4435 1.1 mrg { 4436 1.1 mrg dataflow_set *set = dfpm->set; 4437 1.1 mrg variable *var = *slot; 4438 1.1 mrg location_chain *node; 4439 1.1 mrg 4440 1.1 mrg if (!var->onepart || !var->n_var_parts) 4441 1.1 mrg return 1; 4442 1.1 mrg 4443 1.1 mrg gcc_assert (var->n_var_parts == 1); 4444 1.1 mrg 4445 1.1 mrg if (dv_is_decl_p (var->dv)) 4446 1.1 mrg { 4447 1.1 mrg bool check_dupes = false; 4448 1.1 mrg 4449 1.1 mrg restart: 4450 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 4451 1.1 mrg { 4452 1.1 mrg if (GET_CODE (node->loc) == VALUE) 4453 1.1 mrg gcc_assert (!VALUE_RECURSED_INTO (node->loc)); 4454 1.1 mrg else if (GET_CODE (node->loc) == REG) 4455 1.1 mrg { 4456 1.1 mrg attrs *att, **attp, **curp = NULL; 4457 1.1 mrg 4458 1.1 mrg if (var->refcount != 1) 4459 1.1 mrg { 4460 1.1 mrg slot = unshare_variable (set, slot, var, 4461 1.1 mrg VAR_INIT_STATUS_INITIALIZED); 4462 1.1 mrg var = *slot; 4463 1.1 mrg goto restart; 4464 1.1 mrg } 4465 1.1 mrg 4466 1.1 mrg for (attp = &set->regs[REGNO (node->loc)]; (att = *attp); 4467 1.1 mrg attp = &att->next) 4468 1.1 mrg if (att->offset == 0 4469 1.1 mrg && GET_MODE (att->loc) == GET_MODE (node->loc)) 4470 1.1 mrg { 4471 1.1 mrg if (dv_is_value_p (att->dv)) 4472 1.1 mrg { 4473 1.1 mrg rtx cval = dv_as_value (att->dv); 4474 1.1 mrg node->loc = cval; 4475 1.1 mrg check_dupes = true; 4476 1.1 mrg break; 4477 1.1 mrg } 4478 1.1 mrg else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv)) 4479 1.1 mrg curp = attp; 4480 1.1 mrg } 4481 1.1 mrg 4482 1.1 mrg if (!curp) 4483 1.1 mrg { 4484 1.1 mrg curp = attp; 4485 1.1 mrg while (*curp) 4486 1.1 mrg if ((*curp)->offset == 0 4487 1.1 mrg && GET_MODE ((*curp)->loc) == GET_MODE (node->loc) 4488 1.1 mrg && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv)) 4489 1.1 mrg break; 4490 1.1 mrg else 4491 1.1 mrg curp = &(*curp)->next; 4492 1.1 mrg gcc_assert (*curp); 4493 1.1 mrg } 4494 1.1 mrg 4495 1.1 mrg if (!att) 4496 1.1 mrg { 4497 1.1 mrg decl_or_value cdv; 4498 1.1 mrg rtx cval; 4499 1.1 mrg 4500 1.1 mrg if (!*dfpm->permp) 4501 1.1 mrg { 4502 1.1 mrg *dfpm->permp = XNEW (dataflow_set); 4503 1.1 mrg dataflow_set_init (*dfpm->permp); 4504 1.1 mrg } 4505 1.1 mrg 4506 1.1 mrg for (att = (*dfpm->permp)->regs[REGNO (node->loc)]; 4507 1.1 mrg att; att = att->next) 4508 1.1 mrg if (GET_MODE (att->loc) == GET_MODE (node->loc)) 4509 1.1 mrg { 4510 1.1 mrg gcc_assert (att->offset == 0 4511 1.1 mrg && dv_is_value_p (att->dv)); 4512 1.1 mrg val_reset (set, att->dv); 4513 1.1 mrg break; 4514 1.1 mrg } 4515 1.1 mrg 4516 1.1 mrg if (att) 4517 1.1 mrg { 4518 1.1 mrg cdv = att->dv; 4519 1.1 mrg cval = dv_as_value (cdv); 4520 1.1 mrg } 4521 1.1 mrg else 4522 1.1 mrg { 4523 1.1 mrg /* Create a unique value to hold this register, 4524 1.1 mrg that ought to be found and reused in 4525 1.1 mrg subsequent rounds. */ 4526 1.1 mrg cselib_val *v; 4527 1.1 mrg gcc_assert (!cselib_lookup (node->loc, 4528 1.1 mrg GET_MODE (node->loc), 0, 4529 1.1 mrg VOIDmode)); 4530 1.1 mrg v = cselib_lookup (node->loc, GET_MODE (node->loc), 1, 4531 1.1 mrg VOIDmode); 4532 1.1 mrg cselib_preserve_value (v); 4533 1.1 mrg cselib_invalidate_rtx (node->loc); 4534 1.1 mrg cval = v->val_rtx; 4535 1.1 mrg cdv = dv_from_value (cval); 4536 1.1 mrg if (dump_file) 4537 1.1 mrg fprintf (dump_file, 4538 1.1 mrg "Created new value %u:%u for reg %i\n", 4539 1.1 mrg v->uid, v->hash, REGNO (node->loc)); 4540 1.1 mrg } 4541 1.1 mrg 4542 1.1 mrg var_reg_decl_set (*dfpm->permp, node->loc, 4543 1.1 mrg VAR_INIT_STATUS_INITIALIZED, 4544 1.1 mrg cdv, 0, NULL, INSERT); 4545 1.1 mrg 4546 1.1 mrg node->loc = cval; 4547 1.1 mrg check_dupes = true; 4548 1.1 mrg } 4549 1.1 mrg 4550 1.1 mrg /* Remove attribute referring to the decl, which now 4551 1.1 mrg uses the value for the register, already existing or 4552 1.1 mrg to be added when we bring perm in. */ 4553 1.1 mrg att = *curp; 4554 1.1 mrg *curp = att->next; 4555 1.1 mrg delete att; 4556 1.1 mrg } 4557 1.1 mrg } 4558 1.1 mrg 4559 1.1 mrg if (check_dupes) 4560 1.1 mrg remove_duplicate_values (var); 4561 1.1 mrg } 4562 1.1 mrg 4563 1.1 mrg return 1; 4564 1.1 mrg } 4565 1.1 mrg 4566 1.1 mrg /* Reset values in the permanent set that are not associated with the 4567 1.1 mrg chosen expression. */ 4568 1.1 mrg 4569 1.1 mrg int 4570 1.1 mrg variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm) 4571 1.1 mrg { 4572 1.1 mrg dataflow_set *set = dfpm->set; 4573 1.1 mrg variable *pvar = *pslot, *var; 4574 1.1 mrg location_chain *pnode; 4575 1.1 mrg decl_or_value dv; 4576 1.1 mrg attrs *att; 4577 1.1 mrg 4578 1.1 mrg gcc_assert (dv_is_value_p (pvar->dv) 4579 1.1 mrg && pvar->n_var_parts == 1); 4580 1.1 mrg pnode = pvar->var_part[0].loc_chain; 4581 1.1 mrg gcc_assert (pnode 4582 1.1 mrg && !pnode->next 4583 1.1 mrg && REG_P (pnode->loc)); 4584 1.1 mrg 4585 1.1 mrg dv = pvar->dv; 4586 1.1 mrg 4587 1.1 mrg var = shared_hash_find (set->vars, dv); 4588 1.1 mrg if (var) 4589 1.1 mrg { 4590 1.1 mrg /* Although variable_post_merge_new_vals may have made decls 4591 1.1 mrg non-star-canonical, values that pre-existed in canonical form 4592 1.1 mrg remain canonical, and newly-created values reference a single 4593 1.1 mrg REG, so they are canonical as well. Since VAR has the 4594 1.1 mrg location list for a VALUE, using find_loc_in_1pdv for it is 4595 1.1 mrg fine, since VALUEs don't map back to DECLs. */ 4596 1.1 mrg if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars))) 4597 1.1 mrg return 1; 4598 1.1 mrg val_reset (set, dv); 4599 1.1 mrg } 4600 1.1 mrg 4601 1.1 mrg for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next) 4602 1.1 mrg if (att->offset == 0 4603 1.1 mrg && GET_MODE (att->loc) == GET_MODE (pnode->loc) 4604 1.1 mrg && dv_is_value_p (att->dv)) 4605 1.1 mrg break; 4606 1.1 mrg 4607 1.1 mrg /* If there is a value associated with this register already, create 4608 1.1 mrg an equivalence. */ 4609 1.1 mrg if (att && dv_as_value (att->dv) != dv_as_value (dv)) 4610 1.1 mrg { 4611 1.1 mrg rtx cval = dv_as_value (att->dv); 4612 1.1 mrg set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT); 4613 1.1 mrg set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init, 4614 1.1 mrg NULL, INSERT); 4615 1.1 mrg } 4616 1.1 mrg else if (!att) 4617 1.1 mrg { 4618 1.1 mrg attrs_list_insert (&set->regs[REGNO (pnode->loc)], 4619 1.1 mrg dv, 0, pnode->loc); 4620 1.1 mrg variable_union (pvar, set); 4621 1.1 mrg } 4622 1.1 mrg 4623 1.1 mrg return 1; 4624 1.1 mrg } 4625 1.1 mrg 4626 1.1 mrg /* Just checking stuff and registering register attributes for 4627 1.1 mrg now. */ 4628 1.1 mrg 4629 1.1 mrg static void 4630 1.1 mrg dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp) 4631 1.1 mrg { 4632 1.1 mrg struct dfset_post_merge dfpm; 4633 1.1 mrg 4634 1.1 mrg dfpm.set = set; 4635 1.1 mrg dfpm.permp = permp; 4636 1.1 mrg 4637 1.1 mrg shared_hash_htab (set->vars) 4638 1.1 mrg ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm); 4639 1.1 mrg if (*permp) 4640 1.1 mrg shared_hash_htab ((*permp)->vars) 4641 1.1 mrg ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm); 4642 1.1 mrg shared_hash_htab (set->vars) 4643 1.1 mrg ->traverse <dataflow_set *, canonicalize_values_star> (set); 4644 1.1 mrg shared_hash_htab (set->vars) 4645 1.1 mrg ->traverse <dataflow_set *, canonicalize_vars_star> (set); 4646 1.1 mrg } 4647 1.1 mrg 4648 1.1 mrg /* Return a node whose loc is a MEM that refers to EXPR in the 4649 1.1 mrg location list of a one-part variable or value VAR, or in that of 4650 1.1 mrg any values recursively mentioned in the location lists. */ 4651 1.1 mrg 4652 1.1 mrg static location_chain * 4653 1.1 mrg find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars) 4654 1.1 mrg { 4655 1.1 mrg location_chain *node; 4656 1.1 mrg decl_or_value dv; 4657 1.1 mrg variable *var; 4658 1.1 mrg location_chain *where = NULL; 4659 1.1 mrg 4660 1.1 mrg if (!val) 4661 1.1 mrg return NULL; 4662 1.1 mrg 4663 1.1 mrg gcc_assert (GET_CODE (val) == VALUE 4664 1.1 mrg && !VALUE_RECURSED_INTO (val)); 4665 1.1 mrg 4666 1.1 mrg dv = dv_from_value (val); 4667 1.1 mrg var = vars->find_with_hash (dv, dv_htab_hash (dv)); 4668 1.1 mrg 4669 1.1 mrg if (!var) 4670 1.1 mrg return NULL; 4671 1.1 mrg 4672 1.1 mrg gcc_assert (var->onepart); 4673 1.1 mrg 4674 1.1 mrg if (!var->n_var_parts) 4675 1.1 mrg return NULL; 4676 1.1 mrg 4677 1.1 mrg VALUE_RECURSED_INTO (val) = true; 4678 1.1 mrg 4679 1.1 mrg for (node = var->var_part[0].loc_chain; node; node = node->next) 4680 1.1 mrg if (MEM_P (node->loc) 4681 1.1 mrg && MEM_EXPR (node->loc) == expr 4682 1.1 mrg && int_mem_offset (node->loc) == 0) 4683 1.1 mrg { 4684 1.1 mrg where = node; 4685 1.1 mrg break; 4686 1.1 mrg } 4687 1.1 mrg else if (GET_CODE (node->loc) == VALUE 4688 1.1 mrg && !VALUE_RECURSED_INTO (node->loc) 4689 1.1 mrg && (where = find_mem_expr_in_1pdv (expr, node->loc, vars))) 4690 1.1 mrg break; 4691 1.1 mrg 4692 1.1 mrg VALUE_RECURSED_INTO (val) = false; 4693 1.1 mrg 4694 1.1 mrg return where; 4695 1.1 mrg } 4696 1.1 mrg 4697 1.1 mrg /* Return TRUE if the value of MEM may vary across a call. */ 4698 1.1 mrg 4699 1.1 mrg static bool 4700 1.1 mrg mem_dies_at_call (rtx mem) 4701 1.1 mrg { 4702 1.1 mrg tree expr = MEM_EXPR (mem); 4703 1.1 mrg tree decl; 4704 1.1 mrg 4705 1.1 mrg if (!expr) 4706 1.1 mrg return true; 4707 1.1 mrg 4708 1.1 mrg decl = get_base_address (expr); 4709 1.1 mrg 4710 1.1 mrg if (!decl) 4711 1.1 mrg return true; 4712 1.1 mrg 4713 1.1 mrg if (!DECL_P (decl)) 4714 1.1 mrg return true; 4715 1.1 mrg 4716 1.1 mrg return (may_be_aliased (decl) 4717 1.1 mrg || (!TREE_READONLY (decl) && is_global_var (decl))); 4718 1.1 mrg } 4719 1.1 mrg 4720 1.1 mrg /* Remove all MEMs from the location list of a hash table entry for a 4721 1.1 mrg one-part variable, except those whose MEM attributes map back to 4722 1.1 mrg the variable itself, directly or within a VALUE. */ 4723 1.1 mrg 4724 1.1 mrg int 4725 1.1 mrg dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set) 4726 1.1 mrg { 4727 1.1 mrg variable *var = *slot; 4728 1.1 mrg 4729 1.1 mrg if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR) 4730 1.1 mrg { 4731 1.1 mrg tree decl = dv_as_decl (var->dv); 4732 1.1 mrg location_chain *loc, **locp; 4733 1.1 mrg bool changed = false; 4734 1.1 mrg 4735 1.1 mrg if (!var->n_var_parts) 4736 1.1 mrg return 1; 4737 1.1 mrg 4738 1.1 mrg gcc_assert (var->n_var_parts == 1); 4739 1.1 mrg 4740 1.1 mrg if (shared_var_p (var, set->vars)) 4741 1.1 mrg { 4742 1.1 mrg for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) 4743 1.1 mrg { 4744 1.1 mrg /* We want to remove dying MEMs that don't refer to DECL. */ 4745 1.1 mrg if (GET_CODE (loc->loc) == MEM 4746 1.1 mrg && (MEM_EXPR (loc->loc) != decl 4747 1.1 mrg || int_mem_offset (loc->loc) != 0) 4748 1.1 mrg && mem_dies_at_call (loc->loc)) 4749 1.1 mrg break; 4750 1.1 mrg /* We want to move here MEMs that do refer to DECL. */ 4751 1.1 mrg else if (GET_CODE (loc->loc) == VALUE 4752 1.1 mrg && find_mem_expr_in_1pdv (decl, loc->loc, 4753 1.1 mrg shared_hash_htab (set->vars))) 4754 1.1 mrg break; 4755 1.1 mrg } 4756 1.1 mrg 4757 1.1 mrg if (!loc) 4758 1.1 mrg return 1; 4759 1.1 mrg 4760 1.1 mrg slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN); 4761 1.1 mrg var = *slot; 4762 1.1 mrg gcc_assert (var->n_var_parts == 1); 4763 1.1 mrg } 4764 1.1 mrg 4765 1.1 mrg for (locp = &var->var_part[0].loc_chain, loc = *locp; 4766 1.1 mrg loc; loc = *locp) 4767 1.1 mrg { 4768 1.1 mrg rtx old_loc = loc->loc; 4769 1.1 mrg if (GET_CODE (old_loc) == VALUE) 4770 1.1 mrg { 4771 1.1 mrg location_chain *mem_node 4772 1.1 mrg = find_mem_expr_in_1pdv (decl, loc->loc, 4773 1.1 mrg shared_hash_htab (set->vars)); 4774 1.1 mrg 4775 1.1 mrg /* ??? This picks up only one out of multiple MEMs that 4776 1.1 mrg refer to the same variable. Do we ever need to be 4777 1.1 mrg concerned about dealing with more than one, or, given 4778 1.1 mrg that they should all map to the same variable 4779 1.1 mrg location, their addresses will have been merged and 4780 1.1 mrg they will be regarded as equivalent? */ 4781 1.1 mrg if (mem_node) 4782 1.1 mrg { 4783 1.1 mrg loc->loc = mem_node->loc; 4784 1.1 mrg loc->set_src = mem_node->set_src; 4785 1.1 mrg loc->init = MIN (loc->init, mem_node->init); 4786 1.1 mrg } 4787 1.1 mrg } 4788 1.1 mrg 4789 1.1 mrg if (GET_CODE (loc->loc) != MEM 4790 1.1 mrg || (MEM_EXPR (loc->loc) == decl 4791 1.1 mrg && int_mem_offset (loc->loc) == 0) 4792 1.1 mrg || !mem_dies_at_call (loc->loc)) 4793 1.1 mrg { 4794 1.1 mrg if (old_loc != loc->loc && emit_notes) 4795 1.1 mrg { 4796 1.1 mrg if (old_loc == var->var_part[0].cur_loc) 4797 1.1 mrg { 4798 1.1 mrg changed = true; 4799 1.1 mrg var->var_part[0].cur_loc = NULL; 4800 1.1 mrg } 4801 1.1 mrg } 4802 1.1 mrg locp = &loc->next; 4803 1.1 mrg continue; 4804 1.1 mrg } 4805 1.1 mrg 4806 1.1 mrg if (emit_notes) 4807 1.1 mrg { 4808 1.1 mrg if (old_loc == var->var_part[0].cur_loc) 4809 1.1 mrg { 4810 1.1 mrg changed = true; 4811 1.1 mrg var->var_part[0].cur_loc = NULL; 4812 1.1 mrg } 4813 1.1 mrg } 4814 1.1 mrg *locp = loc->next; 4815 1.1 mrg delete loc; 4816 1.1 mrg } 4817 1.1 mrg 4818 1.1 mrg if (!var->var_part[0].loc_chain) 4819 1.1 mrg { 4820 1.1 mrg var->n_var_parts--; 4821 1.1 mrg changed = true; 4822 1.1 mrg } 4823 1.1 mrg if (changed) 4824 1.1 mrg variable_was_changed (var, set); 4825 1.1 mrg } 4826 1.1 mrg 4827 1.1 mrg return 1; 4828 1.1 mrg } 4829 1.1 mrg 4830 1.1 mrg /* Remove all MEMs from the location list of a hash table entry for a 4831 1.1 mrg onepart variable. */ 4832 1.1 mrg 4833 1.1 mrg int 4834 1.1 mrg dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set) 4835 1.1 mrg { 4836 1.1 mrg variable *var = *slot; 4837 1.1 mrg 4838 1.1 mrg if (var->onepart != NOT_ONEPART) 4839 1.1 mrg { 4840 1.1 mrg location_chain *loc, **locp; 4841 1.1 mrg bool changed = false; 4842 1.1 mrg rtx cur_loc; 4843 1.1 mrg 4844 1.1 mrg gcc_assert (var->n_var_parts == 1); 4845 1.1 mrg 4846 1.1 mrg if (shared_var_p (var, set->vars)) 4847 1.1 mrg { 4848 1.1 mrg for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) 4849 1.1 mrg if (GET_CODE (loc->loc) == MEM 4850 1.1 mrg && mem_dies_at_call (loc->loc)) 4851 1.1 mrg break; 4852 1.1 mrg 4853 1.1 mrg if (!loc) 4854 1.1 mrg return 1; 4855 1.1 mrg 4856 1.1 mrg slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN); 4857 1.1 mrg var = *slot; 4858 1.1 mrg gcc_assert (var->n_var_parts == 1); 4859 1.1 mrg } 4860 1.1 mrg 4861 1.1 mrg if (VAR_LOC_1PAUX (var)) 4862 1.1 mrg cur_loc = VAR_LOC_FROM (var); 4863 1.1 mrg else 4864 1.1 mrg cur_loc = var->var_part[0].cur_loc; 4865 1.1 mrg 4866 1.1 mrg for (locp = &var->var_part[0].loc_chain, loc = *locp; 4867 1.1 mrg loc; loc = *locp) 4868 1.1 mrg { 4869 1.1 mrg if (GET_CODE (loc->loc) != MEM 4870 1.1 mrg || !mem_dies_at_call (loc->loc)) 4871 1.1 mrg { 4872 1.1 mrg locp = &loc->next; 4873 1.1 mrg continue; 4874 1.1 mrg } 4875 1.1 mrg 4876 1.1 mrg *locp = loc->next; 4877 1.1 mrg /* If we have deleted the location which was last emitted 4878 1.1 mrg we have to emit new location so add the variable to set 4879 1.1 mrg of changed variables. */ 4880 1.1 mrg if (cur_loc == loc->loc) 4881 1.1 mrg { 4882 1.1 mrg changed = true; 4883 1.1 mrg var->var_part[0].cur_loc = NULL; 4884 1.1 mrg if (VAR_LOC_1PAUX (var)) 4885 1.1 mrg VAR_LOC_FROM (var) = NULL; 4886 1.1 mrg } 4887 1.1 mrg delete loc; 4888 1.1 mrg } 4889 1.1 mrg 4890 1.1 mrg if (!var->var_part[0].loc_chain) 4891 1.1 mrg { 4892 1.1 mrg var->n_var_parts--; 4893 1.1 mrg changed = true; 4894 1.1 mrg } 4895 1.1 mrg if (changed) 4896 1.1 mrg variable_was_changed (var, set); 4897 1.1 mrg } 4898 1.1 mrg 4899 1.1 mrg return 1; 4900 1.1 mrg } 4901 1.1 mrg 4902 1.1 mrg /* Remove all variable-location information about call-clobbered 4903 1.1 mrg registers, as well as associations between MEMs and VALUEs. */ 4904 1.1 mrg 4905 1.1 mrg static void 4906 1.1 mrg dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn) 4907 1.1 mrg { 4908 1.1 mrg unsigned int r; 4909 1.1 mrg hard_reg_set_iterator hrsi; 4910 1.1 mrg 4911 1.1 mrg HARD_REG_SET callee_clobbers 4912 1.1 mrg = insn_callee_abi (call_insn).full_reg_clobbers (); 4913 1.1 mrg 4914 1.1 mrg EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, r, hrsi) 4915 1.1 mrg var_regno_delete (set, r); 4916 1.1 mrg 4917 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 4918 1.1 mrg { 4919 1.1 mrg set->traversed_vars = set->vars; 4920 1.1 mrg shared_hash_htab (set->vars) 4921 1.1 mrg ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set); 4922 1.1 mrg set->traversed_vars = set->vars; 4923 1.1 mrg shared_hash_htab (set->vars) 4924 1.1 mrg ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set); 4925 1.1 mrg set->traversed_vars = NULL; 4926 1.1 mrg } 4927 1.1 mrg } 4928 1.1 mrg 4929 1.1 mrg static bool 4930 1.1 mrg variable_part_different_p (variable_part *vp1, variable_part *vp2) 4931 1.1 mrg { 4932 1.1 mrg location_chain *lc1, *lc2; 4933 1.1 mrg 4934 1.1 mrg for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next) 4935 1.1 mrg { 4936 1.1 mrg for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next) 4937 1.1 mrg { 4938 1.1 mrg if (REG_P (lc1->loc) && REG_P (lc2->loc)) 4939 1.1 mrg { 4940 1.1 mrg if (REGNO (lc1->loc) == REGNO (lc2->loc)) 4941 1.1 mrg break; 4942 1.1 mrg } 4943 1.1 mrg if (rtx_equal_p (lc1->loc, lc2->loc)) 4944 1.1 mrg break; 4945 1.1 mrg } 4946 1.1 mrg if (!lc2) 4947 1.1 mrg return true; 4948 1.1 mrg } 4949 1.1 mrg return false; 4950 1.1 mrg } 4951 1.1 mrg 4952 1.1 mrg /* Return true if one-part variables VAR1 and VAR2 are different. 4953 1.1 mrg They must be in canonical order. */ 4954 1.1 mrg 4955 1.1 mrg static bool 4956 1.1 mrg onepart_variable_different_p (variable *var1, variable *var2) 4957 1.1 mrg { 4958 1.1 mrg location_chain *lc1, *lc2; 4959 1.1 mrg 4960 1.1 mrg if (var1 == var2) 4961 1.1 mrg return false; 4962 1.1 mrg 4963 1.1 mrg gcc_assert (var1->n_var_parts == 1 4964 1.1 mrg && var2->n_var_parts == 1); 4965 1.1 mrg 4966 1.1 mrg lc1 = var1->var_part[0].loc_chain; 4967 1.1 mrg lc2 = var2->var_part[0].loc_chain; 4968 1.1 mrg 4969 1.1 mrg gcc_assert (lc1 && lc2); 4970 1.1 mrg 4971 1.1 mrg while (lc1 && lc2) 4972 1.1 mrg { 4973 1.1 mrg if (loc_cmp (lc1->loc, lc2->loc)) 4974 1.1 mrg return true; 4975 1.1 mrg lc1 = lc1->next; 4976 1.1 mrg lc2 = lc2->next; 4977 1.1 mrg } 4978 1.1 mrg 4979 1.1 mrg return lc1 != lc2; 4980 1.1 mrg } 4981 1.1 mrg 4982 1.1 mrg /* Return true if one-part variables VAR1 and VAR2 are different. 4983 1.1 mrg They must be in canonical order. */ 4984 1.1 mrg 4985 1.1 mrg static void 4986 1.1 mrg dump_onepart_variable_differences (variable *var1, variable *var2) 4987 1.1 mrg { 4988 1.1 mrg location_chain *lc1, *lc2; 4989 1.1 mrg 4990 1.1 mrg gcc_assert (var1 != var2); 4991 1.1 mrg gcc_assert (dump_file); 4992 1.1 mrg gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)); 4993 1.1 mrg gcc_assert (var1->n_var_parts == 1 4994 1.1 mrg && var2->n_var_parts == 1); 4995 1.1 mrg 4996 1.1 mrg lc1 = var1->var_part[0].loc_chain; 4997 1.1 mrg lc2 = var2->var_part[0].loc_chain; 4998 1.1 mrg 4999 1.1 mrg gcc_assert (lc1 && lc2); 5000 1.1 mrg 5001 1.1 mrg while (lc1 && lc2) 5002 1.1 mrg { 5003 1.1 mrg switch (loc_cmp (lc1->loc, lc2->loc)) 5004 1.1 mrg { 5005 1.1 mrg case -1: 5006 1.1 mrg fprintf (dump_file, "removed: "); 5007 1.1 mrg print_rtl_single (dump_file, lc1->loc); 5008 1.1 mrg lc1 = lc1->next; 5009 1.1 mrg continue; 5010 1.1 mrg case 0: 5011 1.1 mrg break; 5012 1.1 mrg case 1: 5013 1.1 mrg fprintf (dump_file, "added: "); 5014 1.1 mrg print_rtl_single (dump_file, lc2->loc); 5015 1.1 mrg lc2 = lc2->next; 5016 1.1 mrg continue; 5017 1.1 mrg default: 5018 1.1 mrg gcc_unreachable (); 5019 1.1 mrg } 5020 1.1 mrg lc1 = lc1->next; 5021 1.1 mrg lc2 = lc2->next; 5022 1.1 mrg } 5023 1.1 mrg 5024 1.1 mrg while (lc1) 5025 1.1 mrg { 5026 1.1 mrg fprintf (dump_file, "removed: "); 5027 1.1 mrg print_rtl_single (dump_file, lc1->loc); 5028 1.1 mrg lc1 = lc1->next; 5029 1.1 mrg } 5030 1.1 mrg 5031 1.1 mrg while (lc2) 5032 1.1 mrg { 5033 1.1 mrg fprintf (dump_file, "added: "); 5034 1.1 mrg print_rtl_single (dump_file, lc2->loc); 5035 1.1 mrg lc2 = lc2->next; 5036 1.1 mrg } 5037 1.1 mrg } 5038 1.1 mrg 5039 1.1 mrg /* Return true if variables VAR1 and VAR2 are different. */ 5040 1.1 mrg 5041 1.1 mrg static bool 5042 1.1 mrg variable_different_p (variable *var1, variable *var2) 5043 1.1 mrg { 5044 1.1 mrg int i; 5045 1.1 mrg 5046 1.1 mrg if (var1 == var2) 5047 1.1 mrg return false; 5048 1.1 mrg 5049 1.1 mrg if (var1->onepart != var2->onepart) 5050 1.1 mrg return true; 5051 1.1 mrg 5052 1.1 mrg if (var1->n_var_parts != var2->n_var_parts) 5053 1.1 mrg return true; 5054 1.1 mrg 5055 1.1 mrg if (var1->onepart && var1->n_var_parts) 5056 1.1 mrg { 5057 1.1 mrg gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv) 5058 1.1 mrg && var1->n_var_parts == 1); 5059 1.1 mrg /* One-part values have locations in a canonical order. */ 5060 1.1 mrg return onepart_variable_different_p (var1, var2); 5061 1.1 mrg } 5062 1.1 mrg 5063 1.1 mrg for (i = 0; i < var1->n_var_parts; i++) 5064 1.1 mrg { 5065 1.1 mrg if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i)) 5066 1.1 mrg return true; 5067 1.1 mrg if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i])) 5068 1.1 mrg return true; 5069 1.1 mrg if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i])) 5070 1.1 mrg return true; 5071 1.1 mrg } 5072 1.1 mrg return false; 5073 1.1 mrg } 5074 1.1 mrg 5075 1.1 mrg /* Return true if dataflow sets OLD_SET and NEW_SET differ. */ 5076 1.1 mrg 5077 1.1 mrg static bool 5078 1.1 mrg dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set) 5079 1.1 mrg { 5080 1.1 mrg variable_iterator_type hi; 5081 1.1 mrg variable *var1; 5082 1.1 mrg bool diffound = false; 5083 1.1 mrg bool details = (dump_file && (dump_flags & TDF_DETAILS)); 5084 1.1 mrg 5085 1.1 mrg #define RETRUE \ 5086 1.1 mrg do \ 5087 1.1 mrg { \ 5088 1.1 mrg if (!details) \ 5089 1.1 mrg return true; \ 5090 1.1 mrg else \ 5091 1.1 mrg diffound = true; \ 5092 1.1 mrg } \ 5093 1.1 mrg while (0) 5094 1.1 mrg 5095 1.1 mrg if (old_set->vars == new_set->vars) 5096 1.1 mrg return false; 5097 1.1 mrg 5098 1.1 mrg if (shared_hash_htab (old_set->vars)->elements () 5099 1.1 mrg != shared_hash_htab (new_set->vars)->elements ()) 5100 1.1 mrg RETRUE; 5101 1.1 mrg 5102 1.1 mrg FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars), 5103 1.1 mrg var1, variable, hi) 5104 1.1 mrg { 5105 1.1 mrg variable_table_type *htab = shared_hash_htab (new_set->vars); 5106 1.1 mrg variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv)); 5107 1.1 mrg 5108 1.1 mrg if (!var2) 5109 1.1 mrg { 5110 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 5111 1.1 mrg { 5112 1.1 mrg fprintf (dump_file, "dataflow difference found: removal of:\n"); 5113 1.1 mrg dump_var (var1); 5114 1.1 mrg } 5115 1.1 mrg RETRUE; 5116 1.1 mrg } 5117 1.1 mrg else if (variable_different_p (var1, var2)) 5118 1.1 mrg { 5119 1.1 mrg if (details) 5120 1.1 mrg { 5121 1.1 mrg fprintf (dump_file, "dataflow difference found: " 5122 1.1 mrg "old and new follow:\n"); 5123 1.1 mrg dump_var (var1); 5124 1.1 mrg if (dv_onepart_p (var1->dv)) 5125 1.1 mrg dump_onepart_variable_differences (var1, var2); 5126 1.1 mrg dump_var (var2); 5127 1.1 mrg } 5128 1.1 mrg RETRUE; 5129 1.1 mrg } 5130 1.1 mrg } 5131 1.1 mrg 5132 1.1 mrg /* There's no need to traverse the second hashtab unless we want to 5133 1.1 mrg print the details. If both have the same number of elements and 5134 1.1 mrg the second one had all entries found in the first one, then the 5135 1.1 mrg second can't have any extra entries. */ 5136 1.1 mrg if (!details) 5137 1.1 mrg return diffound; 5138 1.1 mrg 5139 1.1 mrg FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars), 5140 1.1 mrg var1, variable, hi) 5141 1.1 mrg { 5142 1.1 mrg variable_table_type *htab = shared_hash_htab (old_set->vars); 5143 1.1 mrg variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv)); 5144 1.1 mrg if (!var2) 5145 1.1 mrg { 5146 1.1 mrg if (details) 5147 1.1 mrg { 5148 1.1 mrg fprintf (dump_file, "dataflow difference found: addition of:\n"); 5149 1.1 mrg dump_var (var1); 5150 1.1 mrg } 5151 1.1 mrg RETRUE; 5152 1.1 mrg } 5153 1.1 mrg } 5154 1.1 mrg 5155 1.1 mrg #undef RETRUE 5156 1.1 mrg 5157 1.1 mrg return diffound; 5158 1.1 mrg } 5159 1.1 mrg 5160 1.1 mrg /* Free the contents of dataflow set SET. */ 5161 1.1 mrg 5162 1.1 mrg static void 5163 1.1 mrg dataflow_set_destroy (dataflow_set *set) 5164 1.1 mrg { 5165 1.1 mrg int i; 5166 1.1 mrg 5167 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 5168 1.1 mrg attrs_list_clear (&set->regs[i]); 5169 1.1 mrg 5170 1.1 mrg shared_hash_destroy (set->vars); 5171 1.1 mrg set->vars = NULL; 5172 1.1 mrg } 5173 1.1 mrg 5174 1.1 mrg /* Return true if T is a tracked parameter with non-degenerate record type. */ 5175 1.1 mrg 5176 1.1 mrg static bool 5177 1.1 mrg tracked_record_parameter_p (tree t) 5178 1.1 mrg { 5179 1.1 mrg if (TREE_CODE (t) != PARM_DECL) 5180 1.1 mrg return false; 5181 1.1 mrg 5182 1.1 mrg if (DECL_MODE (t) == BLKmode) 5183 1.1 mrg return false; 5184 1.1 mrg 5185 1.1 mrg tree type = TREE_TYPE (t); 5186 1.1 mrg if (TREE_CODE (type) != RECORD_TYPE) 5187 1.1 mrg return false; 5188 1.1 mrg 5189 1.1 mrg if (TYPE_FIELDS (type) == NULL_TREE 5190 1.1 mrg || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE) 5191 1.1 mrg return false; 5192 1.1 mrg 5193 1.1 mrg return true; 5194 1.1 mrg } 5195 1.1 mrg 5196 1.1 mrg /* Shall EXPR be tracked? */ 5197 1.1 mrg 5198 1.1 mrg static bool 5199 1.1 mrg track_expr_p (tree expr, bool need_rtl) 5200 1.1 mrg { 5201 1.1 mrg rtx decl_rtl; 5202 1.1 mrg tree realdecl; 5203 1.1 mrg 5204 1.1 mrg if (TREE_CODE (expr) == DEBUG_EXPR_DECL) 5205 1.1 mrg return DECL_RTL_SET_P (expr); 5206 1.1 mrg 5207 1.1 mrg /* If EXPR is not a parameter or a variable do not track it. */ 5208 1.1 mrg if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL) 5209 1.1 mrg return 0; 5210 1.1 mrg 5211 1.1 mrg /* It also must have a name... */ 5212 1.1 mrg if (!DECL_NAME (expr) && need_rtl) 5213 1.1 mrg return 0; 5214 1.1 mrg 5215 1.1 mrg /* ... and a RTL assigned to it. */ 5216 1.1 mrg decl_rtl = DECL_RTL_IF_SET (expr); 5217 1.1 mrg if (!decl_rtl && need_rtl) 5218 1.1 mrg return 0; 5219 1.1 mrg 5220 1.1 mrg /* If this expression is really a debug alias of some other declaration, we 5221 1.1 mrg don't need to track this expression if the ultimate declaration is 5222 1.1 mrg ignored. */ 5223 1.1 mrg realdecl = expr; 5224 1.1 mrg if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl)) 5225 1.1 mrg { 5226 1.1 mrg realdecl = DECL_DEBUG_EXPR (realdecl); 5227 1.1 mrg if (!DECL_P (realdecl)) 5228 1.1 mrg { 5229 1.1 mrg if (handled_component_p (realdecl) 5230 1.1 mrg || (TREE_CODE (realdecl) == MEM_REF 5231 1.1 mrg && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR)) 5232 1.1 mrg { 5233 1.1 mrg HOST_WIDE_INT bitsize, bitpos; 5234 1.1 mrg bool reverse; 5235 1.1 mrg tree innerdecl 5236 1.1 mrg = get_ref_base_and_extent_hwi (realdecl, &bitpos, 5237 1.1 mrg &bitsize, &reverse); 5238 1.1 mrg if (!innerdecl 5239 1.1 mrg || !DECL_P (innerdecl) 5240 1.1 mrg || DECL_IGNORED_P (innerdecl) 5241 1.1 mrg /* Do not track declarations for parts of tracked record 5242 1.1 mrg parameters since we want to track them as a whole. */ 5243 1.1 mrg || tracked_record_parameter_p (innerdecl) 5244 1.1 mrg || TREE_STATIC (innerdecl) 5245 1.1 mrg || bitsize == 0 5246 1.1 mrg || bitpos + bitsize > 256) 5247 1.1 mrg return 0; 5248 1.1 mrg else 5249 1.1 mrg realdecl = expr; 5250 1.1 mrg } 5251 1.1 mrg else 5252 1.1 mrg return 0; 5253 1.1 mrg } 5254 1.1 mrg } 5255 1.1 mrg 5256 1.1 mrg /* Do not track EXPR if REALDECL it should be ignored for debugging 5257 1.1 mrg purposes. */ 5258 1.1 mrg if (DECL_IGNORED_P (realdecl)) 5259 1.1 mrg return 0; 5260 1.1 mrg 5261 1.1 mrg /* Do not track global variables until we are able to emit correct location 5262 1.1 mrg list for them. */ 5263 1.1 mrg if (TREE_STATIC (realdecl)) 5264 1.1 mrg return 0; 5265 1.1 mrg 5266 1.1 mrg /* When the EXPR is a DECL for alias of some variable (see example) 5267 1.1 mrg the TREE_STATIC flag is not used. Disable tracking all DECLs whose 5268 1.1 mrg DECL_RTL contains SYMBOL_REF. 5269 1.1 mrg 5270 1.1 mrg Example: 5271 1.1 mrg extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv"))); 5272 1.1 mrg char **_dl_argv; 5273 1.1 mrg */ 5274 1.1 mrg if (decl_rtl && MEM_P (decl_rtl) 5275 1.1 mrg && contains_symbol_ref_p (XEXP (decl_rtl, 0))) 5276 1.1 mrg return 0; 5277 1.1 mrg 5278 1.1 mrg /* If RTX is a memory it should not be very large (because it would be 5279 1.1 mrg an array or struct). */ 5280 1.1 mrg if (decl_rtl && MEM_P (decl_rtl)) 5281 1.1 mrg { 5282 1.1 mrg /* Do not track structures and arrays. */ 5283 1.1 mrg if ((GET_MODE (decl_rtl) == BLKmode 5284 1.1 mrg || AGGREGATE_TYPE_P (TREE_TYPE (realdecl))) 5285 1.1 mrg && !tracked_record_parameter_p (realdecl)) 5286 1.1 mrg return 0; 5287 1.1 mrg if (MEM_SIZE_KNOWN_P (decl_rtl) 5288 1.1 mrg && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS)) 5289 1.1 mrg return 0; 5290 1.1 mrg } 5291 1.1 mrg 5292 1.1 mrg DECL_CHANGED (expr) = 0; 5293 1.1 mrg DECL_CHANGED (realdecl) = 0; 5294 1.1 mrg return 1; 5295 1.1 mrg } 5296 1.1 mrg 5297 1.1 mrg /* Determine whether a given LOC refers to the same variable part as 5298 1.1 mrg EXPR+OFFSET. */ 5299 1.1 mrg 5300 1.1 mrg static bool 5301 1.1 mrg same_variable_part_p (rtx loc, tree expr, poly_int64 offset) 5302 1.1 mrg { 5303 1.1 mrg tree expr2; 5304 1.1 mrg poly_int64 offset2; 5305 1.1 mrg 5306 1.1 mrg if (! DECL_P (expr)) 5307 1.1 mrg return false; 5308 1.1 mrg 5309 1.1 mrg if (REG_P (loc)) 5310 1.1 mrg { 5311 1.1 mrg expr2 = REG_EXPR (loc); 5312 1.1 mrg offset2 = REG_OFFSET (loc); 5313 1.1 mrg } 5314 1.1 mrg else if (MEM_P (loc)) 5315 1.1 mrg { 5316 1.1 mrg expr2 = MEM_EXPR (loc); 5317 1.1 mrg offset2 = int_mem_offset (loc); 5318 1.1 mrg } 5319 1.1 mrg else 5320 1.1 mrg return false; 5321 1.1 mrg 5322 1.1 mrg if (! expr2 || ! DECL_P (expr2)) 5323 1.1 mrg return false; 5324 1.1 mrg 5325 1.1 mrg expr = var_debug_decl (expr); 5326 1.1 mrg expr2 = var_debug_decl (expr2); 5327 1.1 mrg 5328 1.1 mrg return (expr == expr2 && known_eq (offset, offset2)); 5329 1.1 mrg } 5330 1.1 mrg 5331 1.1 mrg /* LOC is a REG or MEM that we would like to track if possible. 5332 1.1 mrg If EXPR is null, we don't know what expression LOC refers to, 5333 1.1 mrg otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if 5334 1.1 mrg LOC is an lvalue register. 5335 1.1 mrg 5336 1.1 mrg Return true if EXPR is nonnull and if LOC, or some lowpart of it, 5337 1.1 mrg is something we can track. When returning true, store the mode of 5338 1.1 mrg the lowpart we can track in *MODE_OUT (if nonnull) and its offset 5339 1.1 mrg from EXPR in *OFFSET_OUT (if nonnull). */ 5340 1.1 mrg 5341 1.1 mrg static bool 5342 1.1 mrg track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p, 5343 1.1 mrg machine_mode *mode_out, HOST_WIDE_INT *offset_out) 5344 1.1 mrg { 5345 1.1 mrg machine_mode mode; 5346 1.1 mrg 5347 1.1 mrg if (expr == NULL || !track_expr_p (expr, true)) 5348 1.1 mrg return false; 5349 1.1 mrg 5350 1.1 mrg /* If REG was a paradoxical subreg, its REG_ATTRS will describe the 5351 1.1 mrg whole subreg, but only the old inner part is really relevant. */ 5352 1.1 mrg mode = GET_MODE (loc); 5353 1.1 mrg if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc))) 5354 1.1 mrg { 5355 1.1 mrg machine_mode pseudo_mode; 5356 1.1 mrg 5357 1.1 mrg pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc)); 5358 1.1 mrg if (paradoxical_subreg_p (mode, pseudo_mode)) 5359 1.1 mrg { 5360 1.1 mrg offset += byte_lowpart_offset (pseudo_mode, mode); 5361 1.1 mrg mode = pseudo_mode; 5362 1.1 mrg } 5363 1.1 mrg } 5364 1.1 mrg 5365 1.1 mrg /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself. 5366 1.1 mrg Do the same if we are storing to a register and EXPR occupies 5367 1.1 mrg the whole of register LOC; in that case, the whole of EXPR is 5368 1.1 mrg being changed. We exclude complex modes from the second case 5369 1.1 mrg because the real and imaginary parts are represented as separate 5370 1.1 mrg pseudo registers, even if the whole complex value fits into one 5371 1.1 mrg hard register. */ 5372 1.1 mrg if ((paradoxical_subreg_p (mode, DECL_MODE (expr)) 5373 1.1 mrg || (store_reg_p 5374 1.1 mrg && !COMPLEX_MODE_P (DECL_MODE (expr)) 5375 1.1 mrg && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1)) 5376 1.1 mrg && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0)) 5377 1.1 mrg { 5378 1.1 mrg mode = DECL_MODE (expr); 5379 1.1 mrg offset = 0; 5380 1.1 mrg } 5381 1.1 mrg 5382 1.1 mrg HOST_WIDE_INT const_offset; 5383 1.1 mrg if (!track_offset_p (offset, &const_offset)) 5384 1.1 mrg return false; 5385 1.1 mrg 5386 1.1 mrg if (mode_out) 5387 1.1 mrg *mode_out = mode; 5388 1.1 mrg if (offset_out) 5389 1.1 mrg *offset_out = const_offset; 5390 1.1 mrg return true; 5391 1.1 mrg } 5392 1.1 mrg 5393 1.1 mrg /* Return the MODE lowpart of LOC, or null if LOC is not something we 5394 1.1 mrg want to track. When returning nonnull, make sure that the attributes 5395 1.1 mrg on the returned value are updated. */ 5396 1.1 mrg 5397 1.1 mrg static rtx 5398 1.1 mrg var_lowpart (machine_mode mode, rtx loc) 5399 1.1 mrg { 5400 1.1 mrg unsigned int regno; 5401 1.1 mrg 5402 1.1 mrg if (GET_MODE (loc) == mode) 5403 1.1 mrg return loc; 5404 1.1 mrg 5405 1.1 mrg if (!REG_P (loc) && !MEM_P (loc)) 5406 1.1 mrg return NULL; 5407 1.1 mrg 5408 1.1 mrg poly_uint64 offset = byte_lowpart_offset (mode, GET_MODE (loc)); 5409 1.1 mrg 5410 1.1 mrg if (MEM_P (loc)) 5411 1.1 mrg return adjust_address_nv (loc, mode, offset); 5412 1.1 mrg 5413 1.1 mrg poly_uint64 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc)); 5414 1.1 mrg regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc), 5415 1.1 mrg reg_offset, mode); 5416 1.1 mrg return gen_rtx_REG_offset (loc, mode, regno, offset); 5417 1.1 mrg } 5418 1.1 mrg 5419 1.1 mrg /* Carry information about uses and stores while walking rtx. */ 5420 1.1 mrg 5421 1.1 mrg struct count_use_info 5422 1.1 mrg { 5423 1.1 mrg /* The insn where the RTX is. */ 5424 1.1 mrg rtx_insn *insn; 5425 1.1 mrg 5426 1.1 mrg /* The basic block where insn is. */ 5427 1.1 mrg basic_block bb; 5428 1.1 mrg 5429 1.1 mrg /* The array of n_sets sets in the insn, as determined by cselib. */ 5430 1.1 mrg struct cselib_set *sets; 5431 1.1 mrg int n_sets; 5432 1.1 mrg 5433 1.1 mrg /* True if we're counting stores, false otherwise. */ 5434 1.1 mrg bool store_p; 5435 1.1 mrg }; 5436 1.1 mrg 5437 1.1 mrg /* Find a VALUE corresponding to X. */ 5438 1.1 mrg 5439 1.1 mrg static inline cselib_val * 5440 1.1 mrg find_use_val (rtx x, machine_mode mode, struct count_use_info *cui) 5441 1.1 mrg { 5442 1.1 mrg int i; 5443 1.1 mrg 5444 1.1 mrg if (cui->sets) 5445 1.1 mrg { 5446 1.1 mrg /* This is called after uses are set up and before stores are 5447 1.1 mrg processed by cselib, so it's safe to look up srcs, but not 5448 1.1 mrg dsts. So we look up expressions that appear in srcs or in 5449 1.1 mrg dest expressions, but we search the sets array for dests of 5450 1.1 mrg stores. */ 5451 1.1 mrg if (cui->store_p) 5452 1.1 mrg { 5453 1.1 mrg /* Some targets represent memset and memcpy patterns 5454 1.1 mrg by (set (mem:BLK ...) (reg:[QHSD]I ...)) or 5455 1.1 mrg (set (mem:BLK ...) (const_int ...)) or 5456 1.1 mrg (set (mem:BLK ...) (mem:BLK ...)). Don't return anything 5457 1.1 mrg in that case, otherwise we end up with mode mismatches. */ 5458 1.1 mrg if (mode == BLKmode && MEM_P (x)) 5459 1.1 mrg return NULL; 5460 1.1 mrg for (i = 0; i < cui->n_sets; i++) 5461 1.1 mrg if (cui->sets[i].dest == x) 5462 1.1 mrg return cui->sets[i].src_elt; 5463 1.1 mrg } 5464 1.1 mrg else 5465 1.1 mrg return cselib_lookup (x, mode, 0, VOIDmode); 5466 1.1 mrg } 5467 1.1 mrg 5468 1.1 mrg return NULL; 5469 1.1 mrg } 5470 1.1 mrg 5471 1.1 mrg /* Replace all registers and addresses in an expression with VALUE 5472 1.1 mrg expressions that map back to them, unless the expression is a 5473 1.1 mrg register. If no mapping is or can be performed, returns NULL. */ 5474 1.1 mrg 5475 1.1 mrg static rtx 5476 1.1 mrg replace_expr_with_values (rtx loc) 5477 1.1 mrg { 5478 1.1 mrg if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE) 5479 1.1 mrg return NULL; 5480 1.1 mrg else if (MEM_P (loc)) 5481 1.1 mrg { 5482 1.1 mrg cselib_val *addr = cselib_lookup (XEXP (loc, 0), 5483 1.1 mrg get_address_mode (loc), 0, 5484 1.1 mrg GET_MODE (loc)); 5485 1.1 mrg if (addr) 5486 1.1 mrg return replace_equiv_address_nv (loc, addr->val_rtx); 5487 1.1 mrg else 5488 1.1 mrg return NULL; 5489 1.1 mrg } 5490 1.1 mrg else 5491 1.1 mrg return cselib_subst_to_values (loc, VOIDmode); 5492 1.1 mrg } 5493 1.1 mrg 5494 1.1 mrg /* Return true if X contains a DEBUG_EXPR. */ 5495 1.1 mrg 5496 1.1 mrg static bool 5497 1.1 mrg rtx_debug_expr_p (const_rtx x) 5498 1.1 mrg { 5499 1.1 mrg subrtx_iterator::array_type array; 5500 1.1 mrg FOR_EACH_SUBRTX (iter, array, x, ALL) 5501 1.1 mrg if (GET_CODE (*iter) == DEBUG_EXPR) 5502 1.1 mrg return true; 5503 1.1 mrg return false; 5504 1.1 mrg } 5505 1.1 mrg 5506 1.1 mrg /* Determine what kind of micro operation to choose for a USE. Return 5507 1.1 mrg MO_CLOBBER if no micro operation is to be generated. */ 5508 1.1 mrg 5509 1.1 mrg static enum micro_operation_type 5510 1.1 mrg use_type (rtx loc, struct count_use_info *cui, machine_mode *modep) 5511 1.1 mrg { 5512 1.1 mrg tree expr; 5513 1.1 mrg 5514 1.1 mrg if (cui && cui->sets) 5515 1.1 mrg { 5516 1.1 mrg if (GET_CODE (loc) == VAR_LOCATION) 5517 1.1 mrg { 5518 1.1 mrg if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false)) 5519 1.1 mrg { 5520 1.1 mrg rtx ploc = PAT_VAR_LOCATION_LOC (loc); 5521 1.1 mrg if (! VAR_LOC_UNKNOWN_P (ploc)) 5522 1.1 mrg { 5523 1.1 mrg cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1, 5524 1.1 mrg VOIDmode); 5525 1.1 mrg 5526 1.1 mrg /* ??? flag_float_store and volatile mems are never 5527 1.1 mrg given values, but we could in theory use them for 5528 1.1 mrg locations. */ 5529 1.1 mrg gcc_assert (val || 1); 5530 1.1 mrg } 5531 1.1 mrg return MO_VAL_LOC; 5532 1.1 mrg } 5533 1.1 mrg else 5534 1.1 mrg return MO_CLOBBER; 5535 1.1 mrg } 5536 1.1 mrg 5537 1.1 mrg if (REG_P (loc) || MEM_P (loc)) 5538 1.1 mrg { 5539 1.1 mrg if (modep) 5540 1.1 mrg *modep = GET_MODE (loc); 5541 1.1 mrg if (cui->store_p) 5542 1.1 mrg { 5543 1.1 mrg if (REG_P (loc) 5544 1.1 mrg || (find_use_val (loc, GET_MODE (loc), cui) 5545 1.1 mrg && cselib_lookup (XEXP (loc, 0), 5546 1.1 mrg get_address_mode (loc), 0, 5547 1.1 mrg GET_MODE (loc)))) 5548 1.1 mrg return MO_VAL_SET; 5549 1.1 mrg } 5550 1.1 mrg else 5551 1.1 mrg { 5552 1.1 mrg cselib_val *val = find_use_val (loc, GET_MODE (loc), cui); 5553 1.1 mrg 5554 1.1 mrg if (val && !cselib_preserved_value_p (val)) 5555 1.1 mrg return MO_VAL_USE; 5556 1.1 mrg } 5557 1.1 mrg } 5558 1.1 mrg } 5559 1.1 mrg 5560 1.1 mrg if (REG_P (loc)) 5561 1.1 mrg { 5562 1.1 mrg gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER); 5563 1.1 mrg 5564 1.1 mrg if (loc == cfa_base_rtx) 5565 1.1 mrg return MO_CLOBBER; 5566 1.1 mrg expr = REG_EXPR (loc); 5567 1.1 mrg 5568 1.1 mrg if (!expr) 5569 1.1 mrg return MO_USE_NO_VAR; 5570 1.1 mrg else if (target_for_debug_bind (var_debug_decl (expr))) 5571 1.1 mrg return MO_CLOBBER; 5572 1.1 mrg else if (track_loc_p (loc, expr, REG_OFFSET (loc), 5573 1.1 mrg false, modep, NULL)) 5574 1.1 mrg return MO_USE; 5575 1.1 mrg else 5576 1.1 mrg return MO_USE_NO_VAR; 5577 1.1 mrg } 5578 1.1 mrg else if (MEM_P (loc)) 5579 1.1 mrg { 5580 1.1 mrg expr = MEM_EXPR (loc); 5581 1.1 mrg 5582 1.1 mrg if (!expr) 5583 1.1 mrg return MO_CLOBBER; 5584 1.1 mrg else if (target_for_debug_bind (var_debug_decl (expr))) 5585 1.1 mrg return MO_CLOBBER; 5586 1.1 mrg else if (track_loc_p (loc, expr, int_mem_offset (loc), 5587 1.1 mrg false, modep, NULL) 5588 1.1 mrg /* Multi-part variables shouldn't refer to one-part 5589 1.1 mrg variable names such as VALUEs (never happens) or 5590 1.1 mrg DEBUG_EXPRs (only happens in the presence of debug 5591 1.1 mrg insns). */ 5592 1.1 mrg && (!MAY_HAVE_DEBUG_BIND_INSNS 5593 1.1 mrg || !rtx_debug_expr_p (XEXP (loc, 0)))) 5594 1.1 mrg return MO_USE; 5595 1.1 mrg else 5596 1.1 mrg return MO_CLOBBER; 5597 1.1 mrg } 5598 1.1 mrg 5599 1.1 mrg return MO_CLOBBER; 5600 1.1 mrg } 5601 1.1 mrg 5602 1.1 mrg /* Log to OUT information about micro-operation MOPT involving X in 5603 1.1 mrg INSN of BB. */ 5604 1.1 mrg 5605 1.1 mrg static inline void 5606 1.1 mrg log_op_type (rtx x, basic_block bb, rtx_insn *insn, 5607 1.1 mrg enum micro_operation_type mopt, FILE *out) 5608 1.1 mrg { 5609 1.1 mrg fprintf (out, "bb %i op %i insn %i %s ", 5610 1.1 mrg bb->index, VTI (bb)->mos.length (), 5611 1.1 mrg INSN_UID (insn), micro_operation_type_name[mopt]); 5612 1.1 mrg print_inline_rtx (out, x, 2); 5613 1.1 mrg fputc ('\n', out); 5614 1.1 mrg } 5615 1.1 mrg 5616 1.1 mrg /* Tell whether the CONCAT used to holds a VALUE and its location 5617 1.1 mrg needs value resolution, i.e., an attempt of mapping the location 5618 1.1 mrg back to other incoming values. */ 5619 1.1 mrg #define VAL_NEEDS_RESOLUTION(x) \ 5620 1.1 mrg (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil) 5621 1.1 mrg /* Whether the location in the CONCAT is a tracked expression, that 5622 1.1 mrg should also be handled like a MO_USE. */ 5623 1.1 mrg #define VAL_HOLDS_TRACK_EXPR(x) \ 5624 1.1 mrg (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used) 5625 1.1 mrg /* Whether the location in the CONCAT should be handled like a MO_COPY 5626 1.1 mrg as well. */ 5627 1.1 mrg #define VAL_EXPR_IS_COPIED(x) \ 5628 1.1 mrg (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump) 5629 1.1 mrg /* Whether the location in the CONCAT should be handled like a 5630 1.1 mrg MO_CLOBBER as well. */ 5631 1.1 mrg #define VAL_EXPR_IS_CLOBBERED(x) \ 5632 1.1 mrg (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging) 5633 1.1 mrg 5634 1.1 mrg /* All preserved VALUEs. */ 5635 1.1 mrg static vec<rtx> preserved_values; 5636 1.1 mrg 5637 1.1 mrg /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */ 5638 1.1 mrg 5639 1.1 mrg static void 5640 1.1 mrg preserve_value (cselib_val *val) 5641 1.1 mrg { 5642 1.1 mrg cselib_preserve_value (val); 5643 1.1 mrg preserved_values.safe_push (val->val_rtx); 5644 1.1 mrg } 5645 1.1 mrg 5646 1.1 mrg /* Helper function for MO_VAL_LOC handling. Return non-zero if 5647 1.1 mrg any rtxes not suitable for CONST use not replaced by VALUEs 5648 1.1 mrg are discovered. */ 5649 1.1 mrg 5650 1.1 mrg static bool 5651 1.1 mrg non_suitable_const (const_rtx x) 5652 1.1 mrg { 5653 1.1 mrg subrtx_iterator::array_type array; 5654 1.1 mrg FOR_EACH_SUBRTX (iter, array, x, ALL) 5655 1.1 mrg { 5656 1.1 mrg const_rtx x = *iter; 5657 1.1 mrg switch (GET_CODE (x)) 5658 1.1 mrg { 5659 1.1 mrg case REG: 5660 1.1 mrg case DEBUG_EXPR: 5661 1.1 mrg case PC: 5662 1.1 mrg case SCRATCH: 5663 1.1 mrg case ASM_INPUT: 5664 1.1 mrg case ASM_OPERANDS: 5665 1.1 mrg return true; 5666 1.1 mrg case MEM: 5667 1.1 mrg if (!MEM_READONLY_P (x)) 5668 1.1 mrg return true; 5669 1.1 mrg break; 5670 1.1 mrg default: 5671 1.1 mrg break; 5672 1.1 mrg } 5673 1.1 mrg } 5674 1.1 mrg return false; 5675 1.1 mrg } 5676 1.1 mrg 5677 1.1 mrg /* Add uses (register and memory references) LOC which will be tracked 5678 1.1 mrg to VTI (bb)->mos. */ 5679 1.1 mrg 5680 1.1 mrg static void 5681 1.1 mrg add_uses (rtx loc, struct count_use_info *cui) 5682 1.1 mrg { 5683 1.1 mrg machine_mode mode = VOIDmode; 5684 1.1 mrg enum micro_operation_type type = use_type (loc, cui, &mode); 5685 1.1 mrg 5686 1.1 mrg if (type != MO_CLOBBER) 5687 1.1 mrg { 5688 1.1 mrg basic_block bb = cui->bb; 5689 1.1 mrg micro_operation mo; 5690 1.1 mrg 5691 1.1 mrg mo.type = type; 5692 1.1 mrg mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc; 5693 1.1 mrg mo.insn = cui->insn; 5694 1.1 mrg 5695 1.1 mrg if (type == MO_VAL_LOC) 5696 1.1 mrg { 5697 1.1 mrg rtx oloc = loc; 5698 1.1 mrg rtx vloc = PAT_VAR_LOCATION_LOC (oloc); 5699 1.1 mrg cselib_val *val; 5700 1.1 mrg 5701 1.1 mrg gcc_assert (cui->sets); 5702 1.1 mrg 5703 1.1 mrg if (MEM_P (vloc) 5704 1.1 mrg && !REG_P (XEXP (vloc, 0)) 5705 1.1 mrg && !MEM_P (XEXP (vloc, 0))) 5706 1.1 mrg { 5707 1.1 mrg rtx mloc = vloc; 5708 1.1 mrg machine_mode address_mode = get_address_mode (mloc); 5709 1.1 mrg cselib_val *val 5710 1.1 mrg = cselib_lookup (XEXP (mloc, 0), address_mode, 0, 5711 1.1 mrg GET_MODE (mloc)); 5712 1.1 mrg 5713 1.1 mrg if (val && !cselib_preserved_value_p (val)) 5714 1.1 mrg preserve_value (val); 5715 1.1 mrg } 5716 1.1 mrg 5717 1.1 mrg if (CONSTANT_P (vloc) 5718 1.1 mrg && (GET_CODE (vloc) != CONST || non_suitable_const (vloc))) 5719 1.1 mrg /* For constants don't look up any value. */; 5720 1.1 mrg else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc) 5721 1.1 mrg && (val = find_use_val (vloc, GET_MODE (oloc), cui))) 5722 1.1 mrg { 5723 1.1 mrg machine_mode mode2; 5724 1.1 mrg enum micro_operation_type type2; 5725 1.1 mrg rtx nloc = NULL; 5726 1.1 mrg bool resolvable = REG_P (vloc) || MEM_P (vloc); 5727 1.1 mrg 5728 1.1 mrg if (resolvable) 5729 1.1 mrg nloc = replace_expr_with_values (vloc); 5730 1.1 mrg 5731 1.1 mrg if (nloc) 5732 1.1 mrg { 5733 1.1 mrg oloc = shallow_copy_rtx (oloc); 5734 1.1 mrg PAT_VAR_LOCATION_LOC (oloc) = nloc; 5735 1.1 mrg } 5736 1.1 mrg 5737 1.1 mrg oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc); 5738 1.1 mrg 5739 1.1 mrg type2 = use_type (vloc, 0, &mode2); 5740 1.1 mrg 5741 1.1 mrg gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR 5742 1.1 mrg || type2 == MO_CLOBBER); 5743 1.1 mrg 5744 1.1 mrg if (type2 == MO_CLOBBER 5745 1.1 mrg && !cselib_preserved_value_p (val)) 5746 1.1 mrg { 5747 1.1 mrg VAL_NEEDS_RESOLUTION (oloc) = resolvable; 5748 1.1 mrg preserve_value (val); 5749 1.1 mrg } 5750 1.1 mrg } 5751 1.1 mrg else if (!VAR_LOC_UNKNOWN_P (vloc)) 5752 1.1 mrg { 5753 1.1 mrg oloc = shallow_copy_rtx (oloc); 5754 1.1 mrg PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC (); 5755 1.1 mrg } 5756 1.1 mrg 5757 1.1 mrg mo.u.loc = oloc; 5758 1.1 mrg } 5759 1.1 mrg else if (type == MO_VAL_USE) 5760 1.1 mrg { 5761 1.1 mrg machine_mode mode2 = VOIDmode; 5762 1.1 mrg enum micro_operation_type type2; 5763 1.1 mrg cselib_val *val = find_use_val (loc, GET_MODE (loc), cui); 5764 1.1 mrg rtx vloc, oloc = loc, nloc; 5765 1.1 mrg 5766 1.1 mrg gcc_assert (cui->sets); 5767 1.1 mrg 5768 1.1 mrg if (MEM_P (oloc) 5769 1.1 mrg && !REG_P (XEXP (oloc, 0)) 5770 1.1 mrg && !MEM_P (XEXP (oloc, 0))) 5771 1.1 mrg { 5772 1.1 mrg rtx mloc = oloc; 5773 1.1 mrg machine_mode address_mode = get_address_mode (mloc); 5774 1.1 mrg cselib_val *val 5775 1.1 mrg = cselib_lookup (XEXP (mloc, 0), address_mode, 0, 5776 1.1 mrg GET_MODE (mloc)); 5777 1.1 mrg 5778 1.1 mrg if (val && !cselib_preserved_value_p (val)) 5779 1.1 mrg preserve_value (val); 5780 1.1 mrg } 5781 1.1 mrg 5782 1.1 mrg type2 = use_type (loc, 0, &mode2); 5783 1.1 mrg 5784 1.1 mrg gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR 5785 1.1 mrg || type2 == MO_CLOBBER); 5786 1.1 mrg 5787 1.1 mrg if (type2 == MO_USE) 5788 1.1 mrg vloc = var_lowpart (mode2, loc); 5789 1.1 mrg else 5790 1.1 mrg vloc = oloc; 5791 1.1 mrg 5792 1.1 mrg /* The loc of a MO_VAL_USE may have two forms: 5793 1.1 mrg 5794 1.1 mrg (concat val src): val is at src, a value-based 5795 1.1 mrg representation. 5796 1.1 mrg 5797 1.1 mrg (concat (concat val use) src): same as above, with use as 5798 1.1 mrg the MO_USE tracked value, if it differs from src. 5799 1.1 mrg 5800 1.1 mrg */ 5801 1.1 mrg 5802 1.1 mrg gcc_checking_assert (REG_P (loc) || MEM_P (loc)); 5803 1.1 mrg nloc = replace_expr_with_values (loc); 5804 1.1 mrg if (!nloc) 5805 1.1 mrg nloc = oloc; 5806 1.1 mrg 5807 1.1 mrg if (vloc != nloc) 5808 1.1 mrg oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc); 5809 1.1 mrg else 5810 1.1 mrg oloc = val->val_rtx; 5811 1.1 mrg 5812 1.1 mrg mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc); 5813 1.1 mrg 5814 1.1 mrg if (type2 == MO_USE) 5815 1.1 mrg VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1; 5816 1.1 mrg if (!cselib_preserved_value_p (val)) 5817 1.1 mrg { 5818 1.1 mrg VAL_NEEDS_RESOLUTION (mo.u.loc) = 1; 5819 1.1 mrg preserve_value (val); 5820 1.1 mrg } 5821 1.1 mrg } 5822 1.1 mrg else 5823 1.1 mrg gcc_assert (type == MO_USE || type == MO_USE_NO_VAR); 5824 1.1 mrg 5825 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 5826 1.1 mrg log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file); 5827 1.1 mrg VTI (bb)->mos.safe_push (mo); 5828 1.1 mrg } 5829 1.1 mrg } 5830 1.1 mrg 5831 1.1 mrg /* Helper function for finding all uses of REG/MEM in X in insn INSN. */ 5832 1.1 mrg 5833 1.1 mrg static void 5834 1.1 mrg add_uses_1 (rtx *x, void *cui) 5835 1.1 mrg { 5836 1.1 mrg subrtx_var_iterator::array_type array; 5837 1.1 mrg FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST) 5838 1.1 mrg add_uses (*iter, (struct count_use_info *) cui); 5839 1.1 mrg } 5840 1.1 mrg 5841 1.1 mrg /* This is the value used during expansion of locations. We want it 5842 1.1 mrg to be unbounded, so that variables expanded deep in a recursion 5843 1.1 mrg nest are fully evaluated, so that their values are cached 5844 1.1 mrg correctly. We avoid recursion cycles through other means, and we 5845 1.1 mrg don't unshare RTL, so excess complexity is not a problem. */ 5846 1.1 mrg #define EXPR_DEPTH (INT_MAX) 5847 1.1 mrg /* We use this to keep too-complex expressions from being emitted as 5848 1.1 mrg location notes, and then to debug information. Users can trade 5849 1.1 mrg compile time for ridiculously complex expressions, although they're 5850 1.1 mrg seldom useful, and they may often have to be discarded as not 5851 1.1 mrg representable anyway. */ 5852 1.1 mrg #define EXPR_USE_DEPTH (param_max_vartrack_expr_depth) 5853 1.1 mrg 5854 1.1 mrg /* Attempt to reverse the EXPR operation in the debug info and record 5855 1.1 mrg it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is 5856 1.1 mrg no longer live we can express its value as VAL - 6. */ 5857 1.1 mrg 5858 1.1 mrg static void 5859 1.1 mrg reverse_op (rtx val, const_rtx expr, rtx_insn *insn) 5860 1.1 mrg { 5861 1.1 mrg rtx src, arg, ret; 5862 1.1 mrg cselib_val *v; 5863 1.1 mrg struct elt_loc_list *l; 5864 1.1 mrg enum rtx_code code; 5865 1.1 mrg int count; 5866 1.1 mrg 5867 1.1 mrg if (GET_CODE (expr) != SET) 5868 1.1 mrg return; 5869 1.1 mrg 5870 1.1 mrg if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr))) 5871 1.1 mrg return; 5872 1.1 mrg 5873 1.1 mrg src = SET_SRC (expr); 5874 1.1 mrg switch (GET_CODE (src)) 5875 1.1 mrg { 5876 1.1 mrg case PLUS: 5877 1.1 mrg case MINUS: 5878 1.1 mrg case XOR: 5879 1.1 mrg case NOT: 5880 1.1 mrg case NEG: 5881 1.1 mrg if (!REG_P (XEXP (src, 0))) 5882 1.1 mrg return; 5883 1.1 mrg break; 5884 1.1 mrg case SIGN_EXTEND: 5885 1.1 mrg case ZERO_EXTEND: 5886 1.1 mrg if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0))) 5887 1.1 mrg return; 5888 1.1 mrg break; 5889 1.1 mrg default: 5890 1.1 mrg return; 5891 1.1 mrg } 5892 1.1 mrg 5893 1.1 mrg if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx) 5894 1.1 mrg return; 5895 1.1 mrg 5896 1.1 mrg v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode); 5897 1.1 mrg if (!v || !cselib_preserved_value_p (v)) 5898 1.1 mrg return; 5899 1.1 mrg 5900 1.1 mrg /* Use canonical V to avoid creating multiple redundant expressions 5901 1.1 mrg for different VALUES equivalent to V. */ 5902 1.1 mrg v = canonical_cselib_val (v); 5903 1.1 mrg 5904 1.1 mrg /* Adding a reverse op isn't useful if V already has an always valid 5905 1.1 mrg location. Ignore ENTRY_VALUE, while it is always constant, we should 5906 1.1 mrg prefer non-ENTRY_VALUE locations whenever possible. */ 5907 1.1 mrg for (l = v->locs, count = 0; l; l = l->next, count++) 5908 1.1 mrg if (CONSTANT_P (l->loc) 5909 1.1 mrg && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0))) 5910 1.1 mrg return; 5911 1.1 mrg /* Avoid creating too large locs lists. */ 5912 1.1 mrg else if (count == param_max_vartrack_reverse_op_size) 5913 1.1 mrg return; 5914 1.1 mrg 5915 1.1 mrg switch (GET_CODE (src)) 5916 1.1 mrg { 5917 1.1 mrg case NOT: 5918 1.1 mrg case NEG: 5919 1.1 mrg if (GET_MODE (v->val_rtx) != GET_MODE (val)) 5920 1.1 mrg return; 5921 1.1 mrg ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val); 5922 1.1 mrg break; 5923 1.1 mrg case SIGN_EXTEND: 5924 1.1 mrg case ZERO_EXTEND: 5925 1.1 mrg ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val); 5926 1.1 mrg break; 5927 1.1 mrg case XOR: 5928 1.1 mrg code = XOR; 5929 1.1 mrg goto binary; 5930 1.1 mrg case PLUS: 5931 1.1 mrg code = MINUS; 5932 1.1 mrg goto binary; 5933 1.1 mrg case MINUS: 5934 1.1 mrg code = PLUS; 5935 1.1 mrg goto binary; 5936 1.1 mrg binary: 5937 1.1 mrg if (GET_MODE (v->val_rtx) != GET_MODE (val)) 5938 1.1 mrg return; 5939 1.1 mrg arg = XEXP (src, 1); 5940 1.1 mrg if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF) 5941 1.1 mrg { 5942 1.1 mrg arg = cselib_expand_value_rtx (arg, scratch_regs, 5); 5943 1.1 mrg if (arg == NULL_RTX) 5944 1.1 mrg return; 5945 1.1 mrg if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF) 5946 1.1 mrg return; 5947 1.1 mrg } 5948 1.1 mrg ret = simplify_gen_binary (code, GET_MODE (val), val, arg); 5949 1.1 mrg break; 5950 1.1 mrg default: 5951 1.1 mrg gcc_unreachable (); 5952 1.1 mrg } 5953 1.1 mrg 5954 1.1 mrg cselib_add_permanent_equiv (v, ret, insn); 5955 1.1 mrg } 5956 1.1 mrg 5957 1.1 mrg /* Add stores (register and memory references) LOC which will be tracked 5958 1.1 mrg to VTI (bb)->mos. EXPR is the RTL expression containing the store. 5959 1.1 mrg CUIP->insn is instruction which the LOC is part of. */ 5960 1.1 mrg 5961 1.1 mrg static void 5962 1.1 mrg add_stores (rtx loc, const_rtx expr, void *cuip) 5963 1.1 mrg { 5964 1.1 mrg machine_mode mode = VOIDmode, mode2; 5965 1.1 mrg struct count_use_info *cui = (struct count_use_info *)cuip; 5966 1.1 mrg basic_block bb = cui->bb; 5967 1.1 mrg micro_operation mo; 5968 1.1 mrg rtx oloc = loc, nloc, src = NULL; 5969 1.1 mrg enum micro_operation_type type = use_type (loc, cui, &mode); 5970 1.1 mrg bool track_p = false; 5971 1.1 mrg cselib_val *v; 5972 1.1 mrg bool resolve, preserve; 5973 1.1 mrg 5974 1.1 mrg if (type == MO_CLOBBER) 5975 1.1 mrg return; 5976 1.1 mrg 5977 1.1 mrg mode2 = mode; 5978 1.1 mrg 5979 1.1 mrg if (REG_P (loc)) 5980 1.1 mrg { 5981 1.1 mrg gcc_assert (loc != cfa_base_rtx); 5982 1.1 mrg if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET) 5983 1.1 mrg || !(track_p = use_type (loc, NULL, &mode2) == MO_USE) 5984 1.1 mrg || GET_CODE (expr) == CLOBBER) 5985 1.1 mrg { 5986 1.1 mrg mo.type = MO_CLOBBER; 5987 1.1 mrg mo.u.loc = loc; 5988 1.1 mrg if (GET_CODE (expr) == SET 5989 1.1 mrg && (SET_DEST (expr) == loc 5990 1.1 mrg || (GET_CODE (SET_DEST (expr)) == STRICT_LOW_PART 5991 1.1 mrg && XEXP (SET_DEST (expr), 0) == loc)) 5992 1.1 mrg && !unsuitable_loc (SET_SRC (expr)) 5993 1.1 mrg && find_use_val (loc, mode, cui)) 5994 1.1 mrg { 5995 1.1 mrg gcc_checking_assert (type == MO_VAL_SET); 5996 1.1 mrg mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr)); 5997 1.1 mrg } 5998 1.1 mrg } 5999 1.1 mrg else 6000 1.1 mrg { 6001 1.1 mrg if (GET_CODE (expr) == SET 6002 1.1 mrg && SET_DEST (expr) == loc 6003 1.1 mrg && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS) 6004 1.1 mrg src = var_lowpart (mode2, SET_SRC (expr)); 6005 1.1 mrg loc = var_lowpart (mode2, loc); 6006 1.1 mrg 6007 1.1 mrg if (src == NULL) 6008 1.1 mrg { 6009 1.1 mrg mo.type = MO_SET; 6010 1.1 mrg mo.u.loc = loc; 6011 1.1 mrg } 6012 1.1 mrg else 6013 1.1 mrg { 6014 1.1 mrg rtx xexpr = gen_rtx_SET (loc, src); 6015 1.1 mrg if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc))) 6016 1.1 mrg { 6017 1.1 mrg /* If this is an instruction copying (part of) a parameter 6018 1.1 mrg passed by invisible reference to its register location, 6019 1.1 mrg pretend it's a SET so that the initial memory location 6020 1.1 mrg is discarded, as the parameter register can be reused 6021 1.1 mrg for other purposes and we do not track locations based 6022 1.1 mrg on generic registers. */ 6023 1.1 mrg if (MEM_P (src) 6024 1.1 mrg && REG_EXPR (loc) 6025 1.1 mrg && TREE_CODE (REG_EXPR (loc)) == PARM_DECL 6026 1.1 mrg && DECL_MODE (REG_EXPR (loc)) != BLKmode 6027 1.1 mrg && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc))) 6028 1.1 mrg && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) 6029 1.1 mrg != arg_pointer_rtx) 6030 1.1 mrg mo.type = MO_SET; 6031 1.1 mrg else 6032 1.1 mrg mo.type = MO_COPY; 6033 1.1 mrg } 6034 1.1 mrg else 6035 1.1 mrg mo.type = MO_SET; 6036 1.1 mrg mo.u.loc = xexpr; 6037 1.1 mrg } 6038 1.1 mrg } 6039 1.1 mrg mo.insn = cui->insn; 6040 1.1 mrg } 6041 1.1 mrg else if (MEM_P (loc) 6042 1.1 mrg && ((track_p = use_type (loc, NULL, &mode2) == MO_USE) 6043 1.1 mrg || cui->sets)) 6044 1.1 mrg { 6045 1.1 mrg if (MEM_P (loc) && type == MO_VAL_SET 6046 1.1 mrg && !REG_P (XEXP (loc, 0)) 6047 1.1 mrg && !MEM_P (XEXP (loc, 0))) 6048 1.1 mrg { 6049 1.1 mrg rtx mloc = loc; 6050 1.1 mrg machine_mode address_mode = get_address_mode (mloc); 6051 1.1 mrg cselib_val *val = cselib_lookup (XEXP (mloc, 0), 6052 1.1 mrg address_mode, 0, 6053 1.1 mrg GET_MODE (mloc)); 6054 1.1 mrg 6055 1.1 mrg if (val && !cselib_preserved_value_p (val)) 6056 1.1 mrg preserve_value (val); 6057 1.1 mrg } 6058 1.1 mrg 6059 1.1 mrg if (GET_CODE (expr) == CLOBBER || !track_p) 6060 1.1 mrg { 6061 1.1 mrg mo.type = MO_CLOBBER; 6062 1.1 mrg mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc; 6063 1.1 mrg } 6064 1.1 mrg else 6065 1.1 mrg { 6066 1.1 mrg if (GET_CODE (expr) == SET 6067 1.1 mrg && SET_DEST (expr) == loc 6068 1.1 mrg && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS) 6069 1.1 mrg src = var_lowpart (mode2, SET_SRC (expr)); 6070 1.1 mrg loc = var_lowpart (mode2, loc); 6071 1.1 mrg 6072 1.1 mrg if (src == NULL) 6073 1.1 mrg { 6074 1.1 mrg mo.type = MO_SET; 6075 1.1 mrg mo.u.loc = loc; 6076 1.1 mrg } 6077 1.1 mrg else 6078 1.1 mrg { 6079 1.1 mrg rtx xexpr = gen_rtx_SET (loc, src); 6080 1.1 mrg if (same_variable_part_p (SET_SRC (xexpr), 6081 1.1 mrg MEM_EXPR (loc), 6082 1.1 mrg int_mem_offset (loc))) 6083 1.1 mrg mo.type = MO_COPY; 6084 1.1 mrg else 6085 1.1 mrg mo.type = MO_SET; 6086 1.1 mrg mo.u.loc = xexpr; 6087 1.1 mrg } 6088 1.1 mrg } 6089 1.1 mrg mo.insn = cui->insn; 6090 1.1 mrg } 6091 1.1 mrg else 6092 1.1 mrg return; 6093 1.1 mrg 6094 1.1 mrg if (type != MO_VAL_SET) 6095 1.1 mrg goto log_and_return; 6096 1.1 mrg 6097 1.1 mrg v = find_use_val (oloc, mode, cui); 6098 1.1 mrg 6099 1.1 mrg if (!v) 6100 1.1 mrg goto log_and_return; 6101 1.1 mrg 6102 1.1 mrg resolve = preserve = !cselib_preserved_value_p (v); 6103 1.1 mrg 6104 1.1 mrg /* We cannot track values for multiple-part variables, so we track only 6105 1.1 mrg locations for tracked record parameters. */ 6106 1.1 mrg if (track_p 6107 1.1 mrg && REG_P (loc) 6108 1.1 mrg && REG_EXPR (loc) 6109 1.1 mrg && tracked_record_parameter_p (REG_EXPR (loc))) 6110 1.1 mrg { 6111 1.1 mrg /* Although we don't use the value here, it could be used later by the 6112 1.1 mrg mere virtue of its existence as the operand of the reverse operation 6113 1.1 mrg that gave rise to it (typically extension/truncation). Make sure it 6114 1.1 mrg is preserved as required by vt_expand_var_loc_chain. */ 6115 1.1 mrg if (preserve) 6116 1.1 mrg preserve_value (v); 6117 1.1 mrg goto log_and_return; 6118 1.1 mrg } 6119 1.1 mrg 6120 1.1 mrg if (loc == stack_pointer_rtx 6121 1.1 mrg && (maybe_ne (hard_frame_pointer_adjustment, -1) 6122 1.1 mrg || (!frame_pointer_needed && !ACCUMULATE_OUTGOING_ARGS)) 6123 1.1 mrg && preserve) 6124 1.1 mrg cselib_set_value_sp_based (v); 6125 1.1 mrg 6126 1.1 mrg /* Don't record MO_VAL_SET for VALUEs that can be described using 6127 1.1 mrg cfa_base_rtx or cfa_base_rtx + CONST_INT, cselib already knows 6128 1.1 mrg all the needed equivalences and they shouldn't change depending 6129 1.1 mrg on which register holds that VALUE in some instruction. */ 6130 1.1 mrg if (!frame_pointer_needed 6131 1.1 mrg && cfa_base_rtx 6132 1.1 mrg && cselib_sp_derived_value_p (v) 6133 1.1 mrg && loc == stack_pointer_rtx) 6134 1.1 mrg { 6135 1.1 mrg if (preserve) 6136 1.1 mrg preserve_value (v); 6137 1.1 mrg return; 6138 1.1 mrg } 6139 1.1 mrg 6140 1.1 mrg nloc = replace_expr_with_values (oloc); 6141 1.1 mrg if (nloc) 6142 1.1 mrg oloc = nloc; 6143 1.1 mrg 6144 1.1 mrg if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC) 6145 1.1 mrg { 6146 1.1 mrg cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode); 6147 1.1 mrg 6148 1.1 mrg if (oval == v) 6149 1.1 mrg return; 6150 1.1 mrg gcc_assert (REG_P (oloc) || MEM_P (oloc)); 6151 1.1 mrg 6152 1.1 mrg if (oval && !cselib_preserved_value_p (oval)) 6153 1.1 mrg { 6154 1.1 mrg micro_operation moa; 6155 1.1 mrg 6156 1.1 mrg preserve_value (oval); 6157 1.1 mrg 6158 1.1 mrg moa.type = MO_VAL_USE; 6159 1.1 mrg moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc); 6160 1.1 mrg VAL_NEEDS_RESOLUTION (moa.u.loc) = 1; 6161 1.1 mrg moa.insn = cui->insn; 6162 1.1 mrg 6163 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 6164 1.1 mrg log_op_type (moa.u.loc, cui->bb, cui->insn, 6165 1.1 mrg moa.type, dump_file); 6166 1.1 mrg VTI (bb)->mos.safe_push (moa); 6167 1.1 mrg } 6168 1.1 mrg 6169 1.1 mrg resolve = false; 6170 1.1 mrg } 6171 1.1 mrg else if (resolve && GET_CODE (mo.u.loc) == SET) 6172 1.1 mrg { 6173 1.1 mrg if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr))) 6174 1.1 mrg nloc = replace_expr_with_values (SET_SRC (expr)); 6175 1.1 mrg else 6176 1.1 mrg nloc = NULL_RTX; 6177 1.1 mrg 6178 1.1 mrg /* Avoid the mode mismatch between oexpr and expr. */ 6179 1.1 mrg if (!nloc && mode != mode2) 6180 1.1 mrg { 6181 1.1 mrg nloc = SET_SRC (expr); 6182 1.1 mrg gcc_assert (oloc == SET_DEST (expr)); 6183 1.1 mrg } 6184 1.1 mrg 6185 1.1 mrg if (nloc && nloc != SET_SRC (mo.u.loc)) 6186 1.1 mrg oloc = gen_rtx_SET (oloc, nloc); 6187 1.1 mrg else 6188 1.1 mrg { 6189 1.1 mrg if (oloc == SET_DEST (mo.u.loc)) 6190 1.1 mrg /* No point in duplicating. */ 6191 1.1 mrg oloc = mo.u.loc; 6192 1.1 mrg if (!REG_P (SET_SRC (mo.u.loc))) 6193 1.1 mrg resolve = false; 6194 1.1 mrg } 6195 1.1 mrg } 6196 1.1 mrg else if (!resolve) 6197 1.1 mrg { 6198 1.1 mrg if (GET_CODE (mo.u.loc) == SET 6199 1.1 mrg && oloc == SET_DEST (mo.u.loc)) 6200 1.1 mrg /* No point in duplicating. */ 6201 1.1 mrg oloc = mo.u.loc; 6202 1.1 mrg } 6203 1.1 mrg else 6204 1.1 mrg resolve = false; 6205 1.1 mrg 6206 1.1 mrg loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc); 6207 1.1 mrg 6208 1.1 mrg if (mo.u.loc != oloc) 6209 1.1 mrg loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc); 6210 1.1 mrg 6211 1.1 mrg /* The loc of a MO_VAL_SET may have various forms: 6212 1.1 mrg 6213 1.1 mrg (concat val dst): dst now holds val 6214 1.1 mrg 6215 1.1 mrg (concat val (set dst src)): dst now holds val, copied from src 6216 1.1 mrg 6217 1.1 mrg (concat (concat val dstv) dst): dst now holds val; dstv is dst 6218 1.1 mrg after replacing mems and non-top-level regs with values. 6219 1.1 mrg 6220 1.1 mrg (concat (concat val dstv) (set dst src)): dst now holds val, 6221 1.1 mrg copied from src. dstv is a value-based representation of dst, if 6222 1.1 mrg it differs from dst. If resolution is needed, src is a REG, and 6223 1.1 mrg its mode is the same as that of val. 6224 1.1 mrg 6225 1.1 mrg (concat (concat val (set dstv srcv)) (set dst src)): src 6226 1.1 mrg copied to dst, holding val. dstv and srcv are value-based 6227 1.1 mrg representations of dst and src, respectively. 6228 1.1 mrg 6229 1.1 mrg */ 6230 1.1 mrg 6231 1.1 mrg if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC) 6232 1.1 mrg reverse_op (v->val_rtx, expr, cui->insn); 6233 1.1 mrg 6234 1.1 mrg mo.u.loc = loc; 6235 1.1 mrg 6236 1.1 mrg if (track_p) 6237 1.1 mrg VAL_HOLDS_TRACK_EXPR (loc) = 1; 6238 1.1 mrg if (preserve) 6239 1.1 mrg { 6240 1.1 mrg VAL_NEEDS_RESOLUTION (loc) = resolve; 6241 1.1 mrg preserve_value (v); 6242 1.1 mrg } 6243 1.1 mrg if (mo.type == MO_CLOBBER) 6244 1.1 mrg VAL_EXPR_IS_CLOBBERED (loc) = 1; 6245 1.1 mrg if (mo.type == MO_COPY) 6246 1.1 mrg VAL_EXPR_IS_COPIED (loc) = 1; 6247 1.1 mrg 6248 1.1 mrg mo.type = MO_VAL_SET; 6249 1.1 mrg 6250 1.1 mrg log_and_return: 6251 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 6252 1.1 mrg log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file); 6253 1.1 mrg VTI (bb)->mos.safe_push (mo); 6254 1.1 mrg } 6255 1.1 mrg 6256 1.1 mrg /* Arguments to the call. */ 6257 1.1 mrg static rtx call_arguments; 6258 1.1 mrg 6259 1.1 mrg /* Compute call_arguments. */ 6260 1.1 mrg 6261 1.1 mrg static void 6262 1.1 mrg prepare_call_arguments (basic_block bb, rtx_insn *insn) 6263 1.1 mrg { 6264 1.1 mrg rtx link, x, call; 6265 1.1 mrg rtx prev, cur, next; 6266 1.1 mrg rtx this_arg = NULL_RTX; 6267 1.1 mrg tree type = NULL_TREE, t, fndecl = NULL_TREE; 6268 1.1 mrg tree obj_type_ref = NULL_TREE; 6269 1.1 mrg CUMULATIVE_ARGS args_so_far_v; 6270 1.1 mrg cumulative_args_t args_so_far; 6271 1.1 mrg 6272 1.1 mrg memset (&args_so_far_v, 0, sizeof (args_so_far_v)); 6273 1.1 mrg args_so_far = pack_cumulative_args (&args_so_far_v); 6274 1.1 mrg call = get_call_rtx_from (insn); 6275 1.1 mrg if (call) 6276 1.1 mrg { 6277 1.1 mrg if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF) 6278 1.1 mrg { 6279 1.1 mrg rtx symbol = XEXP (XEXP (call, 0), 0); 6280 1.1 mrg if (SYMBOL_REF_DECL (symbol)) 6281 1.1 mrg fndecl = SYMBOL_REF_DECL (symbol); 6282 1.1 mrg } 6283 1.1 mrg if (fndecl == NULL_TREE) 6284 1.1 mrg fndecl = MEM_EXPR (XEXP (call, 0)); 6285 1.1 mrg if (fndecl 6286 1.1 mrg && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE 6287 1.1 mrg && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE) 6288 1.1 mrg fndecl = NULL_TREE; 6289 1.1 mrg if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl))) 6290 1.1 mrg type = TREE_TYPE (fndecl); 6291 1.1 mrg if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL) 6292 1.1 mrg { 6293 1.1 mrg if (TREE_CODE (fndecl) == INDIRECT_REF 6294 1.1 mrg && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF) 6295 1.1 mrg obj_type_ref = TREE_OPERAND (fndecl, 0); 6296 1.1 mrg fndecl = NULL_TREE; 6297 1.1 mrg } 6298 1.1 mrg if (type) 6299 1.1 mrg { 6300 1.1 mrg for (t = TYPE_ARG_TYPES (type); t && t != void_list_node; 6301 1.1 mrg t = TREE_CHAIN (t)) 6302 1.1 mrg if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE 6303 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t)))) 6304 1.1 mrg break; 6305 1.1 mrg if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE) 6306 1.1 mrg type = NULL; 6307 1.1 mrg else 6308 1.1 mrg { 6309 1.1 mrg int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type)); 6310 1.1 mrg link = CALL_INSN_FUNCTION_USAGE (insn); 6311 1.1 mrg #ifndef PCC_STATIC_STRUCT_RETURN 6312 1.1 mrg if (aggregate_value_p (TREE_TYPE (type), type) 6313 1.1 mrg && targetm.calls.struct_value_rtx (type, 0) == 0) 6314 1.1 mrg { 6315 1.1 mrg tree struct_addr = build_pointer_type (TREE_TYPE (type)); 6316 1.1 mrg function_arg_info arg (struct_addr, /*named=*/true); 6317 1.1 mrg rtx reg; 6318 1.1 mrg INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl, 6319 1.1 mrg nargs + 1); 6320 1.1 mrg reg = targetm.calls.function_arg (args_so_far, arg); 6321 1.1 mrg targetm.calls.function_arg_advance (args_so_far, arg); 6322 1.1 mrg if (reg == NULL_RTX) 6323 1.1 mrg { 6324 1.1 mrg for (; link; link = XEXP (link, 1)) 6325 1.1 mrg if (GET_CODE (XEXP (link, 0)) == USE 6326 1.1 mrg && MEM_P (XEXP (XEXP (link, 0), 0))) 6327 1.1 mrg { 6328 1.1 mrg link = XEXP (link, 1); 6329 1.1 mrg break; 6330 1.1 mrg } 6331 1.1 mrg } 6332 1.1 mrg } 6333 1.1 mrg else 6334 1.1 mrg #endif 6335 1.1 mrg INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl, 6336 1.1 mrg nargs); 6337 1.1 mrg if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node) 6338 1.1 mrg { 6339 1.1 mrg t = TYPE_ARG_TYPES (type); 6340 1.1 mrg function_arg_info arg (TREE_VALUE (t), /*named=*/true); 6341 1.1 mrg this_arg = targetm.calls.function_arg (args_so_far, arg); 6342 1.1 mrg if (this_arg && !REG_P (this_arg)) 6343 1.1 mrg this_arg = NULL_RTX; 6344 1.1 mrg else if (this_arg == NULL_RTX) 6345 1.1 mrg { 6346 1.1 mrg for (; link; link = XEXP (link, 1)) 6347 1.1 mrg if (GET_CODE (XEXP (link, 0)) == USE 6348 1.1 mrg && MEM_P (XEXP (XEXP (link, 0), 0))) 6349 1.1 mrg { 6350 1.1 mrg this_arg = XEXP (XEXP (link, 0), 0); 6351 1.1 mrg break; 6352 1.1 mrg } 6353 1.1 mrg } 6354 1.1 mrg } 6355 1.1 mrg } 6356 1.1 mrg } 6357 1.1 mrg } 6358 1.1 mrg t = type ? TYPE_ARG_TYPES (type) : NULL_TREE; 6359 1.1 mrg 6360 1.1 mrg for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) 6361 1.1 mrg if (GET_CODE (XEXP (link, 0)) == USE) 6362 1.1 mrg { 6363 1.1 mrg rtx item = NULL_RTX; 6364 1.1 mrg x = XEXP (XEXP (link, 0), 0); 6365 1.1 mrg if (GET_MODE (link) == VOIDmode 6366 1.1 mrg || GET_MODE (link) == BLKmode 6367 1.1 mrg || (GET_MODE (link) != GET_MODE (x) 6368 1.1 mrg && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT 6369 1.1 mrg && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT) 6370 1.1 mrg || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT 6371 1.1 mrg && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT)))) 6372 1.1 mrg /* Can't do anything for these, if the original type mode 6373 1.1 mrg isn't known or can't be converted. */; 6374 1.1 mrg else if (REG_P (x)) 6375 1.1 mrg { 6376 1.1 mrg cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode); 6377 1.1 mrg scalar_int_mode mode; 6378 1.1 mrg if (val && cselib_preserved_value_p (val)) 6379 1.1 mrg item = val->val_rtx; 6380 1.1 mrg else if (is_a <scalar_int_mode> (GET_MODE (x), &mode)) 6381 1.1 mrg { 6382 1.1 mrg opt_scalar_int_mode mode_iter; 6383 1.1 mrg FOR_EACH_WIDER_MODE (mode_iter, mode) 6384 1.1 mrg { 6385 1.1 mrg mode = mode_iter.require (); 6386 1.1 mrg if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD) 6387 1.1 mrg break; 6388 1.1 mrg 6389 1.1 mrg rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0); 6390 1.1 mrg if (reg == NULL_RTX || !REG_P (reg)) 6391 1.1 mrg continue; 6392 1.1 mrg val = cselib_lookup (reg, mode, 0, VOIDmode); 6393 1.1 mrg if (val && cselib_preserved_value_p (val)) 6394 1.1 mrg { 6395 1.1 mrg item = val->val_rtx; 6396 1.1 mrg break; 6397 1.1 mrg } 6398 1.1 mrg } 6399 1.1 mrg } 6400 1.1 mrg } 6401 1.1 mrg else if (MEM_P (x)) 6402 1.1 mrg { 6403 1.1 mrg rtx mem = x; 6404 1.1 mrg cselib_val *val; 6405 1.1 mrg 6406 1.1 mrg if (!frame_pointer_needed) 6407 1.1 mrg { 6408 1.1 mrg class adjust_mem_data amd; 6409 1.1 mrg amd.mem_mode = VOIDmode; 6410 1.1 mrg amd.stack_adjust = -VTI (bb)->out.stack_adjust; 6411 1.1 mrg amd.store = true; 6412 1.1 mrg mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems, 6413 1.1 mrg &amd); 6414 1.1 mrg gcc_assert (amd.side_effects.is_empty ()); 6415 1.1 mrg } 6416 1.1 mrg val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode); 6417 1.1 mrg if (val && cselib_preserved_value_p (val)) 6418 1.1 mrg item = val->val_rtx; 6419 1.1 mrg else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT 6420 1.1 mrg && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT) 6421 1.1 mrg { 6422 1.1 mrg /* For non-integer stack argument see also if they weren't 6423 1.1 mrg initialized by integers. */ 6424 1.1 mrg scalar_int_mode imode; 6425 1.1 mrg if (int_mode_for_mode (GET_MODE (mem)).exists (&imode) 6426 1.1 mrg && imode != GET_MODE (mem)) 6427 1.1 mrg { 6428 1.1 mrg val = cselib_lookup (adjust_address_nv (mem, imode, 0), 6429 1.1 mrg imode, 0, VOIDmode); 6430 1.1 mrg if (val && cselib_preserved_value_p (val)) 6431 1.1 mrg item = lowpart_subreg (GET_MODE (x), val->val_rtx, 6432 1.1 mrg imode); 6433 1.1 mrg } 6434 1.1 mrg } 6435 1.1 mrg } 6436 1.1 mrg if (item) 6437 1.1 mrg { 6438 1.1 mrg rtx x2 = x; 6439 1.1 mrg if (GET_MODE (item) != GET_MODE (link)) 6440 1.1 mrg item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item)); 6441 1.1 mrg if (GET_MODE (x2) != GET_MODE (link)) 6442 1.1 mrg x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2)); 6443 1.1 mrg item = gen_rtx_CONCAT (GET_MODE (link), x2, item); 6444 1.1 mrg call_arguments 6445 1.1 mrg = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments); 6446 1.1 mrg } 6447 1.1 mrg if (t && t != void_list_node) 6448 1.1 mrg { 6449 1.1 mrg rtx reg; 6450 1.1 mrg function_arg_info arg (TREE_VALUE (t), /*named=*/true); 6451 1.1 mrg apply_pass_by_reference_rules (&args_so_far_v, arg); 6452 1.1 mrg reg = targetm.calls.function_arg (args_so_far, arg); 6453 1.1 mrg if (TREE_CODE (arg.type) == REFERENCE_TYPE 6454 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (arg.type)) 6455 1.1 mrg && reg 6456 1.1 mrg && REG_P (reg) 6457 1.1 mrg && GET_MODE (reg) == arg.mode 6458 1.1 mrg && (GET_MODE_CLASS (arg.mode) == MODE_INT 6459 1.1 mrg || GET_MODE_CLASS (arg.mode) == MODE_PARTIAL_INT) 6460 1.1 mrg && REG_P (x) 6461 1.1 mrg && REGNO (x) == REGNO (reg) 6462 1.1 mrg && GET_MODE (x) == arg.mode 6463 1.1 mrg && item) 6464 1.1 mrg { 6465 1.1 mrg machine_mode indmode 6466 1.1 mrg = TYPE_MODE (TREE_TYPE (arg.type)); 6467 1.1 mrg rtx mem = gen_rtx_MEM (indmode, x); 6468 1.1 mrg cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode); 6469 1.1 mrg if (val && cselib_preserved_value_p (val)) 6470 1.1 mrg { 6471 1.1 mrg item = gen_rtx_CONCAT (indmode, mem, val->val_rtx); 6472 1.1 mrg call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, 6473 1.1 mrg call_arguments); 6474 1.1 mrg } 6475 1.1 mrg else 6476 1.1 mrg { 6477 1.1 mrg struct elt_loc_list *l; 6478 1.1 mrg tree initial; 6479 1.1 mrg 6480 1.1 mrg /* Try harder, when passing address of a constant 6481 1.1 mrg pool integer it can be easily read back. */ 6482 1.1 mrg item = XEXP (item, 1); 6483 1.1 mrg if (GET_CODE (item) == SUBREG) 6484 1.1 mrg item = SUBREG_REG (item); 6485 1.1 mrg gcc_assert (GET_CODE (item) == VALUE); 6486 1.1 mrg val = CSELIB_VAL_PTR (item); 6487 1.1 mrg for (l = val->locs; l; l = l->next) 6488 1.1 mrg if (GET_CODE (l->loc) == SYMBOL_REF 6489 1.1 mrg && TREE_CONSTANT_POOL_ADDRESS_P (l->loc) 6490 1.1 mrg && SYMBOL_REF_DECL (l->loc) 6491 1.1 mrg && DECL_INITIAL (SYMBOL_REF_DECL (l->loc))) 6492 1.1 mrg { 6493 1.1 mrg initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc)); 6494 1.1 mrg if (tree_fits_shwi_p (initial)) 6495 1.1 mrg { 6496 1.1 mrg item = GEN_INT (tree_to_shwi (initial)); 6497 1.1 mrg item = gen_rtx_CONCAT (indmode, mem, item); 6498 1.1 mrg call_arguments 6499 1.1 mrg = gen_rtx_EXPR_LIST (VOIDmode, item, 6500 1.1 mrg call_arguments); 6501 1.1 mrg } 6502 1.1 mrg break; 6503 1.1 mrg } 6504 1.1 mrg } 6505 1.1 mrg } 6506 1.1 mrg targetm.calls.function_arg_advance (args_so_far, arg); 6507 1.1 mrg t = TREE_CHAIN (t); 6508 1.1 mrg } 6509 1.1 mrg } 6510 1.1 mrg 6511 1.1 mrg /* Add debug arguments. */ 6512 1.1 mrg if (fndecl 6513 1.1 mrg && TREE_CODE (fndecl) == FUNCTION_DECL 6514 1.1 mrg && DECL_HAS_DEBUG_ARGS_P (fndecl)) 6515 1.1 mrg { 6516 1.1 mrg vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl); 6517 1.1 mrg if (debug_args) 6518 1.1 mrg { 6519 1.1 mrg unsigned int ix; 6520 1.1 mrg tree param; 6521 1.1 mrg for (ix = 0; vec_safe_iterate (*debug_args, ix, ¶m); ix += 2) 6522 1.1 mrg { 6523 1.1 mrg rtx item; 6524 1.1 mrg tree dtemp = (**debug_args)[ix + 1]; 6525 1.1 mrg machine_mode mode = DECL_MODE (dtemp); 6526 1.1 mrg item = gen_rtx_DEBUG_PARAMETER_REF (mode, param); 6527 1.1 mrg item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp)); 6528 1.1 mrg call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, 6529 1.1 mrg call_arguments); 6530 1.1 mrg } 6531 1.1 mrg } 6532 1.1 mrg } 6533 1.1 mrg 6534 1.1 mrg /* Reverse call_arguments chain. */ 6535 1.1 mrg prev = NULL_RTX; 6536 1.1 mrg for (cur = call_arguments; cur; cur = next) 6537 1.1 mrg { 6538 1.1 mrg next = XEXP (cur, 1); 6539 1.1 mrg XEXP (cur, 1) = prev; 6540 1.1 mrg prev = cur; 6541 1.1 mrg } 6542 1.1 mrg call_arguments = prev; 6543 1.1 mrg 6544 1.1 mrg x = get_call_rtx_from (insn); 6545 1.1 mrg if (x) 6546 1.1 mrg { 6547 1.1 mrg x = XEXP (XEXP (x, 0), 0); 6548 1.1 mrg if (GET_CODE (x) == SYMBOL_REF) 6549 1.1 mrg /* Don't record anything. */; 6550 1.1 mrg else if (CONSTANT_P (x)) 6551 1.1 mrg { 6552 1.1 mrg x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x), 6553 1.1 mrg pc_rtx, x); 6554 1.1 mrg call_arguments 6555 1.1 mrg = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); 6556 1.1 mrg } 6557 1.1 mrg else 6558 1.1 mrg { 6559 1.1 mrg cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode); 6560 1.1 mrg if (val && cselib_preserved_value_p (val)) 6561 1.1 mrg { 6562 1.1 mrg x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx); 6563 1.1 mrg call_arguments 6564 1.1 mrg = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); 6565 1.1 mrg } 6566 1.1 mrg } 6567 1.1 mrg } 6568 1.1 mrg if (this_arg) 6569 1.1 mrg { 6570 1.1 mrg machine_mode mode 6571 1.1 mrg = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref))); 6572 1.1 mrg rtx clobbered = gen_rtx_MEM (mode, this_arg); 6573 1.1 mrg HOST_WIDE_INT token 6574 1.1 mrg = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref)); 6575 1.1 mrg if (token) 6576 1.1 mrg clobbered = plus_constant (mode, clobbered, 6577 1.1 mrg token * GET_MODE_SIZE (mode)); 6578 1.1 mrg clobbered = gen_rtx_MEM (mode, clobbered); 6579 1.1 mrg x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered); 6580 1.1 mrg call_arguments 6581 1.1 mrg = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); 6582 1.1 mrg } 6583 1.1 mrg } 6584 1.1 mrg 6585 1.1 mrg /* Callback for cselib_record_sets_hook, that records as micro 6586 1.1 mrg operations uses and stores in an insn after cselib_record_sets has 6587 1.1 mrg analyzed the sets in an insn, but before it modifies the stored 6588 1.1 mrg values in the internal tables, unless cselib_record_sets doesn't 6589 1.1 mrg call it directly (perhaps because we're not doing cselib in the 6590 1.1 mrg first place, in which case sets and n_sets will be 0). */ 6591 1.1 mrg 6592 1.1 mrg static void 6593 1.1 mrg add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets) 6594 1.1 mrg { 6595 1.1 mrg basic_block bb = BLOCK_FOR_INSN (insn); 6596 1.1 mrg int n1, n2; 6597 1.1 mrg struct count_use_info cui; 6598 1.1 mrg micro_operation *mos; 6599 1.1 mrg 6600 1.1 mrg cselib_hook_called = true; 6601 1.1 mrg 6602 1.1 mrg cui.insn = insn; 6603 1.1 mrg cui.bb = bb; 6604 1.1 mrg cui.sets = sets; 6605 1.1 mrg cui.n_sets = n_sets; 6606 1.1 mrg 6607 1.1 mrg n1 = VTI (bb)->mos.length (); 6608 1.1 mrg cui.store_p = false; 6609 1.1 mrg note_uses (&PATTERN (insn), add_uses_1, &cui); 6610 1.1 mrg n2 = VTI (bb)->mos.length () - 1; 6611 1.1 mrg mos = VTI (bb)->mos.address (); 6612 1.1 mrg 6613 1.1 mrg /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and 6614 1.1 mrg MO_VAL_LOC last. */ 6615 1.1 mrg while (n1 < n2) 6616 1.1 mrg { 6617 1.1 mrg while (n1 < n2 && mos[n1].type == MO_USE) 6618 1.1 mrg n1++; 6619 1.1 mrg while (n1 < n2 && mos[n2].type != MO_USE) 6620 1.1 mrg n2--; 6621 1.1 mrg if (n1 < n2) 6622 1.1 mrg std::swap (mos[n1], mos[n2]); 6623 1.1 mrg } 6624 1.1 mrg 6625 1.1 mrg n2 = VTI (bb)->mos.length () - 1; 6626 1.1 mrg while (n1 < n2) 6627 1.1 mrg { 6628 1.1 mrg while (n1 < n2 && mos[n1].type != MO_VAL_LOC) 6629 1.1 mrg n1++; 6630 1.1 mrg while (n1 < n2 && mos[n2].type == MO_VAL_LOC) 6631 1.1 mrg n2--; 6632 1.1 mrg if (n1 < n2) 6633 1.1 mrg std::swap (mos[n1], mos[n2]); 6634 1.1 mrg } 6635 1.1 mrg 6636 1.1 mrg if (CALL_P (insn)) 6637 1.1 mrg { 6638 1.1 mrg micro_operation mo; 6639 1.1 mrg 6640 1.1 mrg mo.type = MO_CALL; 6641 1.1 mrg mo.insn = insn; 6642 1.1 mrg mo.u.loc = call_arguments; 6643 1.1 mrg call_arguments = NULL_RTX; 6644 1.1 mrg 6645 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 6646 1.1 mrg log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file); 6647 1.1 mrg VTI (bb)->mos.safe_push (mo); 6648 1.1 mrg } 6649 1.1 mrg 6650 1.1 mrg n1 = VTI (bb)->mos.length (); 6651 1.1 mrg /* This will record NEXT_INSN (insn), such that we can 6652 1.1 mrg insert notes before it without worrying about any 6653 1.1 mrg notes that MO_USEs might emit after the insn. */ 6654 1.1 mrg cui.store_p = true; 6655 1.1 mrg note_stores (insn, add_stores, &cui); 6656 1.1 mrg n2 = VTI (bb)->mos.length () - 1; 6657 1.1 mrg mos = VTI (bb)->mos.address (); 6658 1.1 mrg 6659 1.1 mrg /* Order the MO_VAL_USEs first (note_stores does nothing 6660 1.1 mrg on DEBUG_INSNs, so there are no MO_VAL_LOCs from this 6661 1.1 mrg insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */ 6662 1.1 mrg while (n1 < n2) 6663 1.1 mrg { 6664 1.1 mrg while (n1 < n2 && mos[n1].type == MO_VAL_USE) 6665 1.1 mrg n1++; 6666 1.1 mrg while (n1 < n2 && mos[n2].type != MO_VAL_USE) 6667 1.1 mrg n2--; 6668 1.1 mrg if (n1 < n2) 6669 1.1 mrg std::swap (mos[n1], mos[n2]); 6670 1.1 mrg } 6671 1.1 mrg 6672 1.1 mrg n2 = VTI (bb)->mos.length () - 1; 6673 1.1 mrg while (n1 < n2) 6674 1.1 mrg { 6675 1.1 mrg while (n1 < n2 && mos[n1].type == MO_CLOBBER) 6676 1.1 mrg n1++; 6677 1.1 mrg while (n1 < n2 && mos[n2].type != MO_CLOBBER) 6678 1.1 mrg n2--; 6679 1.1 mrg if (n1 < n2) 6680 1.1 mrg std::swap (mos[n1], mos[n2]); 6681 1.1 mrg } 6682 1.1 mrg } 6683 1.1 mrg 6684 1.1 mrg static enum var_init_status 6685 1.1 mrg find_src_status (dataflow_set *in, rtx src) 6686 1.1 mrg { 6687 1.1 mrg tree decl = NULL_TREE; 6688 1.1 mrg enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED; 6689 1.1 mrg 6690 1.1 mrg if (! flag_var_tracking_uninit) 6691 1.1 mrg status = VAR_INIT_STATUS_INITIALIZED; 6692 1.1 mrg 6693 1.1 mrg if (src && REG_P (src)) 6694 1.1 mrg decl = var_debug_decl (REG_EXPR (src)); 6695 1.1 mrg else if (src && MEM_P (src)) 6696 1.1 mrg decl = var_debug_decl (MEM_EXPR (src)); 6697 1.1 mrg 6698 1.1 mrg if (src && decl) 6699 1.1 mrg status = get_init_value (in, src, dv_from_decl (decl)); 6700 1.1 mrg 6701 1.1 mrg return status; 6702 1.1 mrg } 6703 1.1 mrg 6704 1.1 mrg /* SRC is the source of an assignment. Use SET to try to find what 6705 1.1 mrg was ultimately assigned to SRC. Return that value if known, 6706 1.1 mrg otherwise return SRC itself. */ 6707 1.1 mrg 6708 1.1 mrg static rtx 6709 1.1 mrg find_src_set_src (dataflow_set *set, rtx src) 6710 1.1 mrg { 6711 1.1 mrg tree decl = NULL_TREE; /* The variable being copied around. */ 6712 1.1 mrg rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */ 6713 1.1 mrg variable *var; 6714 1.1 mrg location_chain *nextp; 6715 1.1 mrg int i; 6716 1.1 mrg bool found; 6717 1.1 mrg 6718 1.1 mrg if (src && REG_P (src)) 6719 1.1 mrg decl = var_debug_decl (REG_EXPR (src)); 6720 1.1 mrg else if (src && MEM_P (src)) 6721 1.1 mrg decl = var_debug_decl (MEM_EXPR (src)); 6722 1.1 mrg 6723 1.1 mrg if (src && decl) 6724 1.1 mrg { 6725 1.1 mrg decl_or_value dv = dv_from_decl (decl); 6726 1.1 mrg 6727 1.1 mrg var = shared_hash_find (set->vars, dv); 6728 1.1 mrg if (var) 6729 1.1 mrg { 6730 1.1 mrg found = false; 6731 1.1 mrg for (i = 0; i < var->n_var_parts && !found; i++) 6732 1.1 mrg for (nextp = var->var_part[i].loc_chain; nextp && !found; 6733 1.1 mrg nextp = nextp->next) 6734 1.1 mrg if (rtx_equal_p (nextp->loc, src)) 6735 1.1 mrg { 6736 1.1 mrg set_src = nextp->set_src; 6737 1.1 mrg found = true; 6738 1.1 mrg } 6739 1.1 mrg 6740 1.1 mrg } 6741 1.1 mrg } 6742 1.1 mrg 6743 1.1 mrg return set_src; 6744 1.1 mrg } 6745 1.1 mrg 6746 1.1 mrg /* Compute the changes of variable locations in the basic block BB. */ 6747 1.1 mrg 6748 1.1 mrg static bool 6749 1.1 mrg compute_bb_dataflow (basic_block bb) 6750 1.1 mrg { 6751 1.1 mrg unsigned int i; 6752 1.1 mrg micro_operation *mo; 6753 1.1 mrg bool changed; 6754 1.1 mrg dataflow_set old_out; 6755 1.1 mrg dataflow_set *in = &VTI (bb)->in; 6756 1.1 mrg dataflow_set *out = &VTI (bb)->out; 6757 1.1 mrg 6758 1.1 mrg dataflow_set_init (&old_out); 6759 1.1 mrg dataflow_set_copy (&old_out, out); 6760 1.1 mrg dataflow_set_copy (out, in); 6761 1.1 mrg 6762 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 6763 1.1 mrg local_get_addr_cache = new hash_map<rtx, rtx>; 6764 1.1 mrg 6765 1.1 mrg FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo) 6766 1.1 mrg { 6767 1.1 mrg rtx_insn *insn = mo->insn; 6768 1.1 mrg 6769 1.1 mrg switch (mo->type) 6770 1.1 mrg { 6771 1.1 mrg case MO_CALL: 6772 1.1 mrg dataflow_set_clear_at_call (out, insn); 6773 1.1 mrg break; 6774 1.1 mrg 6775 1.1 mrg case MO_USE: 6776 1.1 mrg { 6777 1.1 mrg rtx loc = mo->u.loc; 6778 1.1 mrg 6779 1.1 mrg if (REG_P (loc)) 6780 1.1 mrg var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL); 6781 1.1 mrg else if (MEM_P (loc)) 6782 1.1 mrg var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL); 6783 1.1 mrg } 6784 1.1 mrg break; 6785 1.1 mrg 6786 1.1 mrg case MO_VAL_LOC: 6787 1.1 mrg { 6788 1.1 mrg rtx loc = mo->u.loc; 6789 1.1 mrg rtx val, vloc; 6790 1.1 mrg tree var; 6791 1.1 mrg 6792 1.1 mrg if (GET_CODE (loc) == CONCAT) 6793 1.1 mrg { 6794 1.1 mrg val = XEXP (loc, 0); 6795 1.1 mrg vloc = XEXP (loc, 1); 6796 1.1 mrg } 6797 1.1 mrg else 6798 1.1 mrg { 6799 1.1 mrg val = NULL_RTX; 6800 1.1 mrg vloc = loc; 6801 1.1 mrg } 6802 1.1 mrg 6803 1.1 mrg var = PAT_VAR_LOCATION_DECL (vloc); 6804 1.1 mrg 6805 1.1 mrg clobber_variable_part (out, NULL_RTX, 6806 1.1 mrg dv_from_decl (var), 0, NULL_RTX); 6807 1.1 mrg if (val) 6808 1.1 mrg { 6809 1.1 mrg if (VAL_NEEDS_RESOLUTION (loc)) 6810 1.1 mrg val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn); 6811 1.1 mrg set_variable_part (out, val, dv_from_decl (var), 0, 6812 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL_RTX, 6813 1.1 mrg INSERT); 6814 1.1 mrg } 6815 1.1 mrg else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc))) 6816 1.1 mrg set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc), 6817 1.1 mrg dv_from_decl (var), 0, 6818 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL_RTX, 6819 1.1 mrg INSERT); 6820 1.1 mrg } 6821 1.1 mrg break; 6822 1.1 mrg 6823 1.1 mrg case MO_VAL_USE: 6824 1.1 mrg { 6825 1.1 mrg rtx loc = mo->u.loc; 6826 1.1 mrg rtx val, vloc, uloc; 6827 1.1 mrg 6828 1.1 mrg vloc = uloc = XEXP (loc, 1); 6829 1.1 mrg val = XEXP (loc, 0); 6830 1.1 mrg 6831 1.1 mrg if (GET_CODE (val) == CONCAT) 6832 1.1 mrg { 6833 1.1 mrg uloc = XEXP (val, 1); 6834 1.1 mrg val = XEXP (val, 0); 6835 1.1 mrg } 6836 1.1 mrg 6837 1.1 mrg if (VAL_NEEDS_RESOLUTION (loc)) 6838 1.1 mrg val_resolve (out, val, vloc, insn); 6839 1.1 mrg else 6840 1.1 mrg val_store (out, val, uloc, insn, false); 6841 1.1 mrg 6842 1.1 mrg if (VAL_HOLDS_TRACK_EXPR (loc)) 6843 1.1 mrg { 6844 1.1 mrg if (GET_CODE (uloc) == REG) 6845 1.1 mrg var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED, 6846 1.1 mrg NULL); 6847 1.1 mrg else if (GET_CODE (uloc) == MEM) 6848 1.1 mrg var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED, 6849 1.1 mrg NULL); 6850 1.1 mrg } 6851 1.1 mrg } 6852 1.1 mrg break; 6853 1.1 mrg 6854 1.1 mrg case MO_VAL_SET: 6855 1.1 mrg { 6856 1.1 mrg rtx loc = mo->u.loc; 6857 1.1 mrg rtx val, vloc, uloc; 6858 1.1 mrg rtx dstv, srcv; 6859 1.1 mrg 6860 1.1 mrg vloc = loc; 6861 1.1 mrg uloc = XEXP (vloc, 1); 6862 1.1 mrg val = XEXP (vloc, 0); 6863 1.1 mrg vloc = uloc; 6864 1.1 mrg 6865 1.1 mrg if (GET_CODE (uloc) == SET) 6866 1.1 mrg { 6867 1.1 mrg dstv = SET_DEST (uloc); 6868 1.1 mrg srcv = SET_SRC (uloc); 6869 1.1 mrg } 6870 1.1 mrg else 6871 1.1 mrg { 6872 1.1 mrg dstv = uloc; 6873 1.1 mrg srcv = NULL; 6874 1.1 mrg } 6875 1.1 mrg 6876 1.1 mrg if (GET_CODE (val) == CONCAT) 6877 1.1 mrg { 6878 1.1 mrg dstv = vloc = XEXP (val, 1); 6879 1.1 mrg val = XEXP (val, 0); 6880 1.1 mrg } 6881 1.1 mrg 6882 1.1 mrg if (GET_CODE (vloc) == SET) 6883 1.1 mrg { 6884 1.1 mrg srcv = SET_SRC (vloc); 6885 1.1 mrg 6886 1.1 mrg gcc_assert (val != srcv); 6887 1.1 mrg gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc)); 6888 1.1 mrg 6889 1.1 mrg dstv = vloc = SET_DEST (vloc); 6890 1.1 mrg 6891 1.1 mrg if (VAL_NEEDS_RESOLUTION (loc)) 6892 1.1 mrg val_resolve (out, val, srcv, insn); 6893 1.1 mrg } 6894 1.1 mrg else if (VAL_NEEDS_RESOLUTION (loc)) 6895 1.1 mrg { 6896 1.1 mrg gcc_assert (GET_CODE (uloc) == SET 6897 1.1 mrg && GET_CODE (SET_SRC (uloc)) == REG); 6898 1.1 mrg val_resolve (out, val, SET_SRC (uloc), insn); 6899 1.1 mrg } 6900 1.1 mrg 6901 1.1 mrg if (VAL_HOLDS_TRACK_EXPR (loc)) 6902 1.1 mrg { 6903 1.1 mrg if (VAL_EXPR_IS_CLOBBERED (loc)) 6904 1.1 mrg { 6905 1.1 mrg if (REG_P (uloc)) 6906 1.1 mrg var_reg_delete (out, uloc, true); 6907 1.1 mrg else if (MEM_P (uloc)) 6908 1.1 mrg { 6909 1.1 mrg gcc_assert (MEM_P (dstv)); 6910 1.1 mrg gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc)); 6911 1.1 mrg var_mem_delete (out, dstv, true); 6912 1.1 mrg } 6913 1.1 mrg } 6914 1.1 mrg else 6915 1.1 mrg { 6916 1.1 mrg bool copied_p = VAL_EXPR_IS_COPIED (loc); 6917 1.1 mrg rtx src = NULL, dst = uloc; 6918 1.1 mrg enum var_init_status status = VAR_INIT_STATUS_INITIALIZED; 6919 1.1 mrg 6920 1.1 mrg if (GET_CODE (uloc) == SET) 6921 1.1 mrg { 6922 1.1 mrg src = SET_SRC (uloc); 6923 1.1 mrg dst = SET_DEST (uloc); 6924 1.1 mrg } 6925 1.1 mrg 6926 1.1 mrg if (copied_p) 6927 1.1 mrg { 6928 1.1 mrg if (flag_var_tracking_uninit) 6929 1.1 mrg { 6930 1.1 mrg status = find_src_status (in, src); 6931 1.1 mrg 6932 1.1 mrg if (status == VAR_INIT_STATUS_UNKNOWN) 6933 1.1 mrg status = find_src_status (out, src); 6934 1.1 mrg } 6935 1.1 mrg 6936 1.1 mrg src = find_src_set_src (in, src); 6937 1.1 mrg } 6938 1.1 mrg 6939 1.1 mrg if (REG_P (dst)) 6940 1.1 mrg var_reg_delete_and_set (out, dst, !copied_p, 6941 1.1 mrg status, srcv); 6942 1.1 mrg else if (MEM_P (dst)) 6943 1.1 mrg { 6944 1.1 mrg gcc_assert (MEM_P (dstv)); 6945 1.1 mrg gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst)); 6946 1.1 mrg var_mem_delete_and_set (out, dstv, !copied_p, 6947 1.1 mrg status, srcv); 6948 1.1 mrg } 6949 1.1 mrg } 6950 1.1 mrg } 6951 1.1 mrg else if (REG_P (uloc)) 6952 1.1 mrg var_regno_delete (out, REGNO (uloc)); 6953 1.1 mrg else if (MEM_P (uloc)) 6954 1.1 mrg { 6955 1.1 mrg gcc_checking_assert (GET_CODE (vloc) == MEM); 6956 1.1 mrg gcc_checking_assert (dstv == vloc); 6957 1.1 mrg if (dstv != vloc) 6958 1.1 mrg clobber_overlapping_mems (out, vloc); 6959 1.1 mrg } 6960 1.1 mrg 6961 1.1 mrg val_store (out, val, dstv, insn, true); 6962 1.1 mrg } 6963 1.1 mrg break; 6964 1.1 mrg 6965 1.1 mrg case MO_SET: 6966 1.1 mrg { 6967 1.1 mrg rtx loc = mo->u.loc; 6968 1.1 mrg rtx set_src = NULL; 6969 1.1 mrg 6970 1.1 mrg if (GET_CODE (loc) == SET) 6971 1.1 mrg { 6972 1.1 mrg set_src = SET_SRC (loc); 6973 1.1 mrg loc = SET_DEST (loc); 6974 1.1 mrg } 6975 1.1 mrg 6976 1.1 mrg if (REG_P (loc)) 6977 1.1 mrg var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED, 6978 1.1 mrg set_src); 6979 1.1 mrg else if (MEM_P (loc)) 6980 1.1 mrg var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED, 6981 1.1 mrg set_src); 6982 1.1 mrg } 6983 1.1 mrg break; 6984 1.1 mrg 6985 1.1 mrg case MO_COPY: 6986 1.1 mrg { 6987 1.1 mrg rtx loc = mo->u.loc; 6988 1.1 mrg enum var_init_status src_status; 6989 1.1 mrg rtx set_src = NULL; 6990 1.1 mrg 6991 1.1 mrg if (GET_CODE (loc) == SET) 6992 1.1 mrg { 6993 1.1 mrg set_src = SET_SRC (loc); 6994 1.1 mrg loc = SET_DEST (loc); 6995 1.1 mrg } 6996 1.1 mrg 6997 1.1 mrg if (! flag_var_tracking_uninit) 6998 1.1 mrg src_status = VAR_INIT_STATUS_INITIALIZED; 6999 1.1 mrg else 7000 1.1 mrg { 7001 1.1 mrg src_status = find_src_status (in, set_src); 7002 1.1 mrg 7003 1.1 mrg if (src_status == VAR_INIT_STATUS_UNKNOWN) 7004 1.1 mrg src_status = find_src_status (out, set_src); 7005 1.1 mrg } 7006 1.1 mrg 7007 1.1 mrg set_src = find_src_set_src (in, set_src); 7008 1.1 mrg 7009 1.1 mrg if (REG_P (loc)) 7010 1.1 mrg var_reg_delete_and_set (out, loc, false, src_status, set_src); 7011 1.1 mrg else if (MEM_P (loc)) 7012 1.1 mrg var_mem_delete_and_set (out, loc, false, src_status, set_src); 7013 1.1 mrg } 7014 1.1 mrg break; 7015 1.1 mrg 7016 1.1 mrg case MO_USE_NO_VAR: 7017 1.1 mrg { 7018 1.1 mrg rtx loc = mo->u.loc; 7019 1.1 mrg 7020 1.1 mrg if (REG_P (loc)) 7021 1.1 mrg var_reg_delete (out, loc, false); 7022 1.1 mrg else if (MEM_P (loc)) 7023 1.1 mrg var_mem_delete (out, loc, false); 7024 1.1 mrg } 7025 1.1 mrg break; 7026 1.1 mrg 7027 1.1 mrg case MO_CLOBBER: 7028 1.1 mrg { 7029 1.1 mrg rtx loc = mo->u.loc; 7030 1.1 mrg 7031 1.1 mrg if (REG_P (loc)) 7032 1.1 mrg var_reg_delete (out, loc, true); 7033 1.1 mrg else if (MEM_P (loc)) 7034 1.1 mrg var_mem_delete (out, loc, true); 7035 1.1 mrg } 7036 1.1 mrg break; 7037 1.1 mrg 7038 1.1 mrg case MO_ADJUST: 7039 1.1 mrg out->stack_adjust += mo->u.adjust; 7040 1.1 mrg break; 7041 1.1 mrg } 7042 1.1 mrg } 7043 1.1 mrg 7044 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 7045 1.1 mrg { 7046 1.1 mrg delete local_get_addr_cache; 7047 1.1 mrg local_get_addr_cache = NULL; 7048 1.1 mrg 7049 1.1 mrg dataflow_set_equiv_regs (out); 7050 1.1 mrg shared_hash_htab (out->vars) 7051 1.1 mrg ->traverse <dataflow_set *, canonicalize_values_mark> (out); 7052 1.1 mrg shared_hash_htab (out->vars) 7053 1.1 mrg ->traverse <dataflow_set *, canonicalize_values_star> (out); 7054 1.1 mrg if (flag_checking) 7055 1.1 mrg shared_hash_htab (out->vars) 7056 1.1 mrg ->traverse <dataflow_set *, canonicalize_loc_order_check> (out); 7057 1.1 mrg } 7058 1.1 mrg changed = dataflow_set_different (&old_out, out); 7059 1.1 mrg dataflow_set_destroy (&old_out); 7060 1.1 mrg return changed; 7061 1.1 mrg } 7062 1.1 mrg 7063 1.1 mrg /* Find the locations of variables in the whole function. */ 7064 1.1 mrg 7065 1.1 mrg static bool 7066 1.1 mrg vt_find_locations (void) 7067 1.1 mrg { 7068 1.1 mrg bb_heap_t *worklist = new bb_heap_t (LONG_MIN); 7069 1.1 mrg bb_heap_t *pending = new bb_heap_t (LONG_MIN); 7070 1.1 mrg sbitmap in_worklist, in_pending; 7071 1.1 mrg basic_block bb; 7072 1.1 mrg edge e; 7073 1.1 mrg int *bb_order; 7074 1.1 mrg int *rc_order; 7075 1.1 mrg int i; 7076 1.1 mrg int htabsz = 0; 7077 1.1 mrg int htabmax = param_max_vartrack_size; 7078 1.1 mrg bool success = true; 7079 1.1 mrg unsigned int n_blocks_processed = 0; 7080 1.1 mrg 7081 1.1 mrg timevar_push (TV_VAR_TRACKING_DATAFLOW); 7082 1.1 mrg /* Compute reverse completion order of depth first search of the CFG 7083 1.1 mrg so that the data-flow runs faster. */ 7084 1.1 mrg rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS); 7085 1.1 mrg bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun)); 7086 1.1 mrg auto_bitmap exit_bbs; 7087 1.1 mrg bitmap_set_bit (exit_bbs, EXIT_BLOCK); 7088 1.1 mrg auto_vec<std::pair<int, int> > toplevel_scc_extents; 7089 1.1 mrg int n = rev_post_order_and_mark_dfs_back_seme 7090 1.1 mrg (cfun, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), exit_bbs, true, 7091 1.1 mrg rc_order, &toplevel_scc_extents); 7092 1.1 mrg for (i = 0; i < n; i++) 7093 1.1 mrg bb_order[rc_order[i]] = i; 7094 1.1 mrg 7095 1.1 mrg in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun)); 7096 1.1 mrg in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun)); 7097 1.1 mrg bitmap_clear (in_worklist); 7098 1.1 mrg bitmap_clear (in_pending); 7099 1.1 mrg 7100 1.1 mrg /* We're performing the dataflow iteration independently over the 7101 1.1 mrg toplevel SCCs plus leading non-cyclic entry blocks and separately 7102 1.1 mrg over the tail. That ensures best memory locality and the least 7103 1.1 mrg number of visited blocks. */ 7104 1.1 mrg unsigned extent = 0; 7105 1.1 mrg int curr_start = -1; 7106 1.1 mrg int curr_end = -1; 7107 1.1 mrg do 7108 1.1 mrg { 7109 1.1 mrg curr_start = curr_end + 1; 7110 1.1 mrg if (toplevel_scc_extents.length () <= extent) 7111 1.1 mrg curr_end = n - 1; 7112 1.1 mrg else 7113 1.1 mrg curr_end = toplevel_scc_extents[extent++].second; 7114 1.1 mrg 7115 1.1 mrg for (int i = curr_start; i <= curr_end; ++i) 7116 1.1 mrg { 7117 1.1 mrg pending->insert (i, BASIC_BLOCK_FOR_FN (cfun, rc_order[i])); 7118 1.1 mrg bitmap_set_bit (in_pending, rc_order[i]); 7119 1.1 mrg } 7120 1.1 mrg 7121 1.1 mrg while (success && !pending->empty ()) 7122 1.1 mrg { 7123 1.1 mrg std::swap (worklist, pending); 7124 1.1 mrg std::swap (in_worklist, in_pending); 7125 1.1 mrg 7126 1.1 mrg while (!worklist->empty ()) 7127 1.1 mrg { 7128 1.1 mrg bool changed; 7129 1.1 mrg edge_iterator ei; 7130 1.1 mrg int oldinsz, oldoutsz; 7131 1.1 mrg 7132 1.1 mrg bb = worklist->extract_min (); 7133 1.1 mrg bitmap_clear_bit (in_worklist, bb->index); 7134 1.1 mrg 7135 1.1 mrg if (VTI (bb)->in.vars) 7136 1.1 mrg { 7137 1.1 mrg htabsz -= (shared_hash_htab (VTI (bb)->in.vars)->size () 7138 1.1 mrg + shared_hash_htab (VTI (bb)->out.vars)->size ()); 7139 1.1 mrg oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements (); 7140 1.1 mrg oldoutsz = shared_hash_htab (VTI (bb)->out.vars)->elements (); 7141 1.1 mrg } 7142 1.1 mrg else 7143 1.1 mrg oldinsz = oldoutsz = 0; 7144 1.1 mrg 7145 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 7146 1.1 mrg { 7147 1.1 mrg dataflow_set *in = &VTI (bb)->in, *first_out = NULL; 7148 1.1 mrg bool first = true, adjust = false; 7149 1.1 mrg 7150 1.1 mrg /* Calculate the IN set as the intersection of 7151 1.1 mrg predecessor OUT sets. */ 7152 1.1 mrg 7153 1.1 mrg dataflow_set_clear (in); 7154 1.1 mrg dst_can_be_shared = true; 7155 1.1 mrg 7156 1.1 mrg FOR_EACH_EDGE (e, ei, bb->preds) 7157 1.1 mrg if (!VTI (e->src)->flooded) 7158 1.1 mrg gcc_assert (bb_order[bb->index] 7159 1.1 mrg <= bb_order[e->src->index]); 7160 1.1 mrg else if (first) 7161 1.1 mrg { 7162 1.1 mrg dataflow_set_copy (in, &VTI (e->src)->out); 7163 1.1 mrg first_out = &VTI (e->src)->out; 7164 1.1 mrg first = false; 7165 1.1 mrg } 7166 1.1 mrg else 7167 1.1 mrg { 7168 1.1 mrg dataflow_set_merge (in, &VTI (e->src)->out); 7169 1.1 mrg adjust = true; 7170 1.1 mrg } 7171 1.1 mrg 7172 1.1 mrg if (adjust) 7173 1.1 mrg { 7174 1.1 mrg dataflow_post_merge_adjust (in, &VTI (bb)->permp); 7175 1.1 mrg 7176 1.1 mrg if (flag_checking) 7177 1.1 mrg /* Merge and merge_adjust should keep entries in 7178 1.1 mrg canonical order. */ 7179 1.1 mrg shared_hash_htab (in->vars) 7180 1.1 mrg ->traverse <dataflow_set *, 7181 1.1 mrg canonicalize_loc_order_check> (in); 7182 1.1 mrg 7183 1.1 mrg if (dst_can_be_shared) 7184 1.1 mrg { 7185 1.1 mrg shared_hash_destroy (in->vars); 7186 1.1 mrg in->vars = shared_hash_copy (first_out->vars); 7187 1.1 mrg } 7188 1.1 mrg } 7189 1.1 mrg 7190 1.1 mrg VTI (bb)->flooded = true; 7191 1.1 mrg } 7192 1.1 mrg else 7193 1.1 mrg { 7194 1.1 mrg /* Calculate the IN set as union of predecessor OUT sets. */ 7195 1.1 mrg dataflow_set_clear (&VTI (bb)->in); 7196 1.1 mrg FOR_EACH_EDGE (e, ei, bb->preds) 7197 1.1 mrg dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out); 7198 1.1 mrg } 7199 1.1 mrg 7200 1.1 mrg changed = compute_bb_dataflow (bb); 7201 1.1 mrg n_blocks_processed++; 7202 1.1 mrg htabsz += (shared_hash_htab (VTI (bb)->in.vars)->size () 7203 1.1 mrg + shared_hash_htab (VTI (bb)->out.vars)->size ()); 7204 1.1 mrg 7205 1.1 mrg if (htabmax && htabsz > htabmax) 7206 1.1 mrg { 7207 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 7208 1.1 mrg inform (DECL_SOURCE_LOCATION (cfun->decl), 7209 1.1 mrg "variable tracking size limit exceeded with " 7210 1.1 mrg "%<-fvar-tracking-assignments%>, retrying without"); 7211 1.1 mrg else 7212 1.1 mrg inform (DECL_SOURCE_LOCATION (cfun->decl), 7213 1.1 mrg "variable tracking size limit exceeded"); 7214 1.1 mrg success = false; 7215 1.1 mrg break; 7216 1.1 mrg } 7217 1.1 mrg 7218 1.1 mrg if (changed) 7219 1.1 mrg { 7220 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs) 7221 1.1 mrg { 7222 1.1 mrg if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)) 7223 1.1 mrg continue; 7224 1.1 mrg 7225 1.1 mrg /* Iterate to an earlier block in RPO in the next 7226 1.1 mrg round, iterate to the same block immediately. */ 7227 1.1 mrg if (bb_order[e->dest->index] < bb_order[bb->index]) 7228 1.1 mrg { 7229 1.1 mrg gcc_assert (bb_order[e->dest->index] >= curr_start); 7230 1.1 mrg if (!bitmap_bit_p (in_pending, e->dest->index)) 7231 1.1 mrg { 7232 1.1 mrg /* Send E->DEST to next round. */ 7233 1.1 mrg bitmap_set_bit (in_pending, e->dest->index); 7234 1.1 mrg pending->insert (bb_order[e->dest->index], 7235 1.1 mrg e->dest); 7236 1.1 mrg } 7237 1.1 mrg } 7238 1.1 mrg else if (bb_order[e->dest->index] <= curr_end 7239 1.1 mrg && !bitmap_bit_p (in_worklist, e->dest->index)) 7240 1.1 mrg { 7241 1.1 mrg /* Add E->DEST to current round or delay 7242 1.1 mrg processing if it is in the next SCC. */ 7243 1.1 mrg bitmap_set_bit (in_worklist, e->dest->index); 7244 1.1 mrg worklist->insert (bb_order[e->dest->index], 7245 1.1 mrg e->dest); 7246 1.1 mrg } 7247 1.1 mrg } 7248 1.1 mrg } 7249 1.1 mrg 7250 1.1 mrg if (dump_file) 7251 1.1 mrg fprintf (dump_file, 7252 1.1 mrg "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, " 7253 1.1 mrg "tsz %i\n", bb->index, 7254 1.1 mrg (int)shared_hash_htab (VTI (bb)->in.vars)->size (), 7255 1.1 mrg oldinsz, 7256 1.1 mrg (int)shared_hash_htab (VTI (bb)->out.vars)->size (), 7257 1.1 mrg oldoutsz, 7258 1.1 mrg (int)worklist->nodes (), (int)pending->nodes (), 7259 1.1 mrg htabsz); 7260 1.1 mrg 7261 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 7262 1.1 mrg { 7263 1.1 mrg fprintf (dump_file, "BB %i IN:\n", bb->index); 7264 1.1 mrg dump_dataflow_set (&VTI (bb)->in); 7265 1.1 mrg fprintf (dump_file, "BB %i OUT:\n", bb->index); 7266 1.1 mrg dump_dataflow_set (&VTI (bb)->out); 7267 1.1 mrg } 7268 1.1 mrg } 7269 1.1 mrg } 7270 1.1 mrg } 7271 1.1 mrg while (curr_end != n - 1); 7272 1.1 mrg 7273 1.1 mrg statistics_counter_event (cfun, "compute_bb_dataflow times", 7274 1.1 mrg n_blocks_processed); 7275 1.1 mrg 7276 1.1 mrg if (success && MAY_HAVE_DEBUG_BIND_INSNS) 7277 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 7278 1.1 mrg gcc_assert (VTI (bb)->flooded); 7279 1.1 mrg 7280 1.1 mrg free (rc_order); 7281 1.1 mrg free (bb_order); 7282 1.1 mrg delete worklist; 7283 1.1 mrg delete pending; 7284 1.1 mrg sbitmap_free (in_worklist); 7285 1.1 mrg sbitmap_free (in_pending); 7286 1.1 mrg 7287 1.1 mrg timevar_pop (TV_VAR_TRACKING_DATAFLOW); 7288 1.1 mrg return success; 7289 1.1 mrg } 7290 1.1 mrg 7291 1.1 mrg /* Print the content of the LIST to dump file. */ 7292 1.1 mrg 7293 1.1 mrg static void 7294 1.1 mrg dump_attrs_list (attrs *list) 7295 1.1 mrg { 7296 1.1 mrg for (; list; list = list->next) 7297 1.1 mrg { 7298 1.1 mrg if (dv_is_decl_p (list->dv)) 7299 1.1 mrg print_mem_expr (dump_file, dv_as_decl (list->dv)); 7300 1.1 mrg else 7301 1.1 mrg print_rtl_single (dump_file, dv_as_value (list->dv)); 7302 1.1 mrg fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset); 7303 1.1 mrg } 7304 1.1 mrg fprintf (dump_file, "\n"); 7305 1.1 mrg } 7306 1.1 mrg 7307 1.1 mrg /* Print the information about variable *SLOT to dump file. */ 7308 1.1 mrg 7309 1.1 mrg int 7310 1.1 mrg dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED) 7311 1.1 mrg { 7312 1.1 mrg variable *var = *slot; 7313 1.1 mrg 7314 1.1 mrg dump_var (var); 7315 1.1 mrg 7316 1.1 mrg /* Continue traversing the hash table. */ 7317 1.1 mrg return 1; 7318 1.1 mrg } 7319 1.1 mrg 7320 1.1 mrg /* Print the information about variable VAR to dump file. */ 7321 1.1 mrg 7322 1.1 mrg static void 7323 1.1 mrg dump_var (variable *var) 7324 1.1 mrg { 7325 1.1 mrg int i; 7326 1.1 mrg location_chain *node; 7327 1.1 mrg 7328 1.1 mrg if (dv_is_decl_p (var->dv)) 7329 1.1 mrg { 7330 1.1 mrg const_tree decl = dv_as_decl (var->dv); 7331 1.1 mrg 7332 1.1 mrg if (DECL_NAME (decl)) 7333 1.1 mrg { 7334 1.1 mrg fprintf (dump_file, " name: %s", 7335 1.1 mrg IDENTIFIER_POINTER (DECL_NAME (decl))); 7336 1.1 mrg if (dump_flags & TDF_UID) 7337 1.1 mrg fprintf (dump_file, "D.%u", DECL_UID (decl)); 7338 1.1 mrg } 7339 1.1 mrg else if (TREE_CODE (decl) == DEBUG_EXPR_DECL) 7340 1.1 mrg fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl)); 7341 1.1 mrg else 7342 1.1 mrg fprintf (dump_file, " name: D.%u", DECL_UID (decl)); 7343 1.1 mrg fprintf (dump_file, "\n"); 7344 1.1 mrg } 7345 1.1 mrg else 7346 1.1 mrg { 7347 1.1 mrg fputc (' ', dump_file); 7348 1.1 mrg print_rtl_single (dump_file, dv_as_value (var->dv)); 7349 1.1 mrg } 7350 1.1 mrg 7351 1.1 mrg for (i = 0; i < var->n_var_parts; i++) 7352 1.1 mrg { 7353 1.1 mrg fprintf (dump_file, " offset %ld\n", 7354 1.1 mrg (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i))); 7355 1.1 mrg for (node = var->var_part[i].loc_chain; node; node = node->next) 7356 1.1 mrg { 7357 1.1 mrg fprintf (dump_file, " "); 7358 1.1 mrg if (node->init == VAR_INIT_STATUS_UNINITIALIZED) 7359 1.1 mrg fprintf (dump_file, "[uninit]"); 7360 1.1 mrg print_rtl_single (dump_file, node->loc); 7361 1.1 mrg } 7362 1.1 mrg } 7363 1.1 mrg } 7364 1.1 mrg 7365 1.1 mrg /* Print the information about variables from hash table VARS to dump file. */ 7366 1.1 mrg 7367 1.1 mrg static void 7368 1.1 mrg dump_vars (variable_table_type *vars) 7369 1.1 mrg { 7370 1.1 mrg if (!vars->is_empty ()) 7371 1.1 mrg { 7372 1.1 mrg fprintf (dump_file, "Variables:\n"); 7373 1.1 mrg vars->traverse <void *, dump_var_tracking_slot> (NULL); 7374 1.1 mrg } 7375 1.1 mrg } 7376 1.1 mrg 7377 1.1 mrg /* Print the dataflow set SET to dump file. */ 7378 1.1 mrg 7379 1.1 mrg static void 7380 1.1 mrg dump_dataflow_set (dataflow_set *set) 7381 1.1 mrg { 7382 1.1 mrg int i; 7383 1.1 mrg 7384 1.1 mrg fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n", 7385 1.1 mrg set->stack_adjust); 7386 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 7387 1.1 mrg { 7388 1.1 mrg if (set->regs[i]) 7389 1.1 mrg { 7390 1.1 mrg fprintf (dump_file, "Reg %d:", i); 7391 1.1 mrg dump_attrs_list (set->regs[i]); 7392 1.1 mrg } 7393 1.1 mrg } 7394 1.1 mrg dump_vars (shared_hash_htab (set->vars)); 7395 1.1 mrg fprintf (dump_file, "\n"); 7396 1.1 mrg } 7397 1.1 mrg 7398 1.1 mrg /* Print the IN and OUT sets for each basic block to dump file. */ 7399 1.1 mrg 7400 1.1 mrg static void 7401 1.1 mrg dump_dataflow_sets (void) 7402 1.1 mrg { 7403 1.1 mrg basic_block bb; 7404 1.1 mrg 7405 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 7406 1.1 mrg { 7407 1.1 mrg fprintf (dump_file, "\nBasic block %d:\n", bb->index); 7408 1.1 mrg fprintf (dump_file, "IN:\n"); 7409 1.1 mrg dump_dataflow_set (&VTI (bb)->in); 7410 1.1 mrg fprintf (dump_file, "OUT:\n"); 7411 1.1 mrg dump_dataflow_set (&VTI (bb)->out); 7412 1.1 mrg } 7413 1.1 mrg } 7414 1.1 mrg 7415 1.1 mrg /* Return the variable for DV in dropped_values, inserting one if 7416 1.1 mrg requested with INSERT. */ 7417 1.1 mrg 7418 1.1 mrg static inline variable * 7419 1.1 mrg variable_from_dropped (decl_or_value dv, enum insert_option insert) 7420 1.1 mrg { 7421 1.1 mrg variable **slot; 7422 1.1 mrg variable *empty_var; 7423 1.1 mrg onepart_enum onepart; 7424 1.1 mrg 7425 1.1 mrg slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert); 7426 1.1 mrg 7427 1.1 mrg if (!slot) 7428 1.1 mrg return NULL; 7429 1.1 mrg 7430 1.1 mrg if (*slot) 7431 1.1 mrg return *slot; 7432 1.1 mrg 7433 1.1 mrg gcc_checking_assert (insert == INSERT); 7434 1.1 mrg 7435 1.1 mrg onepart = dv_onepart_p (dv); 7436 1.1 mrg 7437 1.1 mrg gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR); 7438 1.1 mrg 7439 1.1 mrg empty_var = onepart_pool_allocate (onepart); 7440 1.1 mrg empty_var->dv = dv; 7441 1.1 mrg empty_var->refcount = 1; 7442 1.1 mrg empty_var->n_var_parts = 0; 7443 1.1 mrg empty_var->onepart = onepart; 7444 1.1 mrg empty_var->in_changed_variables = false; 7445 1.1 mrg empty_var->var_part[0].loc_chain = NULL; 7446 1.1 mrg empty_var->var_part[0].cur_loc = NULL; 7447 1.1 mrg VAR_LOC_1PAUX (empty_var) = NULL; 7448 1.1 mrg set_dv_changed (dv, true); 7449 1.1 mrg 7450 1.1 mrg *slot = empty_var; 7451 1.1 mrg 7452 1.1 mrg return empty_var; 7453 1.1 mrg } 7454 1.1 mrg 7455 1.1 mrg /* Recover the one-part aux from dropped_values. */ 7456 1.1 mrg 7457 1.1 mrg static struct onepart_aux * 7458 1.1 mrg recover_dropped_1paux (variable *var) 7459 1.1 mrg { 7460 1.1 mrg variable *dvar; 7461 1.1 mrg 7462 1.1 mrg gcc_checking_assert (var->onepart); 7463 1.1 mrg 7464 1.1 mrg if (VAR_LOC_1PAUX (var)) 7465 1.1 mrg return VAR_LOC_1PAUX (var); 7466 1.1 mrg 7467 1.1 mrg if (var->onepart == ONEPART_VDECL) 7468 1.1 mrg return NULL; 7469 1.1 mrg 7470 1.1 mrg dvar = variable_from_dropped (var->dv, NO_INSERT); 7471 1.1 mrg 7472 1.1 mrg if (!dvar) 7473 1.1 mrg return NULL; 7474 1.1 mrg 7475 1.1 mrg VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar); 7476 1.1 mrg VAR_LOC_1PAUX (dvar) = NULL; 7477 1.1 mrg 7478 1.1 mrg return VAR_LOC_1PAUX (var); 7479 1.1 mrg } 7480 1.1 mrg 7481 1.1 mrg /* Add variable VAR to the hash table of changed variables and 7482 1.1 mrg if it has no locations delete it from SET's hash table. */ 7483 1.1 mrg 7484 1.1 mrg static void 7485 1.1 mrg variable_was_changed (variable *var, dataflow_set *set) 7486 1.1 mrg { 7487 1.1 mrg hashval_t hash = dv_htab_hash (var->dv); 7488 1.1 mrg 7489 1.1 mrg if (emit_notes) 7490 1.1 mrg { 7491 1.1 mrg variable **slot; 7492 1.1 mrg 7493 1.1 mrg /* Remember this decl or VALUE has been added to changed_variables. */ 7494 1.1 mrg set_dv_changed (var->dv, true); 7495 1.1 mrg 7496 1.1 mrg slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT); 7497 1.1 mrg 7498 1.1 mrg if (*slot) 7499 1.1 mrg { 7500 1.1 mrg variable *old_var = *slot; 7501 1.1 mrg gcc_assert (old_var->in_changed_variables); 7502 1.1 mrg old_var->in_changed_variables = false; 7503 1.1 mrg if (var != old_var && var->onepart) 7504 1.1 mrg { 7505 1.1 mrg /* Restore the auxiliary info from an empty variable 7506 1.1 mrg previously created for changed_variables, so it is 7507 1.1 mrg not lost. */ 7508 1.1 mrg gcc_checking_assert (!VAR_LOC_1PAUX (var)); 7509 1.1 mrg VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var); 7510 1.1 mrg VAR_LOC_1PAUX (old_var) = NULL; 7511 1.1 mrg } 7512 1.1 mrg variable_htab_free (*slot); 7513 1.1 mrg } 7514 1.1 mrg 7515 1.1 mrg if (set && var->n_var_parts == 0) 7516 1.1 mrg { 7517 1.1 mrg onepart_enum onepart = var->onepart; 7518 1.1 mrg variable *empty_var = NULL; 7519 1.1 mrg variable **dslot = NULL; 7520 1.1 mrg 7521 1.1 mrg if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR) 7522 1.1 mrg { 7523 1.1 mrg dslot = dropped_values->find_slot_with_hash (var->dv, 7524 1.1 mrg dv_htab_hash (var->dv), 7525 1.1 mrg INSERT); 7526 1.1 mrg empty_var = *dslot; 7527 1.1 mrg 7528 1.1 mrg if (empty_var) 7529 1.1 mrg { 7530 1.1 mrg gcc_checking_assert (!empty_var->in_changed_variables); 7531 1.1 mrg if (!VAR_LOC_1PAUX (var)) 7532 1.1 mrg { 7533 1.1 mrg VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var); 7534 1.1 mrg VAR_LOC_1PAUX (empty_var) = NULL; 7535 1.1 mrg } 7536 1.1 mrg else 7537 1.1 mrg gcc_checking_assert (!VAR_LOC_1PAUX (empty_var)); 7538 1.1 mrg } 7539 1.1 mrg } 7540 1.1 mrg 7541 1.1 mrg if (!empty_var) 7542 1.1 mrg { 7543 1.1 mrg empty_var = onepart_pool_allocate (onepart); 7544 1.1 mrg empty_var->dv = var->dv; 7545 1.1 mrg empty_var->refcount = 1; 7546 1.1 mrg empty_var->n_var_parts = 0; 7547 1.1 mrg empty_var->onepart = onepart; 7548 1.1 mrg if (dslot) 7549 1.1 mrg { 7550 1.1 mrg empty_var->refcount++; 7551 1.1 mrg *dslot = empty_var; 7552 1.1 mrg } 7553 1.1 mrg } 7554 1.1 mrg else 7555 1.1 mrg empty_var->refcount++; 7556 1.1 mrg empty_var->in_changed_variables = true; 7557 1.1 mrg *slot = empty_var; 7558 1.1 mrg if (onepart) 7559 1.1 mrg { 7560 1.1 mrg empty_var->var_part[0].loc_chain = NULL; 7561 1.1 mrg empty_var->var_part[0].cur_loc = NULL; 7562 1.1 mrg VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var); 7563 1.1 mrg VAR_LOC_1PAUX (var) = NULL; 7564 1.1 mrg } 7565 1.1 mrg goto drop_var; 7566 1.1 mrg } 7567 1.1 mrg else 7568 1.1 mrg { 7569 1.1 mrg if (var->onepart && !VAR_LOC_1PAUX (var)) 7570 1.1 mrg recover_dropped_1paux (var); 7571 1.1 mrg var->refcount++; 7572 1.1 mrg var->in_changed_variables = true; 7573 1.1 mrg *slot = var; 7574 1.1 mrg } 7575 1.1 mrg } 7576 1.1 mrg else 7577 1.1 mrg { 7578 1.1 mrg gcc_assert (set); 7579 1.1 mrg if (var->n_var_parts == 0) 7580 1.1 mrg { 7581 1.1 mrg variable **slot; 7582 1.1 mrg 7583 1.1 mrg drop_var: 7584 1.1 mrg slot = shared_hash_find_slot_noinsert (set->vars, var->dv); 7585 1.1 mrg if (slot) 7586 1.1 mrg { 7587 1.1 mrg if (shared_hash_shared (set->vars)) 7588 1.1 mrg slot = shared_hash_find_slot_unshare (&set->vars, var->dv, 7589 1.1 mrg NO_INSERT); 7590 1.1 mrg shared_hash_htab (set->vars)->clear_slot (slot); 7591 1.1 mrg } 7592 1.1 mrg } 7593 1.1 mrg } 7594 1.1 mrg } 7595 1.1 mrg 7596 1.1 mrg /* Look for the index in VAR->var_part corresponding to OFFSET. 7597 1.1 mrg Return -1 if not found. If INSERTION_POINT is non-NULL, the 7598 1.1 mrg referenced int will be set to the index that the part has or should 7599 1.1 mrg have, if it should be inserted. */ 7600 1.1 mrg 7601 1.1 mrg static inline int 7602 1.1 mrg find_variable_location_part (variable *var, HOST_WIDE_INT offset, 7603 1.1 mrg int *insertion_point) 7604 1.1 mrg { 7605 1.1 mrg int pos, low, high; 7606 1.1 mrg 7607 1.1 mrg if (var->onepart) 7608 1.1 mrg { 7609 1.1 mrg if (offset != 0) 7610 1.1 mrg return -1; 7611 1.1 mrg 7612 1.1 mrg if (insertion_point) 7613 1.1 mrg *insertion_point = 0; 7614 1.1 mrg 7615 1.1 mrg return var->n_var_parts - 1; 7616 1.1 mrg } 7617 1.1 mrg 7618 1.1 mrg /* Find the location part. */ 7619 1.1 mrg low = 0; 7620 1.1 mrg high = var->n_var_parts; 7621 1.1 mrg while (low != high) 7622 1.1 mrg { 7623 1.1 mrg pos = (low + high) / 2; 7624 1.1 mrg if (VAR_PART_OFFSET (var, pos) < offset) 7625 1.1 mrg low = pos + 1; 7626 1.1 mrg else 7627 1.1 mrg high = pos; 7628 1.1 mrg } 7629 1.1 mrg pos = low; 7630 1.1 mrg 7631 1.1 mrg if (insertion_point) 7632 1.1 mrg *insertion_point = pos; 7633 1.1 mrg 7634 1.1 mrg if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset) 7635 1.1 mrg return pos; 7636 1.1 mrg 7637 1.1 mrg return -1; 7638 1.1 mrg } 7639 1.1 mrg 7640 1.1 mrg static variable ** 7641 1.1 mrg set_slot_part (dataflow_set *set, rtx loc, variable **slot, 7642 1.1 mrg decl_or_value dv, HOST_WIDE_INT offset, 7643 1.1 mrg enum var_init_status initialized, rtx set_src) 7644 1.1 mrg { 7645 1.1 mrg int pos; 7646 1.1 mrg location_chain *node, *next; 7647 1.1 mrg location_chain **nextp; 7648 1.1 mrg variable *var; 7649 1.1 mrg onepart_enum onepart; 7650 1.1 mrg 7651 1.1 mrg var = *slot; 7652 1.1 mrg 7653 1.1 mrg if (var) 7654 1.1 mrg onepart = var->onepart; 7655 1.1 mrg else 7656 1.1 mrg onepart = dv_onepart_p (dv); 7657 1.1 mrg 7658 1.1 mrg gcc_checking_assert (offset == 0 || !onepart); 7659 1.1 mrg gcc_checking_assert (loc != dv_as_opaque (dv)); 7660 1.1 mrg 7661 1.1 mrg if (! flag_var_tracking_uninit) 7662 1.1 mrg initialized = VAR_INIT_STATUS_INITIALIZED; 7663 1.1 mrg 7664 1.1 mrg if (!var) 7665 1.1 mrg { 7666 1.1 mrg /* Create new variable information. */ 7667 1.1 mrg var = onepart_pool_allocate (onepart); 7668 1.1 mrg var->dv = dv; 7669 1.1 mrg var->refcount = 1; 7670 1.1 mrg var->n_var_parts = 1; 7671 1.1 mrg var->onepart = onepart; 7672 1.1 mrg var->in_changed_variables = false; 7673 1.1 mrg if (var->onepart) 7674 1.1 mrg VAR_LOC_1PAUX (var) = NULL; 7675 1.1 mrg else 7676 1.1 mrg VAR_PART_OFFSET (var, 0) = offset; 7677 1.1 mrg var->var_part[0].loc_chain = NULL; 7678 1.1 mrg var->var_part[0].cur_loc = NULL; 7679 1.1 mrg *slot = var; 7680 1.1 mrg pos = 0; 7681 1.1 mrg nextp = &var->var_part[0].loc_chain; 7682 1.1 mrg } 7683 1.1 mrg else if (onepart) 7684 1.1 mrg { 7685 1.1 mrg int r = -1, c = 0; 7686 1.1 mrg 7687 1.1 mrg gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv)); 7688 1.1 mrg 7689 1.1 mrg pos = 0; 7690 1.1 mrg 7691 1.1 mrg if (GET_CODE (loc) == VALUE) 7692 1.1 mrg { 7693 1.1 mrg for (nextp = &var->var_part[0].loc_chain; (node = *nextp); 7694 1.1 mrg nextp = &node->next) 7695 1.1 mrg if (GET_CODE (node->loc) == VALUE) 7696 1.1 mrg { 7697 1.1 mrg if (node->loc == loc) 7698 1.1 mrg { 7699 1.1 mrg r = 0; 7700 1.1 mrg break; 7701 1.1 mrg } 7702 1.1 mrg if (canon_value_cmp (node->loc, loc)) 7703 1.1 mrg c++; 7704 1.1 mrg else 7705 1.1 mrg { 7706 1.1 mrg r = 1; 7707 1.1 mrg break; 7708 1.1 mrg } 7709 1.1 mrg } 7710 1.1 mrg else if (REG_P (node->loc) || MEM_P (node->loc)) 7711 1.1 mrg c++; 7712 1.1 mrg else 7713 1.1 mrg { 7714 1.1 mrg r = 1; 7715 1.1 mrg break; 7716 1.1 mrg } 7717 1.1 mrg } 7718 1.1 mrg else if (REG_P (loc)) 7719 1.1 mrg { 7720 1.1 mrg for (nextp = &var->var_part[0].loc_chain; (node = *nextp); 7721 1.1 mrg nextp = &node->next) 7722 1.1 mrg if (REG_P (node->loc)) 7723 1.1 mrg { 7724 1.1 mrg if (REGNO (node->loc) < REGNO (loc)) 7725 1.1 mrg c++; 7726 1.1 mrg else 7727 1.1 mrg { 7728 1.1 mrg if (REGNO (node->loc) == REGNO (loc)) 7729 1.1 mrg r = 0; 7730 1.1 mrg else 7731 1.1 mrg r = 1; 7732 1.1 mrg break; 7733 1.1 mrg } 7734 1.1 mrg } 7735 1.1 mrg else 7736 1.1 mrg { 7737 1.1 mrg r = 1; 7738 1.1 mrg break; 7739 1.1 mrg } 7740 1.1 mrg } 7741 1.1 mrg else if (MEM_P (loc)) 7742 1.1 mrg { 7743 1.1 mrg for (nextp = &var->var_part[0].loc_chain; (node = *nextp); 7744 1.1 mrg nextp = &node->next) 7745 1.1 mrg if (REG_P (node->loc)) 7746 1.1 mrg c++; 7747 1.1 mrg else if (MEM_P (node->loc)) 7748 1.1 mrg { 7749 1.1 mrg if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0) 7750 1.1 mrg break; 7751 1.1 mrg else 7752 1.1 mrg c++; 7753 1.1 mrg } 7754 1.1 mrg else 7755 1.1 mrg { 7756 1.1 mrg r = 1; 7757 1.1 mrg break; 7758 1.1 mrg } 7759 1.1 mrg } 7760 1.1 mrg else 7761 1.1 mrg for (nextp = &var->var_part[0].loc_chain; (node = *nextp); 7762 1.1 mrg nextp = &node->next) 7763 1.1 mrg if ((r = loc_cmp (node->loc, loc)) >= 0) 7764 1.1 mrg break; 7765 1.1 mrg else 7766 1.1 mrg c++; 7767 1.1 mrg 7768 1.1 mrg if (r == 0) 7769 1.1 mrg return slot; 7770 1.1 mrg 7771 1.1 mrg if (shared_var_p (var, set->vars)) 7772 1.1 mrg { 7773 1.1 mrg slot = unshare_variable (set, slot, var, initialized); 7774 1.1 mrg var = *slot; 7775 1.1 mrg for (nextp = &var->var_part[0].loc_chain; c; 7776 1.1 mrg nextp = &(*nextp)->next) 7777 1.1 mrg c--; 7778 1.1 mrg gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc); 7779 1.1 mrg } 7780 1.1 mrg } 7781 1.1 mrg else 7782 1.1 mrg { 7783 1.1 mrg int inspos = 0; 7784 1.1 mrg 7785 1.1 mrg gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv)); 7786 1.1 mrg 7787 1.1 mrg pos = find_variable_location_part (var, offset, &inspos); 7788 1.1 mrg 7789 1.1 mrg if (pos >= 0) 7790 1.1 mrg { 7791 1.1 mrg node = var->var_part[pos].loc_chain; 7792 1.1 mrg 7793 1.1 mrg if (node 7794 1.1 mrg && ((REG_P (node->loc) && REG_P (loc) 7795 1.1 mrg && REGNO (node->loc) == REGNO (loc)) 7796 1.1 mrg || rtx_equal_p (node->loc, loc))) 7797 1.1 mrg { 7798 1.1 mrg /* LOC is in the beginning of the chain so we have nothing 7799 1.1 mrg to do. */ 7800 1.1 mrg if (node->init < initialized) 7801 1.1 mrg node->init = initialized; 7802 1.1 mrg if (set_src != NULL) 7803 1.1 mrg node->set_src = set_src; 7804 1.1 mrg 7805 1.1 mrg return slot; 7806 1.1 mrg } 7807 1.1 mrg else 7808 1.1 mrg { 7809 1.1 mrg /* We have to make a copy of a shared variable. */ 7810 1.1 mrg if (shared_var_p (var, set->vars)) 7811 1.1 mrg { 7812 1.1 mrg slot = unshare_variable (set, slot, var, initialized); 7813 1.1 mrg var = *slot; 7814 1.1 mrg } 7815 1.1 mrg } 7816 1.1 mrg } 7817 1.1 mrg else 7818 1.1 mrg { 7819 1.1 mrg /* We have not found the location part, new one will be created. */ 7820 1.1 mrg 7821 1.1 mrg /* We have to make a copy of the shared variable. */ 7822 1.1 mrg if (shared_var_p (var, set->vars)) 7823 1.1 mrg { 7824 1.1 mrg slot = unshare_variable (set, slot, var, initialized); 7825 1.1 mrg var = *slot; 7826 1.1 mrg } 7827 1.1 mrg 7828 1.1 mrg /* We track only variables whose size is <= MAX_VAR_PARTS bytes 7829 1.1 mrg thus there are at most MAX_VAR_PARTS different offsets. */ 7830 1.1 mrg gcc_assert (var->n_var_parts < MAX_VAR_PARTS 7831 1.1 mrg && (!var->n_var_parts || !onepart)); 7832 1.1 mrg 7833 1.1 mrg /* We have to move the elements of array starting at index 7834 1.1 mrg inspos to the next position. */ 7835 1.1 mrg for (pos = var->n_var_parts; pos > inspos; pos--) 7836 1.1 mrg var->var_part[pos] = var->var_part[pos - 1]; 7837 1.1 mrg 7838 1.1 mrg var->n_var_parts++; 7839 1.1 mrg gcc_checking_assert (!onepart); 7840 1.1 mrg VAR_PART_OFFSET (var, pos) = offset; 7841 1.1 mrg var->var_part[pos].loc_chain = NULL; 7842 1.1 mrg var->var_part[pos].cur_loc = NULL; 7843 1.1 mrg } 7844 1.1 mrg 7845 1.1 mrg /* Delete the location from the list. */ 7846 1.1 mrg nextp = &var->var_part[pos].loc_chain; 7847 1.1 mrg for (node = var->var_part[pos].loc_chain; node; node = next) 7848 1.1 mrg { 7849 1.1 mrg next = node->next; 7850 1.1 mrg if ((REG_P (node->loc) && REG_P (loc) 7851 1.1 mrg && REGNO (node->loc) == REGNO (loc)) 7852 1.1 mrg || rtx_equal_p (node->loc, loc)) 7853 1.1 mrg { 7854 1.1 mrg /* Save these values, to assign to the new node, before 7855 1.1 mrg deleting this one. */ 7856 1.1 mrg if (node->init > initialized) 7857 1.1 mrg initialized = node->init; 7858 1.1 mrg if (node->set_src != NULL && set_src == NULL) 7859 1.1 mrg set_src = node->set_src; 7860 1.1 mrg if (var->var_part[pos].cur_loc == node->loc) 7861 1.1 mrg var->var_part[pos].cur_loc = NULL; 7862 1.1 mrg delete node; 7863 1.1 mrg *nextp = next; 7864 1.1 mrg break; 7865 1.1 mrg } 7866 1.1 mrg else 7867 1.1 mrg nextp = &node->next; 7868 1.1 mrg } 7869 1.1 mrg 7870 1.1 mrg nextp = &var->var_part[pos].loc_chain; 7871 1.1 mrg } 7872 1.1 mrg 7873 1.1 mrg /* Add the location to the beginning. */ 7874 1.1 mrg node = new location_chain; 7875 1.1 mrg node->loc = loc; 7876 1.1 mrg node->init = initialized; 7877 1.1 mrg node->set_src = set_src; 7878 1.1 mrg node->next = *nextp; 7879 1.1 mrg *nextp = node; 7880 1.1 mrg 7881 1.1 mrg /* If no location was emitted do so. */ 7882 1.1 mrg if (var->var_part[pos].cur_loc == NULL) 7883 1.1 mrg variable_was_changed (var, set); 7884 1.1 mrg 7885 1.1 mrg return slot; 7886 1.1 mrg } 7887 1.1 mrg 7888 1.1 mrg /* Set the part of variable's location in the dataflow set SET. The 7889 1.1 mrg variable part is specified by variable's declaration in DV and 7890 1.1 mrg offset OFFSET and the part's location by LOC. IOPT should be 7891 1.1 mrg NO_INSERT if the variable is known to be in SET already and the 7892 1.1 mrg variable hash table must not be resized, and INSERT otherwise. */ 7893 1.1 mrg 7894 1.1 mrg static void 7895 1.1 mrg set_variable_part (dataflow_set *set, rtx loc, 7896 1.1 mrg decl_or_value dv, HOST_WIDE_INT offset, 7897 1.1 mrg enum var_init_status initialized, rtx set_src, 7898 1.1 mrg enum insert_option iopt) 7899 1.1 mrg { 7900 1.1 mrg variable **slot; 7901 1.1 mrg 7902 1.1 mrg if (iopt == NO_INSERT) 7903 1.1 mrg slot = shared_hash_find_slot_noinsert (set->vars, dv); 7904 1.1 mrg else 7905 1.1 mrg { 7906 1.1 mrg slot = shared_hash_find_slot (set->vars, dv); 7907 1.1 mrg if (!slot) 7908 1.1 mrg slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt); 7909 1.1 mrg } 7910 1.1 mrg set_slot_part (set, loc, slot, dv, offset, initialized, set_src); 7911 1.1 mrg } 7912 1.1 mrg 7913 1.1 mrg /* Remove all recorded register locations for the given variable part 7914 1.1 mrg from dataflow set SET, except for those that are identical to loc. 7915 1.1 mrg The variable part is specified by variable's declaration or value 7916 1.1 mrg DV and offset OFFSET. */ 7917 1.1 mrg 7918 1.1 mrg static variable ** 7919 1.1 mrg clobber_slot_part (dataflow_set *set, rtx loc, variable **slot, 7920 1.1 mrg HOST_WIDE_INT offset, rtx set_src) 7921 1.1 mrg { 7922 1.1 mrg variable *var = *slot; 7923 1.1 mrg int pos = find_variable_location_part (var, offset, NULL); 7924 1.1 mrg 7925 1.1 mrg if (pos >= 0) 7926 1.1 mrg { 7927 1.1 mrg location_chain *node, *next; 7928 1.1 mrg 7929 1.1 mrg /* Remove the register locations from the dataflow set. */ 7930 1.1 mrg next = var->var_part[pos].loc_chain; 7931 1.1 mrg for (node = next; node; node = next) 7932 1.1 mrg { 7933 1.1 mrg next = node->next; 7934 1.1 mrg if (node->loc != loc 7935 1.1 mrg && (!flag_var_tracking_uninit 7936 1.1 mrg || !set_src 7937 1.1 mrg || MEM_P (set_src) 7938 1.1 mrg || !rtx_equal_p (set_src, node->set_src))) 7939 1.1 mrg { 7940 1.1 mrg if (REG_P (node->loc)) 7941 1.1 mrg { 7942 1.1 mrg attrs *anode, *anext; 7943 1.1 mrg attrs **anextp; 7944 1.1 mrg 7945 1.1 mrg /* Remove the variable part from the register's 7946 1.1 mrg list, but preserve any other variable parts 7947 1.1 mrg that might be regarded as live in that same 7948 1.1 mrg register. */ 7949 1.1 mrg anextp = &set->regs[REGNO (node->loc)]; 7950 1.1 mrg for (anode = *anextp; anode; anode = anext) 7951 1.1 mrg { 7952 1.1 mrg anext = anode->next; 7953 1.1 mrg if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv) 7954 1.1 mrg && anode->offset == offset) 7955 1.1 mrg { 7956 1.1 mrg delete anode; 7957 1.1 mrg *anextp = anext; 7958 1.1 mrg } 7959 1.1 mrg else 7960 1.1 mrg anextp = &anode->next; 7961 1.1 mrg } 7962 1.1 mrg } 7963 1.1 mrg 7964 1.1 mrg slot = delete_slot_part (set, node->loc, slot, offset); 7965 1.1 mrg } 7966 1.1 mrg } 7967 1.1 mrg } 7968 1.1 mrg 7969 1.1 mrg return slot; 7970 1.1 mrg } 7971 1.1 mrg 7972 1.1 mrg /* Remove all recorded register locations for the given variable part 7973 1.1 mrg from dataflow set SET, except for those that are identical to loc. 7974 1.1 mrg The variable part is specified by variable's declaration or value 7975 1.1 mrg DV and offset OFFSET. */ 7976 1.1 mrg 7977 1.1 mrg static void 7978 1.1 mrg clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, 7979 1.1 mrg HOST_WIDE_INT offset, rtx set_src) 7980 1.1 mrg { 7981 1.1 mrg variable **slot; 7982 1.1 mrg 7983 1.1 mrg if (!dv_as_opaque (dv) 7984 1.1 mrg || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv)))) 7985 1.1 mrg return; 7986 1.1 mrg 7987 1.1 mrg slot = shared_hash_find_slot_noinsert (set->vars, dv); 7988 1.1 mrg if (!slot) 7989 1.1 mrg return; 7990 1.1 mrg 7991 1.1 mrg clobber_slot_part (set, loc, slot, offset, set_src); 7992 1.1 mrg } 7993 1.1 mrg 7994 1.1 mrg /* Delete the part of variable's location from dataflow set SET. The 7995 1.1 mrg variable part is specified by its SET->vars slot SLOT and offset 7996 1.1 mrg OFFSET and the part's location by LOC. */ 7997 1.1 mrg 7998 1.1 mrg static variable ** 7999 1.1 mrg delete_slot_part (dataflow_set *set, rtx loc, variable **slot, 8000 1.1 mrg HOST_WIDE_INT offset) 8001 1.1 mrg { 8002 1.1 mrg variable *var = *slot; 8003 1.1 mrg int pos = find_variable_location_part (var, offset, NULL); 8004 1.1 mrg 8005 1.1 mrg if (pos >= 0) 8006 1.1 mrg { 8007 1.1 mrg location_chain *node, *next; 8008 1.1 mrg location_chain **nextp; 8009 1.1 mrg bool changed; 8010 1.1 mrg rtx cur_loc; 8011 1.1 mrg 8012 1.1 mrg if (shared_var_p (var, set->vars)) 8013 1.1 mrg { 8014 1.1 mrg /* If the variable contains the location part we have to 8015 1.1 mrg make a copy of the variable. */ 8016 1.1 mrg for (node = var->var_part[pos].loc_chain; node; 8017 1.1 mrg node = node->next) 8018 1.1 mrg { 8019 1.1 mrg if ((REG_P (node->loc) && REG_P (loc) 8020 1.1 mrg && REGNO (node->loc) == REGNO (loc)) 8021 1.1 mrg || rtx_equal_p (node->loc, loc)) 8022 1.1 mrg { 8023 1.1 mrg slot = unshare_variable (set, slot, var, 8024 1.1 mrg VAR_INIT_STATUS_UNKNOWN); 8025 1.1 mrg var = *slot; 8026 1.1 mrg break; 8027 1.1 mrg } 8028 1.1 mrg } 8029 1.1 mrg } 8030 1.1 mrg 8031 1.1 mrg if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var)) 8032 1.1 mrg cur_loc = VAR_LOC_FROM (var); 8033 1.1 mrg else 8034 1.1 mrg cur_loc = var->var_part[pos].cur_loc; 8035 1.1 mrg 8036 1.1 mrg /* Delete the location part. */ 8037 1.1 mrg changed = false; 8038 1.1 mrg nextp = &var->var_part[pos].loc_chain; 8039 1.1 mrg for (node = *nextp; node; node = next) 8040 1.1 mrg { 8041 1.1 mrg next = node->next; 8042 1.1 mrg if ((REG_P (node->loc) && REG_P (loc) 8043 1.1 mrg && REGNO (node->loc) == REGNO (loc)) 8044 1.1 mrg || rtx_equal_p (node->loc, loc)) 8045 1.1 mrg { 8046 1.1 mrg /* If we have deleted the location which was last emitted 8047 1.1 mrg we have to emit new location so add the variable to set 8048 1.1 mrg of changed variables. */ 8049 1.1 mrg if (cur_loc == node->loc) 8050 1.1 mrg { 8051 1.1 mrg changed = true; 8052 1.1 mrg var->var_part[pos].cur_loc = NULL; 8053 1.1 mrg if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var)) 8054 1.1 mrg VAR_LOC_FROM (var) = NULL; 8055 1.1 mrg } 8056 1.1 mrg delete node; 8057 1.1 mrg *nextp = next; 8058 1.1 mrg break; 8059 1.1 mrg } 8060 1.1 mrg else 8061 1.1 mrg nextp = &node->next; 8062 1.1 mrg } 8063 1.1 mrg 8064 1.1 mrg if (var->var_part[pos].loc_chain == NULL) 8065 1.1 mrg { 8066 1.1 mrg changed = true; 8067 1.1 mrg var->n_var_parts--; 8068 1.1 mrg while (pos < var->n_var_parts) 8069 1.1 mrg { 8070 1.1 mrg var->var_part[pos] = var->var_part[pos + 1]; 8071 1.1 mrg pos++; 8072 1.1 mrg } 8073 1.1 mrg } 8074 1.1 mrg if (changed) 8075 1.1 mrg variable_was_changed (var, set); 8076 1.1 mrg } 8077 1.1 mrg 8078 1.1 mrg return slot; 8079 1.1 mrg } 8080 1.1 mrg 8081 1.1 mrg /* Delete the part of variable's location from dataflow set SET. The 8082 1.1 mrg variable part is specified by variable's declaration or value DV 8083 1.1 mrg and offset OFFSET and the part's location by LOC. */ 8084 1.1 mrg 8085 1.1 mrg static void 8086 1.1 mrg delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, 8087 1.1 mrg HOST_WIDE_INT offset) 8088 1.1 mrg { 8089 1.1 mrg variable **slot = shared_hash_find_slot_noinsert (set->vars, dv); 8090 1.1 mrg if (!slot) 8091 1.1 mrg return; 8092 1.1 mrg 8093 1.1 mrg delete_slot_part (set, loc, slot, offset); 8094 1.1 mrg } 8095 1.1 mrg 8096 1.1 mrg 8097 1.1 mrg /* Structure for passing some other parameters to function 8098 1.1 mrg vt_expand_loc_callback. */ 8099 1.1 mrg class expand_loc_callback_data 8100 1.1 mrg { 8101 1.1 mrg public: 8102 1.1 mrg /* The variables and values active at this point. */ 8103 1.1 mrg variable_table_type *vars; 8104 1.1 mrg 8105 1.1 mrg /* Stack of values and debug_exprs under expansion, and their 8106 1.1 mrg children. */ 8107 1.1 mrg auto_vec<rtx, 4> expanding; 8108 1.1 mrg 8109 1.1 mrg /* Stack of values and debug_exprs whose expansion hit recursion 8110 1.1 mrg cycles. They will have VALUE_RECURSED_INTO marked when added to 8111 1.1 mrg this list. This flag will be cleared if any of its dependencies 8112 1.1 mrg resolves to a valid location. So, if the flag remains set at the 8113 1.1 mrg end of the search, we know no valid location for this one can 8114 1.1 mrg possibly exist. */ 8115 1.1 mrg auto_vec<rtx, 4> pending; 8116 1.1 mrg 8117 1.1 mrg /* The maximum depth among the sub-expressions under expansion. 8118 1.1 mrg Zero indicates no expansion so far. */ 8119 1.1 mrg expand_depth depth; 8120 1.1 mrg }; 8121 1.1 mrg 8122 1.1 mrg /* Allocate the one-part auxiliary data structure for VAR, with enough 8123 1.1 mrg room for COUNT dependencies. */ 8124 1.1 mrg 8125 1.1 mrg static void 8126 1.1 mrg loc_exp_dep_alloc (variable *var, int count) 8127 1.1 mrg { 8128 1.1 mrg size_t allocsize; 8129 1.1 mrg 8130 1.1 mrg gcc_checking_assert (var->onepart); 8131 1.1 mrg 8132 1.1 mrg /* We can be called with COUNT == 0 to allocate the data structure 8133 1.1 mrg without any dependencies, e.g. for the backlinks only. However, 8134 1.1 mrg if we are specifying a COUNT, then the dependency list must have 8135 1.1 mrg been emptied before. It would be possible to adjust pointers or 8136 1.1 mrg force it empty here, but this is better done at an earlier point 8137 1.1 mrg in the algorithm, so we instead leave an assertion to catch 8138 1.1 mrg errors. */ 8139 1.1 mrg gcc_checking_assert (!count 8140 1.1 mrg || VAR_LOC_DEP_VEC (var) == NULL 8141 1.1 mrg || VAR_LOC_DEP_VEC (var)->is_empty ()); 8142 1.1 mrg 8143 1.1 mrg if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count)) 8144 1.1 mrg return; 8145 1.1 mrg 8146 1.1 mrg allocsize = offsetof (struct onepart_aux, deps) 8147 1.1 mrg + deps_vec::embedded_size (count); 8148 1.1 mrg 8149 1.1 mrg if (VAR_LOC_1PAUX (var)) 8150 1.1 mrg { 8151 1.1 mrg VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux, 8152 1.1 mrg VAR_LOC_1PAUX (var), allocsize); 8153 1.1 mrg /* If the reallocation moves the onepaux structure, the 8154 1.1 mrg back-pointer to BACKLINKS in the first list member will still 8155 1.1 mrg point to its old location. Adjust it. */ 8156 1.1 mrg if (VAR_LOC_DEP_LST (var)) 8157 1.1 mrg VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var); 8158 1.1 mrg } 8159 1.1 mrg else 8160 1.1 mrg { 8161 1.1 mrg VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize); 8162 1.1 mrg *VAR_LOC_DEP_LSTP (var) = NULL; 8163 1.1 mrg VAR_LOC_FROM (var) = NULL; 8164 1.1 mrg VAR_LOC_DEPTH (var).complexity = 0; 8165 1.1 mrg VAR_LOC_DEPTH (var).entryvals = 0; 8166 1.1 mrg } 8167 1.1 mrg VAR_LOC_DEP_VEC (var)->embedded_init (count); 8168 1.1 mrg } 8169 1.1 mrg 8170 1.1 mrg /* Remove all entries from the vector of active dependencies of VAR, 8171 1.1 mrg removing them from the back-links lists too. */ 8172 1.1 mrg 8173 1.1 mrg static void 8174 1.1 mrg loc_exp_dep_clear (variable *var) 8175 1.1 mrg { 8176 1.1 mrg while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ()) 8177 1.1 mrg { 8178 1.1 mrg loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last (); 8179 1.1 mrg if (led->next) 8180 1.1 mrg led->next->pprev = led->pprev; 8181 1.1 mrg if (led->pprev) 8182 1.1 mrg *led->pprev = led->next; 8183 1.1 mrg VAR_LOC_DEP_VEC (var)->pop (); 8184 1.1 mrg } 8185 1.1 mrg } 8186 1.1 mrg 8187 1.1 mrg /* Insert an active dependency from VAR on X to the vector of 8188 1.1 mrg dependencies, and add the corresponding back-link to X's list of 8189 1.1 mrg back-links in VARS. */ 8190 1.1 mrg 8191 1.1 mrg static void 8192 1.1 mrg loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars) 8193 1.1 mrg { 8194 1.1 mrg decl_or_value dv; 8195 1.1 mrg variable *xvar; 8196 1.1 mrg loc_exp_dep *led; 8197 1.1 mrg 8198 1.1 mrg dv = dv_from_rtx (x); 8199 1.1 mrg 8200 1.1 mrg /* ??? Build a vector of variables parallel to EXPANDING, to avoid 8201 1.1 mrg an additional look up? */ 8202 1.1 mrg xvar = vars->find_with_hash (dv, dv_htab_hash (dv)); 8203 1.1 mrg 8204 1.1 mrg if (!xvar) 8205 1.1 mrg { 8206 1.1 mrg xvar = variable_from_dropped (dv, NO_INSERT); 8207 1.1 mrg gcc_checking_assert (xvar); 8208 1.1 mrg } 8209 1.1 mrg 8210 1.1 mrg /* No point in adding the same backlink more than once. This may 8211 1.1 mrg arise if say the same value appears in two complex expressions in 8212 1.1 mrg the same loc_list, or even more than once in a single 8213 1.1 mrg expression. */ 8214 1.1 mrg if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv) 8215 1.1 mrg return; 8216 1.1 mrg 8217 1.1 mrg if (var->onepart == NOT_ONEPART) 8218 1.1 mrg led = new loc_exp_dep; 8219 1.1 mrg else 8220 1.1 mrg { 8221 1.1 mrg loc_exp_dep empty; 8222 1.1 mrg memset (&empty, 0, sizeof (empty)); 8223 1.1 mrg VAR_LOC_DEP_VEC (var)->quick_push (empty); 8224 1.1 mrg led = &VAR_LOC_DEP_VEC (var)->last (); 8225 1.1 mrg } 8226 1.1 mrg led->dv = var->dv; 8227 1.1 mrg led->value = x; 8228 1.1 mrg 8229 1.1 mrg loc_exp_dep_alloc (xvar, 0); 8230 1.1 mrg led->pprev = VAR_LOC_DEP_LSTP (xvar); 8231 1.1 mrg led->next = *led->pprev; 8232 1.1 mrg if (led->next) 8233 1.1 mrg led->next->pprev = &led->next; 8234 1.1 mrg *led->pprev = led; 8235 1.1 mrg } 8236 1.1 mrg 8237 1.1 mrg /* Create active dependencies of VAR on COUNT values starting at 8238 1.1 mrg VALUE, and corresponding back-links to the entries in VARS. Return 8239 1.1 mrg true if we found any pending-recursion results. */ 8240 1.1 mrg 8241 1.1 mrg static bool 8242 1.1 mrg loc_exp_dep_set (variable *var, rtx result, rtx *value, int count, 8243 1.1 mrg variable_table_type *vars) 8244 1.1 mrg { 8245 1.1 mrg bool pending_recursion = false; 8246 1.1 mrg 8247 1.1 mrg gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL 8248 1.1 mrg || VAR_LOC_DEP_VEC (var)->is_empty ()); 8249 1.1 mrg 8250 1.1 mrg /* Set up all dependencies from last_child (as set up at the end of 8251 1.1 mrg the loop above) to the end. */ 8252 1.1 mrg loc_exp_dep_alloc (var, count); 8253 1.1 mrg 8254 1.1 mrg while (count--) 8255 1.1 mrg { 8256 1.1 mrg rtx x = *value++; 8257 1.1 mrg 8258 1.1 mrg if (!pending_recursion) 8259 1.1 mrg pending_recursion = !result && VALUE_RECURSED_INTO (x); 8260 1.1 mrg 8261 1.1 mrg loc_exp_insert_dep (var, x, vars); 8262 1.1 mrg } 8263 1.1 mrg 8264 1.1 mrg return pending_recursion; 8265 1.1 mrg } 8266 1.1 mrg 8267 1.1 mrg /* Notify the back-links of IVAR that are pending recursion that we 8268 1.1 mrg have found a non-NIL value for it, so they are cleared for another 8269 1.1 mrg attempt to compute a current location. */ 8270 1.1 mrg 8271 1.1 mrg static void 8272 1.1 mrg notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars) 8273 1.1 mrg { 8274 1.1 mrg loc_exp_dep *led, *next; 8275 1.1 mrg 8276 1.1 mrg for (led = VAR_LOC_DEP_LST (ivar); led; led = next) 8277 1.1 mrg { 8278 1.1 mrg decl_or_value dv = led->dv; 8279 1.1 mrg variable *var; 8280 1.1 mrg 8281 1.1 mrg next = led->next; 8282 1.1 mrg 8283 1.1 mrg if (dv_is_value_p (dv)) 8284 1.1 mrg { 8285 1.1 mrg rtx value = dv_as_value (dv); 8286 1.1 mrg 8287 1.1 mrg /* If we have already resolved it, leave it alone. */ 8288 1.1 mrg if (!VALUE_RECURSED_INTO (value)) 8289 1.1 mrg continue; 8290 1.1 mrg 8291 1.1 mrg /* Check that VALUE_RECURSED_INTO, true from the test above, 8292 1.1 mrg implies NO_LOC_P. */ 8293 1.1 mrg gcc_checking_assert (NO_LOC_P (value)); 8294 1.1 mrg 8295 1.1 mrg /* We won't notify variables that are being expanded, 8296 1.1 mrg because their dependency list is cleared before 8297 1.1 mrg recursing. */ 8298 1.1 mrg NO_LOC_P (value) = false; 8299 1.1 mrg VALUE_RECURSED_INTO (value) = false; 8300 1.1 mrg 8301 1.1 mrg gcc_checking_assert (dv_changed_p (dv)); 8302 1.1 mrg } 8303 1.1 mrg else 8304 1.1 mrg { 8305 1.1 mrg gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART); 8306 1.1 mrg if (!dv_changed_p (dv)) 8307 1.1 mrg continue; 8308 1.1 mrg } 8309 1.1 mrg 8310 1.1 mrg var = vars->find_with_hash (dv, dv_htab_hash (dv)); 8311 1.1 mrg 8312 1.1 mrg if (!var) 8313 1.1 mrg var = variable_from_dropped (dv, NO_INSERT); 8314 1.1 mrg 8315 1.1 mrg if (var) 8316 1.1 mrg notify_dependents_of_resolved_value (var, vars); 8317 1.1 mrg 8318 1.1 mrg if (next) 8319 1.1 mrg next->pprev = led->pprev; 8320 1.1 mrg if (led->pprev) 8321 1.1 mrg *led->pprev = next; 8322 1.1 mrg led->next = NULL; 8323 1.1 mrg led->pprev = NULL; 8324 1.1 mrg } 8325 1.1 mrg } 8326 1.1 mrg 8327 1.1 mrg static rtx vt_expand_loc_callback (rtx x, bitmap regs, 8328 1.1 mrg int max_depth, void *data); 8329 1.1 mrg 8330 1.1 mrg /* Return the combined depth, when one sub-expression evaluated to 8331 1.1 mrg BEST_DEPTH and the previous known depth was SAVED_DEPTH. */ 8332 1.1 mrg 8333 1.1 mrg static inline expand_depth 8334 1.1 mrg update_depth (expand_depth saved_depth, expand_depth best_depth) 8335 1.1 mrg { 8336 1.1 mrg /* If we didn't find anything, stick with what we had. */ 8337 1.1 mrg if (!best_depth.complexity) 8338 1.1 mrg return saved_depth; 8339 1.1 mrg 8340 1.1 mrg /* If we found hadn't found anything, use the depth of the current 8341 1.1 mrg expression. Do NOT add one extra level, we want to compute the 8342 1.1 mrg maximum depth among sub-expressions. We'll increment it later, 8343 1.1 mrg if appropriate. */ 8344 1.1 mrg if (!saved_depth.complexity) 8345 1.1 mrg return best_depth; 8346 1.1 mrg 8347 1.1 mrg /* Combine the entryval count so that regardless of which one we 8348 1.1 mrg return, the entryval count is accurate. */ 8349 1.1 mrg best_depth.entryvals = saved_depth.entryvals 8350 1.1 mrg = best_depth.entryvals + saved_depth.entryvals; 8351 1.1 mrg 8352 1.1 mrg if (saved_depth.complexity < best_depth.complexity) 8353 1.1 mrg return best_depth; 8354 1.1 mrg else 8355 1.1 mrg return saved_depth; 8356 1.1 mrg } 8357 1.1 mrg 8358 1.1 mrg /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and 8359 1.1 mrg DATA for cselib expand callback. If PENDRECP is given, indicate in 8360 1.1 mrg it whether any sub-expression couldn't be fully evaluated because 8361 1.1 mrg it is pending recursion resolution. */ 8362 1.1 mrg 8363 1.1 mrg static inline rtx 8364 1.1 mrg vt_expand_var_loc_chain (variable *var, bitmap regs, void *data, 8365 1.1 mrg bool *pendrecp) 8366 1.1 mrg { 8367 1.1 mrg class expand_loc_callback_data *elcd 8368 1.1 mrg = (class expand_loc_callback_data *) data; 8369 1.1 mrg location_chain *loc, *next; 8370 1.1 mrg rtx result = NULL; 8371 1.1 mrg int first_child, result_first_child, last_child; 8372 1.1 mrg bool pending_recursion; 8373 1.1 mrg rtx loc_from = NULL; 8374 1.1 mrg struct elt_loc_list *cloc = NULL; 8375 1.1 mrg expand_depth depth = { 0, 0 }, saved_depth = elcd->depth; 8376 1.1 mrg int wanted_entryvals, found_entryvals = 0; 8377 1.1 mrg 8378 1.1 mrg /* Clear all backlinks pointing at this, so that we're not notified 8379 1.1 mrg while we're active. */ 8380 1.1 mrg loc_exp_dep_clear (var); 8381 1.1 mrg 8382 1.1 mrg retry: 8383 1.1 mrg if (var->onepart == ONEPART_VALUE) 8384 1.1 mrg { 8385 1.1 mrg cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv)); 8386 1.1 mrg 8387 1.1 mrg gcc_checking_assert (cselib_preserved_value_p (val)); 8388 1.1 mrg 8389 1.1 mrg cloc = val->locs; 8390 1.1 mrg } 8391 1.1 mrg 8392 1.1 mrg first_child = result_first_child = last_child 8393 1.1 mrg = elcd->expanding.length (); 8394 1.1 mrg 8395 1.1 mrg wanted_entryvals = found_entryvals; 8396 1.1 mrg 8397 1.1 mrg /* Attempt to expand each available location in turn. */ 8398 1.1 mrg for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL; 8399 1.1 mrg loc || cloc; loc = next) 8400 1.1 mrg { 8401 1.1 mrg result_first_child = last_child; 8402 1.1 mrg 8403 1.1 mrg if (!loc) 8404 1.1 mrg { 8405 1.1 mrg loc_from = cloc->loc; 8406 1.1 mrg next = loc; 8407 1.1 mrg cloc = cloc->next; 8408 1.1 mrg if (unsuitable_loc (loc_from)) 8409 1.1 mrg continue; 8410 1.1 mrg } 8411 1.1 mrg else 8412 1.1 mrg { 8413 1.1 mrg loc_from = loc->loc; 8414 1.1 mrg next = loc->next; 8415 1.1 mrg } 8416 1.1 mrg 8417 1.1 mrg gcc_checking_assert (!unsuitable_loc (loc_from)); 8418 1.1 mrg 8419 1.1 mrg elcd->depth.complexity = elcd->depth.entryvals = 0; 8420 1.1 mrg result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH, 8421 1.1 mrg vt_expand_loc_callback, data); 8422 1.1 mrg last_child = elcd->expanding.length (); 8423 1.1 mrg 8424 1.1 mrg if (result) 8425 1.1 mrg { 8426 1.1 mrg depth = elcd->depth; 8427 1.1 mrg 8428 1.1 mrg gcc_checking_assert (depth.complexity 8429 1.1 mrg || result_first_child == last_child); 8430 1.1 mrg 8431 1.1 mrg if (last_child - result_first_child != 1) 8432 1.1 mrg { 8433 1.1 mrg if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE) 8434 1.1 mrg depth.entryvals++; 8435 1.1 mrg depth.complexity++; 8436 1.1 mrg } 8437 1.1 mrg 8438 1.1 mrg if (depth.complexity <= EXPR_USE_DEPTH) 8439 1.1 mrg { 8440 1.1 mrg if (depth.entryvals <= wanted_entryvals) 8441 1.1 mrg break; 8442 1.1 mrg else if (!found_entryvals || depth.entryvals < found_entryvals) 8443 1.1 mrg found_entryvals = depth.entryvals; 8444 1.1 mrg } 8445 1.1 mrg 8446 1.1 mrg result = NULL; 8447 1.1 mrg } 8448 1.1 mrg 8449 1.1 mrg /* Set it up in case we leave the loop. */ 8450 1.1 mrg depth.complexity = depth.entryvals = 0; 8451 1.1 mrg loc_from = NULL; 8452 1.1 mrg result_first_child = first_child; 8453 1.1 mrg } 8454 1.1 mrg 8455 1.1 mrg if (!loc_from && wanted_entryvals < found_entryvals) 8456 1.1 mrg { 8457 1.1 mrg /* We found entries with ENTRY_VALUEs and skipped them. Since 8458 1.1 mrg we could not find any expansions without ENTRY_VALUEs, but we 8459 1.1 mrg found at least one with them, go back and get an entry with 8460 1.1 mrg the minimum number ENTRY_VALUE count that we found. We could 8461 1.1 mrg avoid looping, but since each sub-loc is already resolved, 8462 1.1 mrg the re-expansion should be trivial. ??? Should we record all 8463 1.1 mrg attempted locs as dependencies, so that we retry the 8464 1.1 mrg expansion should any of them change, in the hope it can give 8465 1.1 mrg us a new entry without an ENTRY_VALUE? */ 8466 1.1 mrg elcd->expanding.truncate (first_child); 8467 1.1 mrg goto retry; 8468 1.1 mrg } 8469 1.1 mrg 8470 1.1 mrg /* Register all encountered dependencies as active. */ 8471 1.1 mrg pending_recursion = loc_exp_dep_set 8472 1.1 mrg (var, result, elcd->expanding.address () + result_first_child, 8473 1.1 mrg last_child - result_first_child, elcd->vars); 8474 1.1 mrg 8475 1.1 mrg elcd->expanding.truncate (first_child); 8476 1.1 mrg 8477 1.1 mrg /* Record where the expansion came from. */ 8478 1.1 mrg gcc_checking_assert (!result || !pending_recursion); 8479 1.1 mrg VAR_LOC_FROM (var) = loc_from; 8480 1.1 mrg VAR_LOC_DEPTH (var) = depth; 8481 1.1 mrg 8482 1.1 mrg gcc_checking_assert (!depth.complexity == !result); 8483 1.1 mrg 8484 1.1 mrg elcd->depth = update_depth (saved_depth, depth); 8485 1.1 mrg 8486 1.1 mrg /* Indicate whether any of the dependencies are pending recursion 8487 1.1 mrg resolution. */ 8488 1.1 mrg if (pendrecp) 8489 1.1 mrg *pendrecp = pending_recursion; 8490 1.1 mrg 8491 1.1 mrg if (!pendrecp || !pending_recursion) 8492 1.1 mrg var->var_part[0].cur_loc = result; 8493 1.1 mrg 8494 1.1 mrg return result; 8495 1.1 mrg } 8496 1.1 mrg 8497 1.1 mrg /* Callback for cselib_expand_value, that looks for expressions 8498 1.1 mrg holding the value in the var-tracking hash tables. Return X for 8499 1.1 mrg standard processing, anything else is to be used as-is. */ 8500 1.1 mrg 8501 1.1 mrg static rtx 8502 1.1 mrg vt_expand_loc_callback (rtx x, bitmap regs, 8503 1.1 mrg int max_depth ATTRIBUTE_UNUSED, 8504 1.1 mrg void *data) 8505 1.1 mrg { 8506 1.1 mrg class expand_loc_callback_data *elcd 8507 1.1 mrg = (class expand_loc_callback_data *) data; 8508 1.1 mrg decl_or_value dv; 8509 1.1 mrg variable *var; 8510 1.1 mrg rtx result, subreg; 8511 1.1 mrg bool pending_recursion = false; 8512 1.1 mrg bool from_empty = false; 8513 1.1 mrg 8514 1.1 mrg switch (GET_CODE (x)) 8515 1.1 mrg { 8516 1.1 mrg case SUBREG: 8517 1.1 mrg subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs, 8518 1.1 mrg EXPR_DEPTH, 8519 1.1 mrg vt_expand_loc_callback, data); 8520 1.1 mrg 8521 1.1 mrg if (!subreg) 8522 1.1 mrg return NULL; 8523 1.1 mrg 8524 1.1 mrg result = simplify_gen_subreg (GET_MODE (x), subreg, 8525 1.1 mrg GET_MODE (SUBREG_REG (x)), 8526 1.1 mrg SUBREG_BYTE (x)); 8527 1.1 mrg 8528 1.1 mrg /* Invalid SUBREGs are ok in debug info. ??? We could try 8529 1.1 mrg alternate expansions for the VALUE as well. */ 8530 1.1 mrg if (!result && GET_MODE (subreg) != VOIDmode) 8531 1.1 mrg result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x)); 8532 1.1 mrg 8533 1.1 mrg return result; 8534 1.1 mrg 8535 1.1 mrg case DEBUG_EXPR: 8536 1.1 mrg case VALUE: 8537 1.1 mrg dv = dv_from_rtx (x); 8538 1.1 mrg break; 8539 1.1 mrg 8540 1.1 mrg default: 8541 1.1 mrg return x; 8542 1.1 mrg } 8543 1.1 mrg 8544 1.1 mrg elcd->expanding.safe_push (x); 8545 1.1 mrg 8546 1.1 mrg /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */ 8547 1.1 mrg gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x)); 8548 1.1 mrg 8549 1.1 mrg if (NO_LOC_P (x)) 8550 1.1 mrg { 8551 1.1 mrg gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv)); 8552 1.1 mrg return NULL; 8553 1.1 mrg } 8554 1.1 mrg 8555 1.1 mrg var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv)); 8556 1.1 mrg 8557 1.1 mrg if (!var) 8558 1.1 mrg { 8559 1.1 mrg from_empty = true; 8560 1.1 mrg var = variable_from_dropped (dv, INSERT); 8561 1.1 mrg } 8562 1.1 mrg 8563 1.1 mrg gcc_checking_assert (var); 8564 1.1 mrg 8565 1.1 mrg if (!dv_changed_p (dv)) 8566 1.1 mrg { 8567 1.1 mrg gcc_checking_assert (!NO_LOC_P (x)); 8568 1.1 mrg gcc_checking_assert (var->var_part[0].cur_loc); 8569 1.1 mrg gcc_checking_assert (VAR_LOC_1PAUX (var)); 8570 1.1 mrg gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity); 8571 1.1 mrg 8572 1.1 mrg elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth); 8573 1.1 mrg 8574 1.1 mrg return var->var_part[0].cur_loc; 8575 1.1 mrg } 8576 1.1 mrg 8577 1.1 mrg VALUE_RECURSED_INTO (x) = true; 8578 1.1 mrg /* This is tentative, but it makes some tests simpler. */ 8579 1.1 mrg NO_LOC_P (x) = true; 8580 1.1 mrg 8581 1.1 mrg gcc_checking_assert (var->n_var_parts == 1 || from_empty); 8582 1.1 mrg 8583 1.1 mrg result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion); 8584 1.1 mrg 8585 1.1 mrg if (pending_recursion) 8586 1.1 mrg { 8587 1.1 mrg gcc_checking_assert (!result); 8588 1.1 mrg elcd->pending.safe_push (x); 8589 1.1 mrg } 8590 1.1 mrg else 8591 1.1 mrg { 8592 1.1 mrg NO_LOC_P (x) = !result; 8593 1.1 mrg VALUE_RECURSED_INTO (x) = false; 8594 1.1 mrg set_dv_changed (dv, false); 8595 1.1 mrg 8596 1.1 mrg if (result) 8597 1.1 mrg notify_dependents_of_resolved_value (var, elcd->vars); 8598 1.1 mrg } 8599 1.1 mrg 8600 1.1 mrg return result; 8601 1.1 mrg } 8602 1.1 mrg 8603 1.1 mrg /* While expanding variables, we may encounter recursion cycles 8604 1.1 mrg because of mutual (possibly indirect) dependencies between two 8605 1.1 mrg particular variables (or values), say A and B. If we're trying to 8606 1.1 mrg expand A when we get to B, which in turn attempts to expand A, if 8607 1.1 mrg we can't find any other expansion for B, we'll add B to this 8608 1.1 mrg pending-recursion stack, and tentatively return NULL for its 8609 1.1 mrg location. This tentative value will be used for any other 8610 1.1 mrg occurrences of B, unless A gets some other location, in which case 8611 1.1 mrg it will notify B that it is worth another try at computing a 8612 1.1 mrg location for it, and it will use the location computed for A then. 8613 1.1 mrg At the end of the expansion, the tentative NULL locations become 8614 1.1 mrg final for all members of PENDING that didn't get a notification. 8615 1.1 mrg This function performs this finalization of NULL locations. */ 8616 1.1 mrg 8617 1.1 mrg static void 8618 1.1 mrg resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending) 8619 1.1 mrg { 8620 1.1 mrg while (!pending->is_empty ()) 8621 1.1 mrg { 8622 1.1 mrg rtx x = pending->pop (); 8623 1.1 mrg decl_or_value dv; 8624 1.1 mrg 8625 1.1 mrg if (!VALUE_RECURSED_INTO (x)) 8626 1.1 mrg continue; 8627 1.1 mrg 8628 1.1 mrg gcc_checking_assert (NO_LOC_P (x)); 8629 1.1 mrg VALUE_RECURSED_INTO (x) = false; 8630 1.1 mrg dv = dv_from_rtx (x); 8631 1.1 mrg gcc_checking_assert (dv_changed_p (dv)); 8632 1.1 mrg set_dv_changed (dv, false); 8633 1.1 mrg } 8634 1.1 mrg } 8635 1.1 mrg 8636 1.1 mrg /* Initialize expand_loc_callback_data D with variable hash table V. 8637 1.1 mrg It must be a macro because of alloca (vec stack). */ 8638 1.1 mrg #define INIT_ELCD(d, v) \ 8639 1.1 mrg do \ 8640 1.1 mrg { \ 8641 1.1 mrg (d).vars = (v); \ 8642 1.1 mrg (d).depth.complexity = (d).depth.entryvals = 0; \ 8643 1.1 mrg } \ 8644 1.1 mrg while (0) 8645 1.1 mrg /* Finalize expand_loc_callback_data D, resolved to location L. */ 8646 1.1 mrg #define FINI_ELCD(d, l) \ 8647 1.1 mrg do \ 8648 1.1 mrg { \ 8649 1.1 mrg resolve_expansions_pending_recursion (&(d).pending); \ 8650 1.1 mrg (d).pending.release (); \ 8651 1.1 mrg (d).expanding.release (); \ 8652 1.1 mrg \ 8653 1.1 mrg if ((l) && MEM_P (l)) \ 8654 1.1 mrg (l) = targetm.delegitimize_address (l); \ 8655 1.1 mrg } \ 8656 1.1 mrg while (0) 8657 1.1 mrg 8658 1.1 mrg /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the 8659 1.1 mrg equivalences in VARS, updating their CUR_LOCs in the process. */ 8660 1.1 mrg 8661 1.1 mrg static rtx 8662 1.1 mrg vt_expand_loc (rtx loc, variable_table_type *vars) 8663 1.1 mrg { 8664 1.1 mrg class expand_loc_callback_data data; 8665 1.1 mrg rtx result; 8666 1.1 mrg 8667 1.1 mrg if (!MAY_HAVE_DEBUG_BIND_INSNS) 8668 1.1 mrg return loc; 8669 1.1 mrg 8670 1.1 mrg INIT_ELCD (data, vars); 8671 1.1 mrg 8672 1.1 mrg result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH, 8673 1.1 mrg vt_expand_loc_callback, &data); 8674 1.1 mrg 8675 1.1 mrg FINI_ELCD (data, result); 8676 1.1 mrg 8677 1.1 mrg return result; 8678 1.1 mrg } 8679 1.1 mrg 8680 1.1 mrg /* Expand the one-part VARiable to a location, using the equivalences 8681 1.1 mrg in VARS, updating their CUR_LOCs in the process. */ 8682 1.1 mrg 8683 1.1 mrg static rtx 8684 1.1 mrg vt_expand_1pvar (variable *var, variable_table_type *vars) 8685 1.1 mrg { 8686 1.1 mrg class expand_loc_callback_data data; 8687 1.1 mrg rtx loc; 8688 1.1 mrg 8689 1.1 mrg gcc_checking_assert (var->onepart && var->n_var_parts == 1); 8690 1.1 mrg 8691 1.1 mrg if (!dv_changed_p (var->dv)) 8692 1.1 mrg return var->var_part[0].cur_loc; 8693 1.1 mrg 8694 1.1 mrg INIT_ELCD (data, vars); 8695 1.1 mrg 8696 1.1 mrg loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL); 8697 1.1 mrg 8698 1.1 mrg gcc_checking_assert (data.expanding.is_empty ()); 8699 1.1 mrg 8700 1.1 mrg FINI_ELCD (data, loc); 8701 1.1 mrg 8702 1.1 mrg return loc; 8703 1.1 mrg } 8704 1.1 mrg 8705 1.1 mrg /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains 8706 1.1 mrg additional parameters: WHERE specifies whether the note shall be emitted 8707 1.1 mrg before or after instruction INSN. */ 8708 1.1 mrg 8709 1.1 mrg int 8710 1.1 mrg emit_note_insn_var_location (variable **varp, emit_note_data *data) 8711 1.1 mrg { 8712 1.1 mrg variable *var = *varp; 8713 1.1 mrg rtx_insn *insn = data->insn; 8714 1.1 mrg enum emit_note_where where = data->where; 8715 1.1 mrg variable_table_type *vars = data->vars; 8716 1.1 mrg rtx_note *note; 8717 1.1 mrg rtx note_vl; 8718 1.1 mrg int i, j, n_var_parts; 8719 1.1 mrg bool complete; 8720 1.1 mrg enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED; 8721 1.1 mrg HOST_WIDE_INT last_limit; 8722 1.1 mrg HOST_WIDE_INT offsets[MAX_VAR_PARTS]; 8723 1.1 mrg rtx loc[MAX_VAR_PARTS]; 8724 1.1 mrg tree decl; 8725 1.1 mrg location_chain *lc; 8726 1.1 mrg 8727 1.1 mrg gcc_checking_assert (var->onepart == NOT_ONEPART 8728 1.1 mrg || var->onepart == ONEPART_VDECL); 8729 1.1 mrg 8730 1.1 mrg decl = dv_as_decl (var->dv); 8731 1.1 mrg 8732 1.1 mrg complete = true; 8733 1.1 mrg last_limit = 0; 8734 1.1 mrg n_var_parts = 0; 8735 1.1 mrg if (!var->onepart) 8736 1.1 mrg for (i = 0; i < var->n_var_parts; i++) 8737 1.1 mrg if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain) 8738 1.1 mrg var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc; 8739 1.1 mrg for (i = 0; i < var->n_var_parts; i++) 8740 1.1 mrg { 8741 1.1 mrg machine_mode mode, wider_mode; 8742 1.1 mrg rtx loc2; 8743 1.1 mrg HOST_WIDE_INT offset, size, wider_size; 8744 1.1 mrg 8745 1.1 mrg if (i == 0 && var->onepart) 8746 1.1 mrg { 8747 1.1 mrg gcc_checking_assert (var->n_var_parts == 1); 8748 1.1 mrg offset = 0; 8749 1.1 mrg initialized = VAR_INIT_STATUS_INITIALIZED; 8750 1.1 mrg loc2 = vt_expand_1pvar (var, vars); 8751 1.1 mrg } 8752 1.1 mrg else 8753 1.1 mrg { 8754 1.1 mrg if (last_limit < VAR_PART_OFFSET (var, i)) 8755 1.1 mrg { 8756 1.1 mrg complete = false; 8757 1.1 mrg break; 8758 1.1 mrg } 8759 1.1 mrg else if (last_limit > VAR_PART_OFFSET (var, i)) 8760 1.1 mrg continue; 8761 1.1 mrg offset = VAR_PART_OFFSET (var, i); 8762 1.1 mrg loc2 = var->var_part[i].cur_loc; 8763 1.1 mrg if (loc2 && GET_CODE (loc2) == MEM 8764 1.1 mrg && GET_CODE (XEXP (loc2, 0)) == VALUE) 8765 1.1 mrg { 8766 1.1 mrg rtx depval = XEXP (loc2, 0); 8767 1.1 mrg 8768 1.1 mrg loc2 = vt_expand_loc (loc2, vars); 8769 1.1 mrg 8770 1.1 mrg if (loc2) 8771 1.1 mrg loc_exp_insert_dep (var, depval, vars); 8772 1.1 mrg } 8773 1.1 mrg if (!loc2) 8774 1.1 mrg { 8775 1.1 mrg complete = false; 8776 1.1 mrg continue; 8777 1.1 mrg } 8778 1.1 mrg gcc_checking_assert (GET_CODE (loc2) != VALUE); 8779 1.1 mrg for (lc = var->var_part[i].loc_chain; lc; lc = lc->next) 8780 1.1 mrg if (var->var_part[i].cur_loc == lc->loc) 8781 1.1 mrg { 8782 1.1 mrg initialized = lc->init; 8783 1.1 mrg break; 8784 1.1 mrg } 8785 1.1 mrg gcc_assert (lc); 8786 1.1 mrg } 8787 1.1 mrg 8788 1.1 mrg offsets[n_var_parts] = offset; 8789 1.1 mrg if (!loc2) 8790 1.1 mrg { 8791 1.1 mrg complete = false; 8792 1.1 mrg continue; 8793 1.1 mrg } 8794 1.1 mrg loc[n_var_parts] = loc2; 8795 1.1 mrg mode = GET_MODE (var->var_part[i].cur_loc); 8796 1.1 mrg if (mode == VOIDmode && var->onepart) 8797 1.1 mrg mode = DECL_MODE (decl); 8798 1.1 mrg /* We ony track subparts of constant-sized objects, since at present 8799 1.1 mrg there's no representation for polynomial pieces. */ 8800 1.1 mrg if (!GET_MODE_SIZE (mode).is_constant (&size)) 8801 1.1 mrg { 8802 1.1 mrg complete = false; 8803 1.1 mrg continue; 8804 1.1 mrg } 8805 1.1 mrg last_limit = offsets[n_var_parts] + size; 8806 1.1 mrg 8807 1.1 mrg /* Attempt to merge adjacent registers or memory. */ 8808 1.1 mrg for (j = i + 1; j < var->n_var_parts; j++) 8809 1.1 mrg if (last_limit <= VAR_PART_OFFSET (var, j)) 8810 1.1 mrg break; 8811 1.1 mrg if (j < var->n_var_parts 8812 1.1 mrg && GET_MODE_WIDER_MODE (mode).exists (&wider_mode) 8813 1.1 mrg && GET_MODE_SIZE (wider_mode).is_constant (&wider_size) 8814 1.1 mrg && var->var_part[j].cur_loc 8815 1.1 mrg && mode == GET_MODE (var->var_part[j].cur_loc) 8816 1.1 mrg && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts])) 8817 1.1 mrg && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j)) 8818 1.1 mrg && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars)) 8819 1.1 mrg && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)) 8820 1.1 mrg { 8821 1.1 mrg rtx new_loc = NULL; 8822 1.1 mrg poly_int64 offset2; 8823 1.1 mrg 8824 1.1 mrg if (REG_P (loc[n_var_parts]) 8825 1.1 mrg && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2 8826 1.1 mrg == hard_regno_nregs (REGNO (loc[n_var_parts]), wider_mode) 8827 1.1 mrg && end_hard_regno (mode, REGNO (loc[n_var_parts])) 8828 1.1 mrg == REGNO (loc2)) 8829 1.1 mrg { 8830 1.1 mrg if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN) 8831 1.1 mrg new_loc = simplify_subreg (wider_mode, loc[n_var_parts], 8832 1.1 mrg mode, 0); 8833 1.1 mrg else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN) 8834 1.1 mrg new_loc = simplify_subreg (wider_mode, loc2, mode, 0); 8835 1.1 mrg if (new_loc) 8836 1.1 mrg { 8837 1.1 mrg if (!REG_P (new_loc) 8838 1.1 mrg || REGNO (new_loc) != REGNO (loc[n_var_parts])) 8839 1.1 mrg new_loc = NULL; 8840 1.1 mrg else 8841 1.1 mrg REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]); 8842 1.1 mrg } 8843 1.1 mrg } 8844 1.1 mrg else if (MEM_P (loc[n_var_parts]) 8845 1.1 mrg && GET_CODE (XEXP (loc2, 0)) == PLUS 8846 1.1 mrg && REG_P (XEXP (XEXP (loc2, 0), 0)) 8847 1.1 mrg && poly_int_rtx_p (XEXP (XEXP (loc2, 0), 1), &offset2)) 8848 1.1 mrg { 8849 1.1 mrg poly_int64 end1 = size; 8850 1.1 mrg rtx base1 = strip_offset_and_add (XEXP (loc[n_var_parts], 0), 8851 1.1 mrg &end1); 8852 1.1 mrg if (rtx_equal_p (base1, XEXP (XEXP (loc2, 0), 0)) 8853 1.1 mrg && known_eq (end1, offset2)) 8854 1.1 mrg new_loc = adjust_address_nv (loc[n_var_parts], 8855 1.1 mrg wider_mode, 0); 8856 1.1 mrg } 8857 1.1 mrg 8858 1.1 mrg if (new_loc) 8859 1.1 mrg { 8860 1.1 mrg loc[n_var_parts] = new_loc; 8861 1.1 mrg mode = wider_mode; 8862 1.1 mrg last_limit = offsets[n_var_parts] + wider_size; 8863 1.1 mrg i = j; 8864 1.1 mrg } 8865 1.1 mrg } 8866 1.1 mrg ++n_var_parts; 8867 1.1 mrg } 8868 1.1 mrg poly_uint64 type_size_unit 8869 1.1 mrg = tree_to_poly_uint64 (TYPE_SIZE_UNIT (TREE_TYPE (decl))); 8870 1.1 mrg if (maybe_lt (poly_uint64 (last_limit), type_size_unit)) 8871 1.1 mrg complete = false; 8872 1.1 mrg 8873 1.1 mrg if (! flag_var_tracking_uninit) 8874 1.1 mrg initialized = VAR_INIT_STATUS_INITIALIZED; 8875 1.1 mrg 8876 1.1 mrg note_vl = NULL_RTX; 8877 1.1 mrg if (!complete) 8878 1.1 mrg note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized); 8879 1.1 mrg else if (n_var_parts == 1) 8880 1.1 mrg { 8881 1.1 mrg rtx expr_list; 8882 1.1 mrg 8883 1.1 mrg if (offsets[0] || GET_CODE (loc[0]) == PARALLEL) 8884 1.1 mrg expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0])); 8885 1.1 mrg else 8886 1.1 mrg expr_list = loc[0]; 8887 1.1 mrg 8888 1.1 mrg note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized); 8889 1.1 mrg } 8890 1.1 mrg else if (n_var_parts) 8891 1.1 mrg { 8892 1.1 mrg rtx parallel; 8893 1.1 mrg 8894 1.1 mrg for (i = 0; i < n_var_parts; i++) 8895 1.1 mrg loc[i] 8896 1.1 mrg = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i])); 8897 1.1 mrg 8898 1.1 mrg parallel = gen_rtx_PARALLEL (VOIDmode, 8899 1.1 mrg gen_rtvec_v (n_var_parts, loc)); 8900 1.1 mrg note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, 8901 1.1 mrg parallel, initialized); 8902 1.1 mrg } 8903 1.1 mrg 8904 1.1 mrg if (where != EMIT_NOTE_BEFORE_INSN) 8905 1.1 mrg { 8906 1.1 mrg note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn); 8907 1.1 mrg if (where == EMIT_NOTE_AFTER_CALL_INSN) 8908 1.1 mrg NOTE_DURING_CALL_P (note) = true; 8909 1.1 mrg } 8910 1.1 mrg else 8911 1.1 mrg { 8912 1.1 mrg /* Make sure that the call related notes come first. */ 8913 1.1 mrg while (NEXT_INSN (insn) 8914 1.1 mrg && NOTE_P (insn) 8915 1.1 mrg && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION 8916 1.1 mrg && NOTE_DURING_CALL_P (insn)) 8917 1.1 mrg insn = NEXT_INSN (insn); 8918 1.1 mrg if (NOTE_P (insn) 8919 1.1 mrg && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION 8920 1.1 mrg && NOTE_DURING_CALL_P (insn)) 8921 1.1 mrg note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn); 8922 1.1 mrg else 8923 1.1 mrg note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn); 8924 1.1 mrg } 8925 1.1 mrg NOTE_VAR_LOCATION (note) = note_vl; 8926 1.1 mrg 8927 1.1 mrg set_dv_changed (var->dv, false); 8928 1.1 mrg gcc_assert (var->in_changed_variables); 8929 1.1 mrg var->in_changed_variables = false; 8930 1.1 mrg changed_variables->clear_slot (varp); 8931 1.1 mrg 8932 1.1 mrg /* Continue traversing the hash table. */ 8933 1.1 mrg return 1; 8934 1.1 mrg } 8935 1.1 mrg 8936 1.1 mrg /* While traversing changed_variables, push onto DATA (a stack of RTX 8937 1.1 mrg values) entries that aren't user variables. */ 8938 1.1 mrg 8939 1.1 mrg int 8940 1.1 mrg var_track_values_to_stack (variable **slot, 8941 1.1 mrg vec<rtx, va_heap> *changed_values_stack) 8942 1.1 mrg { 8943 1.1 mrg variable *var = *slot; 8944 1.1 mrg 8945 1.1 mrg if (var->onepart == ONEPART_VALUE) 8946 1.1 mrg changed_values_stack->safe_push (dv_as_value (var->dv)); 8947 1.1 mrg else if (var->onepart == ONEPART_DEXPR) 8948 1.1 mrg changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv))); 8949 1.1 mrg 8950 1.1 mrg return 1; 8951 1.1 mrg } 8952 1.1 mrg 8953 1.1 mrg /* Remove from changed_variables the entry whose DV corresponds to 8954 1.1 mrg value or debug_expr VAL. */ 8955 1.1 mrg static void 8956 1.1 mrg remove_value_from_changed_variables (rtx val) 8957 1.1 mrg { 8958 1.1 mrg decl_or_value dv = dv_from_rtx (val); 8959 1.1 mrg variable **slot; 8960 1.1 mrg variable *var; 8961 1.1 mrg 8962 1.1 mrg slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv), 8963 1.1 mrg NO_INSERT); 8964 1.1 mrg var = *slot; 8965 1.1 mrg var->in_changed_variables = false; 8966 1.1 mrg changed_variables->clear_slot (slot); 8967 1.1 mrg } 8968 1.1 mrg 8969 1.1 mrg /* If VAL (a value or debug_expr) has backlinks to variables actively 8970 1.1 mrg dependent on it in HTAB or in CHANGED_VARIABLES, mark them as 8971 1.1 mrg changed, adding to CHANGED_VALUES_STACK any dependencies that may 8972 1.1 mrg have dependencies of their own to notify. */ 8973 1.1 mrg 8974 1.1 mrg static void 8975 1.1 mrg notify_dependents_of_changed_value (rtx val, variable_table_type *htab, 8976 1.1 mrg vec<rtx, va_heap> *changed_values_stack) 8977 1.1 mrg { 8978 1.1 mrg variable **slot; 8979 1.1 mrg variable *var; 8980 1.1 mrg loc_exp_dep *led; 8981 1.1 mrg decl_or_value dv = dv_from_rtx (val); 8982 1.1 mrg 8983 1.1 mrg slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv), 8984 1.1 mrg NO_INSERT); 8985 1.1 mrg if (!slot) 8986 1.1 mrg slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT); 8987 1.1 mrg if (!slot) 8988 1.1 mrg slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), 8989 1.1 mrg NO_INSERT); 8990 1.1 mrg var = *slot; 8991 1.1 mrg 8992 1.1 mrg while ((led = VAR_LOC_DEP_LST (var))) 8993 1.1 mrg { 8994 1.1 mrg decl_or_value ldv = led->dv; 8995 1.1 mrg variable *ivar; 8996 1.1 mrg 8997 1.1 mrg /* Deactivate and remove the backlink, as it was used up. It 8998 1.1 mrg makes no sense to attempt to notify the same entity again: 8999 1.1 mrg either it will be recomputed and re-register an active 9000 1.1 mrg dependency, or it will still have the changed mark. */ 9001 1.1 mrg if (led->next) 9002 1.1 mrg led->next->pprev = led->pprev; 9003 1.1 mrg if (led->pprev) 9004 1.1 mrg *led->pprev = led->next; 9005 1.1 mrg led->next = NULL; 9006 1.1 mrg led->pprev = NULL; 9007 1.1 mrg 9008 1.1 mrg if (dv_changed_p (ldv)) 9009 1.1 mrg continue; 9010 1.1 mrg 9011 1.1 mrg switch (dv_onepart_p (ldv)) 9012 1.1 mrg { 9013 1.1 mrg case ONEPART_VALUE: 9014 1.1 mrg case ONEPART_DEXPR: 9015 1.1 mrg set_dv_changed (ldv, true); 9016 1.1 mrg changed_values_stack->safe_push (dv_as_rtx (ldv)); 9017 1.1 mrg break; 9018 1.1 mrg 9019 1.1 mrg case ONEPART_VDECL: 9020 1.1 mrg ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv)); 9021 1.1 mrg gcc_checking_assert (!VAR_LOC_DEP_LST (ivar)); 9022 1.1 mrg variable_was_changed (ivar, NULL); 9023 1.1 mrg break; 9024 1.1 mrg 9025 1.1 mrg case NOT_ONEPART: 9026 1.1 mrg delete led; 9027 1.1 mrg ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv)); 9028 1.1 mrg if (ivar) 9029 1.1 mrg { 9030 1.1 mrg int i = ivar->n_var_parts; 9031 1.1 mrg while (i--) 9032 1.1 mrg { 9033 1.1 mrg rtx loc = ivar->var_part[i].cur_loc; 9034 1.1 mrg 9035 1.1 mrg if (loc && GET_CODE (loc) == MEM 9036 1.1 mrg && XEXP (loc, 0) == val) 9037 1.1 mrg { 9038 1.1 mrg variable_was_changed (ivar, NULL); 9039 1.1 mrg break; 9040 1.1 mrg } 9041 1.1 mrg } 9042 1.1 mrg } 9043 1.1 mrg break; 9044 1.1 mrg 9045 1.1 mrg default: 9046 1.1 mrg gcc_unreachable (); 9047 1.1 mrg } 9048 1.1 mrg } 9049 1.1 mrg } 9050 1.1 mrg 9051 1.1 mrg /* Take out of changed_variables any entries that don't refer to use 9052 1.1 mrg variables. Back-propagate change notifications from values and 9053 1.1 mrg debug_exprs to their active dependencies in HTAB or in 9054 1.1 mrg CHANGED_VARIABLES. */ 9055 1.1 mrg 9056 1.1 mrg static void 9057 1.1 mrg process_changed_values (variable_table_type *htab) 9058 1.1 mrg { 9059 1.1 mrg int i, n; 9060 1.1 mrg rtx val; 9061 1.1 mrg auto_vec<rtx, 20> changed_values_stack; 9062 1.1 mrg 9063 1.1 mrg /* Move values from changed_variables to changed_values_stack. */ 9064 1.1 mrg changed_variables 9065 1.1 mrg ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack> 9066 1.1 mrg (&changed_values_stack); 9067 1.1 mrg 9068 1.1 mrg /* Back-propagate change notifications in values while popping 9069 1.1 mrg them from the stack. */ 9070 1.1 mrg for (n = i = changed_values_stack.length (); 9071 1.1 mrg i > 0; i = changed_values_stack.length ()) 9072 1.1 mrg { 9073 1.1 mrg val = changed_values_stack.pop (); 9074 1.1 mrg notify_dependents_of_changed_value (val, htab, &changed_values_stack); 9075 1.1 mrg 9076 1.1 mrg /* This condition will hold when visiting each of the entries 9077 1.1 mrg originally in changed_variables. We can't remove them 9078 1.1 mrg earlier because this could drop the backlinks before we got a 9079 1.1 mrg chance to use them. */ 9080 1.1 mrg if (i == n) 9081 1.1 mrg { 9082 1.1 mrg remove_value_from_changed_variables (val); 9083 1.1 mrg n--; 9084 1.1 mrg } 9085 1.1 mrg } 9086 1.1 mrg } 9087 1.1 mrg 9088 1.1 mrg /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain 9089 1.1 mrg CHANGED_VARIABLES and delete this chain. WHERE specifies whether 9090 1.1 mrg the notes shall be emitted before of after instruction INSN. */ 9091 1.1 mrg 9092 1.1 mrg static void 9093 1.1 mrg emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where, 9094 1.1 mrg shared_hash *vars) 9095 1.1 mrg { 9096 1.1 mrg emit_note_data data; 9097 1.1 mrg variable_table_type *htab = shared_hash_htab (vars); 9098 1.1 mrg 9099 1.1 mrg if (changed_variables->is_empty ()) 9100 1.1 mrg return; 9101 1.1 mrg 9102 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 9103 1.1 mrg process_changed_values (htab); 9104 1.1 mrg 9105 1.1 mrg data.insn = insn; 9106 1.1 mrg data.where = where; 9107 1.1 mrg data.vars = htab; 9108 1.1 mrg 9109 1.1 mrg changed_variables 9110 1.1 mrg ->traverse <emit_note_data*, emit_note_insn_var_location> (&data); 9111 1.1 mrg } 9112 1.1 mrg 9113 1.1 mrg /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the 9114 1.1 mrg same variable in hash table DATA or is not there at all. */ 9115 1.1 mrg 9116 1.1 mrg int 9117 1.1 mrg emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars) 9118 1.1 mrg { 9119 1.1 mrg variable *old_var, *new_var; 9120 1.1 mrg 9121 1.1 mrg old_var = *slot; 9122 1.1 mrg new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv)); 9123 1.1 mrg 9124 1.1 mrg if (!new_var) 9125 1.1 mrg { 9126 1.1 mrg /* Variable has disappeared. */ 9127 1.1 mrg variable *empty_var = NULL; 9128 1.1 mrg 9129 1.1 mrg if (old_var->onepart == ONEPART_VALUE 9130 1.1 mrg || old_var->onepart == ONEPART_DEXPR) 9131 1.1 mrg { 9132 1.1 mrg empty_var = variable_from_dropped (old_var->dv, NO_INSERT); 9133 1.1 mrg if (empty_var) 9134 1.1 mrg { 9135 1.1 mrg gcc_checking_assert (!empty_var->in_changed_variables); 9136 1.1 mrg if (!VAR_LOC_1PAUX (old_var)) 9137 1.1 mrg { 9138 1.1 mrg VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var); 9139 1.1 mrg VAR_LOC_1PAUX (empty_var) = NULL; 9140 1.1 mrg } 9141 1.1 mrg else 9142 1.1 mrg gcc_checking_assert (!VAR_LOC_1PAUX (empty_var)); 9143 1.1 mrg } 9144 1.1 mrg } 9145 1.1 mrg 9146 1.1 mrg if (!empty_var) 9147 1.1 mrg { 9148 1.1 mrg empty_var = onepart_pool_allocate (old_var->onepart); 9149 1.1 mrg empty_var->dv = old_var->dv; 9150 1.1 mrg empty_var->refcount = 0; 9151 1.1 mrg empty_var->n_var_parts = 0; 9152 1.1 mrg empty_var->onepart = old_var->onepart; 9153 1.1 mrg empty_var->in_changed_variables = false; 9154 1.1 mrg } 9155 1.1 mrg 9156 1.1 mrg if (empty_var->onepart) 9157 1.1 mrg { 9158 1.1 mrg /* Propagate the auxiliary data to (ultimately) 9159 1.1 mrg changed_variables. */ 9160 1.1 mrg empty_var->var_part[0].loc_chain = NULL; 9161 1.1 mrg empty_var->var_part[0].cur_loc = NULL; 9162 1.1 mrg VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var); 9163 1.1 mrg VAR_LOC_1PAUX (old_var) = NULL; 9164 1.1 mrg } 9165 1.1 mrg variable_was_changed (empty_var, NULL); 9166 1.1 mrg /* Continue traversing the hash table. */ 9167 1.1 mrg return 1; 9168 1.1 mrg } 9169 1.1 mrg /* Update cur_loc and one-part auxiliary data, before new_var goes 9170 1.1 mrg through variable_was_changed. */ 9171 1.1 mrg if (old_var != new_var && new_var->onepart) 9172 1.1 mrg { 9173 1.1 mrg gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL); 9174 1.1 mrg VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var); 9175 1.1 mrg VAR_LOC_1PAUX (old_var) = NULL; 9176 1.1 mrg new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc; 9177 1.1 mrg } 9178 1.1 mrg if (variable_different_p (old_var, new_var)) 9179 1.1 mrg variable_was_changed (new_var, NULL); 9180 1.1 mrg 9181 1.1 mrg /* Continue traversing the hash table. */ 9182 1.1 mrg return 1; 9183 1.1 mrg } 9184 1.1 mrg 9185 1.1 mrg /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash 9186 1.1 mrg table DATA. */ 9187 1.1 mrg 9188 1.1 mrg int 9189 1.1 mrg emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars) 9190 1.1 mrg { 9191 1.1 mrg variable *old_var, *new_var; 9192 1.1 mrg 9193 1.1 mrg new_var = *slot; 9194 1.1 mrg old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv)); 9195 1.1 mrg if (!old_var) 9196 1.1 mrg { 9197 1.1 mrg int i; 9198 1.1 mrg for (i = 0; i < new_var->n_var_parts; i++) 9199 1.1 mrg new_var->var_part[i].cur_loc = NULL; 9200 1.1 mrg variable_was_changed (new_var, NULL); 9201 1.1 mrg } 9202 1.1 mrg 9203 1.1 mrg /* Continue traversing the hash table. */ 9204 1.1 mrg return 1; 9205 1.1 mrg } 9206 1.1 mrg 9207 1.1 mrg /* Emit notes before INSN for differences between dataflow sets OLD_SET and 9208 1.1 mrg NEW_SET. */ 9209 1.1 mrg 9210 1.1 mrg static void 9211 1.1 mrg emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set, 9212 1.1 mrg dataflow_set *new_set) 9213 1.1 mrg { 9214 1.1 mrg shared_hash_htab (old_set->vars) 9215 1.1 mrg ->traverse <variable_table_type *, emit_notes_for_differences_1> 9216 1.1 mrg (shared_hash_htab (new_set->vars)); 9217 1.1 mrg shared_hash_htab (new_set->vars) 9218 1.1 mrg ->traverse <variable_table_type *, emit_notes_for_differences_2> 9219 1.1 mrg (shared_hash_htab (old_set->vars)); 9220 1.1 mrg emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars); 9221 1.1 mrg } 9222 1.1 mrg 9223 1.1 mrg /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */ 9224 1.1 mrg 9225 1.1 mrg static rtx_insn * 9226 1.1 mrg next_non_note_insn_var_location (rtx_insn *insn) 9227 1.1 mrg { 9228 1.1 mrg while (insn) 9229 1.1 mrg { 9230 1.1 mrg insn = NEXT_INSN (insn); 9231 1.1 mrg if (insn == 0 9232 1.1 mrg || !NOTE_P (insn) 9233 1.1 mrg || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION) 9234 1.1 mrg break; 9235 1.1 mrg } 9236 1.1 mrg 9237 1.1 mrg return insn; 9238 1.1 mrg } 9239 1.1 mrg 9240 1.1 mrg /* Emit the notes for changes of location parts in the basic block BB. */ 9241 1.1 mrg 9242 1.1 mrg static void 9243 1.1 mrg emit_notes_in_bb (basic_block bb, dataflow_set *set) 9244 1.1 mrg { 9245 1.1 mrg unsigned int i; 9246 1.1 mrg micro_operation *mo; 9247 1.1 mrg 9248 1.1 mrg dataflow_set_clear (set); 9249 1.1 mrg dataflow_set_copy (set, &VTI (bb)->in); 9250 1.1 mrg 9251 1.1 mrg FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo) 9252 1.1 mrg { 9253 1.1 mrg rtx_insn *insn = mo->insn; 9254 1.1 mrg rtx_insn *next_insn = next_non_note_insn_var_location (insn); 9255 1.1 mrg 9256 1.1 mrg switch (mo->type) 9257 1.1 mrg { 9258 1.1 mrg case MO_CALL: 9259 1.1 mrg dataflow_set_clear_at_call (set, insn); 9260 1.1 mrg emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars); 9261 1.1 mrg { 9262 1.1 mrg rtx arguments = mo->u.loc, *p = &arguments; 9263 1.1 mrg while (*p) 9264 1.1 mrg { 9265 1.1 mrg XEXP (XEXP (*p, 0), 1) 9266 1.1 mrg = vt_expand_loc (XEXP (XEXP (*p, 0), 1), 9267 1.1 mrg shared_hash_htab (set->vars)); 9268 1.1 mrg /* If expansion is successful, keep it in the list. */ 9269 1.1 mrg if (XEXP (XEXP (*p, 0), 1)) 9270 1.1 mrg { 9271 1.1 mrg XEXP (XEXP (*p, 0), 1) 9272 1.1 mrg = copy_rtx_if_shared (XEXP (XEXP (*p, 0), 1)); 9273 1.1 mrg p = &XEXP (*p, 1); 9274 1.1 mrg } 9275 1.1 mrg /* Otherwise, if the following item is data_value for it, 9276 1.1 mrg drop it too too. */ 9277 1.1 mrg else if (XEXP (*p, 1) 9278 1.1 mrg && REG_P (XEXP (XEXP (*p, 0), 0)) 9279 1.1 mrg && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0)) 9280 1.1 mrg && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0), 9281 1.1 mrg 0)) 9282 1.1 mrg && REGNO (XEXP (XEXP (*p, 0), 0)) 9283 1.1 mrg == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 9284 1.1 mrg 0), 0))) 9285 1.1 mrg *p = XEXP (XEXP (*p, 1), 1); 9286 1.1 mrg /* Just drop this item. */ 9287 1.1 mrg else 9288 1.1 mrg *p = XEXP (*p, 1); 9289 1.1 mrg } 9290 1.1 mrg add_reg_note (insn, REG_CALL_ARG_LOCATION, arguments); 9291 1.1 mrg } 9292 1.1 mrg break; 9293 1.1 mrg 9294 1.1 mrg case MO_USE: 9295 1.1 mrg { 9296 1.1 mrg rtx loc = mo->u.loc; 9297 1.1 mrg 9298 1.1 mrg if (REG_P (loc)) 9299 1.1 mrg var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL); 9300 1.1 mrg else 9301 1.1 mrg var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL); 9302 1.1 mrg 9303 1.1 mrg emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars); 9304 1.1 mrg } 9305 1.1 mrg break; 9306 1.1 mrg 9307 1.1 mrg case MO_VAL_LOC: 9308 1.1 mrg { 9309 1.1 mrg rtx loc = mo->u.loc; 9310 1.1 mrg rtx val, vloc; 9311 1.1 mrg tree var; 9312 1.1 mrg 9313 1.1 mrg if (GET_CODE (loc) == CONCAT) 9314 1.1 mrg { 9315 1.1 mrg val = XEXP (loc, 0); 9316 1.1 mrg vloc = XEXP (loc, 1); 9317 1.1 mrg } 9318 1.1 mrg else 9319 1.1 mrg { 9320 1.1 mrg val = NULL_RTX; 9321 1.1 mrg vloc = loc; 9322 1.1 mrg } 9323 1.1 mrg 9324 1.1 mrg var = PAT_VAR_LOCATION_DECL (vloc); 9325 1.1 mrg 9326 1.1 mrg clobber_variable_part (set, NULL_RTX, 9327 1.1 mrg dv_from_decl (var), 0, NULL_RTX); 9328 1.1 mrg if (val) 9329 1.1 mrg { 9330 1.1 mrg if (VAL_NEEDS_RESOLUTION (loc)) 9331 1.1 mrg val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn); 9332 1.1 mrg set_variable_part (set, val, dv_from_decl (var), 0, 9333 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL_RTX, 9334 1.1 mrg INSERT); 9335 1.1 mrg } 9336 1.1 mrg else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc))) 9337 1.1 mrg set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc), 9338 1.1 mrg dv_from_decl (var), 0, 9339 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL_RTX, 9340 1.1 mrg INSERT); 9341 1.1 mrg 9342 1.1 mrg emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars); 9343 1.1 mrg } 9344 1.1 mrg break; 9345 1.1 mrg 9346 1.1 mrg case MO_VAL_USE: 9347 1.1 mrg { 9348 1.1 mrg rtx loc = mo->u.loc; 9349 1.1 mrg rtx val, vloc, uloc; 9350 1.1 mrg 9351 1.1 mrg vloc = uloc = XEXP (loc, 1); 9352 1.1 mrg val = XEXP (loc, 0); 9353 1.1 mrg 9354 1.1 mrg if (GET_CODE (val) == CONCAT) 9355 1.1 mrg { 9356 1.1 mrg uloc = XEXP (val, 1); 9357 1.1 mrg val = XEXP (val, 0); 9358 1.1 mrg } 9359 1.1 mrg 9360 1.1 mrg if (VAL_NEEDS_RESOLUTION (loc)) 9361 1.1 mrg val_resolve (set, val, vloc, insn); 9362 1.1 mrg else 9363 1.1 mrg val_store (set, val, uloc, insn, false); 9364 1.1 mrg 9365 1.1 mrg if (VAL_HOLDS_TRACK_EXPR (loc)) 9366 1.1 mrg { 9367 1.1 mrg if (GET_CODE (uloc) == REG) 9368 1.1 mrg var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED, 9369 1.1 mrg NULL); 9370 1.1 mrg else if (GET_CODE (uloc) == MEM) 9371 1.1 mrg var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED, 9372 1.1 mrg NULL); 9373 1.1 mrg } 9374 1.1 mrg 9375 1.1 mrg emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars); 9376 1.1 mrg } 9377 1.1 mrg break; 9378 1.1 mrg 9379 1.1 mrg case MO_VAL_SET: 9380 1.1 mrg { 9381 1.1 mrg rtx loc = mo->u.loc; 9382 1.1 mrg rtx val, vloc, uloc; 9383 1.1 mrg rtx dstv, srcv; 9384 1.1 mrg 9385 1.1 mrg vloc = loc; 9386 1.1 mrg uloc = XEXP (vloc, 1); 9387 1.1 mrg val = XEXP (vloc, 0); 9388 1.1 mrg vloc = uloc; 9389 1.1 mrg 9390 1.1 mrg if (GET_CODE (uloc) == SET) 9391 1.1 mrg { 9392 1.1 mrg dstv = SET_DEST (uloc); 9393 1.1 mrg srcv = SET_SRC (uloc); 9394 1.1 mrg } 9395 1.1 mrg else 9396 1.1 mrg { 9397 1.1 mrg dstv = uloc; 9398 1.1 mrg srcv = NULL; 9399 1.1 mrg } 9400 1.1 mrg 9401 1.1 mrg if (GET_CODE (val) == CONCAT) 9402 1.1 mrg { 9403 1.1 mrg dstv = vloc = XEXP (val, 1); 9404 1.1 mrg val = XEXP (val, 0); 9405 1.1 mrg } 9406 1.1 mrg 9407 1.1 mrg if (GET_CODE (vloc) == SET) 9408 1.1 mrg { 9409 1.1 mrg srcv = SET_SRC (vloc); 9410 1.1 mrg 9411 1.1 mrg gcc_assert (val != srcv); 9412 1.1 mrg gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc)); 9413 1.1 mrg 9414 1.1 mrg dstv = vloc = SET_DEST (vloc); 9415 1.1 mrg 9416 1.1 mrg if (VAL_NEEDS_RESOLUTION (loc)) 9417 1.1 mrg val_resolve (set, val, srcv, insn); 9418 1.1 mrg } 9419 1.1 mrg else if (VAL_NEEDS_RESOLUTION (loc)) 9420 1.1 mrg { 9421 1.1 mrg gcc_assert (GET_CODE (uloc) == SET 9422 1.1 mrg && GET_CODE (SET_SRC (uloc)) == REG); 9423 1.1 mrg val_resolve (set, val, SET_SRC (uloc), insn); 9424 1.1 mrg } 9425 1.1 mrg 9426 1.1 mrg if (VAL_HOLDS_TRACK_EXPR (loc)) 9427 1.1 mrg { 9428 1.1 mrg if (VAL_EXPR_IS_CLOBBERED (loc)) 9429 1.1 mrg { 9430 1.1 mrg if (REG_P (uloc)) 9431 1.1 mrg var_reg_delete (set, uloc, true); 9432 1.1 mrg else if (MEM_P (uloc)) 9433 1.1 mrg { 9434 1.1 mrg gcc_assert (MEM_P (dstv)); 9435 1.1 mrg gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc)); 9436 1.1 mrg var_mem_delete (set, dstv, true); 9437 1.1 mrg } 9438 1.1 mrg } 9439 1.1 mrg else 9440 1.1 mrg { 9441 1.1 mrg bool copied_p = VAL_EXPR_IS_COPIED (loc); 9442 1.1 mrg rtx src = NULL, dst = uloc; 9443 1.1 mrg enum var_init_status status = VAR_INIT_STATUS_INITIALIZED; 9444 1.1 mrg 9445 1.1 mrg if (GET_CODE (uloc) == SET) 9446 1.1 mrg { 9447 1.1 mrg src = SET_SRC (uloc); 9448 1.1 mrg dst = SET_DEST (uloc); 9449 1.1 mrg } 9450 1.1 mrg 9451 1.1 mrg if (copied_p) 9452 1.1 mrg { 9453 1.1 mrg status = find_src_status (set, src); 9454 1.1 mrg 9455 1.1 mrg src = find_src_set_src (set, src); 9456 1.1 mrg } 9457 1.1 mrg 9458 1.1 mrg if (REG_P (dst)) 9459 1.1 mrg var_reg_delete_and_set (set, dst, !copied_p, 9460 1.1 mrg status, srcv); 9461 1.1 mrg else if (MEM_P (dst)) 9462 1.1 mrg { 9463 1.1 mrg gcc_assert (MEM_P (dstv)); 9464 1.1 mrg gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst)); 9465 1.1 mrg var_mem_delete_and_set (set, dstv, !copied_p, 9466 1.1 mrg status, srcv); 9467 1.1 mrg } 9468 1.1 mrg } 9469 1.1 mrg } 9470 1.1 mrg else if (REG_P (uloc)) 9471 1.1 mrg var_regno_delete (set, REGNO (uloc)); 9472 1.1 mrg else if (MEM_P (uloc)) 9473 1.1 mrg { 9474 1.1 mrg gcc_checking_assert (GET_CODE (vloc) == MEM); 9475 1.1 mrg gcc_checking_assert (vloc == dstv); 9476 1.1 mrg if (vloc != dstv) 9477 1.1 mrg clobber_overlapping_mems (set, vloc); 9478 1.1 mrg } 9479 1.1 mrg 9480 1.1 mrg val_store (set, val, dstv, insn, true); 9481 1.1 mrg 9482 1.1 mrg emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN, 9483 1.1 mrg set->vars); 9484 1.1 mrg } 9485 1.1 mrg break; 9486 1.1 mrg 9487 1.1 mrg case MO_SET: 9488 1.1 mrg { 9489 1.1 mrg rtx loc = mo->u.loc; 9490 1.1 mrg rtx set_src = NULL; 9491 1.1 mrg 9492 1.1 mrg if (GET_CODE (loc) == SET) 9493 1.1 mrg { 9494 1.1 mrg set_src = SET_SRC (loc); 9495 1.1 mrg loc = SET_DEST (loc); 9496 1.1 mrg } 9497 1.1 mrg 9498 1.1 mrg if (REG_P (loc)) 9499 1.1 mrg var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED, 9500 1.1 mrg set_src); 9501 1.1 mrg else 9502 1.1 mrg var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED, 9503 1.1 mrg set_src); 9504 1.1 mrg 9505 1.1 mrg emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN, 9506 1.1 mrg set->vars); 9507 1.1 mrg } 9508 1.1 mrg break; 9509 1.1 mrg 9510 1.1 mrg case MO_COPY: 9511 1.1 mrg { 9512 1.1 mrg rtx loc = mo->u.loc; 9513 1.1 mrg enum var_init_status src_status; 9514 1.1 mrg rtx set_src = NULL; 9515 1.1 mrg 9516 1.1 mrg if (GET_CODE (loc) == SET) 9517 1.1 mrg { 9518 1.1 mrg set_src = SET_SRC (loc); 9519 1.1 mrg loc = SET_DEST (loc); 9520 1.1 mrg } 9521 1.1 mrg 9522 1.1 mrg src_status = find_src_status (set, set_src); 9523 1.1 mrg set_src = find_src_set_src (set, set_src); 9524 1.1 mrg 9525 1.1 mrg if (REG_P (loc)) 9526 1.1 mrg var_reg_delete_and_set (set, loc, false, src_status, set_src); 9527 1.1 mrg else 9528 1.1 mrg var_mem_delete_and_set (set, loc, false, src_status, set_src); 9529 1.1 mrg 9530 1.1 mrg emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN, 9531 1.1 mrg set->vars); 9532 1.1 mrg } 9533 1.1 mrg break; 9534 1.1 mrg 9535 1.1 mrg case MO_USE_NO_VAR: 9536 1.1 mrg { 9537 1.1 mrg rtx loc = mo->u.loc; 9538 1.1 mrg 9539 1.1 mrg if (REG_P (loc)) 9540 1.1 mrg var_reg_delete (set, loc, false); 9541 1.1 mrg else 9542 1.1 mrg var_mem_delete (set, loc, false); 9543 1.1 mrg 9544 1.1 mrg emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars); 9545 1.1 mrg } 9546 1.1 mrg break; 9547 1.1 mrg 9548 1.1 mrg case MO_CLOBBER: 9549 1.1 mrg { 9550 1.1 mrg rtx loc = mo->u.loc; 9551 1.1 mrg 9552 1.1 mrg if (REG_P (loc)) 9553 1.1 mrg var_reg_delete (set, loc, true); 9554 1.1 mrg else 9555 1.1 mrg var_mem_delete (set, loc, true); 9556 1.1 mrg 9557 1.1 mrg emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN, 9558 1.1 mrg set->vars); 9559 1.1 mrg } 9560 1.1 mrg break; 9561 1.1 mrg 9562 1.1 mrg case MO_ADJUST: 9563 1.1 mrg set->stack_adjust += mo->u.adjust; 9564 1.1 mrg break; 9565 1.1 mrg } 9566 1.1 mrg } 9567 1.1 mrg } 9568 1.1 mrg 9569 1.1 mrg /* Emit notes for the whole function. */ 9570 1.1 mrg 9571 1.1 mrg static void 9572 1.1 mrg vt_emit_notes (void) 9573 1.1 mrg { 9574 1.1 mrg basic_block bb; 9575 1.1 mrg dataflow_set cur; 9576 1.1 mrg 9577 1.1 mrg gcc_assert (changed_variables->is_empty ()); 9578 1.1 mrg 9579 1.1 mrg /* Free memory occupied by the out hash tables, as they aren't used 9580 1.1 mrg anymore. */ 9581 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 9582 1.1 mrg dataflow_set_clear (&VTI (bb)->out); 9583 1.1 mrg 9584 1.1 mrg /* Enable emitting notes by functions (mainly by set_variable_part and 9585 1.1 mrg delete_variable_part). */ 9586 1.1 mrg emit_notes = true; 9587 1.1 mrg 9588 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 9589 1.1 mrg dropped_values = new variable_table_type (cselib_get_next_uid () * 2); 9590 1.1 mrg 9591 1.1 mrg dataflow_set_init (&cur); 9592 1.1 mrg 9593 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 9594 1.1 mrg { 9595 1.1 mrg /* Emit the notes for changes of variable locations between two 9596 1.1 mrg subsequent basic blocks. */ 9597 1.1 mrg emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in); 9598 1.1 mrg 9599 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 9600 1.1 mrg local_get_addr_cache = new hash_map<rtx, rtx>; 9601 1.1 mrg 9602 1.1 mrg /* Emit the notes for the changes in the basic block itself. */ 9603 1.1 mrg emit_notes_in_bb (bb, &cur); 9604 1.1 mrg 9605 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 9606 1.1 mrg delete local_get_addr_cache; 9607 1.1 mrg local_get_addr_cache = NULL; 9608 1.1 mrg 9609 1.1 mrg /* Free memory occupied by the in hash table, we won't need it 9610 1.1 mrg again. */ 9611 1.1 mrg dataflow_set_clear (&VTI (bb)->in); 9612 1.1 mrg } 9613 1.1 mrg 9614 1.1 mrg if (flag_checking) 9615 1.1 mrg shared_hash_htab (cur.vars) 9616 1.1 mrg ->traverse <variable_table_type *, emit_notes_for_differences_1> 9617 1.1 mrg (shared_hash_htab (empty_shared_hash)); 9618 1.1 mrg 9619 1.1 mrg dataflow_set_destroy (&cur); 9620 1.1 mrg 9621 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 9622 1.1 mrg delete dropped_values; 9623 1.1 mrg dropped_values = NULL; 9624 1.1 mrg 9625 1.1 mrg emit_notes = false; 9626 1.1 mrg } 9627 1.1 mrg 9628 1.1 mrg /* If there is a declaration and offset associated with register/memory RTL 9629 1.1 mrg assign declaration to *DECLP and offset to *OFFSETP, and return true. */ 9630 1.1 mrg 9631 1.1 mrg static bool 9632 1.1 mrg vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp) 9633 1.1 mrg { 9634 1.1 mrg if (REG_P (rtl)) 9635 1.1 mrg { 9636 1.1 mrg if (REG_ATTRS (rtl)) 9637 1.1 mrg { 9638 1.1 mrg *declp = REG_EXPR (rtl); 9639 1.1 mrg *offsetp = REG_OFFSET (rtl); 9640 1.1 mrg return true; 9641 1.1 mrg } 9642 1.1 mrg } 9643 1.1 mrg else if (GET_CODE (rtl) == PARALLEL) 9644 1.1 mrg { 9645 1.1 mrg tree decl = NULL_TREE; 9646 1.1 mrg HOST_WIDE_INT offset = MAX_VAR_PARTS; 9647 1.1 mrg int len = XVECLEN (rtl, 0), i; 9648 1.1 mrg 9649 1.1 mrg for (i = 0; i < len; i++) 9650 1.1 mrg { 9651 1.1 mrg rtx reg = XEXP (XVECEXP (rtl, 0, i), 0); 9652 1.1 mrg if (!REG_P (reg) || !REG_ATTRS (reg)) 9653 1.1 mrg break; 9654 1.1 mrg if (!decl) 9655 1.1 mrg decl = REG_EXPR (reg); 9656 1.1 mrg if (REG_EXPR (reg) != decl) 9657 1.1 mrg break; 9658 1.1 mrg HOST_WIDE_INT this_offset; 9659 1.1 mrg if (!track_offset_p (REG_OFFSET (reg), &this_offset)) 9660 1.1 mrg break; 9661 1.1 mrg offset = MIN (offset, this_offset); 9662 1.1 mrg } 9663 1.1 mrg 9664 1.1 mrg if (i == len) 9665 1.1 mrg { 9666 1.1 mrg *declp = decl; 9667 1.1 mrg *offsetp = offset; 9668 1.1 mrg return true; 9669 1.1 mrg } 9670 1.1 mrg } 9671 1.1 mrg else if (MEM_P (rtl)) 9672 1.1 mrg { 9673 1.1 mrg if (MEM_ATTRS (rtl)) 9674 1.1 mrg { 9675 1.1 mrg *declp = MEM_EXPR (rtl); 9676 1.1 mrg *offsetp = int_mem_offset (rtl); 9677 1.1 mrg return true; 9678 1.1 mrg } 9679 1.1 mrg } 9680 1.1 mrg return false; 9681 1.1 mrg } 9682 1.1 mrg 9683 1.1 mrg /* Record the value for the ENTRY_VALUE of RTL as a global equivalence 9684 1.1 mrg of VAL. */ 9685 1.1 mrg 9686 1.1 mrg static void 9687 1.1 mrg record_entry_value (cselib_val *val, rtx rtl) 9688 1.1 mrg { 9689 1.1 mrg rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl)); 9690 1.1 mrg 9691 1.1 mrg ENTRY_VALUE_EXP (ev) = rtl; 9692 1.1 mrg 9693 1.1 mrg cselib_add_permanent_equiv (val, ev, get_insns ()); 9694 1.1 mrg } 9695 1.1 mrg 9696 1.1 mrg /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */ 9697 1.1 mrg 9698 1.1 mrg static void 9699 1.1 mrg vt_add_function_parameter (tree parm) 9700 1.1 mrg { 9701 1.1 mrg rtx decl_rtl = DECL_RTL_IF_SET (parm); 9702 1.1 mrg rtx incoming = DECL_INCOMING_RTL (parm); 9703 1.1 mrg tree decl; 9704 1.1 mrg machine_mode mode; 9705 1.1 mrg poly_int64 offset; 9706 1.1 mrg dataflow_set *out; 9707 1.1 mrg decl_or_value dv; 9708 1.1 mrg bool incoming_ok = true; 9709 1.1 mrg 9710 1.1 mrg if (TREE_CODE (parm) != PARM_DECL) 9711 1.1 mrg return; 9712 1.1 mrg 9713 1.1 mrg if (!decl_rtl || !incoming) 9714 1.1 mrg return; 9715 1.1 mrg 9716 1.1 mrg if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode) 9717 1.1 mrg return; 9718 1.1 mrg 9719 1.1 mrg /* If there is a DRAP register or a pseudo in internal_arg_pointer, 9720 1.1 mrg rewrite the incoming location of parameters passed on the stack 9721 1.1 mrg into MEMs based on the argument pointer, so that incoming doesn't 9722 1.1 mrg depend on a pseudo. */ 9723 1.1 mrg poly_int64 incoming_offset = 0; 9724 1.1 mrg if (MEM_P (incoming) 9725 1.1 mrg && (strip_offset (XEXP (incoming, 0), &incoming_offset) 9726 1.1 mrg == crtl->args.internal_arg_pointer)) 9727 1.1 mrg { 9728 1.1 mrg HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl); 9729 1.1 mrg incoming 9730 1.1 mrg = replace_equiv_address_nv (incoming, 9731 1.1 mrg plus_constant (Pmode, 9732 1.1 mrg arg_pointer_rtx, 9733 1.1 mrg off + incoming_offset)); 9734 1.1 mrg } 9735 1.1 mrg 9736 1.1 mrg #ifdef HAVE_window_save 9737 1.1 mrg /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers. 9738 1.1 mrg If the target machine has an explicit window save instruction, the 9739 1.1 mrg actual entry value is the corresponding OUTGOING_REGNO instead. */ 9740 1.1 mrg if (HAVE_window_save && !crtl->uses_only_leaf_regs) 9741 1.1 mrg { 9742 1.1 mrg if (REG_P (incoming) 9743 1.1 mrg && HARD_REGISTER_P (incoming) 9744 1.1 mrg && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming)) 9745 1.1 mrg { 9746 1.1 mrg parm_reg p; 9747 1.1 mrg p.incoming = incoming; 9748 1.1 mrg incoming 9749 1.1 mrg = gen_rtx_REG_offset (incoming, GET_MODE (incoming), 9750 1.1 mrg OUTGOING_REGNO (REGNO (incoming)), 0); 9751 1.1 mrg p.outgoing = incoming; 9752 1.1 mrg vec_safe_push (windowed_parm_regs, p); 9753 1.1 mrg } 9754 1.1 mrg else if (GET_CODE (incoming) == PARALLEL) 9755 1.1 mrg { 9756 1.1 mrg rtx outgoing 9757 1.1 mrg = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0))); 9758 1.1 mrg int i; 9759 1.1 mrg 9760 1.1 mrg for (i = 0; i < XVECLEN (incoming, 0); i++) 9761 1.1 mrg { 9762 1.1 mrg rtx reg = XEXP (XVECEXP (incoming, 0, i), 0); 9763 1.1 mrg parm_reg p; 9764 1.1 mrg p.incoming = reg; 9765 1.1 mrg reg = gen_rtx_REG_offset (reg, GET_MODE (reg), 9766 1.1 mrg OUTGOING_REGNO (REGNO (reg)), 0); 9767 1.1 mrg p.outgoing = reg; 9768 1.1 mrg XVECEXP (outgoing, 0, i) 9769 1.1 mrg = gen_rtx_EXPR_LIST (VOIDmode, reg, 9770 1.1 mrg XEXP (XVECEXP (incoming, 0, i), 1)); 9771 1.1 mrg vec_safe_push (windowed_parm_regs, p); 9772 1.1 mrg } 9773 1.1 mrg 9774 1.1 mrg incoming = outgoing; 9775 1.1 mrg } 9776 1.1 mrg else if (MEM_P (incoming) 9777 1.1 mrg && REG_P (XEXP (incoming, 0)) 9778 1.1 mrg && HARD_REGISTER_P (XEXP (incoming, 0))) 9779 1.1 mrg { 9780 1.1 mrg rtx reg = XEXP (incoming, 0); 9781 1.1 mrg if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg)) 9782 1.1 mrg { 9783 1.1 mrg parm_reg p; 9784 1.1 mrg p.incoming = reg; 9785 1.1 mrg reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg))); 9786 1.1 mrg p.outgoing = reg; 9787 1.1 mrg vec_safe_push (windowed_parm_regs, p); 9788 1.1 mrg incoming = replace_equiv_address_nv (incoming, reg); 9789 1.1 mrg } 9790 1.1 mrg } 9791 1.1 mrg } 9792 1.1 mrg #endif 9793 1.1 mrg 9794 1.1 mrg if (!vt_get_decl_and_offset (incoming, &decl, &offset)) 9795 1.1 mrg { 9796 1.1 mrg incoming_ok = false; 9797 1.1 mrg if (MEM_P (incoming)) 9798 1.1 mrg { 9799 1.1 mrg /* This means argument is passed by invisible reference. */ 9800 1.1 mrg offset = 0; 9801 1.1 mrg decl = parm; 9802 1.1 mrg } 9803 1.1 mrg else 9804 1.1 mrg { 9805 1.1 mrg if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset)) 9806 1.1 mrg return; 9807 1.1 mrg offset += byte_lowpart_offset (GET_MODE (incoming), 9808 1.1 mrg GET_MODE (decl_rtl)); 9809 1.1 mrg } 9810 1.1 mrg } 9811 1.1 mrg 9812 1.1 mrg if (!decl) 9813 1.1 mrg return; 9814 1.1 mrg 9815 1.1 mrg if (parm != decl) 9816 1.1 mrg { 9817 1.1 mrg /* If that DECL_RTL wasn't a pseudo that got spilled to 9818 1.1 mrg memory, bail out. Otherwise, the spill slot sharing code 9819 1.1 mrg will force the memory to reference spill_slot_decl (%sfp), 9820 1.1 mrg so we don't match above. That's ok, the pseudo must have 9821 1.1 mrg referenced the entire parameter, so just reset OFFSET. */ 9822 1.1 mrg if (decl != get_spill_slot_decl (false)) 9823 1.1 mrg return; 9824 1.1 mrg offset = 0; 9825 1.1 mrg } 9826 1.1 mrg 9827 1.1 mrg HOST_WIDE_INT const_offset; 9828 1.1 mrg if (!track_loc_p (incoming, parm, offset, false, &mode, &const_offset)) 9829 1.1 mrg return; 9830 1.1 mrg 9831 1.1 mrg out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out; 9832 1.1 mrg 9833 1.1 mrg dv = dv_from_decl (parm); 9834 1.1 mrg 9835 1.1 mrg if (target_for_debug_bind (parm) 9836 1.1 mrg /* We can't deal with these right now, because this kind of 9837 1.1 mrg variable is single-part. ??? We could handle parallels 9838 1.1 mrg that describe multiple locations for the same single 9839 1.1 mrg value, but ATM we don't. */ 9840 1.1 mrg && GET_CODE (incoming) != PARALLEL) 9841 1.1 mrg { 9842 1.1 mrg cselib_val *val; 9843 1.1 mrg rtx lowpart; 9844 1.1 mrg 9845 1.1 mrg /* ??? We shouldn't ever hit this, but it may happen because 9846 1.1 mrg arguments passed by invisible reference aren't dealt with 9847 1.1 mrg above: incoming-rtl will have Pmode rather than the 9848 1.1 mrg expected mode for the type. */ 9849 1.1 mrg if (const_offset) 9850 1.1 mrg return; 9851 1.1 mrg 9852 1.1 mrg lowpart = var_lowpart (mode, incoming); 9853 1.1 mrg if (!lowpart) 9854 1.1 mrg return; 9855 1.1 mrg 9856 1.1 mrg val = cselib_lookup_from_insn (lowpart, mode, true, 9857 1.1 mrg VOIDmode, get_insns ()); 9858 1.1 mrg 9859 1.1 mrg /* ??? Float-typed values in memory are not handled by 9860 1.1 mrg cselib. */ 9861 1.1 mrg if (val) 9862 1.1 mrg { 9863 1.1 mrg preserve_value (val); 9864 1.1 mrg set_variable_part (out, val->val_rtx, dv, const_offset, 9865 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); 9866 1.1 mrg dv = dv_from_value (val->val_rtx); 9867 1.1 mrg } 9868 1.1 mrg 9869 1.1 mrg if (MEM_P (incoming)) 9870 1.1 mrg { 9871 1.1 mrg val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true, 9872 1.1 mrg VOIDmode, get_insns ()); 9873 1.1 mrg if (val) 9874 1.1 mrg { 9875 1.1 mrg preserve_value (val); 9876 1.1 mrg incoming = replace_equiv_address_nv (incoming, val->val_rtx); 9877 1.1 mrg } 9878 1.1 mrg } 9879 1.1 mrg } 9880 1.1 mrg 9881 1.1 mrg if (REG_P (incoming)) 9882 1.1 mrg { 9883 1.1 mrg incoming = var_lowpart (mode, incoming); 9884 1.1 mrg gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER); 9885 1.1 mrg attrs_list_insert (&out->regs[REGNO (incoming)], dv, const_offset, 9886 1.1 mrg incoming); 9887 1.1 mrg set_variable_part (out, incoming, dv, const_offset, 9888 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); 9889 1.1 mrg if (dv_is_value_p (dv)) 9890 1.1 mrg { 9891 1.1 mrg record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming); 9892 1.1 mrg if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE 9893 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm)))) 9894 1.1 mrg { 9895 1.1 mrg machine_mode indmode 9896 1.1 mrg = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm))); 9897 1.1 mrg rtx mem = gen_rtx_MEM (indmode, incoming); 9898 1.1 mrg cselib_val *val = cselib_lookup_from_insn (mem, indmode, true, 9899 1.1 mrg VOIDmode, 9900 1.1 mrg get_insns ()); 9901 1.1 mrg if (val) 9902 1.1 mrg { 9903 1.1 mrg preserve_value (val); 9904 1.1 mrg record_entry_value (val, mem); 9905 1.1 mrg set_variable_part (out, mem, dv_from_value (val->val_rtx), 0, 9906 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); 9907 1.1 mrg } 9908 1.1 mrg } 9909 1.1 mrg } 9910 1.1 mrg } 9911 1.1 mrg else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv)) 9912 1.1 mrg { 9913 1.1 mrg int i; 9914 1.1 mrg 9915 1.1 mrg /* The following code relies on vt_get_decl_and_offset returning true for 9916 1.1 mrg incoming, which might not be always the case. */ 9917 1.1 mrg if (!incoming_ok) 9918 1.1 mrg return; 9919 1.1 mrg for (i = 0; i < XVECLEN (incoming, 0); i++) 9920 1.1 mrg { 9921 1.1 mrg rtx reg = XEXP (XVECEXP (incoming, 0, i), 0); 9922 1.1 mrg /* vt_get_decl_and_offset has already checked that the offset 9923 1.1 mrg is a valid variable part. */ 9924 1.1 mrg const_offset = get_tracked_reg_offset (reg); 9925 1.1 mrg gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER); 9926 1.1 mrg attrs_list_insert (&out->regs[REGNO (reg)], dv, const_offset, reg); 9927 1.1 mrg set_variable_part (out, reg, dv, const_offset, 9928 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); 9929 1.1 mrg } 9930 1.1 mrg } 9931 1.1 mrg else if (MEM_P (incoming)) 9932 1.1 mrg { 9933 1.1 mrg incoming = var_lowpart (mode, incoming); 9934 1.1 mrg set_variable_part (out, incoming, dv, const_offset, 9935 1.1 mrg VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); 9936 1.1 mrg } 9937 1.1 mrg } 9938 1.1 mrg 9939 1.1 mrg /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */ 9940 1.1 mrg 9941 1.1 mrg static void 9942 1.1 mrg vt_add_function_parameters (void) 9943 1.1 mrg { 9944 1.1 mrg tree parm; 9945 1.1 mrg 9946 1.1 mrg for (parm = DECL_ARGUMENTS (current_function_decl); 9947 1.1 mrg parm; parm = DECL_CHAIN (parm)) 9948 1.1 mrg vt_add_function_parameter (parm); 9949 1.1 mrg 9950 1.1 mrg if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl))) 9951 1.1 mrg { 9952 1.1 mrg tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl)); 9953 1.1 mrg 9954 1.1 mrg if (TREE_CODE (vexpr) == INDIRECT_REF) 9955 1.1 mrg vexpr = TREE_OPERAND (vexpr, 0); 9956 1.1 mrg 9957 1.1 mrg if (TREE_CODE (vexpr) == PARM_DECL 9958 1.1 mrg && DECL_ARTIFICIAL (vexpr) 9959 1.1 mrg && !DECL_IGNORED_P (vexpr) 9960 1.1 mrg && DECL_NAMELESS (vexpr)) 9961 1.1 mrg vt_add_function_parameter (vexpr); 9962 1.1 mrg } 9963 1.1 mrg } 9964 1.1 mrg 9965 1.1 mrg /* Initialize cfa_base_rtx, create a preserved VALUE for it and 9966 1.1 mrg ensure it isn't flushed during cselib_reset_table. 9967 1.1 mrg Can be called only if frame_pointer_rtx resp. arg_pointer_rtx 9968 1.1 mrg has been eliminated. */ 9969 1.1 mrg 9970 1.1 mrg static void 9971 1.1 mrg vt_init_cfa_base (void) 9972 1.1 mrg { 9973 1.1 mrg cselib_val *val; 9974 1.1 mrg 9975 1.1 mrg #ifdef FRAME_POINTER_CFA_OFFSET 9976 1.1 mrg cfa_base_rtx = frame_pointer_rtx; 9977 1.1 mrg cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl); 9978 1.1 mrg #else 9979 1.1 mrg cfa_base_rtx = arg_pointer_rtx; 9980 1.1 mrg cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl); 9981 1.1 mrg #endif 9982 1.1 mrg if (cfa_base_rtx == hard_frame_pointer_rtx 9983 1.1 mrg || !fixed_regs[REGNO (cfa_base_rtx)]) 9984 1.1 mrg { 9985 1.1 mrg cfa_base_rtx = NULL_RTX; 9986 1.1 mrg return; 9987 1.1 mrg } 9988 1.1 mrg if (!MAY_HAVE_DEBUG_BIND_INSNS) 9989 1.1 mrg return; 9990 1.1 mrg 9991 1.1 mrg /* Tell alias analysis that cfa_base_rtx should share 9992 1.1 mrg find_base_term value with stack pointer or hard frame pointer. */ 9993 1.1 mrg if (!frame_pointer_needed) 9994 1.1 mrg vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx); 9995 1.1 mrg else if (!crtl->stack_realign_tried) 9996 1.1 mrg vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx); 9997 1.1 mrg 9998 1.1 mrg val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1, 9999 1.1 mrg VOIDmode, get_insns ()); 10000 1.1 mrg preserve_value (val); 10001 1.1 mrg cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx)); 10002 1.1 mrg } 10003 1.1 mrg 10004 1.1 mrg /* Reemit INSN, a MARKER_DEBUG_INSN, as a note. */ 10005 1.1 mrg 10006 1.1 mrg static rtx_insn * 10007 1.1 mrg reemit_marker_as_note (rtx_insn *insn) 10008 1.1 mrg { 10009 1.1 mrg gcc_checking_assert (DEBUG_MARKER_INSN_P (insn)); 10010 1.1 mrg 10011 1.1 mrg enum insn_note kind = INSN_DEBUG_MARKER_KIND (insn); 10012 1.1 mrg 10013 1.1 mrg switch (kind) 10014 1.1 mrg { 10015 1.1 mrg case NOTE_INSN_BEGIN_STMT: 10016 1.1 mrg case NOTE_INSN_INLINE_ENTRY: 10017 1.1 mrg { 10018 1.1 mrg rtx_insn *note = NULL; 10019 1.1 mrg if (cfun->debug_nonbind_markers) 10020 1.1 mrg { 10021 1.1 mrg note = emit_note_before (kind, insn); 10022 1.1 mrg NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn); 10023 1.1 mrg } 10024 1.1 mrg delete_insn (insn); 10025 1.1 mrg return note; 10026 1.1 mrg } 10027 1.1 mrg 10028 1.1 mrg default: 10029 1.1 mrg gcc_unreachable (); 10030 1.1 mrg } 10031 1.1 mrg } 10032 1.1 mrg 10033 1.1 mrg /* Allocate and initialize the data structures for variable tracking 10034 1.1 mrg and parse the RTL to get the micro operations. */ 10035 1.1 mrg 10036 1.1 mrg static bool 10037 1.1 mrg vt_initialize (void) 10038 1.1 mrg { 10039 1.1 mrg basic_block bb; 10040 1.1 mrg poly_int64 fp_cfa_offset = -1; 10041 1.1 mrg 10042 1.1 mrg alloc_aux_for_blocks (sizeof (variable_tracking_info)); 10043 1.1 mrg 10044 1.1 mrg empty_shared_hash = shared_hash_pool.allocate (); 10045 1.1 mrg empty_shared_hash->refcount = 1; 10046 1.1 mrg empty_shared_hash->htab = new variable_table_type (1); 10047 1.1 mrg changed_variables = new variable_table_type (10); 10048 1.1 mrg 10049 1.1 mrg /* Init the IN and OUT sets. */ 10050 1.1 mrg FOR_ALL_BB_FN (bb, cfun) 10051 1.1 mrg { 10052 1.1 mrg VTI (bb)->visited = false; 10053 1.1 mrg VTI (bb)->flooded = false; 10054 1.1 mrg dataflow_set_init (&VTI (bb)->in); 10055 1.1 mrg dataflow_set_init (&VTI (bb)->out); 10056 1.1 mrg VTI (bb)->permp = NULL; 10057 1.1 mrg } 10058 1.1 mrg 10059 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 10060 1.1 mrg { 10061 1.1 mrg cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS); 10062 1.1 mrg scratch_regs = BITMAP_ALLOC (NULL); 10063 1.1 mrg preserved_values.create (256); 10064 1.1 mrg global_get_addr_cache = new hash_map<rtx, rtx>; 10065 1.1 mrg } 10066 1.1 mrg else 10067 1.1 mrg { 10068 1.1 mrg scratch_regs = NULL; 10069 1.1 mrg global_get_addr_cache = NULL; 10070 1.1 mrg } 10071 1.1 mrg 10072 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 10073 1.1 mrg { 10074 1.1 mrg rtx reg, expr; 10075 1.1 mrg int ofst; 10076 1.1 mrg cselib_val *val; 10077 1.1 mrg 10078 1.1 mrg #ifdef FRAME_POINTER_CFA_OFFSET 10079 1.1 mrg reg = frame_pointer_rtx; 10080 1.1 mrg ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl); 10081 1.1 mrg #else 10082 1.1 mrg reg = arg_pointer_rtx; 10083 1.1 mrg ofst = ARG_POINTER_CFA_OFFSET (current_function_decl); 10084 1.1 mrg #endif 10085 1.1 mrg 10086 1.1 mrg ofst -= INCOMING_FRAME_SP_OFFSET; 10087 1.1 mrg 10088 1.1 mrg val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1, 10089 1.1 mrg VOIDmode, get_insns ()); 10090 1.1 mrg preserve_value (val); 10091 1.1 mrg if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)]) 10092 1.1 mrg cselib_preserve_cfa_base_value (val, REGNO (reg)); 10093 1.1 mrg if (ofst) 10094 1.1 mrg { 10095 1.1 mrg cselib_val *valsp 10096 1.1 mrg = cselib_lookup_from_insn (stack_pointer_rtx, 10097 1.1 mrg GET_MODE (stack_pointer_rtx), 1, 10098 1.1 mrg VOIDmode, get_insns ()); 10099 1.1 mrg preserve_value (valsp); 10100 1.1 mrg expr = plus_constant (GET_MODE (reg), reg, ofst); 10101 1.1 mrg /* This cselib_add_permanent_equiv call needs to be done before 10102 1.1 mrg the other cselib_add_permanent_equiv a few lines later, 10103 1.1 mrg because after that one is done, cselib_lookup on this expr 10104 1.1 mrg will due to the cselib SP_DERIVED_VALUE_P optimizations 10105 1.1 mrg return valsp and so no permanent equivalency will be added. */ 10106 1.1 mrg cselib_add_permanent_equiv (valsp, expr, get_insns ()); 10107 1.1 mrg } 10108 1.1 mrg 10109 1.1 mrg expr = plus_constant (GET_MODE (stack_pointer_rtx), 10110 1.1 mrg stack_pointer_rtx, -ofst); 10111 1.1 mrg cselib_add_permanent_equiv (val, expr, get_insns ()); 10112 1.1 mrg } 10113 1.1 mrg 10114 1.1 mrg /* In order to factor out the adjustments made to the stack pointer or to 10115 1.1 mrg the hard frame pointer and thus be able to use DW_OP_fbreg operations 10116 1.1 mrg instead of individual location lists, we're going to rewrite MEMs based 10117 1.1 mrg on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx 10118 1.1 mrg or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx 10119 1.1 mrg resp. arg_pointer_rtx. We can do this either when there is no frame 10120 1.1 mrg pointer in the function and stack adjustments are consistent for all 10121 1.1 mrg basic blocks or when there is a frame pointer and no stack realignment. 10122 1.1 mrg But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx 10123 1.1 mrg has been eliminated. */ 10124 1.1 mrg if (!frame_pointer_needed) 10125 1.1 mrg { 10126 1.1 mrg rtx reg, elim; 10127 1.1 mrg 10128 1.1 mrg if (!vt_stack_adjustments ()) 10129 1.1 mrg return false; 10130 1.1 mrg 10131 1.1 mrg #ifdef FRAME_POINTER_CFA_OFFSET 10132 1.1 mrg reg = frame_pointer_rtx; 10133 1.1 mrg #else 10134 1.1 mrg reg = arg_pointer_rtx; 10135 1.1 mrg #endif 10136 1.1 mrg elim = eliminate_regs (reg, VOIDmode, NULL_RTX); 10137 1.1 mrg if (elim != reg) 10138 1.1 mrg { 10139 1.1 mrg if (GET_CODE (elim) == PLUS) 10140 1.1 mrg elim = XEXP (elim, 0); 10141 1.1 mrg if (elim == stack_pointer_rtx) 10142 1.1 mrg vt_init_cfa_base (); 10143 1.1 mrg } 10144 1.1 mrg } 10145 1.1 mrg else if (!crtl->stack_realign_tried) 10146 1.1 mrg { 10147 1.1 mrg rtx reg, elim; 10148 1.1 mrg 10149 1.1 mrg #ifdef FRAME_POINTER_CFA_OFFSET 10150 1.1 mrg reg = frame_pointer_rtx; 10151 1.1 mrg fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); 10152 1.1 mrg #else 10153 1.1 mrg reg = arg_pointer_rtx; 10154 1.1 mrg fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); 10155 1.1 mrg #endif 10156 1.1 mrg elim = eliminate_regs (reg, VOIDmode, NULL_RTX); 10157 1.1 mrg if (elim != reg) 10158 1.1 mrg { 10159 1.1 mrg if (GET_CODE (elim) == PLUS) 10160 1.1 mrg { 10161 1.1 mrg fp_cfa_offset -= rtx_to_poly_int64 (XEXP (elim, 1)); 10162 1.1 mrg elim = XEXP (elim, 0); 10163 1.1 mrg } 10164 1.1 mrg if (elim != hard_frame_pointer_rtx) 10165 1.1 mrg fp_cfa_offset = -1; 10166 1.1 mrg } 10167 1.1 mrg else 10168 1.1 mrg fp_cfa_offset = -1; 10169 1.1 mrg } 10170 1.1 mrg 10171 1.1 mrg /* If the stack is realigned and a DRAP register is used, we're going to 10172 1.1 mrg rewrite MEMs based on it representing incoming locations of parameters 10173 1.1 mrg passed on the stack into MEMs based on the argument pointer. Although 10174 1.1 mrg we aren't going to rewrite other MEMs, we still need to initialize the 10175 1.1 mrg virtual CFA pointer in order to ensure that the argument pointer will 10176 1.1 mrg be seen as a constant throughout the function. 10177 1.1 mrg 10178 1.1 mrg ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */ 10179 1.1 mrg else if (stack_realign_drap) 10180 1.1 mrg { 10181 1.1 mrg rtx reg, elim; 10182 1.1 mrg 10183 1.1 mrg #ifdef FRAME_POINTER_CFA_OFFSET 10184 1.1 mrg reg = frame_pointer_rtx; 10185 1.1 mrg #else 10186 1.1 mrg reg = arg_pointer_rtx; 10187 1.1 mrg #endif 10188 1.1 mrg elim = eliminate_regs (reg, VOIDmode, NULL_RTX); 10189 1.1 mrg if (elim != reg) 10190 1.1 mrg { 10191 1.1 mrg if (GET_CODE (elim) == PLUS) 10192 1.1 mrg elim = XEXP (elim, 0); 10193 1.1 mrg if (elim == hard_frame_pointer_rtx) 10194 1.1 mrg vt_init_cfa_base (); 10195 1.1 mrg } 10196 1.1 mrg } 10197 1.1 mrg 10198 1.1 mrg hard_frame_pointer_adjustment = -1; 10199 1.1 mrg 10200 1.1 mrg vt_add_function_parameters (); 10201 1.1 mrg 10202 1.1 mrg bool record_sp_value = false; 10203 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 10204 1.1 mrg { 10205 1.1 mrg rtx_insn *insn; 10206 1.1 mrg basic_block first_bb, last_bb; 10207 1.1 mrg 10208 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 10209 1.1 mrg { 10210 1.1 mrg cselib_record_sets_hook = add_with_sets; 10211 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 10212 1.1 mrg fprintf (dump_file, "first value: %i\n", 10213 1.1 mrg cselib_get_next_uid ()); 10214 1.1 mrg } 10215 1.1 mrg 10216 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS 10217 1.1 mrg && cfa_base_rtx 10218 1.1 mrg && !frame_pointer_needed 10219 1.1 mrg && record_sp_value) 10220 1.1 mrg cselib_record_sp_cfa_base_equiv (-cfa_base_offset 10221 1.1 mrg - VTI (bb)->in.stack_adjust, 10222 1.1 mrg BB_HEAD (bb)); 10223 1.1 mrg record_sp_value = true; 10224 1.1 mrg 10225 1.1 mrg first_bb = bb; 10226 1.1 mrg for (;;) 10227 1.1 mrg { 10228 1.1 mrg edge e; 10229 1.1 mrg if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun) 10230 1.1 mrg || ! single_pred_p (bb->next_bb)) 10231 1.1 mrg break; 10232 1.1 mrg e = find_edge (bb, bb->next_bb); 10233 1.1 mrg if (! e || (e->flags & EDGE_FALLTHRU) == 0) 10234 1.1 mrg break; 10235 1.1 mrg bb = bb->next_bb; 10236 1.1 mrg } 10237 1.1 mrg last_bb = bb; 10238 1.1 mrg 10239 1.1 mrg /* Add the micro-operations to the vector. */ 10240 1.1 mrg FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb) 10241 1.1 mrg { 10242 1.1 mrg HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust; 10243 1.1 mrg VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust; 10244 1.1 mrg 10245 1.1 mrg rtx_insn *next; 10246 1.1 mrg FOR_BB_INSNS_SAFE (bb, insn, next) 10247 1.1 mrg { 10248 1.1 mrg if (INSN_P (insn)) 10249 1.1 mrg { 10250 1.1 mrg HOST_WIDE_INT pre = 0, post = 0; 10251 1.1 mrg 10252 1.1 mrg if (!frame_pointer_needed) 10253 1.1 mrg { 10254 1.1 mrg insn_stack_adjust_offset_pre_post (insn, &pre, &post); 10255 1.1 mrg if (pre) 10256 1.1 mrg { 10257 1.1 mrg micro_operation mo; 10258 1.1 mrg mo.type = MO_ADJUST; 10259 1.1 mrg mo.u.adjust = pre; 10260 1.1 mrg mo.insn = insn; 10261 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 10262 1.1 mrg log_op_type (PATTERN (insn), bb, insn, 10263 1.1 mrg MO_ADJUST, dump_file); 10264 1.1 mrg VTI (bb)->mos.safe_push (mo); 10265 1.1 mrg } 10266 1.1 mrg } 10267 1.1 mrg 10268 1.1 mrg cselib_hook_called = false; 10269 1.1 mrg adjust_insn (bb, insn); 10270 1.1 mrg 10271 1.1 mrg if (pre) 10272 1.1 mrg VTI (bb)->out.stack_adjust += pre; 10273 1.1 mrg 10274 1.1 mrg if (DEBUG_MARKER_INSN_P (insn)) 10275 1.1 mrg { 10276 1.1 mrg reemit_marker_as_note (insn); 10277 1.1 mrg continue; 10278 1.1 mrg } 10279 1.1 mrg 10280 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 10281 1.1 mrg { 10282 1.1 mrg if (CALL_P (insn)) 10283 1.1 mrg prepare_call_arguments (bb, insn); 10284 1.1 mrg cselib_process_insn (insn); 10285 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 10286 1.1 mrg { 10287 1.1 mrg if (dump_flags & TDF_SLIM) 10288 1.1 mrg dump_insn_slim (dump_file, insn); 10289 1.1 mrg else 10290 1.1 mrg print_rtl_single (dump_file, insn); 10291 1.1 mrg dump_cselib_table (dump_file); 10292 1.1 mrg } 10293 1.1 mrg } 10294 1.1 mrg if (!cselib_hook_called) 10295 1.1 mrg add_with_sets (insn, 0, 0); 10296 1.1 mrg cancel_changes (0); 10297 1.1 mrg 10298 1.1 mrg if (post) 10299 1.1 mrg { 10300 1.1 mrg micro_operation mo; 10301 1.1 mrg mo.type = MO_ADJUST; 10302 1.1 mrg mo.u.adjust = post; 10303 1.1 mrg mo.insn = insn; 10304 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 10305 1.1 mrg log_op_type (PATTERN (insn), bb, insn, 10306 1.1 mrg MO_ADJUST, dump_file); 10307 1.1 mrg VTI (bb)->mos.safe_push (mo); 10308 1.1 mrg VTI (bb)->out.stack_adjust += post; 10309 1.1 mrg } 10310 1.1 mrg 10311 1.1 mrg if (maybe_ne (fp_cfa_offset, -1) 10312 1.1 mrg && known_eq (hard_frame_pointer_adjustment, -1) 10313 1.1 mrg && fp_setter_insn (insn)) 10314 1.1 mrg { 10315 1.1 mrg vt_init_cfa_base (); 10316 1.1 mrg hard_frame_pointer_adjustment = fp_cfa_offset; 10317 1.1 mrg /* Disassociate sp from fp now. */ 10318 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 10319 1.1 mrg { 10320 1.1 mrg cselib_val *v; 10321 1.1 mrg cselib_invalidate_rtx (stack_pointer_rtx); 10322 1.1 mrg v = cselib_lookup (stack_pointer_rtx, Pmode, 1, 10323 1.1 mrg VOIDmode); 10324 1.1 mrg if (v && !cselib_preserved_value_p (v)) 10325 1.1 mrg { 10326 1.1 mrg cselib_set_value_sp_based (v); 10327 1.1 mrg preserve_value (v); 10328 1.1 mrg } 10329 1.1 mrg } 10330 1.1 mrg } 10331 1.1 mrg } 10332 1.1 mrg } 10333 1.1 mrg gcc_assert (offset == VTI (bb)->out.stack_adjust); 10334 1.1 mrg } 10335 1.1 mrg 10336 1.1 mrg bb = last_bb; 10337 1.1 mrg 10338 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 10339 1.1 mrg { 10340 1.1 mrg cselib_preserve_only_values (); 10341 1.1 mrg cselib_reset_table (cselib_get_next_uid ()); 10342 1.1 mrg cselib_record_sets_hook = NULL; 10343 1.1 mrg } 10344 1.1 mrg } 10345 1.1 mrg 10346 1.1 mrg hard_frame_pointer_adjustment = -1; 10347 1.1 mrg VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true; 10348 1.1 mrg cfa_base_rtx = NULL_RTX; 10349 1.1 mrg return true; 10350 1.1 mrg } 10351 1.1 mrg 10352 1.1 mrg /* This is *not* reset after each function. It gives each 10353 1.1 mrg NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation 10354 1.1 mrg a unique label number. */ 10355 1.1 mrg 10356 1.1 mrg static int debug_label_num = 1; 10357 1.1 mrg 10358 1.1 mrg /* Remove from the insn stream a single debug insn used for 10359 1.1 mrg variable tracking at assignments. */ 10360 1.1 mrg 10361 1.1 mrg static inline void 10362 1.1 mrg delete_vta_debug_insn (rtx_insn *insn) 10363 1.1 mrg { 10364 1.1 mrg if (DEBUG_MARKER_INSN_P (insn)) 10365 1.1 mrg { 10366 1.1 mrg reemit_marker_as_note (insn); 10367 1.1 mrg return; 10368 1.1 mrg } 10369 1.1 mrg 10370 1.1 mrg tree decl = INSN_VAR_LOCATION_DECL (insn); 10371 1.1 mrg if (TREE_CODE (decl) == LABEL_DECL 10372 1.1 mrg && DECL_NAME (decl) 10373 1.1 mrg && !DECL_RTL_SET_P (decl)) 10374 1.1 mrg { 10375 1.1 mrg PUT_CODE (insn, NOTE); 10376 1.1 mrg NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL; 10377 1.1 mrg NOTE_DELETED_LABEL_NAME (insn) 10378 1.1 mrg = IDENTIFIER_POINTER (DECL_NAME (decl)); 10379 1.1 mrg SET_DECL_RTL (decl, insn); 10380 1.1 mrg CODE_LABEL_NUMBER (insn) = debug_label_num++; 10381 1.1 mrg } 10382 1.1 mrg else 10383 1.1 mrg delete_insn (insn); 10384 1.1 mrg } 10385 1.1 mrg 10386 1.1 mrg /* Remove from the insn stream all debug insns used for variable 10387 1.1 mrg tracking at assignments. USE_CFG should be false if the cfg is no 10388 1.1 mrg longer usable. */ 10389 1.1 mrg 10390 1.1 mrg void 10391 1.1 mrg delete_vta_debug_insns (bool use_cfg) 10392 1.1 mrg { 10393 1.1 mrg basic_block bb; 10394 1.1 mrg rtx_insn *insn, *next; 10395 1.1 mrg 10396 1.1 mrg if (!MAY_HAVE_DEBUG_INSNS) 10397 1.1 mrg return; 10398 1.1 mrg 10399 1.1 mrg if (use_cfg) 10400 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 10401 1.1 mrg { 10402 1.1 mrg FOR_BB_INSNS_SAFE (bb, insn, next) 10403 1.1 mrg if (DEBUG_INSN_P (insn)) 10404 1.1 mrg delete_vta_debug_insn (insn); 10405 1.1 mrg } 10406 1.1 mrg else 10407 1.1 mrg for (insn = get_insns (); insn; insn = next) 10408 1.1 mrg { 10409 1.1 mrg next = NEXT_INSN (insn); 10410 1.1 mrg if (DEBUG_INSN_P (insn)) 10411 1.1 mrg delete_vta_debug_insn (insn); 10412 1.1 mrg } 10413 1.1 mrg } 10414 1.1 mrg 10415 1.1 mrg /* Run a fast, BB-local only version of var tracking, to take care of 10416 1.1 mrg information that we don't do global analysis on, such that not all 10417 1.1 mrg information is lost. If SKIPPED holds, we're skipping the global 10418 1.1 mrg pass entirely, so we should try to use information it would have 10419 1.1 mrg handled as well.. */ 10420 1.1 mrg 10421 1.1 mrg static void 10422 1.1 mrg vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED) 10423 1.1 mrg { 10424 1.1 mrg /* ??? Just skip it all for now. */ 10425 1.1 mrg delete_vta_debug_insns (true); 10426 1.1 mrg } 10427 1.1 mrg 10428 1.1 mrg /* Free the data structures needed for variable tracking. */ 10429 1.1 mrg 10430 1.1 mrg static void 10431 1.1 mrg vt_finalize (void) 10432 1.1 mrg { 10433 1.1 mrg basic_block bb; 10434 1.1 mrg 10435 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 10436 1.1 mrg { 10437 1.1 mrg VTI (bb)->mos.release (); 10438 1.1 mrg } 10439 1.1 mrg 10440 1.1 mrg FOR_ALL_BB_FN (bb, cfun) 10441 1.1 mrg { 10442 1.1 mrg dataflow_set_destroy (&VTI (bb)->in); 10443 1.1 mrg dataflow_set_destroy (&VTI (bb)->out); 10444 1.1 mrg if (VTI (bb)->permp) 10445 1.1 mrg { 10446 1.1 mrg dataflow_set_destroy (VTI (bb)->permp); 10447 1.1 mrg XDELETE (VTI (bb)->permp); 10448 1.1 mrg } 10449 1.1 mrg } 10450 1.1 mrg free_aux_for_blocks (); 10451 1.1 mrg delete empty_shared_hash->htab; 10452 1.1 mrg empty_shared_hash->htab = NULL; 10453 1.1 mrg delete changed_variables; 10454 1.1 mrg changed_variables = NULL; 10455 1.1 mrg attrs_pool.release (); 10456 1.1 mrg var_pool.release (); 10457 1.1 mrg location_chain_pool.release (); 10458 1.1 mrg shared_hash_pool.release (); 10459 1.1 mrg 10460 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 10461 1.1 mrg { 10462 1.1 mrg if (global_get_addr_cache) 10463 1.1 mrg delete global_get_addr_cache; 10464 1.1 mrg global_get_addr_cache = NULL; 10465 1.1 mrg loc_exp_dep_pool.release (); 10466 1.1 mrg valvar_pool.release (); 10467 1.1 mrg preserved_values.release (); 10468 1.1 mrg cselib_finish (); 10469 1.1 mrg BITMAP_FREE (scratch_regs); 10470 1.1 mrg scratch_regs = NULL; 10471 1.1 mrg } 10472 1.1 mrg 10473 1.1 mrg #ifdef HAVE_window_save 10474 1.1 mrg vec_free (windowed_parm_regs); 10475 1.1 mrg #endif 10476 1.1 mrg 10477 1.1 mrg if (vui_vec) 10478 1.1 mrg XDELETEVEC (vui_vec); 10479 1.1 mrg vui_vec = NULL; 10480 1.1 mrg vui_allocated = 0; 10481 1.1 mrg } 10482 1.1 mrg 10483 1.1 mrg /* The entry point to variable tracking pass. */ 10484 1.1 mrg 10485 1.1 mrg static inline unsigned int 10486 1.1 mrg variable_tracking_main_1 (void) 10487 1.1 mrg { 10488 1.1 mrg bool success; 10489 1.1 mrg 10490 1.1 mrg /* We won't be called as a separate pass if flag_var_tracking is not 10491 1.1 mrg set, but final may call us to turn debug markers into notes. */ 10492 1.1 mrg if ((!flag_var_tracking && MAY_HAVE_DEBUG_INSNS) 10493 1.1 mrg || flag_var_tracking_assignments < 0 10494 1.1 mrg /* Var-tracking right now assumes the IR doesn't contain 10495 1.1 mrg any pseudos at this point. */ 10496 1.1 mrg || targetm.no_register_allocation) 10497 1.1 mrg { 10498 1.1 mrg delete_vta_debug_insns (true); 10499 1.1 mrg return 0; 10500 1.1 mrg } 10501 1.1 mrg 10502 1.1 mrg if (!flag_var_tracking) 10503 1.1 mrg return 0; 10504 1.1 mrg 10505 1.1 mrg if (n_basic_blocks_for_fn (cfun) > 500 10506 1.1 mrg && n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20) 10507 1.1 mrg { 10508 1.1 mrg vt_debug_insns_local (true); 10509 1.1 mrg return 0; 10510 1.1 mrg } 10511 1.1 mrg 10512 1.1 mrg if (!vt_initialize ()) 10513 1.1 mrg { 10514 1.1 mrg vt_finalize (); 10515 1.1 mrg vt_debug_insns_local (true); 10516 1.1 mrg return 0; 10517 1.1 mrg } 10518 1.1 mrg 10519 1.1 mrg success = vt_find_locations (); 10520 1.1 mrg 10521 1.1 mrg if (!success && flag_var_tracking_assignments > 0) 10522 1.1 mrg { 10523 1.1 mrg vt_finalize (); 10524 1.1 mrg 10525 1.1 mrg delete_vta_debug_insns (true); 10526 1.1 mrg 10527 1.1 mrg /* This is later restored by our caller. */ 10528 1.1 mrg flag_var_tracking_assignments = 0; 10529 1.1 mrg 10530 1.1 mrg success = vt_initialize (); 10531 1.1 mrg gcc_assert (success); 10532 1.1 mrg 10533 1.1 mrg success = vt_find_locations (); 10534 1.1 mrg } 10535 1.1 mrg 10536 1.1 mrg if (!success) 10537 1.1 mrg { 10538 1.1 mrg vt_finalize (); 10539 1.1 mrg vt_debug_insns_local (false); 10540 1.1 mrg return 0; 10541 1.1 mrg } 10542 1.1 mrg 10543 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS)) 10544 1.1 mrg { 10545 1.1 mrg dump_dataflow_sets (); 10546 1.1 mrg dump_reg_info (dump_file); 10547 1.1 mrg dump_flow_info (dump_file, dump_flags); 10548 1.1 mrg } 10549 1.1 mrg 10550 1.1 mrg timevar_push (TV_VAR_TRACKING_EMIT); 10551 1.1 mrg vt_emit_notes (); 10552 1.1 mrg timevar_pop (TV_VAR_TRACKING_EMIT); 10553 1.1 mrg 10554 1.1 mrg vt_finalize (); 10555 1.1 mrg vt_debug_insns_local (false); 10556 1.1 mrg return 0; 10557 1.1 mrg } 10558 1.1 mrg 10559 1.1 mrg unsigned int 10560 1.1 mrg variable_tracking_main (void) 10561 1.1 mrg { 10562 1.1 mrg unsigned int ret; 10563 1.1 mrg int save = flag_var_tracking_assignments; 10564 1.1 mrg 10565 1.1 mrg ret = variable_tracking_main_1 (); 10566 1.1 mrg 10567 1.1 mrg flag_var_tracking_assignments = save; 10568 1.1 mrg 10569 1.1 mrg return ret; 10570 1.1 mrg } 10571 1.1 mrg 10572 1.1 mrg namespace { 10574 1.1 mrg 10575 1.1 mrg const pass_data pass_data_variable_tracking = 10576 1.1 mrg { 10577 1.1 mrg RTL_PASS, /* type */ 10578 1.1 mrg "vartrack", /* name */ 10579 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */ 10580 1.1 mrg TV_VAR_TRACKING, /* tv_id */ 10581 1.1 mrg 0, /* properties_required */ 10582 1.1 mrg 0, /* properties_provided */ 10583 1.1 mrg 0, /* properties_destroyed */ 10584 1.1 mrg 0, /* todo_flags_start */ 10585 1.1 mrg 0, /* todo_flags_finish */ 10586 1.1 mrg }; 10587 1.1 mrg 10588 1.1 mrg class pass_variable_tracking : public rtl_opt_pass 10589 1.1 mrg { 10590 1.1 mrg public: 10591 1.1 mrg pass_variable_tracking (gcc::context *ctxt) 10592 1.1 mrg : rtl_opt_pass (pass_data_variable_tracking, ctxt) 10593 1.1 mrg {} 10594 1.1 mrg 10595 1.1 mrg /* opt_pass methods: */ 10596 1.1 mrg virtual bool gate (function *) 10597 1.1 mrg { 10598 1.1 mrg return (flag_var_tracking && !targetm.delay_vartrack); 10599 1.1 mrg } 10600 1.1 mrg 10601 1.1 mrg virtual unsigned int execute (function *) 10602 1.1 mrg { 10603 1.1 mrg return variable_tracking_main (); 10604 1.1 mrg } 10605 1.1 mrg 10606 1.1 mrg }; // class pass_variable_tracking 10607 1.1 mrg 10608 1.1 mrg } // anon namespace 10609 1.1 mrg 10610 1.1 mrg rtl_opt_pass * 10611 1.1 mrg make_pass_variable_tracking (gcc::context *ctxt) 10612 1.1 mrg { 10613 1.1 mrg return new pass_variable_tracking (ctxt); 10614 } 10615