1 1.1 mrg /* RTL dead code elimination. 2 1.1 mrg Copyright (C) 2005-2022 Free Software Foundation, Inc. 3 1.1 mrg 4 1.1 mrg This file is part of GCC. 5 1.1 mrg 6 1.1 mrg GCC is free software; you can redistribute it and/or modify it under 7 1.1 mrg the terms of the GNU General Public License as published by the Free 8 1.1 mrg Software Foundation; either version 3, or (at your option) any later 9 1.1 mrg version. 10 1.1 mrg 11 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 1.1 mrg for more details. 15 1.1 mrg 16 1.1 mrg You should have received a copy of the GNU General Public License 17 1.1 mrg along with GCC; see the file COPYING3. If not see 18 1.1 mrg <http://www.gnu.org/licenses/>. */ 19 1.1 mrg 20 1.1 mrg #include "config.h" 21 1.1 mrg #include "system.h" 22 1.1 mrg #include "coretypes.h" 23 1.1 mrg #include "backend.h" 24 1.1 mrg #include "rtl.h" 25 1.1 mrg #include "tree.h" 26 1.1 mrg #include "predict.h" 27 1.1 mrg #include "df.h" 28 1.1 mrg #include "memmodel.h" 29 1.1 mrg #include "tm_p.h" 30 1.1 mrg #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ 31 1.1 mrg #include "cfgrtl.h" 32 1.1 mrg #include "cfgbuild.h" 33 1.1 mrg #include "cfgcleanup.h" 34 1.1 mrg #include "dce.h" 35 1.1 mrg #include "valtrack.h" 36 1.1 mrg #include "tree-pass.h" 37 1.1 mrg #include "dbgcnt.h" 38 1.1 mrg #include "rtl-iter.h" 39 1.1 mrg 40 1.1 mrg 41 1.1 mrg /* ------------------------------------------------------------------------- 42 1.1 mrg Core mark/delete routines 43 1.1 mrg ------------------------------------------------------------------------- */ 44 1.1 mrg 45 1.1 mrg /* True if we are invoked while the df engine is running; in this case, 46 1.1 mrg we don't want to reenter it. */ 47 1.1 mrg static bool df_in_progress = false; 48 1.1 mrg 49 1.1 mrg /* True if we are allowed to alter the CFG in this pass. */ 50 1.1 mrg static bool can_alter_cfg = false; 51 1.1 mrg 52 1.1 mrg /* Instructions that have been marked but whose dependencies have not 53 1.1 mrg yet been processed. */ 54 1.1 mrg static vec<rtx_insn *> worklist; 55 1.1 mrg 56 1.1 mrg /* Bitmap of instructions marked as needed indexed by INSN_UID. */ 57 1.1 mrg static sbitmap marked; 58 1.1 mrg 59 1.1 mrg /* Bitmap obstacks used for block processing by the fast algorithm. */ 60 1.1 mrg static bitmap_obstack dce_blocks_bitmap_obstack; 61 1.1 mrg static bitmap_obstack dce_tmp_bitmap_obstack; 62 1.1 mrg 63 1.1 mrg static bool find_call_stack_args (rtx_call_insn *, bool, bool, bitmap); 64 1.1 mrg 65 1.1 mrg /* A subroutine for which BODY is part of the instruction being tested; 66 1.1 mrg either the top-level pattern, or an element of a PARALLEL. The 67 1.1 mrg instruction is known not to be a bare USE or CLOBBER. */ 68 1.1 mrg 69 1.1 mrg static bool 70 1.1 mrg deletable_insn_p_1 (rtx body) 71 1.1 mrg { 72 1.1 mrg switch (GET_CODE (body)) 73 1.1 mrg { 74 1.1 mrg case PREFETCH: 75 1.1 mrg case TRAP_IF: 76 1.1 mrg /* The UNSPEC case was added here because the ia-64 claims that 77 1.1 mrg USEs do not work after reload and generates UNSPECS rather 78 1.1 mrg than USEs. Since dce is run after reload we need to avoid 79 1.1 mrg deleting these even if they are dead. If it turns out that 80 1.1 mrg USEs really do work after reload, the ia-64 should be 81 1.1 mrg changed, and the UNSPEC case can be removed. */ 82 1.1 mrg case UNSPEC: 83 1.1 mrg return false; 84 1.1 mrg 85 1.1 mrg default: 86 1.1 mrg return !volatile_refs_p (body); 87 1.1 mrg } 88 1.1 mrg } 89 1.1 mrg 90 1.1 mrg /* Don't delete calls that may throw if we cannot do so. */ 91 1.1 mrg 92 1.1 mrg static bool 93 1.1 mrg can_delete_call (rtx_insn *insn) 94 1.1 mrg { 95 1.1 mrg if (cfun->can_delete_dead_exceptions && can_alter_cfg) 96 1.1 mrg return true; 97 1.1 mrg if (!insn_nothrow_p (insn)) 98 1.1 mrg return false; 99 1.1 mrg if (can_alter_cfg) 100 1.1 mrg return true; 101 1.1 mrg /* If we can't alter cfg, even when the call can't throw exceptions, it 102 1.1 mrg might have EDGE_ABNORMAL_CALL edges and so we shouldn't delete such 103 1.1 mrg calls. */ 104 1.1 mrg gcc_assert (CALL_P (insn)); 105 1.1 mrg if (BLOCK_FOR_INSN (insn) && BB_END (BLOCK_FOR_INSN (insn)) == insn) 106 1.1 mrg { 107 1.1 mrg edge e; 108 1.1 mrg edge_iterator ei; 109 1.1 mrg 110 1.1 mrg FOR_EACH_EDGE (e, ei, BLOCK_FOR_INSN (insn)->succs) 111 1.1 mrg if ((e->flags & EDGE_ABNORMAL_CALL) != 0) 112 1.1 mrg return false; 113 1.1 mrg } 114 1.1 mrg return true; 115 1.1 mrg } 116 1.1 mrg 117 1.1 mrg /* Return true if INSN is a normal instruction that can be deleted by 118 1.1 mrg the DCE pass. */ 119 1.1 mrg 120 1.1 mrg static bool 121 1.1 mrg deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores) 122 1.1 mrg { 123 1.1 mrg rtx body, x; 124 1.1 mrg int i; 125 1.1 mrg df_ref def; 126 1.1 mrg 127 1.1 mrg if (CALL_P (insn) 128 1.1 mrg /* We cannot delete calls inside of the recursive dce because 129 1.1 mrg this may cause basic blocks to be deleted and this messes up 130 1.1 mrg the rest of the stack of optimization passes. */ 131 1.1 mrg && (!df_in_progress) 132 1.1 mrg /* We cannot delete pure or const sibling calls because it is 133 1.1 mrg hard to see the result. */ 134 1.1 mrg && (!SIBLING_CALL_P (insn)) 135 1.1 mrg /* We can delete dead const or pure calls as long as they do not 136 1.1 mrg infinite loop. */ 137 1.1 mrg && (RTL_CONST_OR_PURE_CALL_P (insn) 138 1.1 mrg && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)) 139 1.1 mrg /* Don't delete calls that may throw if we cannot do so. */ 140 1.1 mrg && can_delete_call (insn)) 141 1.1 mrg return find_call_stack_args (as_a <rtx_call_insn *> (insn), false, 142 1.1 mrg fast, arg_stores); 143 1.1 mrg 144 1.1 mrg /* Don't delete jumps, notes and the like. */ 145 1.1 mrg if (!NONJUMP_INSN_P (insn)) 146 1.1 mrg return false; 147 1.1 mrg 148 1.1 mrg /* Don't delete insns that may throw if we cannot do so. */ 149 1.1 mrg if (!(cfun->can_delete_dead_exceptions && can_alter_cfg) 150 1.1 mrg && !insn_nothrow_p (insn)) 151 1.1 mrg return false; 152 1.1 mrg 153 1.1 mrg /* If INSN sets a global_reg, leave it untouched. */ 154 1.1 mrg FOR_EACH_INSN_DEF (def, insn) 155 1.1 mrg if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def)) 156 1.1 mrg && global_regs[DF_REF_REGNO (def)]) 157 1.1 mrg return false; 158 1.1 mrg /* Initialization of pseudo PIC register should never be removed. */ 159 1.1 mrg else if (DF_REF_REG (def) == pic_offset_table_rtx 160 1.1 mrg && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER) 161 1.1 mrg return false; 162 1.1 mrg 163 1.1 mrg /* Callee-save restores are needed. */ 164 1.1 mrg if (RTX_FRAME_RELATED_P (insn) 165 1.1 mrg && crtl->shrink_wrapped_separate 166 1.1 mrg && find_reg_note (insn, REG_CFA_RESTORE, NULL)) 167 1.1 mrg return false; 168 1.1 mrg 169 1.1 mrg body = PATTERN (insn); 170 1.1 mrg switch (GET_CODE (body)) 171 1.1 mrg { 172 1.1 mrg case USE: 173 1.1 mrg case VAR_LOCATION: 174 1.1 mrg return false; 175 1.1 mrg 176 1.1 mrg case CLOBBER: 177 1.1 mrg if (fast) 178 1.1 mrg { 179 1.1 mrg /* A CLOBBER of a dead pseudo register serves no purpose. 180 1.1 mrg That is not necessarily true for hard registers until 181 1.1 mrg after reload. */ 182 1.1 mrg x = XEXP (body, 0); 183 1.1 mrg return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed); 184 1.1 mrg } 185 1.1 mrg else 186 1.1 mrg /* Because of the way that use-def chains are built, it is not 187 1.1 mrg possible to tell if the clobber is dead because it can 188 1.1 mrg never be the target of a use-def chain. */ 189 1.1 mrg return false; 190 1.1 mrg 191 1.1 mrg case PARALLEL: 192 1.1 mrg for (i = XVECLEN (body, 0) - 1; i >= 0; i--) 193 1.1 mrg if (!deletable_insn_p_1 (XVECEXP (body, 0, i))) 194 1.1 mrg return false; 195 1.1 mrg return true; 196 1.1 mrg 197 1.1 mrg default: 198 1.1 mrg return deletable_insn_p_1 (body); 199 1.1 mrg } 200 1.1 mrg } 201 1.1 mrg 202 1.1 mrg 203 1.1 mrg /* Return true if INSN has been marked as needed. */ 204 1.1 mrg 205 1.1 mrg static inline int 206 1.1 mrg marked_insn_p (rtx_insn *insn) 207 1.1 mrg { 208 1.1 mrg /* Artificial defs are always needed and they do not have an insn. 209 1.1 mrg We should never see them here. */ 210 1.1 mrg gcc_assert (insn); 211 1.1 mrg return bitmap_bit_p (marked, INSN_UID (insn)); 212 1.1 mrg } 213 1.1 mrg 214 1.1 mrg 215 1.1 mrg /* If INSN has not yet been marked as needed, mark it now, and add it to 216 1.1 mrg the worklist. */ 217 1.1 mrg 218 1.1 mrg static void 219 1.1 mrg mark_insn (rtx_insn *insn, bool fast) 220 1.1 mrg { 221 1.1 mrg if (!marked_insn_p (insn)) 222 1.1 mrg { 223 1.1 mrg if (!fast) 224 1.1 mrg worklist.safe_push (insn); 225 1.1 mrg bitmap_set_bit (marked, INSN_UID (insn)); 226 1.1 mrg if (dump_file) 227 1.1 mrg fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn)); 228 1.1 mrg if (CALL_P (insn) 229 1.1 mrg && !df_in_progress 230 1.1 mrg && !SIBLING_CALL_P (insn) 231 1.1 mrg && (RTL_CONST_OR_PURE_CALL_P (insn) 232 1.1 mrg && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)) 233 1.1 mrg && can_delete_call (insn)) 234 1.1 mrg find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL); 235 1.1 mrg } 236 1.1 mrg } 237 1.1 mrg 238 1.1 mrg 239 1.1 mrg /* A note_stores callback used by mark_nonreg_stores. DATA is the 240 1.1 mrg instruction containing DEST. */ 241 1.1 mrg 242 1.1 mrg static void 243 1.1 mrg mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data) 244 1.1 mrg { 245 1.1 mrg if (GET_CODE (pattern) != CLOBBER && !REG_P (dest)) 246 1.1 mrg mark_insn ((rtx_insn *) data, true); 247 1.1 mrg } 248 1.1 mrg 249 1.1 mrg 250 1.1 mrg /* A note_stores callback used by mark_nonreg_stores. DATA is the 251 1.1 mrg instruction containing DEST. */ 252 1.1 mrg 253 1.1 mrg static void 254 1.1 mrg mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data) 255 1.1 mrg { 256 1.1 mrg if (GET_CODE (pattern) != CLOBBER && !REG_P (dest)) 257 1.1 mrg mark_insn ((rtx_insn *) data, false); 258 1.1 mrg } 259 1.1 mrg 260 1.1 mrg 261 1.1 mrg /* Mark INSN if it stores to a non-register destination. */ 262 1.1 mrg 263 1.1 mrg static void 264 1.1 mrg mark_nonreg_stores (rtx_insn *insn, bool fast) 265 1.1 mrg { 266 1.1 mrg if (fast) 267 1.1 mrg note_stores (insn, mark_nonreg_stores_1, insn); 268 1.1 mrg else 269 1.1 mrg note_stores (insn, mark_nonreg_stores_2, insn); 270 1.1 mrg } 271 1.1 mrg 272 1.1 mrg 273 1.1 mrg /* Return true if a store to SIZE bytes, starting OFF bytes from stack pointer, 274 1.1 mrg is a call argument store, and clear corresponding bits from SP_BYTES 275 1.1 mrg bitmap if it is. */ 276 1.1 mrg 277 1.1 mrg static bool 278 1.1 mrg check_argument_store (HOST_WIDE_INT size, HOST_WIDE_INT off, 279 1.1 mrg HOST_WIDE_INT min_sp_off, HOST_WIDE_INT max_sp_off, 280 1.1 mrg bitmap sp_bytes) 281 1.1 mrg { 282 1.1 mrg HOST_WIDE_INT byte; 283 1.1 mrg for (byte = off; byte < off + size; byte++) 284 1.1 mrg { 285 1.1 mrg if (byte < min_sp_off 286 1.1 mrg || byte >= max_sp_off 287 1.1 mrg || !bitmap_clear_bit (sp_bytes, byte - min_sp_off)) 288 1.1 mrg return false; 289 1.1 mrg } 290 1.1 mrg return true; 291 1.1 mrg } 292 1.1 mrg 293 1.1 mrg /* If MEM has sp address, return 0, if it has sp + const address, 294 1.1 mrg return that const, if it has reg address where reg is set to sp + const 295 1.1 mrg and FAST is false, return const, otherwise return 296 1.1 mrg INTTYPE_MINUMUM (HOST_WIDE_INT). */ 297 1.1 mrg 298 1.1 mrg static HOST_WIDE_INT 299 1.1 mrg sp_based_mem_offset (rtx_call_insn *call_insn, const_rtx mem, bool fast) 300 1.1 mrg { 301 1.1 mrg HOST_WIDE_INT off = 0; 302 1.1 mrg rtx addr = XEXP (mem, 0); 303 1.1 mrg if (GET_CODE (addr) == PLUS 304 1.1 mrg && REG_P (XEXP (addr, 0)) 305 1.1 mrg && CONST_INT_P (XEXP (addr, 1))) 306 1.1 mrg { 307 1.1 mrg off = INTVAL (XEXP (addr, 1)); 308 1.1 mrg addr = XEXP (addr, 0); 309 1.1 mrg } 310 1.1 mrg if (addr == stack_pointer_rtx) 311 1.1 mrg return off; 312 1.1 mrg 313 1.1 mrg if (!REG_P (addr) || fast) 314 1.1 mrg return INTTYPE_MINIMUM (HOST_WIDE_INT); 315 1.1 mrg 316 1.1 mrg /* If not fast, use chains to see if addr wasn't set to sp + offset. */ 317 1.1 mrg df_ref use; 318 1.1 mrg FOR_EACH_INSN_USE (use, call_insn) 319 1.1 mrg if (rtx_equal_p (addr, DF_REF_REG (use))) 320 1.1 mrg break; 321 1.1 mrg 322 1.1 mrg if (use == NULL) 323 1.1 mrg return INTTYPE_MINIMUM (HOST_WIDE_INT); 324 1.1 mrg 325 1.1 mrg struct df_link *defs; 326 1.1 mrg for (defs = DF_REF_CHAIN (use); defs; defs = defs->next) 327 1.1 mrg if (! DF_REF_IS_ARTIFICIAL (defs->ref)) 328 1.1 mrg break; 329 1.1 mrg 330 1.1 mrg if (defs == NULL) 331 1.1 mrg return INTTYPE_MINIMUM (HOST_WIDE_INT); 332 1.1 mrg 333 1.1 mrg rtx set = single_set (DF_REF_INSN (defs->ref)); 334 1.1 mrg if (!set) 335 1.1 mrg return INTTYPE_MINIMUM (HOST_WIDE_INT); 336 1.1 mrg 337 1.1 mrg if (GET_CODE (SET_SRC (set)) != PLUS 338 1.1 mrg || XEXP (SET_SRC (set), 0) != stack_pointer_rtx 339 1.1 mrg || !CONST_INT_P (XEXP (SET_SRC (set), 1))) 340 1.1 mrg return INTTYPE_MINIMUM (HOST_WIDE_INT); 341 1.1 mrg 342 1.1 mrg off += INTVAL (XEXP (SET_SRC (set), 1)); 343 1.1 mrg return off; 344 1.1 mrg } 345 1.1 mrg 346 1.1 mrg /* Data for check_argument_load called via note_uses. */ 347 1.1 mrg struct check_argument_load_data { 348 1.1 mrg bitmap sp_bytes; 349 1.1 mrg HOST_WIDE_INT min_sp_off, max_sp_off; 350 1.1 mrg rtx_call_insn *call_insn; 351 1.1 mrg bool fast; 352 1.1 mrg bool load_found; 353 1.1 mrg }; 354 1.1 mrg 355 1.1 mrg /* Helper function for find_call_stack_args. Check if there are 356 1.1 mrg any loads from the argument slots in between the const/pure call 357 1.1 mrg and store to the argument slot, set LOAD_FOUND if any is found. */ 358 1.1 mrg 359 1.1 mrg static void 360 1.1 mrg check_argument_load (rtx *loc, void *data) 361 1.1 mrg { 362 1.1 mrg struct check_argument_load_data *d 363 1.1 mrg = (struct check_argument_load_data *) data; 364 1.1 mrg subrtx_iterator::array_type array; 365 1.1 mrg FOR_EACH_SUBRTX (iter, array, *loc, NONCONST) 366 1.1 mrg { 367 1.1 mrg const_rtx mem = *iter; 368 1.1 mrg HOST_WIDE_INT size; 369 1.1 mrg if (MEM_P (mem) 370 1.1 mrg && MEM_SIZE_KNOWN_P (mem) 371 1.1 mrg && MEM_SIZE (mem).is_constant (&size)) 372 1.1 mrg { 373 1.1 mrg HOST_WIDE_INT off = sp_based_mem_offset (d->call_insn, mem, d->fast); 374 1.1 mrg if (off != INTTYPE_MINIMUM (HOST_WIDE_INT) 375 1.1 mrg && off < d->max_sp_off 376 1.1 mrg && off + size > d->min_sp_off) 377 1.1 mrg for (HOST_WIDE_INT byte = MAX (off, d->min_sp_off); 378 1.1 mrg byte < MIN (off + size, d->max_sp_off); byte++) 379 1.1 mrg if (bitmap_bit_p (d->sp_bytes, byte - d->min_sp_off)) 380 1.1 mrg { 381 1.1 mrg d->load_found = true; 382 1.1 mrg return; 383 1.1 mrg } 384 1.1 mrg } 385 1.1 mrg } 386 1.1 mrg } 387 1.1 mrg 388 1.1 mrg /* Try to find all stack stores of CALL_INSN arguments if 389 1.1 mrg ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found 390 1.1 mrg and it is therefore safe to eliminate the call, return true, 391 1.1 mrg otherwise return false. This function should be first called 392 1.1 mrg with DO_MARK false, and only when the CALL_INSN is actually 393 1.1 mrg going to be marked called again with DO_MARK true. */ 394 1.1 mrg 395 1.1 mrg static bool 396 1.1 mrg find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast, 397 1.1 mrg bitmap arg_stores) 398 1.1 mrg { 399 1.1 mrg rtx p; 400 1.1 mrg rtx_insn *insn, *prev_insn; 401 1.1 mrg bool ret; 402 1.1 mrg HOST_WIDE_INT min_sp_off, max_sp_off; 403 1.1 mrg bitmap sp_bytes; 404 1.1 mrg 405 1.1 mrg gcc_assert (CALL_P (call_insn)); 406 1.1 mrg if (!ACCUMULATE_OUTGOING_ARGS) 407 1.1 mrg return true; 408 1.1 mrg 409 1.1 mrg if (!do_mark) 410 1.1 mrg { 411 1.1 mrg gcc_assert (arg_stores); 412 1.1 mrg bitmap_clear (arg_stores); 413 1.1 mrg } 414 1.1 mrg 415 1.1 mrg min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT); 416 1.1 mrg max_sp_off = 0; 417 1.1 mrg 418 1.1 mrg /* First determine the minimum and maximum offset from sp for 419 1.1 mrg stored arguments. */ 420 1.1 mrg for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) 421 1.1 mrg if (GET_CODE (XEXP (p, 0)) == USE 422 1.1 mrg && MEM_P (XEXP (XEXP (p, 0), 0))) 423 1.1 mrg { 424 1.1 mrg rtx mem = XEXP (XEXP (p, 0), 0); 425 1.1 mrg HOST_WIDE_INT size; 426 1.1 mrg if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size)) 427 1.1 mrg return false; 428 1.1 mrg HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast); 429 1.1 mrg if (off == INTTYPE_MINIMUM (HOST_WIDE_INT)) 430 1.1 mrg return false; 431 1.1 mrg min_sp_off = MIN (min_sp_off, off); 432 1.1 mrg max_sp_off = MAX (max_sp_off, off + size); 433 1.1 mrg } 434 1.1 mrg 435 1.1 mrg if (min_sp_off >= max_sp_off) 436 1.1 mrg return true; 437 1.1 mrg sp_bytes = BITMAP_ALLOC (NULL); 438 1.1 mrg 439 1.1 mrg /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off 440 1.1 mrg which contain arguments. Checking has been done in the previous 441 1.1 mrg loop. */ 442 1.1 mrg for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) 443 1.1 mrg if (GET_CODE (XEXP (p, 0)) == USE 444 1.1 mrg && MEM_P (XEXP (XEXP (p, 0), 0))) 445 1.1 mrg { 446 1.1 mrg rtx mem = XEXP (XEXP (p, 0), 0); 447 1.1 mrg /* Checked in the previous iteration. */ 448 1.1 mrg HOST_WIDE_INT size = MEM_SIZE (mem).to_constant (); 449 1.1 mrg HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast); 450 1.1 mrg gcc_checking_assert (off != INTTYPE_MINIMUM (HOST_WIDE_INT)); 451 1.1 mrg for (HOST_WIDE_INT byte = off; byte < off + size; byte++) 452 1.1 mrg if (!bitmap_set_bit (sp_bytes, byte - min_sp_off)) 453 1.1 mrg gcc_unreachable (); 454 1.1 mrg } 455 1.1 mrg 456 1.1 mrg /* Walk backwards, looking for argument stores. The search stops 457 1.1 mrg when seeing another call, sp adjustment, memory store other than 458 1.1 mrg argument store or a read from an argument stack slot. */ 459 1.1 mrg struct check_argument_load_data data 460 1.1 mrg = { sp_bytes, min_sp_off, max_sp_off, call_insn, fast, false }; 461 1.1 mrg ret = false; 462 1.1 mrg for (insn = PREV_INSN (call_insn); insn; insn = prev_insn) 463 1.1 mrg { 464 1.1 mrg if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn))) 465 1.1 mrg prev_insn = NULL; 466 1.1 mrg else 467 1.1 mrg prev_insn = PREV_INSN (insn); 468 1.1 mrg 469 1.1 mrg if (CALL_P (insn)) 470 1.1 mrg break; 471 1.1 mrg 472 1.1 mrg if (!NONDEBUG_INSN_P (insn)) 473 1.1 mrg continue; 474 1.1 mrg 475 1.1 mrg rtx set = single_set (insn); 476 1.1 mrg if (!set || SET_DEST (set) == stack_pointer_rtx) 477 1.1 mrg break; 478 1.1 mrg 479 1.1 mrg note_uses (&PATTERN (insn), check_argument_load, &data); 480 1.1 mrg if (data.load_found) 481 1.1 mrg break; 482 1.1 mrg 483 1.1 mrg if (!MEM_P (SET_DEST (set))) 484 1.1 mrg continue; 485 1.1 mrg 486 1.1 mrg rtx mem = SET_DEST (set); 487 1.1 mrg HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast); 488 1.1 mrg if (off == INTTYPE_MINIMUM (HOST_WIDE_INT)) 489 1.1 mrg break; 490 1.1 mrg 491 1.1 mrg HOST_WIDE_INT size; 492 1.1 mrg if (!MEM_SIZE_KNOWN_P (mem) 493 1.1 mrg || !MEM_SIZE (mem).is_constant (&size) 494 1.1 mrg || !check_argument_store (size, off, min_sp_off, 495 1.1 mrg max_sp_off, sp_bytes)) 496 1.1 mrg break; 497 1.1 mrg 498 1.1 mrg if (!deletable_insn_p (insn, fast, NULL)) 499 1.1 mrg break; 500 1.1 mrg 501 1.1 mrg if (do_mark) 502 1.1 mrg mark_insn (insn, fast); 503 1.1 mrg else 504 1.1 mrg bitmap_set_bit (arg_stores, INSN_UID (insn)); 505 1.1 mrg 506 1.1 mrg if (bitmap_empty_p (sp_bytes)) 507 1.1 mrg { 508 1.1 mrg ret = true; 509 1.1 mrg break; 510 1.1 mrg } 511 1.1 mrg } 512 1.1 mrg 513 1.1 mrg BITMAP_FREE (sp_bytes); 514 1.1 mrg if (!ret && arg_stores) 515 1.1 mrg bitmap_clear (arg_stores); 516 1.1 mrg 517 1.1 mrg return ret; 518 1.1 mrg } 519 1.1 mrg 520 1.1 mrg 521 1.1 mrg /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN 522 1.1 mrg writes to. */ 523 1.1 mrg 524 1.1 mrg static void 525 1.1 mrg remove_reg_equal_equiv_notes_for_defs (rtx_insn *insn) 526 1.1 mrg { 527 1.1 mrg df_ref def; 528 1.1 mrg 529 1.1 mrg FOR_EACH_INSN_DEF (def, insn) 530 1.1 mrg remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def)); 531 1.1 mrg } 532 1.1 mrg 533 1.1 mrg /* Scan all BBs for debug insns and reset those that reference values 534 1.1 mrg defined in unmarked insns. */ 535 1.1 mrg 536 1.1 mrg static void 537 1.1 mrg reset_unmarked_insns_debug_uses (void) 538 1.1 mrg { 539 1.1 mrg basic_block bb; 540 1.1 mrg rtx_insn *insn, *next; 541 1.1 mrg 542 1.1 mrg FOR_EACH_BB_REVERSE_FN (bb, cfun) 543 1.1 mrg FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next) 544 1.1 mrg if (DEBUG_INSN_P (insn)) 545 1.1 mrg { 546 1.1 mrg df_ref use; 547 1.1 mrg 548 1.1 mrg FOR_EACH_INSN_USE (use, insn) 549 1.1 mrg { 550 1.1 mrg struct df_link *defs; 551 1.1 mrg for (defs = DF_REF_CHAIN (use); defs; defs = defs->next) 552 1.1 mrg { 553 1.1 mrg rtx_insn *ref_insn; 554 1.1 mrg if (DF_REF_IS_ARTIFICIAL (defs->ref)) 555 1.1 mrg continue; 556 1.1 mrg ref_insn = DF_REF_INSN (defs->ref); 557 1.1 mrg if (!marked_insn_p (ref_insn)) 558 1.1 mrg break; 559 1.1 mrg } 560 1.1 mrg if (!defs) 561 1.1 mrg continue; 562 1.1 mrg /* ??? FIXME could we propagate the values assigned to 563 1.1 mrg each of the DEFs? */ 564 1.1 mrg INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); 565 1.1 mrg df_insn_rescan_debug_internal (insn); 566 1.1 mrg break; 567 1.1 mrg } 568 1.1 mrg } 569 1.1 mrg } 570 1.1 mrg 571 1.1 mrg /* Delete every instruction that hasn't been marked. */ 572 1.1 mrg 573 1.1 mrg static void 574 1.1 mrg delete_unmarked_insns (void) 575 1.1 mrg { 576 1.1 mrg basic_block bb; 577 1.1 mrg rtx_insn *insn, *next; 578 1.1 mrg bool must_clean = false; 579 1.1 mrg 580 1.1 mrg FOR_EACH_BB_REVERSE_FN (bb, cfun) 581 1.1 mrg FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next) 582 1.1 mrg if (NONDEBUG_INSN_P (insn)) 583 1.1 mrg { 584 1.1 mrg rtx turn_into_use = NULL_RTX; 585 1.1 mrg 586 1.1 mrg /* Always delete no-op moves. */ 587 1.1 mrg if (noop_move_p (insn) 588 1.1 mrg /* Unless the no-op move can throw and we are not allowed 589 1.1 mrg to alter cfg. */ 590 1.1 mrg && (!cfun->can_throw_non_call_exceptions 591 1.1 mrg || (cfun->can_delete_dead_exceptions && can_alter_cfg) 592 1.1 mrg || insn_nothrow_p (insn))) 593 1.1 mrg { 594 1.1 mrg if (RTX_FRAME_RELATED_P (insn)) 595 1.1 mrg turn_into_use 596 1.1 mrg = find_reg_note (insn, REG_CFA_RESTORE, NULL); 597 1.1 mrg if (turn_into_use && REG_P (XEXP (turn_into_use, 0))) 598 1.1 mrg turn_into_use = XEXP (turn_into_use, 0); 599 1.1 mrg else 600 1.1 mrg turn_into_use = NULL_RTX; 601 1.1 mrg } 602 1.1 mrg 603 1.1 mrg /* Otherwise rely only on the DCE algorithm. */ 604 1.1 mrg else if (marked_insn_p (insn)) 605 1.1 mrg continue; 606 1.1 mrg 607 1.1 mrg /* Beware that reaching a dbg counter limit here can result 608 1.1 mrg in miscompiled file. This occurs when a group of insns 609 1.1 mrg must be deleted together, typically because the kept insn 610 1.1 mrg depends on the output from the deleted insn. Deleting 611 1.1 mrg this insns in reverse order (both at the bb level and 612 1.1 mrg when looking at the blocks) minimizes this, but does not 613 1.1 mrg eliminate it, since it is possible for the using insn to 614 1.1 mrg be top of a block and the producer to be at the bottom of 615 1.1 mrg the block. However, in most cases this will only result 616 1.1 mrg in an uninitialized use of an insn that is dead anyway. 617 1.1 mrg 618 1.1 mrg However, there is one rare case that will cause a 619 1.1 mrg miscompile: deletion of non-looping pure and constant 620 1.1 mrg calls on a machine where ACCUMULATE_OUTGOING_ARGS is true. 621 1.1 mrg In this case it is possible to remove the call, but leave 622 1.1 mrg the argument pushes to the stack. Because of the changes 623 1.1 mrg to the stack pointer, this will almost always lead to a 624 1.1 mrg miscompile. */ 625 1.1 mrg if (!dbg_cnt (dce)) 626 1.1 mrg continue; 627 1.1 mrg 628 1.1 mrg if (dump_file) 629 1.1 mrg fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn)); 630 1.1 mrg 631 1.1 mrg /* Before we delete the insn we have to remove the REG_EQUAL notes 632 1.1 mrg for the destination regs in order to avoid dangling notes. */ 633 1.1 mrg remove_reg_equal_equiv_notes_for_defs (insn); 634 1.1 mrg 635 1.1 mrg if (turn_into_use) 636 1.1 mrg { 637 1.1 mrg /* Don't remove frame related noop moves if they cary 638 1.1 mrg REG_CFA_RESTORE note, while we don't need to emit any code, 639 1.1 mrg we need it to emit the CFI restore note. */ 640 1.1 mrg PATTERN (insn) 641 1.1 mrg = gen_rtx_USE (GET_MODE (turn_into_use), turn_into_use); 642 1.1 mrg INSN_CODE (insn) = -1; 643 1.1 mrg df_insn_rescan (insn); 644 1.1 mrg } 645 1.1 mrg else 646 1.1 mrg /* Now delete the insn. */ 647 1.1 mrg must_clean |= delete_insn_and_edges (insn); 648 1.1 mrg } 649 1.1 mrg 650 1.1 mrg /* Deleted a pure or const call. */ 651 1.1 mrg if (must_clean) 652 1.1 mrg { 653 1.1 mrg gcc_assert (can_alter_cfg); 654 1.1 mrg delete_unreachable_blocks (); 655 1.1 mrg free_dominance_info (CDI_DOMINATORS); 656 1.1 mrg } 657 1.1 mrg } 658 1.1 mrg 659 1.1 mrg 660 1.1 mrg /* Go through the instructions and mark those whose necessity is not 661 1.1 mrg dependent on inter-instruction information. Make sure all other 662 1.1 mrg instructions are not marked. */ 663 1.1 mrg 664 1.1 mrg static void 665 1.1 mrg prescan_insns_for_dce (bool fast) 666 1.1 mrg { 667 1.1 mrg basic_block bb; 668 1.1 mrg rtx_insn *insn, *prev; 669 1.1 mrg bitmap arg_stores = NULL; 670 1.1 mrg 671 1.1 mrg if (dump_file) 672 1.1 mrg fprintf (dump_file, "Finding needed instructions:\n"); 673 1.1 mrg 674 1.1 mrg if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS) 675 1.1 mrg arg_stores = BITMAP_ALLOC (NULL); 676 1.1 mrg 677 1.1 mrg FOR_EACH_BB_FN (bb, cfun) 678 1.1 mrg { 679 1.1 mrg FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev) 680 1.1 mrg if (NONDEBUG_INSN_P (insn)) 681 1.1 mrg { 682 1.1 mrg /* Don't mark argument stores now. They will be marked 683 1.1 mrg if needed when the associated CALL is marked. */ 684 1.1 mrg if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn))) 685 1.1 mrg continue; 686 1.1 mrg if (deletable_insn_p (insn, fast, arg_stores)) 687 1.1 mrg mark_nonreg_stores (insn, fast); 688 1.1 mrg else 689 1.1 mrg mark_insn (insn, fast); 690 1.1 mrg } 691 1.1 mrg /* find_call_stack_args only looks at argument stores in the 692 1.1 mrg same bb. */ 693 1.1 mrg if (arg_stores) 694 1.1 mrg bitmap_clear (arg_stores); 695 1.1 mrg } 696 1.1 mrg 697 1.1 mrg if (arg_stores) 698 1.1 mrg BITMAP_FREE (arg_stores); 699 1.1 mrg 700 1.1 mrg if (dump_file) 701 1.1 mrg fprintf (dump_file, "Finished finding needed instructions:\n"); 702 1.1 mrg } 703 1.1 mrg 704 1.1 mrg 705 1.1 mrg /* UD-based DSE routines. */ 706 1.1 mrg 707 1.1 mrg /* Mark instructions that define artificially-used registers, such as 708 1.1 mrg the frame pointer and the stack pointer. */ 709 1.1 mrg 710 1.1 mrg static void 711 1.1 mrg mark_artificial_uses (void) 712 1.1 mrg { 713 1.1 mrg basic_block bb; 714 1.1 mrg struct df_link *defs; 715 1.1 mrg df_ref use; 716 1.1 mrg 717 1.1 mrg FOR_ALL_BB_FN (bb, cfun) 718 1.1 mrg FOR_EACH_ARTIFICIAL_USE (use, bb->index) 719 1.1 mrg for (defs = DF_REF_CHAIN (use); defs; defs = defs->next) 720 1.1 mrg if (!DF_REF_IS_ARTIFICIAL (defs->ref)) 721 1.1 mrg mark_insn (DF_REF_INSN (defs->ref), false); 722 1.1 mrg } 723 1.1 mrg 724 1.1 mrg 725 1.1 mrg /* Mark every instruction that defines a register value that INSN uses. */ 726 1.1 mrg 727 1.1 mrg static void 728 1.1 mrg mark_reg_dependencies (rtx_insn *insn) 729 1.1 mrg { 730 1.1 mrg struct df_link *defs; 731 1.1 mrg df_ref use; 732 1.1 mrg 733 1.1 mrg if (DEBUG_INSN_P (insn)) 734 1.1 mrg return; 735 1.1 mrg 736 1.1 mrg FOR_EACH_INSN_USE (use, insn) 737 1.1 mrg { 738 1.1 mrg if (dump_file) 739 1.1 mrg { 740 1.1 mrg fprintf (dump_file, "Processing use of "); 741 1.1 mrg print_simple_rtl (dump_file, DF_REF_REG (use)); 742 1.1 mrg fprintf (dump_file, " in insn %d:\n", INSN_UID (insn)); 743 1.1 mrg } 744 1.1 mrg for (defs = DF_REF_CHAIN (use); defs; defs = defs->next) 745 1.1 mrg if (! DF_REF_IS_ARTIFICIAL (defs->ref)) 746 1.1 mrg mark_insn (DF_REF_INSN (defs->ref), false); 747 1.1 mrg } 748 1.1 mrg } 749 1.1 mrg 750 1.1 mrg 751 1.1 mrg /* Initialize global variables for a new DCE pass. */ 752 1.1 mrg 753 1.1 mrg static void 754 1.1 mrg init_dce (bool fast) 755 1.1 mrg { 756 1.1 mrg if (!df_in_progress) 757 1.1 mrg { 758 1.1 mrg if (!fast) 759 1.1 mrg { 760 1.1 mrg df_set_flags (DF_RD_PRUNE_DEAD_DEFS); 761 1.1 mrg df_chain_add_problem (DF_UD_CHAIN); 762 1.1 mrg } 763 1.1 mrg df_analyze (); 764 1.1 mrg } 765 1.1 mrg 766 1.1 mrg if (dump_file) 767 1.1 mrg df_dump (dump_file); 768 1.1 mrg 769 1.1 mrg if (fast) 770 1.1 mrg { 771 1.1 mrg bitmap_obstack_initialize (&dce_blocks_bitmap_obstack); 772 1.1 mrg bitmap_obstack_initialize (&dce_tmp_bitmap_obstack); 773 1.1 mrg can_alter_cfg = false; 774 1.1 mrg } 775 1.1 mrg else 776 1.1 mrg can_alter_cfg = true; 777 1.1 mrg 778 1.1 mrg marked = sbitmap_alloc (get_max_uid () + 1); 779 1.1 mrg bitmap_clear (marked); 780 1.1 mrg } 781 1.1 mrg 782 1.1 mrg 783 1.1 mrg /* Free the data allocated by init_dce. */ 784 1.1 mrg 785 1.1 mrg static void 786 1.1 mrg fini_dce (bool fast) 787 1.1 mrg { 788 1.1 mrg sbitmap_free (marked); 789 1.1 mrg 790 1.1 mrg if (fast) 791 1.1 mrg { 792 1.1 mrg bitmap_obstack_release (&dce_blocks_bitmap_obstack); 793 1.1 mrg bitmap_obstack_release (&dce_tmp_bitmap_obstack); 794 1.1 mrg } 795 1.1 mrg } 796 1.1 mrg 797 1.1 mrg 798 1.1 mrg /* UD-chain based DCE. */ 799 1.1 mrg 800 1.1 mrg static unsigned int 801 1.1 mrg rest_of_handle_ud_dce (void) 802 1.1 mrg { 803 1.1 mrg rtx_insn *insn; 804 1.1 mrg 805 1.1 mrg init_dce (false); 806 1.1 mrg 807 1.1 mrg prescan_insns_for_dce (false); 808 1.1 mrg mark_artificial_uses (); 809 1.1 mrg while (worklist.length () > 0) 810 1.1 mrg { 811 1.1 mrg insn = worklist.pop (); 812 1.1 mrg mark_reg_dependencies (insn); 813 1.1 mrg } 814 1.1 mrg worklist.release (); 815 1.1 mrg 816 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS) 817 1.1 mrg reset_unmarked_insns_debug_uses (); 818 1.1 mrg 819 1.1 mrg /* Before any insns are deleted, we must remove the chains since 820 1.1 mrg they are not bidirectional. */ 821 1.1 mrg df_remove_problem (df_chain); 822 1.1 mrg delete_unmarked_insns (); 823 1.1 mrg 824 1.1 mrg fini_dce (false); 825 1.1 mrg return 0; 826 1.1 mrg } 827 1.1 mrg 828 1.1 mrg 829 1.1 mrg namespace { 830 1.1 mrg 831 1.1 mrg const pass_data pass_data_ud_rtl_dce = 832 1.1 mrg { 833 1.1 mrg RTL_PASS, /* type */ 834 1.1 mrg "ud_dce", /* name */ 835 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */ 836 1.1 mrg TV_DCE, /* tv_id */ 837 1.1 mrg 0, /* properties_required */ 838 1.1 mrg 0, /* properties_provided */ 839 1.1 mrg 0, /* properties_destroyed */ 840 1.1 mrg 0, /* todo_flags_start */ 841 1.1 mrg TODO_df_finish, /* todo_flags_finish */ 842 1.1 mrg }; 843 1.1 mrg 844 1.1 mrg class pass_ud_rtl_dce : public rtl_opt_pass 845 1.1 mrg { 846 1.1 mrg public: 847 1.1 mrg pass_ud_rtl_dce (gcc::context *ctxt) 848 1.1 mrg : rtl_opt_pass (pass_data_ud_rtl_dce, ctxt) 849 1.1 mrg {} 850 1.1 mrg 851 1.1 mrg /* opt_pass methods: */ 852 1.1 mrg virtual bool gate (function *) 853 1.1 mrg { 854 1.1 mrg return optimize > 1 && flag_dce && dbg_cnt (dce_ud); 855 1.1 mrg } 856 1.1 mrg 857 1.1 mrg virtual unsigned int execute (function *) 858 1.1 mrg { 859 1.1 mrg return rest_of_handle_ud_dce (); 860 1.1 mrg } 861 1.1 mrg 862 1.1 mrg }; // class pass_ud_rtl_dce 863 1.1 mrg 864 1.1 mrg } // anon namespace 865 1.1 mrg 866 1.1 mrg rtl_opt_pass * 867 1.1 mrg make_pass_ud_rtl_dce (gcc::context *ctxt) 868 1.1 mrg { 869 1.1 mrg return new pass_ud_rtl_dce (ctxt); 870 1.1 mrg } 871 1.1 mrg 872 1.1 mrg 873 1.1 mrg /* ------------------------------------------------------------------------- 874 1.1 mrg Fast DCE functions 875 1.1 mrg ------------------------------------------------------------------------- */ 876 1.1 mrg 877 1.1 mrg /* Process basic block BB. Return true if the live_in set has 878 1.1 mrg changed. REDO_OUT is true if the info at the bottom of the block 879 1.1 mrg needs to be recalculated before starting. AU is the proper set of 880 1.1 mrg artificial uses. Track global substitution of uses of dead pseudos 881 1.1 mrg in debug insns using GLOBAL_DEBUG. */ 882 1.1 mrg 883 1.1 mrg static bool 884 1.1 mrg word_dce_process_block (basic_block bb, bool redo_out, 885 1.1 mrg struct dead_debug_global *global_debug) 886 1.1 mrg { 887 1.1 mrg bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack); 888 1.1 mrg rtx_insn *insn; 889 1.1 mrg bool block_changed; 890 1.1 mrg struct dead_debug_local debug; 891 1.1 mrg 892 1.1 mrg if (redo_out) 893 1.1 mrg { 894 1.1 mrg /* Need to redo the live_out set of this block if when one of 895 1.1 mrg the succs of this block has had a change in it live in 896 1.1 mrg set. */ 897 1.1 mrg edge e; 898 1.1 mrg edge_iterator ei; 899 1.1 mrg df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n; 900 1.1 mrg bitmap_clear (DF_WORD_LR_OUT (bb)); 901 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs) 902 1.1 mrg (*con_fun_n) (e); 903 1.1 mrg } 904 1.1 mrg 905 1.1 mrg if (dump_file) 906 1.1 mrg { 907 1.1 mrg fprintf (dump_file, "processing block %d live out = ", bb->index); 908 1.1 mrg df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb)); 909 1.1 mrg } 910 1.1 mrg 911 1.1 mrg bitmap_copy (local_live, DF_WORD_LR_OUT (bb)); 912 1.1 mrg dead_debug_local_init (&debug, NULL, global_debug); 913 1.1 mrg 914 1.1 mrg FOR_BB_INSNS_REVERSE (bb, insn) 915 1.1 mrg if (DEBUG_INSN_P (insn)) 916 1.1 mrg { 917 1.1 mrg df_ref use; 918 1.1 mrg FOR_EACH_INSN_USE (use, insn) 919 1.1 mrg if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER 920 1.1 mrg && known_eq (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use))), 921 1.1 mrg 2 * UNITS_PER_WORD) 922 1.1 mrg && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use)) 923 1.1 mrg && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1)) 924 1.1 mrg dead_debug_add (&debug, use, DF_REF_REGNO (use)); 925 1.1 mrg } 926 1.1 mrg else if (INSN_P (insn)) 927 1.1 mrg { 928 1.1 mrg bool any_changed; 929 1.1 mrg 930 1.1 mrg /* No matter if the instruction is needed or not, we remove 931 1.1 mrg any regno in the defs from the live set. */ 932 1.1 mrg any_changed = df_word_lr_simulate_defs (insn, local_live); 933 1.1 mrg if (any_changed) 934 1.1 mrg mark_insn (insn, true); 935 1.1 mrg 936 1.1 mrg /* On the other hand, we do not allow the dead uses to set 937 1.1 mrg anything in local_live. */ 938 1.1 mrg if (marked_insn_p (insn)) 939 1.1 mrg df_word_lr_simulate_uses (insn, local_live); 940 1.1 mrg 941 1.1 mrg /* Insert debug temps for dead REGs used in subsequent debug 942 1.1 mrg insns. We may have to emit a debug temp even if the insn 943 1.1 mrg was marked, in case the debug use was after the point of 944 1.1 mrg death. */ 945 1.1 mrg if (debug.used && !bitmap_empty_p (debug.used)) 946 1.1 mrg { 947 1.1 mrg df_ref def; 948 1.1 mrg 949 1.1 mrg FOR_EACH_INSN_DEF (def, insn) 950 1.1 mrg dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn, 951 1.1 mrg marked_insn_p (insn) 952 1.1 mrg && !control_flow_insn_p (insn) 953 1.1 mrg ? DEBUG_TEMP_AFTER_WITH_REG_FORCE 954 1.1 mrg : DEBUG_TEMP_BEFORE_WITH_VALUE); 955 1.1 mrg } 956 1.1 mrg 957 1.1 mrg if (dump_file) 958 1.1 mrg { 959 1.1 mrg fprintf (dump_file, "finished processing insn %d live out = ", 960 1.1 mrg INSN_UID (insn)); 961 1.1 mrg df_print_word_regset (dump_file, local_live); 962 1.1 mrg } 963 1.1 mrg } 964 1.1 mrg 965 1.1 mrg block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb)); 966 1.1 mrg if (block_changed) 967 1.1 mrg bitmap_copy (DF_WORD_LR_IN (bb), local_live); 968 1.1 mrg 969 1.1 mrg dead_debug_local_finish (&debug, NULL); 970 1.1 mrg BITMAP_FREE (local_live); 971 1.1 mrg return block_changed; 972 1.1 mrg } 973 1.1 mrg 974 1.1 mrg 975 1.1 mrg /* Process basic block BB. Return true if the live_in set has 976 1.1 mrg changed. REDO_OUT is true if the info at the bottom of the block 977 1.1 mrg needs to be recalculated before starting. AU is the proper set of 978 1.1 mrg artificial uses. Track global substitution of uses of dead pseudos 979 1.1 mrg in debug insns using GLOBAL_DEBUG. */ 980 1.1 mrg 981 1.1 mrg static bool 982 1.1 mrg dce_process_block (basic_block bb, bool redo_out, bitmap au, 983 1.1 mrg struct dead_debug_global *global_debug) 984 1.1 mrg { 985 1.1 mrg bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack); 986 1.1 mrg rtx_insn *insn; 987 1.1 mrg bool block_changed; 988 1.1 mrg df_ref def; 989 1.1 mrg struct dead_debug_local debug; 990 1.1 mrg 991 1.1 mrg if (redo_out) 992 1.1 mrg { 993 1.1 mrg /* Need to redo the live_out set of this block if when one of 994 1.1 mrg the succs of this block has had a change in it live in 995 1.1 mrg set. */ 996 1.1 mrg edge e; 997 1.1 mrg edge_iterator ei; 998 1.1 mrg df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n; 999 1.1 mrg bitmap_clear (DF_LR_OUT (bb)); 1000 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs) 1001 1.1 mrg (*con_fun_n) (e); 1002 1.1 mrg } 1003 1.1 mrg 1004 1.1 mrg if (dump_file) 1005 1.1 mrg { 1006 1.1 mrg fprintf (dump_file, "processing block %d lr out = ", bb->index); 1007 1.1 mrg df_print_regset (dump_file, DF_LR_OUT (bb)); 1008 1.1 mrg } 1009 1.1 mrg 1010 1.1 mrg bitmap_copy (local_live, DF_LR_OUT (bb)); 1011 1.1 mrg 1012 1.1 mrg df_simulate_initialize_backwards (bb, local_live); 1013 1.1 mrg dead_debug_local_init (&debug, NULL, global_debug); 1014 1.1 mrg 1015 1.1 mrg FOR_BB_INSNS_REVERSE (bb, insn) 1016 1.1 mrg if (DEBUG_INSN_P (insn)) 1017 1.1 mrg { 1018 1.1 mrg df_ref use; 1019 1.1 mrg FOR_EACH_INSN_USE (use, insn) 1020 1.1 mrg if (!bitmap_bit_p (local_live, DF_REF_REGNO (use)) 1021 1.1 mrg && !bitmap_bit_p (au, DF_REF_REGNO (use))) 1022 1.1 mrg dead_debug_add (&debug, use, DF_REF_REGNO (use)); 1023 1.1 mrg } 1024 1.1 mrg else if (INSN_P (insn)) 1025 1.1 mrg { 1026 1.1 mrg bool needed = marked_insn_p (insn); 1027 1.1 mrg 1028 1.1 mrg /* The insn is needed if there is someone who uses the output. */ 1029 1.1 mrg if (!needed) 1030 1.1 mrg FOR_EACH_INSN_DEF (def, insn) 1031 1.1 mrg if (bitmap_bit_p (local_live, DF_REF_REGNO (def)) 1032 1.1 mrg || bitmap_bit_p (au, DF_REF_REGNO (def))) 1033 1.1 mrg { 1034 1.1 mrg needed = true; 1035 1.1 mrg mark_insn (insn, true); 1036 1.1 mrg break; 1037 1.1 mrg } 1038 1.1 mrg 1039 1.1 mrg /* No matter if the instruction is needed or not, we remove 1040 1.1 mrg any regno in the defs from the live set. */ 1041 1.1 mrg df_simulate_defs (insn, local_live); 1042 1.1 mrg 1043 1.1 mrg /* On the other hand, we do not allow the dead uses to set 1044 1.1 mrg anything in local_live. */ 1045 1.1 mrg if (needed) 1046 1.1 mrg df_simulate_uses (insn, local_live); 1047 1.1 mrg 1048 1.1 mrg /* Insert debug temps for dead REGs used in subsequent debug 1049 1.1 mrg insns. We may have to emit a debug temp even if the insn 1050 1.1 mrg was marked, in case the debug use was after the point of 1051 1.1 mrg death. */ 1052 1.1 mrg if (debug.used && !bitmap_empty_p (debug.used)) 1053 1.1 mrg FOR_EACH_INSN_DEF (def, insn) 1054 1.1 mrg dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn, 1055 1.1 mrg needed && !control_flow_insn_p (insn) 1056 1.1 mrg ? DEBUG_TEMP_AFTER_WITH_REG_FORCE 1057 1.1 mrg : DEBUG_TEMP_BEFORE_WITH_VALUE); 1058 1.1 mrg } 1059 1.1 mrg 1060 1.1 mrg dead_debug_local_finish (&debug, NULL); 1061 1.1 mrg df_simulate_finalize_backwards (bb, local_live); 1062 1.1 mrg 1063 1.1 mrg block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb)); 1064 1.1 mrg if (block_changed) 1065 1.1 mrg bitmap_copy (DF_LR_IN (bb), local_live); 1066 1.1 mrg 1067 1.1 mrg BITMAP_FREE (local_live); 1068 1.1 mrg return block_changed; 1069 1.1 mrg } 1070 1.1 mrg 1071 1.1 mrg 1072 1.1 mrg /* Perform fast DCE once initialization is done. If WORD_LEVEL is 1073 1.1 mrg true, use the word level dce, otherwise do it at the pseudo 1074 1.1 mrg level. */ 1075 1.1 mrg 1076 1.1 mrg static void 1077 1.1 mrg fast_dce (bool word_level) 1078 1.1 mrg { 1079 1.1 mrg int *postorder = df_get_postorder (DF_BACKWARD); 1080 1.1 mrg int n_blocks = df_get_n_blocks (DF_BACKWARD); 1081 1.1 mrg /* The set of blocks that have been seen on this iteration. */ 1082 1.1 mrg bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack); 1083 1.1 mrg /* The set of blocks that need to have the out vectors reset because 1084 1.1 mrg the in of one of their successors has changed. */ 1085 1.1 mrg bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack); 1086 1.1 mrg bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack); 1087 1.1 mrg bool global_changed = true; 1088 1.1 mrg 1089 1.1 mrg /* These regs are considered always live so if they end up dying 1090 1.1 mrg because of some def, we need to bring the back again. Calling 1091 1.1 mrg df_simulate_fixup_sets has the disadvantage of calling 1092 1.1 mrg bb_has_eh_pred once per insn, so we cache the information 1093 1.1 mrg here. */ 1094 1.1 mrg bitmap au = &df->regular_block_artificial_uses; 1095 1.1 mrg bitmap au_eh = &df->eh_block_artificial_uses; 1096 1.1 mrg int i; 1097 1.1 mrg struct dead_debug_global global_debug; 1098 1.1 mrg 1099 1.1 mrg prescan_insns_for_dce (true); 1100 1.1 mrg 1101 1.1 mrg for (i = 0; i < n_blocks; i++) 1102 1.1 mrg bitmap_set_bit (all_blocks, postorder[i]); 1103 1.1 mrg 1104 1.1 mrg dead_debug_global_init (&global_debug, NULL); 1105 1.1 mrg 1106 1.1 mrg while (global_changed) 1107 1.1 mrg { 1108 1.1 mrg global_changed = false; 1109 1.1 mrg 1110 1.1 mrg for (i = 0; i < n_blocks; i++) 1111 1.1 mrg { 1112 1.1 mrg int index = postorder[i]; 1113 1.1 mrg basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index); 1114 1.1 mrg bool local_changed; 1115 1.1 mrg 1116 1.1 mrg if (index < NUM_FIXED_BLOCKS) 1117 1.1 mrg { 1118 1.1 mrg bitmap_set_bit (processed, index); 1119 1.1 mrg continue; 1120 1.1 mrg } 1121 1.1 mrg 1122 1.1 mrg if (word_level) 1123 1.1 mrg local_changed 1124 1.1 mrg = word_dce_process_block (bb, bitmap_bit_p (redo_out, index), 1125 1.1 mrg &global_debug); 1126 1.1 mrg else 1127 1.1 mrg local_changed 1128 1.1 mrg = dce_process_block (bb, bitmap_bit_p (redo_out, index), 1129 1.1 mrg bb_has_eh_pred (bb) ? au_eh : au, 1130 1.1 mrg &global_debug); 1131 1.1 mrg bitmap_set_bit (processed, index); 1132 1.1 mrg 1133 1.1 mrg if (local_changed) 1134 1.1 mrg { 1135 1.1 mrg edge e; 1136 1.1 mrg edge_iterator ei; 1137 1.1 mrg FOR_EACH_EDGE (e, ei, bb->preds) 1138 1.1 mrg if (bitmap_bit_p (processed, e->src->index)) 1139 1.1 mrg /* Be tricky about when we need to iterate the 1140 1.1 mrg analysis. We only have redo the analysis if the 1141 1.1 mrg bitmaps change at the top of a block that is the 1142 1.1 mrg entry to a loop. */ 1143 1.1 mrg global_changed = true; 1144 1.1 mrg else 1145 1.1 mrg bitmap_set_bit (redo_out, e->src->index); 1146 1.1 mrg } 1147 1.1 mrg } 1148 1.1 mrg 1149 1.1 mrg if (global_changed) 1150 1.1 mrg { 1151 1.1 mrg /* Turn off the RUN_DCE flag to prevent recursive calls to 1152 1.1 mrg dce. */ 1153 1.1 mrg int old_flag = df_clear_flags (DF_LR_RUN_DCE); 1154 1.1 mrg 1155 1.1 mrg /* So something was deleted that requires a redo. Do it on 1156 1.1 mrg the cheap. */ 1157 1.1 mrg delete_unmarked_insns (); 1158 1.1 mrg bitmap_clear (marked); 1159 1.1 mrg bitmap_clear (processed); 1160 1.1 mrg bitmap_clear (redo_out); 1161 1.1 mrg 1162 1.1 mrg /* We do not need to rescan any instructions. We only need 1163 1.1 mrg to redo the dataflow equations for the blocks that had a 1164 1.1 mrg change at the top of the block. Then we need to redo the 1165 1.1 mrg iteration. */ 1166 1.1 mrg if (word_level) 1167 1.1 mrg df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks); 1168 1.1 mrg else 1169 1.1 mrg df_analyze_problem (df_lr, all_blocks, postorder, n_blocks); 1170 1.1 mrg 1171 1.1 mrg if (old_flag & DF_LR_RUN_DCE) 1172 1.1 mrg df_set_flags (DF_LR_RUN_DCE); 1173 1.1 mrg 1174 1.1 mrg prescan_insns_for_dce (true); 1175 1.1 mrg } 1176 1.1 mrg } 1177 1.1 mrg 1178 1.1 mrg dead_debug_global_finish (&global_debug, NULL); 1179 1.1 mrg 1180 1.1 mrg delete_unmarked_insns (); 1181 1.1 mrg 1182 1.1 mrg BITMAP_FREE (processed); 1183 1.1 mrg BITMAP_FREE (redo_out); 1184 1.1 mrg BITMAP_FREE (all_blocks); 1185 1.1 mrg } 1186 1.1 mrg 1187 1.1 mrg 1188 1.1 mrg /* Fast register level DCE. */ 1189 1.1 mrg 1190 1.1 mrg static unsigned int 1191 1.1 mrg rest_of_handle_fast_dce (void) 1192 1.1 mrg { 1193 1.1 mrg init_dce (true); 1194 1.1 mrg fast_dce (false); 1195 1.1 mrg fini_dce (true); 1196 1.1 mrg return 0; 1197 1.1 mrg } 1198 1.1 mrg 1199 1.1 mrg 1200 1.1 mrg /* Fast byte level DCE. */ 1201 1.1 mrg 1202 1.1 mrg void 1203 1.1 mrg run_word_dce (void) 1204 1.1 mrg { 1205 1.1 mrg int old_flags; 1206 1.1 mrg 1207 1.1 mrg if (!flag_dce) 1208 1.1 mrg return; 1209 1.1 mrg 1210 1.1 mrg timevar_push (TV_DCE); 1211 1.1 mrg old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN); 1212 1.1 mrg df_word_lr_add_problem (); 1213 1.1 mrg init_dce (true); 1214 1.1 mrg fast_dce (true); 1215 1.1 mrg fini_dce (true); 1216 1.1 mrg df_set_flags (old_flags); 1217 1.1 mrg timevar_pop (TV_DCE); 1218 1.1 mrg } 1219 1.1 mrg 1220 1.1 mrg 1221 1.1 mrg /* This is an internal call that is used by the df live register 1222 1.1 mrg problem to run fast dce as a side effect of creating the live 1223 1.1 mrg information. The stack is organized so that the lr problem is run, 1224 1.1 mrg this pass is run, which updates the live info and the df scanning 1225 1.1 mrg info, and then returns to allow the rest of the problems to be run. 1226 1.1 mrg 1227 1.1 mrg This can be called by elsewhere but it will not update the bit 1228 1.1 mrg vectors for any other problems than LR. */ 1229 1.1 mrg 1230 1.1 mrg void 1231 1.1 mrg run_fast_df_dce (void) 1232 1.1 mrg { 1233 1.1 mrg if (flag_dce) 1234 1.1 mrg { 1235 1.1 mrg /* If dce is able to delete something, it has to happen 1236 1.1 mrg immediately. Otherwise there will be problems handling the 1237 1.1 mrg eq_notes. */ 1238 1.1 mrg int old_flags = 1239 1.1 mrg df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN); 1240 1.1 mrg 1241 1.1 mrg df_in_progress = true; 1242 1.1 mrg rest_of_handle_fast_dce (); 1243 1.1 mrg df_in_progress = false; 1244 1.1 mrg 1245 1.1 mrg df_set_flags (old_flags); 1246 1.1 mrg } 1247 1.1 mrg } 1248 1.1 mrg 1249 1.1 mrg 1250 1.1 mrg /* Run a fast DCE pass. */ 1251 1.1 mrg 1252 1.1 mrg void 1253 1.1 mrg run_fast_dce (void) 1254 1.1 mrg { 1255 1.1 mrg if (flag_dce) 1256 1.1 mrg rest_of_handle_fast_dce (); 1257 1.1 mrg } 1258 1.1 mrg 1259 1.1 mrg 1260 1.1 mrg namespace { 1261 1.1 mrg 1262 1.1 mrg const pass_data pass_data_fast_rtl_dce = 1263 1.1 mrg { 1264 1.1 mrg RTL_PASS, /* type */ 1265 1.1 mrg "rtl_dce", /* name */ 1266 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */ 1267 1.1 mrg TV_DCE, /* tv_id */ 1268 1.1 mrg 0, /* properties_required */ 1269 1.1 mrg 0, /* properties_provided */ 1270 1.1 mrg 0, /* properties_destroyed */ 1271 1.1 mrg 0, /* todo_flags_start */ 1272 1.1 mrg TODO_df_finish, /* todo_flags_finish */ 1273 1.1 mrg }; 1274 1.1 mrg 1275 1.1 mrg class pass_fast_rtl_dce : public rtl_opt_pass 1276 1.1 mrg { 1277 1.1 mrg public: 1278 1.1 mrg pass_fast_rtl_dce (gcc::context *ctxt) 1279 1.1 mrg : rtl_opt_pass (pass_data_fast_rtl_dce, ctxt) 1280 1.1 mrg {} 1281 1.1 mrg 1282 1.1 mrg /* opt_pass methods: */ 1283 1.1 mrg virtual bool gate (function *) 1284 1.1 mrg { 1285 1.1 mrg return optimize > 0 && flag_dce && dbg_cnt (dce_fast); 1286 1.1 mrg } 1287 1.1 mrg 1288 1.1 mrg virtual unsigned int execute (function *) 1289 1.1 mrg { 1290 1.1 mrg return rest_of_handle_fast_dce (); 1291 1.1 mrg } 1292 1.1 mrg 1293 1.1 mrg }; // class pass_fast_rtl_dce 1294 1.1 mrg 1295 1.1 mrg } // anon namespace 1296 1.1 mrg 1297 1.1 mrg rtl_opt_pass * 1298 1.1 mrg make_pass_fast_rtl_dce (gcc::context *ctxt) 1299 1.1 mrg { 1300 1.1 mrg return new pass_fast_rtl_dce (ctxt); 1301 1.1 mrg } 1302