regcprop.cc revision 1.1 1 1.1 mrg /* Copy propagation on hard registers for the GNU compiler.
2 1.1 mrg Copyright (C) 2000-2022 Free Software Foundation, Inc.
3 1.1 mrg
4 1.1 mrg This file is part of GCC.
5 1.1 mrg
6 1.1 mrg GCC is free software; you can redistribute it and/or modify it
7 1.1 mrg under the terms of the GNU General Public License as published by
8 1.1 mrg the Free Software Foundation; either version 3, or (at your option)
9 1.1 mrg any later version.
10 1.1 mrg
11 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT
12 1.1 mrg ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 1.1 mrg or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 1.1 mrg License for more details.
15 1.1 mrg
16 1.1 mrg You should have received a copy of the GNU General Public License
17 1.1 mrg along with GCC; see the file COPYING3. If not see
18 1.1 mrg <http://www.gnu.org/licenses/>. */
19 1.1 mrg
20 1.1 mrg #include "config.h"
21 1.1 mrg #include "system.h"
22 1.1 mrg #include "coretypes.h"
23 1.1 mrg #include "backend.h"
24 1.1 mrg #include "rtl.h"
25 1.1 mrg #include "df.h"
26 1.1 mrg #include "memmodel.h"
27 1.1 mrg #include "tm_p.h"
28 1.1 mrg #include "insn-config.h"
29 1.1 mrg #include "regs.h"
30 1.1 mrg #include "emit-rtl.h"
31 1.1 mrg #include "recog.h"
32 1.1 mrg #include "diagnostic-core.h"
33 1.1 mrg #include "addresses.h"
34 1.1 mrg #include "tree-pass.h"
35 1.1 mrg #include "rtl-iter.h"
36 1.1 mrg #include "cfgrtl.h"
37 1.1 mrg #include "target.h"
38 1.1 mrg #include "function-abi.h"
39 1.1 mrg
40 1.1 mrg /* The following code does forward propagation of hard register copies.
41 1.1 mrg The object is to eliminate as many dependencies as possible, so that
42 1.1 mrg we have the most scheduling freedom. As a side effect, we also clean
43 1.1 mrg up some silly register allocation decisions made by reload. This
44 1.1 mrg code may be obsoleted by a new register allocator. */
45 1.1 mrg
46 1.1 mrg /* DEBUG_INSNs aren't changed right away, as doing so might extend the
47 1.1 mrg lifetime of a register and get the DEBUG_INSN subsequently reset.
48 1.1 mrg So they are queued instead, and updated only when the register is
49 1.1 mrg used in some subsequent real insn before it is set. */
50 1.1 mrg struct queued_debug_insn_change
51 1.1 mrg {
52 1.1 mrg struct queued_debug_insn_change *next;
53 1.1 mrg rtx_insn *insn;
54 1.1 mrg rtx *loc;
55 1.1 mrg rtx new_rtx;
56 1.1 mrg };
57 1.1 mrg
58 1.1 mrg /* For each register, we have a list of registers that contain the same
59 1.1 mrg value. The OLDEST_REGNO field points to the head of the list, and
60 1.1 mrg the NEXT_REGNO field runs through the list. The MODE field indicates
61 1.1 mrg what mode the data is known to be in; this field is VOIDmode when the
62 1.1 mrg register is not known to contain valid data. */
63 1.1 mrg
64 1.1 mrg struct value_data_entry
65 1.1 mrg {
66 1.1 mrg machine_mode mode;
67 1.1 mrg unsigned int oldest_regno;
68 1.1 mrg unsigned int next_regno;
69 1.1 mrg struct queued_debug_insn_change *debug_insn_changes;
70 1.1 mrg };
71 1.1 mrg
72 1.1 mrg struct value_data
73 1.1 mrg {
74 1.1 mrg struct value_data_entry e[FIRST_PSEUDO_REGISTER];
75 1.1 mrg unsigned int max_value_regs;
76 1.1 mrg unsigned int n_debug_insn_changes;
77 1.1 mrg };
78 1.1 mrg
79 1.1 mrg static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
80 1.1 mrg ("debug insn changes pool");
81 1.1 mrg
82 1.1 mrg static bool skip_debug_insn_p;
83 1.1 mrg
84 1.1 mrg static void kill_value_one_regno (unsigned, struct value_data *);
85 1.1 mrg static void kill_value_regno (unsigned, unsigned, struct value_data *);
86 1.1 mrg static void kill_value (const_rtx, struct value_data *);
87 1.1 mrg static void set_value_regno (unsigned, machine_mode, struct value_data *);
88 1.1 mrg static void init_value_data (struct value_data *);
89 1.1 mrg static void kill_clobbered_value (rtx, const_rtx, void *);
90 1.1 mrg static void kill_set_value (rtx, const_rtx, void *);
91 1.1 mrg static void copy_value (rtx, rtx, struct value_data *);
92 1.1 mrg static bool mode_change_ok (machine_mode, machine_mode,
93 1.1 mrg unsigned int);
94 1.1 mrg static rtx maybe_mode_change (machine_mode, machine_mode,
95 1.1 mrg machine_mode, unsigned int, unsigned int);
96 1.1 mrg static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
97 1.1 mrg static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
98 1.1 mrg struct value_data *);
99 1.1 mrg static bool replace_oldest_value_addr (rtx *, enum reg_class,
100 1.1 mrg machine_mode, addr_space_t,
101 1.1 mrg rtx_insn *, struct value_data *);
102 1.1 mrg static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
103 1.1 mrg static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
104 1.1 mrg extern void debug_value_data (struct value_data *);
105 1.1 mrg static void validate_value_data (struct value_data *);
106 1.1 mrg
107 1.1 mrg /* Free all queued updates for DEBUG_INSNs that change some reg to
108 1.1 mrg register REGNO. */
109 1.1 mrg
110 1.1 mrg static void
111 1.1 mrg free_debug_insn_changes (struct value_data *vd, unsigned int regno)
112 1.1 mrg {
113 1.1 mrg struct queued_debug_insn_change *cur, *next;
114 1.1 mrg for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
115 1.1 mrg {
116 1.1 mrg next = cur->next;
117 1.1 mrg --vd->n_debug_insn_changes;
118 1.1 mrg queued_debug_insn_change_pool.remove (cur);
119 1.1 mrg }
120 1.1 mrg vd->e[regno].debug_insn_changes = NULL;
121 1.1 mrg }
122 1.1 mrg
123 1.1 mrg /* Kill register REGNO. This involves removing it from any value
124 1.1 mrg lists, and resetting the value mode to VOIDmode. This is only a
125 1.1 mrg helper function; it does not handle any hard registers overlapping
126 1.1 mrg with REGNO. */
127 1.1 mrg
128 1.1 mrg static void
129 1.1 mrg kill_value_one_regno (unsigned int regno, struct value_data *vd)
130 1.1 mrg {
131 1.1 mrg unsigned int i, next;
132 1.1 mrg
133 1.1 mrg if (vd->e[regno].oldest_regno != regno)
134 1.1 mrg {
135 1.1 mrg for (i = vd->e[regno].oldest_regno;
136 1.1 mrg vd->e[i].next_regno != regno;
137 1.1 mrg i = vd->e[i].next_regno)
138 1.1 mrg continue;
139 1.1 mrg vd->e[i].next_regno = vd->e[regno].next_regno;
140 1.1 mrg }
141 1.1 mrg else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
142 1.1 mrg {
143 1.1 mrg for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
144 1.1 mrg vd->e[i].oldest_regno = next;
145 1.1 mrg }
146 1.1 mrg
147 1.1 mrg vd->e[regno].mode = VOIDmode;
148 1.1 mrg vd->e[regno].oldest_regno = regno;
149 1.1 mrg vd->e[regno].next_regno = INVALID_REGNUM;
150 1.1 mrg if (vd->e[regno].debug_insn_changes)
151 1.1 mrg free_debug_insn_changes (vd, regno);
152 1.1 mrg
153 1.1 mrg if (flag_checking)
154 1.1 mrg validate_value_data (vd);
155 1.1 mrg }
156 1.1 mrg
157 1.1 mrg /* Kill the value in register REGNO for NREGS, and any other registers
158 1.1 mrg whose values overlap. */
159 1.1 mrg
160 1.1 mrg static void
161 1.1 mrg kill_value_regno (unsigned int regno, unsigned int nregs,
162 1.1 mrg struct value_data *vd)
163 1.1 mrg {
164 1.1 mrg unsigned int j;
165 1.1 mrg
166 1.1 mrg /* Kill the value we're told to kill. */
167 1.1 mrg for (j = 0; j < nregs; ++j)
168 1.1 mrg kill_value_one_regno (regno + j, vd);
169 1.1 mrg
170 1.1 mrg /* Kill everything that overlapped what we're told to kill. */
171 1.1 mrg if (regno < vd->max_value_regs)
172 1.1 mrg j = 0;
173 1.1 mrg else
174 1.1 mrg j = regno - vd->max_value_regs;
175 1.1 mrg for (; j < regno; ++j)
176 1.1 mrg {
177 1.1 mrg unsigned int i, n;
178 1.1 mrg if (vd->e[j].mode == VOIDmode)
179 1.1 mrg continue;
180 1.1 mrg n = hard_regno_nregs (j, vd->e[j].mode);
181 1.1 mrg if (j + n > regno)
182 1.1 mrg for (i = 0; i < n; ++i)
183 1.1 mrg kill_value_one_regno (j + i, vd);
184 1.1 mrg }
185 1.1 mrg }
186 1.1 mrg
187 1.1 mrg /* Kill X. This is a convenience function wrapping kill_value_regno
188 1.1 mrg so that we mind the mode the register is in. */
189 1.1 mrg
190 1.1 mrg static void
191 1.1 mrg kill_value (const_rtx x, struct value_data *vd)
192 1.1 mrg {
193 1.1 mrg if (GET_CODE (x) == SUBREG)
194 1.1 mrg {
195 1.1 mrg rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
196 1.1 mrg GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
197 1.1 mrg x = tmp ? tmp : SUBREG_REG (x);
198 1.1 mrg }
199 1.1 mrg if (REG_P (x))
200 1.1 mrg kill_value_regno (REGNO (x), REG_NREGS (x), vd);
201 1.1 mrg }
202 1.1 mrg
203 1.1 mrg /* Remember that REGNO is valid in MODE. */
204 1.1 mrg
205 1.1 mrg static void
206 1.1 mrg set_value_regno (unsigned int regno, machine_mode mode,
207 1.1 mrg struct value_data *vd)
208 1.1 mrg {
209 1.1 mrg unsigned int nregs;
210 1.1 mrg
211 1.1 mrg vd->e[regno].mode = mode;
212 1.1 mrg
213 1.1 mrg nregs = hard_regno_nregs (regno, mode);
214 1.1 mrg if (nregs > vd->max_value_regs)
215 1.1 mrg vd->max_value_regs = nregs;
216 1.1 mrg }
217 1.1 mrg
218 1.1 mrg /* Initialize VD such that there are no known relationships between regs. */
219 1.1 mrg
220 1.1 mrg static void
221 1.1 mrg init_value_data (struct value_data *vd)
222 1.1 mrg {
223 1.1 mrg int i;
224 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
225 1.1 mrg {
226 1.1 mrg vd->e[i].mode = VOIDmode;
227 1.1 mrg vd->e[i].oldest_regno = i;
228 1.1 mrg vd->e[i].next_regno = INVALID_REGNUM;
229 1.1 mrg vd->e[i].debug_insn_changes = NULL;
230 1.1 mrg }
231 1.1 mrg vd->max_value_regs = 0;
232 1.1 mrg vd->n_debug_insn_changes = 0;
233 1.1 mrg }
234 1.1 mrg
235 1.1 mrg /* Called through note_stores. If X is clobbered, kill its value. */
236 1.1 mrg
237 1.1 mrg static void
238 1.1 mrg kill_clobbered_value (rtx x, const_rtx set, void *data)
239 1.1 mrg {
240 1.1 mrg struct value_data *const vd = (struct value_data *) data;
241 1.1 mrg
242 1.1 mrg if (GET_CODE (set) == CLOBBER)
243 1.1 mrg kill_value (x, vd);
244 1.1 mrg }
245 1.1 mrg
246 1.1 mrg /* A structure passed as data to kill_set_value through note_stores. */
247 1.1 mrg struct kill_set_value_data
248 1.1 mrg {
249 1.1 mrg struct value_data *vd;
250 1.1 mrg rtx ignore_set_reg;
251 1.1 mrg };
252 1.1 mrg
253 1.1 mrg /* Called through note_stores. If X is set, not clobbered, kill its
254 1.1 mrg current value and install it as the root of its own value list. */
255 1.1 mrg
256 1.1 mrg static void
257 1.1 mrg kill_set_value (rtx x, const_rtx set, void *data)
258 1.1 mrg {
259 1.1 mrg struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
260 1.1 mrg if (rtx_equal_p (x, ksvd->ignore_set_reg))
261 1.1 mrg return;
262 1.1 mrg
263 1.1 mrg if (GET_CODE (set) != CLOBBER)
264 1.1 mrg {
265 1.1 mrg kill_value (x, ksvd->vd);
266 1.1 mrg if (REG_P (x))
267 1.1 mrg set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
268 1.1 mrg }
269 1.1 mrg }
270 1.1 mrg
271 1.1 mrg /* Kill any register used in X as the base of an auto-increment expression,
272 1.1 mrg and install that register as the root of its own value list. */
273 1.1 mrg
274 1.1 mrg static void
275 1.1 mrg kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
276 1.1 mrg {
277 1.1 mrg subrtx_iterator::array_type array;
278 1.1 mrg FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
279 1.1 mrg {
280 1.1 mrg const_rtx x = *iter;
281 1.1 mrg if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
282 1.1 mrg {
283 1.1 mrg x = XEXP (x, 0);
284 1.1 mrg kill_value (x, vd);
285 1.1 mrg set_value_regno (REGNO (x), GET_MODE (x), vd);
286 1.1 mrg iter.skip_subrtxes ();
287 1.1 mrg }
288 1.1 mrg }
289 1.1 mrg }
290 1.1 mrg
291 1.1 mrg /* Assert that SRC has been copied to DEST. Adjust the data structures
292 1.1 mrg to reflect that SRC contains an older copy of the shared value. */
293 1.1 mrg
294 1.1 mrg static void
295 1.1 mrg copy_value (rtx dest, rtx src, struct value_data *vd)
296 1.1 mrg {
297 1.1 mrg unsigned int dr = REGNO (dest);
298 1.1 mrg unsigned int sr = REGNO (src);
299 1.1 mrg unsigned int dn, sn;
300 1.1 mrg unsigned int i;
301 1.1 mrg
302 1.1 mrg /* ??? At present, it's possible to see noop sets. It'd be nice if
303 1.1 mrg this were cleaned up beforehand... */
304 1.1 mrg if (sr == dr)
305 1.1 mrg return;
306 1.1 mrg
307 1.1 mrg /* Do not propagate copies to the stack pointer, as that can leave
308 1.1 mrg memory accesses with no scheduling dependency on the stack update. */
309 1.1 mrg if (dr == STACK_POINTER_REGNUM)
310 1.1 mrg return;
311 1.1 mrg
312 1.1 mrg /* Likewise with the frame pointer, if we're using one. */
313 1.1 mrg if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
314 1.1 mrg return;
315 1.1 mrg
316 1.1 mrg /* Do not propagate copies to fixed or global registers, patterns
317 1.1 mrg can be relying to see particular fixed register or users can
318 1.1 mrg expect the chosen global register in asm. */
319 1.1 mrg if (fixed_regs[dr] || global_regs[dr])
320 1.1 mrg return;
321 1.1 mrg
322 1.1 mrg /* If SRC and DEST overlap, don't record anything. */
323 1.1 mrg dn = REG_NREGS (dest);
324 1.1 mrg sn = REG_NREGS (src);
325 1.1 mrg if ((dr > sr && dr < sr + sn)
326 1.1 mrg || (sr > dr && sr < dr + dn))
327 1.1 mrg return;
328 1.1 mrg
329 1.1 mrg /* If SRC had no assigned mode (i.e. we didn't know it was live)
330 1.1 mrg assign it now and assume the value came from an input argument
331 1.1 mrg or somesuch. */
332 1.1 mrg if (vd->e[sr].mode == VOIDmode)
333 1.1 mrg set_value_regno (sr, vd->e[dr].mode, vd);
334 1.1 mrg
335 1.1 mrg /* If we are narrowing the input to a smaller number of hard regs,
336 1.1 mrg and it is in big endian, we are really extracting a high part.
337 1.1 mrg Since we generally associate a low part of a value with the value itself,
338 1.1 mrg we must not do the same for the high part.
339 1.1 mrg Note we can still get low parts for the same mode combination through
340 1.1 mrg a two-step copy involving differently sized hard regs.
341 1.1 mrg Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
342 1.1 mrg (set (reg:DI r0) (reg:DI fr0))
343 1.1 mrg (set (reg:SI fr2) (reg:SI r0))
344 1.1 mrg loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
345 1.1 mrg (set (reg:SI fr2) (reg:SI fr0))
346 1.1 mrg loads the high part of (reg:DI fr0) into fr2.
347 1.1 mrg
348 1.1 mrg We can't properly represent the latter case in our tables, so don't
349 1.1 mrg record anything then. */
350 1.1 mrg else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
351 1.1 mrg && maybe_ne (subreg_lowpart_offset (GET_MODE (dest),
352 1.1 mrg vd->e[sr].mode), 0U))
353 1.1 mrg return;
354 1.1 mrg
355 1.1 mrg /* If SRC had been assigned a mode narrower than the copy, we can't
356 1.1 mrg link DEST into the chain, because not all of the pieces of the
357 1.1 mrg copy came from oldest_regno. */
358 1.1 mrg else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
359 1.1 mrg return;
360 1.1 mrg
361 1.1 mrg /* If a narrower value is copied using wider mode, the upper bits
362 1.1 mrg are undefined (could be e.g. a former paradoxical subreg). Signal
363 1.1 mrg in that case we've only copied value using the narrower mode.
364 1.1 mrg Consider:
365 1.1 mrg (set (reg:DI r14) (mem:DI ...))
366 1.1 mrg (set (reg:QI si) (reg:QI r14))
367 1.1 mrg (set (reg:DI bp) (reg:DI r14))
368 1.1 mrg (set (reg:DI r14) (const_int ...))
369 1.1 mrg (set (reg:DI dx) (reg:DI si))
370 1.1 mrg (set (reg:DI si) (const_int ...))
371 1.1 mrg (set (reg:DI dx) (reg:DI bp))
372 1.1 mrg The last set is not redundant, while the low 8 bits of dx are already
373 1.1 mrg equal to low 8 bits of bp, the other bits are undefined. */
374 1.1 mrg else if (partial_subreg_p (vd->e[sr].mode, GET_MODE (src)))
375 1.1 mrg {
376 1.1 mrg if (!REG_CAN_CHANGE_MODE_P (sr, GET_MODE (src), vd->e[sr].mode)
377 1.1 mrg || !REG_CAN_CHANGE_MODE_P (dr, vd->e[sr].mode, GET_MODE (dest)))
378 1.1 mrg return;
379 1.1 mrg set_value_regno (dr, vd->e[sr].mode, vd);
380 1.1 mrg }
381 1.1 mrg
382 1.1 mrg /* Link DR at the end of the value chain used by SR. */
383 1.1 mrg
384 1.1 mrg vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
385 1.1 mrg
386 1.1 mrg for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
387 1.1 mrg continue;
388 1.1 mrg vd->e[i].next_regno = dr;
389 1.1 mrg
390 1.1 mrg if (flag_checking)
391 1.1 mrg validate_value_data (vd);
392 1.1 mrg }
393 1.1 mrg
394 1.1 mrg /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
395 1.1 mrg
396 1.1 mrg static bool
397 1.1 mrg mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
398 1.1 mrg unsigned int regno ATTRIBUTE_UNUSED)
399 1.1 mrg {
400 1.1 mrg if (partial_subreg_p (orig_mode, new_mode))
401 1.1 mrg return false;
402 1.1 mrg
403 1.1 mrg return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
404 1.1 mrg }
405 1.1 mrg
406 1.1 mrg /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
407 1.1 mrg was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
408 1.1 mrg in NEW_MODE.
409 1.1 mrg Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
410 1.1 mrg
411 1.1 mrg static rtx
412 1.1 mrg maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
413 1.1 mrg machine_mode new_mode, unsigned int regno,
414 1.1 mrg unsigned int copy_regno ATTRIBUTE_UNUSED)
415 1.1 mrg {
416 1.1 mrg if (partial_subreg_p (copy_mode, orig_mode)
417 1.1 mrg && partial_subreg_p (copy_mode, new_mode))
418 1.1 mrg return NULL_RTX;
419 1.1 mrg
420 1.1 mrg /* Avoid creating multiple copies of the stack pointer. Some ports
421 1.1 mrg assume there is one and only one stack pointer.
422 1.1 mrg
423 1.1 mrg It's unclear if we need to do the same for other special registers. */
424 1.1 mrg if (regno == STACK_POINTER_REGNUM)
425 1.1 mrg return NULL_RTX;
426 1.1 mrg
427 1.1 mrg if (orig_mode == new_mode)
428 1.1 mrg return gen_raw_REG (new_mode, regno);
429 1.1 mrg else if (mode_change_ok (orig_mode, new_mode, regno)
430 1.1 mrg && mode_change_ok (copy_mode, new_mode, copy_regno))
431 1.1 mrg {
432 1.1 mrg int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
433 1.1 mrg int use_nregs = hard_regno_nregs (copy_regno, new_mode);
434 1.1 mrg poly_uint64 bytes_per_reg;
435 1.1 mrg if (!can_div_trunc_p (GET_MODE_SIZE (copy_mode),
436 1.1 mrg copy_nregs, &bytes_per_reg))
437 1.1 mrg return NULL_RTX;
438 1.1 mrg poly_uint64 copy_offset = bytes_per_reg * (copy_nregs - use_nregs);
439 1.1 mrg poly_uint64 offset
440 1.1 mrg = subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
441 1.1 mrg GET_MODE_SIZE (orig_mode));
442 1.1 mrg regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
443 1.1 mrg if (targetm.hard_regno_mode_ok (regno, new_mode))
444 1.1 mrg return gen_raw_REG (new_mode, regno);
445 1.1 mrg }
446 1.1 mrg return NULL_RTX;
447 1.1 mrg }
448 1.1 mrg
449 1.1 mrg /* Find the oldest copy of the value contained in REGNO that is in
450 1.1 mrg register class CL and has mode MODE. If found, return an rtx
451 1.1 mrg of that oldest register, otherwise return NULL. */
452 1.1 mrg
453 1.1 mrg static rtx
454 1.1 mrg find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
455 1.1 mrg {
456 1.1 mrg unsigned int regno = REGNO (reg);
457 1.1 mrg machine_mode mode = GET_MODE (reg);
458 1.1 mrg unsigned int i;
459 1.1 mrg
460 1.1 mrg gcc_assert (regno < FIRST_PSEUDO_REGISTER);
461 1.1 mrg
462 1.1 mrg /* If we are accessing REG in some mode other that what we set it in,
463 1.1 mrg make sure that the replacement is valid. In particular, consider
464 1.1 mrg (set (reg:DI r11) (...))
465 1.1 mrg (set (reg:SI r9) (reg:SI r11))
466 1.1 mrg (set (reg:SI r10) (...))
467 1.1 mrg (set (...) (reg:DI r9))
468 1.1 mrg Replacing r9 with r11 is invalid. */
469 1.1 mrg if (mode != vd->e[regno].mode
470 1.1 mrg && (REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode)
471 1.1 mrg || !REG_CAN_CHANGE_MODE_P (regno, mode, vd->e[regno].mode)))
472 1.1 mrg return NULL_RTX;
473 1.1 mrg
474 1.1 mrg for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
475 1.1 mrg {
476 1.1 mrg machine_mode oldmode = vd->e[i].mode;
477 1.1 mrg rtx new_rtx;
478 1.1 mrg
479 1.1 mrg if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
480 1.1 mrg continue;
481 1.1 mrg
482 1.1 mrg new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
483 1.1 mrg if (new_rtx)
484 1.1 mrg {
485 1.1 mrg ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
486 1.1 mrg REG_ATTRS (new_rtx) = REG_ATTRS (reg);
487 1.1 mrg REG_POINTER (new_rtx) = REG_POINTER (reg);
488 1.1 mrg return new_rtx;
489 1.1 mrg }
490 1.1 mrg }
491 1.1 mrg
492 1.1 mrg return NULL_RTX;
493 1.1 mrg }
494 1.1 mrg
495 1.1 mrg /* If possible, replace the register at *LOC with the oldest register
496 1.1 mrg in register class CL. Return true if successfully replaced. */
497 1.1 mrg
498 1.1 mrg static bool
499 1.1 mrg replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
500 1.1 mrg struct value_data *vd)
501 1.1 mrg {
502 1.1 mrg rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
503 1.1 mrg if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
504 1.1 mrg {
505 1.1 mrg if (DEBUG_INSN_P (insn))
506 1.1 mrg {
507 1.1 mrg struct queued_debug_insn_change *change;
508 1.1 mrg
509 1.1 mrg if (dump_file)
510 1.1 mrg fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
511 1.1 mrg INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
512 1.1 mrg
513 1.1 mrg change = queued_debug_insn_change_pool.allocate ();
514 1.1 mrg change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
515 1.1 mrg change->insn = insn;
516 1.1 mrg change->loc = loc;
517 1.1 mrg change->new_rtx = new_rtx;
518 1.1 mrg vd->e[REGNO (new_rtx)].debug_insn_changes = change;
519 1.1 mrg ++vd->n_debug_insn_changes;
520 1.1 mrg return true;
521 1.1 mrg }
522 1.1 mrg if (dump_file)
523 1.1 mrg fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
524 1.1 mrg INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
525 1.1 mrg
526 1.1 mrg validate_change (insn, loc, new_rtx, 1);
527 1.1 mrg return true;
528 1.1 mrg }
529 1.1 mrg return false;
530 1.1 mrg }
531 1.1 mrg
532 1.1 mrg /* Similar to replace_oldest_value_reg, but *LOC contains an address.
533 1.1 mrg Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
534 1.1 mrg BASE_REG_CLASS depending on how the register is being considered. */
535 1.1 mrg
536 1.1 mrg static bool
537 1.1 mrg replace_oldest_value_addr (rtx *loc, enum reg_class cl,
538 1.1 mrg machine_mode mode, addr_space_t as,
539 1.1 mrg rtx_insn *insn, struct value_data *vd)
540 1.1 mrg {
541 1.1 mrg rtx x = *loc;
542 1.1 mrg RTX_CODE code = GET_CODE (x);
543 1.1 mrg const char *fmt;
544 1.1 mrg int i, j;
545 1.1 mrg bool changed = false;
546 1.1 mrg
547 1.1 mrg switch (code)
548 1.1 mrg {
549 1.1 mrg case PLUS:
550 1.1 mrg if (DEBUG_INSN_P (insn))
551 1.1 mrg break;
552 1.1 mrg
553 1.1 mrg {
554 1.1 mrg rtx orig_op0 = XEXP (x, 0);
555 1.1 mrg rtx orig_op1 = XEXP (x, 1);
556 1.1 mrg RTX_CODE code0 = GET_CODE (orig_op0);
557 1.1 mrg RTX_CODE code1 = GET_CODE (orig_op1);
558 1.1 mrg rtx op0 = orig_op0;
559 1.1 mrg rtx op1 = orig_op1;
560 1.1 mrg rtx *locI = NULL;
561 1.1 mrg rtx *locB = NULL;
562 1.1 mrg enum rtx_code index_code = SCRATCH;
563 1.1 mrg
564 1.1 mrg if (GET_CODE (op0) == SUBREG)
565 1.1 mrg {
566 1.1 mrg op0 = SUBREG_REG (op0);
567 1.1 mrg code0 = GET_CODE (op0);
568 1.1 mrg }
569 1.1 mrg
570 1.1 mrg if (GET_CODE (op1) == SUBREG)
571 1.1 mrg {
572 1.1 mrg op1 = SUBREG_REG (op1);
573 1.1 mrg code1 = GET_CODE (op1);
574 1.1 mrg }
575 1.1 mrg
576 1.1 mrg if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
577 1.1 mrg || code0 == ZERO_EXTEND || code1 == MEM)
578 1.1 mrg {
579 1.1 mrg locI = &XEXP (x, 0);
580 1.1 mrg locB = &XEXP (x, 1);
581 1.1 mrg index_code = GET_CODE (*locI);
582 1.1 mrg }
583 1.1 mrg else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
584 1.1 mrg || code1 == ZERO_EXTEND || code0 == MEM)
585 1.1 mrg {
586 1.1 mrg locI = &XEXP (x, 1);
587 1.1 mrg locB = &XEXP (x, 0);
588 1.1 mrg index_code = GET_CODE (*locI);
589 1.1 mrg }
590 1.1 mrg else if (code0 == CONST_INT || code0 == CONST
591 1.1 mrg || code0 == SYMBOL_REF || code0 == LABEL_REF)
592 1.1 mrg {
593 1.1 mrg locB = &XEXP (x, 1);
594 1.1 mrg index_code = GET_CODE (XEXP (x, 0));
595 1.1 mrg }
596 1.1 mrg else if (code1 == CONST_INT || code1 == CONST
597 1.1 mrg || code1 == SYMBOL_REF || code1 == LABEL_REF)
598 1.1 mrg {
599 1.1 mrg locB = &XEXP (x, 0);
600 1.1 mrg index_code = GET_CODE (XEXP (x, 1));
601 1.1 mrg }
602 1.1 mrg else if (code0 == REG && code1 == REG)
603 1.1 mrg {
604 1.1 mrg int index_op;
605 1.1 mrg unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
606 1.1 mrg
607 1.1 mrg if (REGNO_OK_FOR_INDEX_P (regno1)
608 1.1 mrg && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
609 1.1 mrg index_op = 1;
610 1.1 mrg else if (REGNO_OK_FOR_INDEX_P (regno0)
611 1.1 mrg && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
612 1.1 mrg index_op = 0;
613 1.1 mrg else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
614 1.1 mrg || REGNO_OK_FOR_INDEX_P (regno1))
615 1.1 mrg index_op = 1;
616 1.1 mrg else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
617 1.1 mrg index_op = 0;
618 1.1 mrg else
619 1.1 mrg index_op = 1;
620 1.1 mrg
621 1.1 mrg locI = &XEXP (x, index_op);
622 1.1 mrg locB = &XEXP (x, !index_op);
623 1.1 mrg index_code = GET_CODE (*locI);
624 1.1 mrg }
625 1.1 mrg else if (code0 == REG)
626 1.1 mrg {
627 1.1 mrg locI = &XEXP (x, 0);
628 1.1 mrg locB = &XEXP (x, 1);
629 1.1 mrg index_code = GET_CODE (*locI);
630 1.1 mrg }
631 1.1 mrg else if (code1 == REG)
632 1.1 mrg {
633 1.1 mrg locI = &XEXP (x, 1);
634 1.1 mrg locB = &XEXP (x, 0);
635 1.1 mrg index_code = GET_CODE (*locI);
636 1.1 mrg }
637 1.1 mrg
638 1.1 mrg if (locI)
639 1.1 mrg changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
640 1.1 mrg mode, as, insn, vd);
641 1.1 mrg if (locB)
642 1.1 mrg changed |= replace_oldest_value_addr (locB,
643 1.1 mrg base_reg_class (mode, as, PLUS,
644 1.1 mrg index_code),
645 1.1 mrg mode, as, insn, vd);
646 1.1 mrg return changed;
647 1.1 mrg }
648 1.1 mrg
649 1.1 mrg case POST_INC:
650 1.1 mrg case POST_DEC:
651 1.1 mrg case POST_MODIFY:
652 1.1 mrg case PRE_INC:
653 1.1 mrg case PRE_DEC:
654 1.1 mrg case PRE_MODIFY:
655 1.1 mrg return false;
656 1.1 mrg
657 1.1 mrg case MEM:
658 1.1 mrg return replace_oldest_value_mem (x, insn, vd);
659 1.1 mrg
660 1.1 mrg case REG:
661 1.1 mrg return replace_oldest_value_reg (loc, cl, insn, vd);
662 1.1 mrg
663 1.1 mrg default:
664 1.1 mrg break;
665 1.1 mrg }
666 1.1 mrg
667 1.1 mrg fmt = GET_RTX_FORMAT (code);
668 1.1 mrg for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
669 1.1 mrg {
670 1.1 mrg if (fmt[i] == 'e')
671 1.1 mrg changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
672 1.1 mrg insn, vd);
673 1.1 mrg else if (fmt[i] == 'E')
674 1.1 mrg for (j = XVECLEN (x, i) - 1; j >= 0; j--)
675 1.1 mrg changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
676 1.1 mrg mode, as, insn, vd);
677 1.1 mrg }
678 1.1 mrg
679 1.1 mrg return changed;
680 1.1 mrg }
681 1.1 mrg
682 1.1 mrg /* Similar to replace_oldest_value_reg, but X contains a memory. */
683 1.1 mrg
684 1.1 mrg static bool
685 1.1 mrg replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
686 1.1 mrg {
687 1.1 mrg enum reg_class cl;
688 1.1 mrg
689 1.1 mrg if (DEBUG_INSN_P (insn))
690 1.1 mrg cl = ALL_REGS;
691 1.1 mrg else
692 1.1 mrg cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
693 1.1 mrg
694 1.1 mrg return replace_oldest_value_addr (&XEXP (x, 0), cl,
695 1.1 mrg GET_MODE (x), MEM_ADDR_SPACE (x),
696 1.1 mrg insn, vd);
697 1.1 mrg }
698 1.1 mrg
699 1.1 mrg /* Apply all queued updates for DEBUG_INSNs that change some reg to
700 1.1 mrg register REGNO. */
701 1.1 mrg
702 1.1 mrg static void
703 1.1 mrg apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
704 1.1 mrg {
705 1.1 mrg struct queued_debug_insn_change *change;
706 1.1 mrg rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
707 1.1 mrg
708 1.1 mrg for (change = vd->e[regno].debug_insn_changes;
709 1.1 mrg change;
710 1.1 mrg change = change->next)
711 1.1 mrg {
712 1.1 mrg if (last_insn != change->insn)
713 1.1 mrg {
714 1.1 mrg apply_change_group ();
715 1.1 mrg last_insn = change->insn;
716 1.1 mrg }
717 1.1 mrg validate_change (change->insn, change->loc, change->new_rtx, 1);
718 1.1 mrg }
719 1.1 mrg apply_change_group ();
720 1.1 mrg }
721 1.1 mrg
722 1.1 mrg /* Called via note_uses, for all used registers in a real insn
723 1.1 mrg apply DEBUG_INSN changes that change registers to the used
724 1.1 mrg registers. */
725 1.1 mrg
726 1.1 mrg static void
727 1.1 mrg cprop_find_used_regs (rtx *loc, void *data)
728 1.1 mrg {
729 1.1 mrg struct value_data *const vd = (struct value_data *) data;
730 1.1 mrg subrtx_iterator::array_type array;
731 1.1 mrg FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
732 1.1 mrg {
733 1.1 mrg const_rtx x = *iter;
734 1.1 mrg if (REG_P (x))
735 1.1 mrg {
736 1.1 mrg unsigned int regno = REGNO (x);
737 1.1 mrg if (vd->e[regno].debug_insn_changes)
738 1.1 mrg {
739 1.1 mrg apply_debug_insn_changes (vd, regno);
740 1.1 mrg free_debug_insn_changes (vd, regno);
741 1.1 mrg }
742 1.1 mrg }
743 1.1 mrg }
744 1.1 mrg }
745 1.1 mrg
746 1.1 mrg /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
747 1.1 mrg
748 1.1 mrg static void
749 1.1 mrg kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
750 1.1 mrg {
751 1.1 mrg note_stores (insn, kill_clobbered_value, vd);
752 1.1 mrg }
753 1.1 mrg
754 1.1 mrg /* Perform the forward copy propagation on basic block BB. */
755 1.1 mrg
756 1.1 mrg static bool
757 1.1 mrg copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
758 1.1 mrg {
759 1.1 mrg bool anything_changed = false;
760 1.1 mrg rtx_insn *insn, *next;
761 1.1 mrg
762 1.1 mrg for (insn = BB_HEAD (bb); ; insn = next)
763 1.1 mrg {
764 1.1 mrg int n_ops, i, predicated;
765 1.1 mrg bool is_asm, any_replacements;
766 1.1 mrg rtx set;
767 1.1 mrg rtx link;
768 1.1 mrg bool changed = false;
769 1.1 mrg struct kill_set_value_data ksvd;
770 1.1 mrg
771 1.1 mrg next = NEXT_INSN (insn);
772 1.1 mrg if (!NONDEBUG_INSN_P (insn))
773 1.1 mrg {
774 1.1 mrg if (DEBUG_BIND_INSN_P (insn))
775 1.1 mrg {
776 1.1 mrg rtx loc = INSN_VAR_LOCATION_LOC (insn);
777 1.1 mrg if (!VAR_LOC_UNKNOWN_P (loc))
778 1.1 mrg replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
779 1.1 mrg ALL_REGS, GET_MODE (loc),
780 1.1 mrg ADDR_SPACE_GENERIC, insn, vd);
781 1.1 mrg }
782 1.1 mrg
783 1.1 mrg if (insn == BB_END (bb))
784 1.1 mrg break;
785 1.1 mrg else
786 1.1 mrg continue;
787 1.1 mrg }
788 1.1 mrg
789 1.1 mrg set = single_set (insn);
790 1.1 mrg
791 1.1 mrg /* Detect noop sets and remove them before processing side effects. */
792 1.1 mrg if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
793 1.1 mrg {
794 1.1 mrg unsigned int regno = REGNO (SET_SRC (set));
795 1.1 mrg rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
796 1.1 mrg SET_DEST (set), vd);
797 1.1 mrg rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
798 1.1 mrg SET_SRC (set), vd);
799 1.1 mrg if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
800 1.1 mrg {
801 1.1 mrg bool last = insn == BB_END (bb);
802 1.1 mrg delete_insn (insn);
803 1.1 mrg if (last)
804 1.1 mrg break;
805 1.1 mrg continue;
806 1.1 mrg }
807 1.1 mrg }
808 1.1 mrg
809 1.1 mrg /* Detect obviously dead sets (via REG_UNUSED notes) and remove them. */
810 1.1 mrg if (set
811 1.1 mrg && !RTX_FRAME_RELATED_P (insn)
812 1.1 mrg && NONJUMP_INSN_P (insn)
813 1.1 mrg && !may_trap_p (set)
814 1.1 mrg && find_reg_note (insn, REG_UNUSED, SET_DEST (set))
815 1.1 mrg && !side_effects_p (SET_SRC (set))
816 1.1 mrg && !side_effects_p (SET_DEST (set)))
817 1.1 mrg {
818 1.1 mrg bool last = insn == BB_END (bb);
819 1.1 mrg delete_insn (insn);
820 1.1 mrg if (last)
821 1.1 mrg break;
822 1.1 mrg continue;
823 1.1 mrg }
824 1.1 mrg
825 1.1 mrg
826 1.1 mrg extract_constrain_insn (insn);
827 1.1 mrg preprocess_constraints (insn);
828 1.1 mrg const operand_alternative *op_alt = which_op_alt ();
829 1.1 mrg n_ops = recog_data.n_operands;
830 1.1 mrg is_asm = asm_noperands (PATTERN (insn)) >= 0;
831 1.1 mrg
832 1.1 mrg /* Simplify the code below by promoting OP_OUT to OP_INOUT
833 1.1 mrg in predicated instructions. */
834 1.1 mrg
835 1.1 mrg predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
836 1.1 mrg for (i = 0; i < n_ops; ++i)
837 1.1 mrg {
838 1.1 mrg int matches = op_alt[i].matches;
839 1.1 mrg if (matches >= 0 || op_alt[i].matched >= 0
840 1.1 mrg || (predicated && recog_data.operand_type[i] == OP_OUT))
841 1.1 mrg recog_data.operand_type[i] = OP_INOUT;
842 1.1 mrg }
843 1.1 mrg
844 1.1 mrg /* Apply changes to earlier DEBUG_INSNs if possible. */
845 1.1 mrg if (vd->n_debug_insn_changes)
846 1.1 mrg note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
847 1.1 mrg
848 1.1 mrg /* For each earlyclobber operand, zap the value data. */
849 1.1 mrg for (i = 0; i < n_ops; i++)
850 1.1 mrg if (op_alt[i].earlyclobber)
851 1.1 mrg kill_value (recog_data.operand[i], vd);
852 1.1 mrg
853 1.1 mrg /* Within asms, a clobber cannot overlap inputs or outputs.
854 1.1 mrg I wouldn't think this were true for regular insns, but
855 1.1 mrg scan_rtx treats them like that... */
856 1.1 mrg kill_clobbered_values (insn, vd);
857 1.1 mrg
858 1.1 mrg /* Kill all auto-incremented values. */
859 1.1 mrg /* ??? REG_INC is useless, since stack pushes aren't done that way. */
860 1.1 mrg kill_autoinc_value (insn, vd);
861 1.1 mrg
862 1.1 mrg /* Kill all early-clobbered operands. */
863 1.1 mrg for (i = 0; i < n_ops; i++)
864 1.1 mrg if (op_alt[i].earlyclobber)
865 1.1 mrg kill_value (recog_data.operand[i], vd);
866 1.1 mrg
867 1.1 mrg /* If we have dead sets in the insn, then we need to note these as we
868 1.1 mrg would clobbers. */
869 1.1 mrg for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
870 1.1 mrg {
871 1.1 mrg if (REG_NOTE_KIND (link) == REG_UNUSED)
872 1.1 mrg {
873 1.1 mrg kill_value (XEXP (link, 0), vd);
874 1.1 mrg /* Furthermore, if the insn looked like a single-set,
875 1.1 mrg but the dead store kills the source value of that
876 1.1 mrg set, then we can no-longer use the plain move
877 1.1 mrg special case below. */
878 1.1 mrg if (set
879 1.1 mrg && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
880 1.1 mrg set = NULL;
881 1.1 mrg }
882 1.1 mrg
883 1.1 mrg /* We need to keep CFI info correct, and the same on all paths,
884 1.1 mrg so we cannot normally replace the registers REG_CFA_REGISTER
885 1.1 mrg refers to. Bail. */
886 1.1 mrg if (REG_NOTE_KIND (link) == REG_CFA_REGISTER)
887 1.1 mrg goto did_replacement;
888 1.1 mrg }
889 1.1 mrg
890 1.1 mrg /* Special-case plain move instructions, since we may well
891 1.1 mrg be able to do the move from a different register class. */
892 1.1 mrg if (set && REG_P (SET_SRC (set)))
893 1.1 mrg {
894 1.1 mrg rtx src = SET_SRC (set);
895 1.1 mrg rtx dest = SET_DEST (set);
896 1.1 mrg unsigned int regno = REGNO (src);
897 1.1 mrg machine_mode mode = GET_MODE (src);
898 1.1 mrg unsigned int i;
899 1.1 mrg rtx new_rtx;
900 1.1 mrg
901 1.1 mrg /* If we are accessing SRC in some mode other that what we
902 1.1 mrg set it in, make sure that the replacement is valid. */
903 1.1 mrg if (mode != vd->e[regno].mode)
904 1.1 mrg {
905 1.1 mrg if (REG_NREGS (src)
906 1.1 mrg > hard_regno_nregs (regno, vd->e[regno].mode))
907 1.1 mrg goto no_move_special_case;
908 1.1 mrg
909 1.1 mrg /* And likewise, if we are narrowing on big endian the transformation
910 1.1 mrg is also invalid. */
911 1.1 mrg if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
912 1.1 mrg && maybe_ne (subreg_lowpart_offset (mode,
913 1.1 mrg vd->e[regno].mode), 0U))
914 1.1 mrg goto no_move_special_case;
915 1.1 mrg }
916 1.1 mrg
917 1.1 mrg /* If the destination is also a register, try to find a source
918 1.1 mrg register in the same class. */
919 1.1 mrg if (REG_P (dest))
920 1.1 mrg {
921 1.1 mrg new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
922 1.1 mrg src, vd);
923 1.1 mrg
924 1.1 mrg if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
925 1.1 mrg {
926 1.1 mrg if (dump_file)
927 1.1 mrg fprintf (dump_file,
928 1.1 mrg "insn %u: replaced reg %u with %u\n",
929 1.1 mrg INSN_UID (insn), regno, REGNO (new_rtx));
930 1.1 mrg changed = true;
931 1.1 mrg goto did_replacement;
932 1.1 mrg }
933 1.1 mrg /* We need to re-extract as validate_change clobbers
934 1.1 mrg recog_data. */
935 1.1 mrg extract_constrain_insn (insn);
936 1.1 mrg preprocess_constraints (insn);
937 1.1 mrg }
938 1.1 mrg
939 1.1 mrg /* Otherwise, try all valid registers and see if its valid. */
940 1.1 mrg for (i = vd->e[regno].oldest_regno; i != regno;
941 1.1 mrg i = vd->e[i].next_regno)
942 1.1 mrg {
943 1.1 mrg new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
944 1.1 mrg mode, i, regno);
945 1.1 mrg if (new_rtx != NULL_RTX)
946 1.1 mrg {
947 1.1 mrg /* Don't propagate for a more expensive reg-reg move. */
948 1.1 mrg if (REG_P (dest))
949 1.1 mrg {
950 1.1 mrg enum reg_class from = REGNO_REG_CLASS (regno);
951 1.1 mrg enum reg_class to = REGNO_REG_CLASS (REGNO (dest));
952 1.1 mrg enum reg_class new_from = REGNO_REG_CLASS (i);
953 1.1 mrg unsigned int original_cost
954 1.1 mrg = targetm.register_move_cost (mode, from, to);
955 1.1 mrg unsigned int after_cost
956 1.1 mrg = targetm.register_move_cost (mode, new_from, to);
957 1.1 mrg if (after_cost > original_cost)
958 1.1 mrg continue;
959 1.1 mrg }
960 1.1 mrg
961 1.1 mrg if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
962 1.1 mrg {
963 1.1 mrg ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
964 1.1 mrg REG_ATTRS (new_rtx) = REG_ATTRS (src);
965 1.1 mrg REG_POINTER (new_rtx) = REG_POINTER (src);
966 1.1 mrg if (dump_file)
967 1.1 mrg fprintf (dump_file,
968 1.1 mrg "insn %u: replaced reg %u with %u\n",
969 1.1 mrg INSN_UID (insn), regno, REGNO (new_rtx));
970 1.1 mrg changed = true;
971 1.1 mrg goto did_replacement;
972 1.1 mrg }
973 1.1 mrg /* We need to re-extract as validate_change clobbers
974 1.1 mrg recog_data. */
975 1.1 mrg extract_constrain_insn (insn);
976 1.1 mrg preprocess_constraints (insn);
977 1.1 mrg }
978 1.1 mrg }
979 1.1 mrg }
980 1.1 mrg no_move_special_case:
981 1.1 mrg
982 1.1 mrg any_replacements = false;
983 1.1 mrg
984 1.1 mrg /* For each input operand, replace a hard register with the
985 1.1 mrg eldest live copy that's in an appropriate register class. */
986 1.1 mrg for (i = 0; i < n_ops; i++)
987 1.1 mrg {
988 1.1 mrg bool replaced = false;
989 1.1 mrg
990 1.1 mrg /* Don't scan match_operand here, since we've no reg class
991 1.1 mrg information to pass down. Any operands that we could
992 1.1 mrg substitute in will be represented elsewhere. */
993 1.1 mrg if (recog_data.constraints[i][0] == '\0')
994 1.1 mrg continue;
995 1.1 mrg
996 1.1 mrg /* Don't replace in asms intentionally referencing hard regs. */
997 1.1 mrg if (is_asm && REG_P (recog_data.operand[i])
998 1.1 mrg && (REGNO (recog_data.operand[i])
999 1.1 mrg == ORIGINAL_REGNO (recog_data.operand[i])))
1000 1.1 mrg continue;
1001 1.1 mrg
1002 1.1 mrg if (recog_data.operand_type[i] == OP_IN)
1003 1.1 mrg {
1004 1.1 mrg if (op_alt[i].is_address)
1005 1.1 mrg replaced
1006 1.1 mrg = replace_oldest_value_addr (recog_data.operand_loc[i],
1007 1.1 mrg alternative_class (op_alt, i),
1008 1.1 mrg VOIDmode, ADDR_SPACE_GENERIC,
1009 1.1 mrg insn, vd);
1010 1.1 mrg else if (REG_P (recog_data.operand[i]))
1011 1.1 mrg replaced
1012 1.1 mrg = replace_oldest_value_reg (recog_data.operand_loc[i],
1013 1.1 mrg alternative_class (op_alt, i),
1014 1.1 mrg insn, vd);
1015 1.1 mrg else if (MEM_P (recog_data.operand[i]))
1016 1.1 mrg replaced = replace_oldest_value_mem (recog_data.operand[i],
1017 1.1 mrg insn, vd);
1018 1.1 mrg }
1019 1.1 mrg else if (MEM_P (recog_data.operand[i]))
1020 1.1 mrg replaced = replace_oldest_value_mem (recog_data.operand[i],
1021 1.1 mrg insn, vd);
1022 1.1 mrg
1023 1.1 mrg /* If we performed any replacement, update match_dups. */
1024 1.1 mrg if (replaced)
1025 1.1 mrg {
1026 1.1 mrg int j;
1027 1.1 mrg rtx new_rtx;
1028 1.1 mrg
1029 1.1 mrg new_rtx = *recog_data.operand_loc[i];
1030 1.1 mrg recog_data.operand[i] = new_rtx;
1031 1.1 mrg for (j = 0; j < recog_data.n_dups; j++)
1032 1.1 mrg if (recog_data.dup_num[j] == i)
1033 1.1 mrg validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
1034 1.1 mrg
1035 1.1 mrg any_replacements = true;
1036 1.1 mrg }
1037 1.1 mrg }
1038 1.1 mrg
1039 1.1 mrg if (any_replacements)
1040 1.1 mrg {
1041 1.1 mrg if (! apply_change_group ())
1042 1.1 mrg {
1043 1.1 mrg if (dump_file)
1044 1.1 mrg fprintf (dump_file,
1045 1.1 mrg "insn %u: reg replacements not verified\n",
1046 1.1 mrg INSN_UID (insn));
1047 1.1 mrg }
1048 1.1 mrg else
1049 1.1 mrg changed = true;
1050 1.1 mrg }
1051 1.1 mrg
1052 1.1 mrg did_replacement:
1053 1.1 mrg if (changed)
1054 1.1 mrg {
1055 1.1 mrg anything_changed = true;
1056 1.1 mrg
1057 1.1 mrg /* If something changed, perhaps further changes to earlier
1058 1.1 mrg DEBUG_INSNs can be applied. */
1059 1.1 mrg if (vd->n_debug_insn_changes)
1060 1.1 mrg note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1061 1.1 mrg df_insn_rescan (insn);
1062 1.1 mrg }
1063 1.1 mrg
1064 1.1 mrg ksvd.vd = vd;
1065 1.1 mrg ksvd.ignore_set_reg = NULL_RTX;
1066 1.1 mrg
1067 1.1 mrg /* Clobber call-clobbered registers. */
1068 1.1 mrg if (CALL_P (insn))
1069 1.1 mrg {
1070 1.1 mrg unsigned int set_regno = INVALID_REGNUM;
1071 1.1 mrg unsigned int set_nregs = 0;
1072 1.1 mrg unsigned int regno;
1073 1.1 mrg rtx exp;
1074 1.1 mrg
1075 1.1 mrg for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1076 1.1 mrg {
1077 1.1 mrg rtx x = XEXP (exp, 0);
1078 1.1 mrg if (GET_CODE (x) == SET)
1079 1.1 mrg {
1080 1.1 mrg rtx dest = SET_DEST (x);
1081 1.1 mrg kill_value (dest, vd);
1082 1.1 mrg set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1083 1.1 mrg copy_value (dest, SET_SRC (x), vd);
1084 1.1 mrg ksvd.ignore_set_reg = dest;
1085 1.1 mrg set_regno = REGNO (dest);
1086 1.1 mrg set_nregs = REG_NREGS (dest);
1087 1.1 mrg break;
1088 1.1 mrg }
1089 1.1 mrg }
1090 1.1 mrg
1091 1.1 mrg function_abi callee_abi = insn_callee_abi (insn);
1092 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1093 1.1 mrg if (vd->e[regno].mode != VOIDmode
1094 1.1 mrg && callee_abi.clobbers_reg_p (vd->e[regno].mode, regno)
1095 1.1 mrg && (regno < set_regno || regno >= set_regno + set_nregs))
1096 1.1 mrg kill_value_regno (regno, 1, vd);
1097 1.1 mrg
1098 1.1 mrg /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1099 1.1 mrg of the SET isn't clobbered by CALLEE_ABI, but instead among
1100 1.1 mrg CLOBBERs on the CALL_INSN, we could wrongly assume the
1101 1.1 mrg value in it is still live. */
1102 1.1 mrg if (ksvd.ignore_set_reg)
1103 1.1 mrg kill_clobbered_values (insn, vd);
1104 1.1 mrg }
1105 1.1 mrg
1106 1.1 mrg bool copy_p = (set
1107 1.1 mrg && REG_P (SET_DEST (set))
1108 1.1 mrg && REG_P (SET_SRC (set)));
1109 1.1 mrg bool noop_p = (copy_p
1110 1.1 mrg && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1111 1.1 mrg
1112 1.1 mrg /* If a noop move is using narrower mode than we have recorded,
1113 1.1 mrg we need to either remove the noop move, or kill_set_value. */
1114 1.1 mrg if (noop_p
1115 1.1 mrg && partial_subreg_p (GET_MODE (SET_DEST (set)),
1116 1.1 mrg vd->e[REGNO (SET_DEST (set))].mode))
1117 1.1 mrg {
1118 1.1 mrg if (noop_move_p (insn))
1119 1.1 mrg {
1120 1.1 mrg bool last = insn == BB_END (bb);
1121 1.1 mrg delete_insn (insn);
1122 1.1 mrg if (last)
1123 1.1 mrg break;
1124 1.1 mrg }
1125 1.1 mrg else
1126 1.1 mrg noop_p = false;
1127 1.1 mrg }
1128 1.1 mrg
1129 1.1 mrg if (!noop_p)
1130 1.1 mrg {
1131 1.1 mrg /* Notice stores. */
1132 1.1 mrg note_stores (insn, kill_set_value, &ksvd);
1133 1.1 mrg
1134 1.1 mrg /* Notice copies. */
1135 1.1 mrg if (copy_p)
1136 1.1 mrg {
1137 1.1 mrg df_insn_rescan (insn);
1138 1.1 mrg copy_value (SET_DEST (set), SET_SRC (set), vd);
1139 1.1 mrg }
1140 1.1 mrg }
1141 1.1 mrg
1142 1.1 mrg if (insn == BB_END (bb))
1143 1.1 mrg break;
1144 1.1 mrg }
1145 1.1 mrg
1146 1.1 mrg return anything_changed;
1147 1.1 mrg }
1148 1.1 mrg
1149 1.1 mrg /* Dump the value chain data to stderr. */
1150 1.1 mrg
1151 1.1 mrg DEBUG_FUNCTION void
1152 1.1 mrg debug_value_data (struct value_data *vd)
1153 1.1 mrg {
1154 1.1 mrg HARD_REG_SET set;
1155 1.1 mrg unsigned int i, j;
1156 1.1 mrg
1157 1.1 mrg CLEAR_HARD_REG_SET (set);
1158 1.1 mrg
1159 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1160 1.1 mrg if (vd->e[i].oldest_regno == i)
1161 1.1 mrg {
1162 1.1 mrg if (vd->e[i].mode == VOIDmode)
1163 1.1 mrg {
1164 1.1 mrg if (vd->e[i].next_regno != INVALID_REGNUM)
1165 1.1 mrg fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1166 1.1 mrg i, vd->e[i].next_regno);
1167 1.1 mrg continue;
1168 1.1 mrg }
1169 1.1 mrg
1170 1.1 mrg SET_HARD_REG_BIT (set, i);
1171 1.1 mrg fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1172 1.1 mrg
1173 1.1 mrg for (j = vd->e[i].next_regno;
1174 1.1 mrg j != INVALID_REGNUM;
1175 1.1 mrg j = vd->e[j].next_regno)
1176 1.1 mrg {
1177 1.1 mrg if (TEST_HARD_REG_BIT (set, j))
1178 1.1 mrg {
1179 1.1 mrg fprintf (stderr, "[%u] Loop in regno chain\n", j);
1180 1.1 mrg return;
1181 1.1 mrg }
1182 1.1 mrg
1183 1.1 mrg if (vd->e[j].oldest_regno != i)
1184 1.1 mrg {
1185 1.1 mrg fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1186 1.1 mrg j, vd->e[j].oldest_regno);
1187 1.1 mrg return;
1188 1.1 mrg }
1189 1.1 mrg SET_HARD_REG_BIT (set, j);
1190 1.1 mrg fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1191 1.1 mrg }
1192 1.1 mrg fputc ('\n', stderr);
1193 1.1 mrg }
1194 1.1 mrg
1195 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1196 1.1 mrg if (! TEST_HARD_REG_BIT (set, i)
1197 1.1 mrg && (vd->e[i].mode != VOIDmode
1198 1.1 mrg || vd->e[i].oldest_regno != i
1199 1.1 mrg || vd->e[i].next_regno != INVALID_REGNUM))
1200 1.1 mrg fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1201 1.1 mrg i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1202 1.1 mrg vd->e[i].next_regno);
1203 1.1 mrg }
1204 1.1 mrg
1205 1.1 mrg /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1206 1.1 mrg DEBUG_INSN is skipped since we do not want to involve DF related
1207 1.1 mrg staff as how it is handled in function pass_cprop_hardreg::execute.
1208 1.1 mrg
1209 1.1 mrg NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1210 1.1 mrg to handle DEBUG_INSN for other uses. */
1211 1.1 mrg
1212 1.1 mrg void
1213 1.1 mrg copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1214 1.1 mrg {
1215 1.1 mrg struct value_data *vd;
1216 1.1 mrg vd = XNEWVEC (struct value_data, 1);
1217 1.1 mrg init_value_data (vd);
1218 1.1 mrg
1219 1.1 mrg skip_debug_insn_p = true;
1220 1.1 mrg copyprop_hardreg_forward_1 (bb, vd);
1221 1.1 mrg free (vd);
1222 1.1 mrg skip_debug_insn_p = false;
1223 1.1 mrg }
1224 1.1 mrg
1225 1.1 mrg static void
1226 1.1 mrg validate_value_data (struct value_data *vd)
1227 1.1 mrg {
1228 1.1 mrg HARD_REG_SET set;
1229 1.1 mrg unsigned int i, j;
1230 1.1 mrg
1231 1.1 mrg CLEAR_HARD_REG_SET (set);
1232 1.1 mrg
1233 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1234 1.1 mrg if (vd->e[i].oldest_regno == i)
1235 1.1 mrg {
1236 1.1 mrg if (vd->e[i].mode == VOIDmode)
1237 1.1 mrg {
1238 1.1 mrg if (vd->e[i].next_regno != INVALID_REGNUM)
1239 1.1 mrg internal_error ("%qs: [%u] bad %<next_regno%> for empty chain (%u)",
1240 1.1 mrg __func__, i, vd->e[i].next_regno);
1241 1.1 mrg continue;
1242 1.1 mrg }
1243 1.1 mrg
1244 1.1 mrg SET_HARD_REG_BIT (set, i);
1245 1.1 mrg
1246 1.1 mrg for (j = vd->e[i].next_regno;
1247 1.1 mrg j != INVALID_REGNUM;
1248 1.1 mrg j = vd->e[j].next_regno)
1249 1.1 mrg {
1250 1.1 mrg if (TEST_HARD_REG_BIT (set, j))
1251 1.1 mrg internal_error ("%qs: loop in %<next_regno%> chain (%u)",
1252 1.1 mrg __func__, j);
1253 1.1 mrg if (vd->e[j].oldest_regno != i)
1254 1.1 mrg internal_error ("%qs: [%u] bad %<oldest_regno%> (%u)",
1255 1.1 mrg __func__, j, vd->e[j].oldest_regno);
1256 1.1 mrg
1257 1.1 mrg SET_HARD_REG_BIT (set, j);
1258 1.1 mrg }
1259 1.1 mrg }
1260 1.1 mrg
1261 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1262 1.1 mrg if (! TEST_HARD_REG_BIT (set, i)
1263 1.1 mrg && (vd->e[i].mode != VOIDmode
1264 1.1 mrg || vd->e[i].oldest_regno != i
1265 1.1 mrg || vd->e[i].next_regno != INVALID_REGNUM))
1266 1.1 mrg internal_error ("%qs: [%u] non-empty register in chain (%s %u %i)",
1267 1.1 mrg __func__, i,
1268 1.1 mrg GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1269 1.1 mrg vd->e[i].next_regno);
1270 1.1 mrg }
1271 1.1 mrg
1272 1.1 mrg
1273 1.1 mrg namespace {
1275 1.1 mrg
1276 1.1 mrg const pass_data pass_data_cprop_hardreg =
1277 1.1 mrg {
1278 1.1 mrg RTL_PASS, /* type */
1279 1.1 mrg "cprop_hardreg", /* name */
1280 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
1281 1.1 mrg TV_CPROP_REGISTERS, /* tv_id */
1282 1.1 mrg 0, /* properties_required */
1283 1.1 mrg 0, /* properties_provided */
1284 1.1 mrg 0, /* properties_destroyed */
1285 1.1 mrg 0, /* todo_flags_start */
1286 1.1 mrg TODO_df_finish, /* todo_flags_finish */
1287 1.1 mrg };
1288 1.1 mrg
1289 1.1 mrg class pass_cprop_hardreg : public rtl_opt_pass
1290 1.1 mrg {
1291 1.1 mrg public:
1292 1.1 mrg pass_cprop_hardreg (gcc::context *ctxt)
1293 1.1 mrg : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1294 1.1 mrg {}
1295 1.1 mrg
1296 1.1 mrg /* opt_pass methods: */
1297 1.1 mrg virtual bool gate (function *)
1298 1.1 mrg {
1299 1.1 mrg return (optimize > 0 && (flag_cprop_registers));
1300 1.1 mrg }
1301 1.1 mrg
1302 1.1 mrg virtual unsigned int execute (function *);
1303 1.1 mrg
1304 1.1 mrg }; // class pass_cprop_hardreg
1305 1.1 mrg
1306 1.1 mrg static bool
1307 1.1 mrg cprop_hardreg_bb (basic_block bb, struct value_data *all_vd, sbitmap visited)
1308 1.1 mrg {
1309 1.1 mrg bitmap_set_bit (visited, bb->index);
1310 1.1 mrg
1311 1.1 mrg /* If a block has a single predecessor, that we've already
1312 1.1 mrg processed, begin with the value data that was live at
1313 1.1 mrg the end of the predecessor block. */
1314 1.1 mrg /* ??? Ought to use more intelligent queuing of blocks. */
1315 1.1 mrg if (single_pred_p (bb)
1316 1.1 mrg && bitmap_bit_p (visited, single_pred (bb)->index)
1317 1.1 mrg && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1318 1.1 mrg {
1319 1.1 mrg all_vd[bb->index] = all_vd[single_pred (bb)->index];
1320 1.1 mrg if (all_vd[bb->index].n_debug_insn_changes)
1321 1.1 mrg {
1322 1.1 mrg unsigned int regno;
1323 1.1 mrg
1324 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 1.1 mrg {
1326 1.1 mrg if (all_vd[bb->index].e[regno].debug_insn_changes)
1327 1.1 mrg {
1328 1.1 mrg struct queued_debug_insn_change *cur;
1329 1.1 mrg for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1330 1.1 mrg cur; cur = cur->next)
1331 1.1 mrg --all_vd[bb->index].n_debug_insn_changes;
1332 1.1 mrg all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1333 1.1 mrg if (all_vd[bb->index].n_debug_insn_changes == 0)
1334 1.1 mrg break;
1335 1.1 mrg }
1336 1.1 mrg }
1337 1.1 mrg }
1338 1.1 mrg }
1339 1.1 mrg else
1340 1.1 mrg init_value_data (all_vd + bb->index);
1341 1.1 mrg
1342 1.1 mrg return copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1343 1.1 mrg }
1344 1.1 mrg
1345 1.1 mrg static void
1346 1.1 mrg cprop_hardreg_debug (function *fun, struct value_data *all_vd)
1347 1.1 mrg {
1348 1.1 mrg basic_block bb;
1349 1.1 mrg
1350 1.1 mrg FOR_EACH_BB_FN (bb, fun)
1351 1.1 mrg if (all_vd[bb->index].n_debug_insn_changes)
1352 1.1 mrg {
1353 1.1 mrg unsigned int regno;
1354 1.1 mrg bitmap live;
1355 1.1 mrg
1356 1.1 mrg live = df_get_live_out (bb);
1357 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1358 1.1 mrg if (all_vd[bb->index].e[regno].debug_insn_changes)
1359 1.1 mrg {
1360 1.1 mrg if (REGNO_REG_SET_P (live, regno))
1361 1.1 mrg apply_debug_insn_changes (all_vd + bb->index, regno);
1362 1.1 mrg
1363 1.1 mrg struct queued_debug_insn_change *cur;
1364 1.1 mrg for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1365 1.1 mrg cur; cur = cur->next)
1366 1.1 mrg --all_vd[bb->index].n_debug_insn_changes;
1367 1.1 mrg all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1368 1.1 mrg if (all_vd[bb->index].n_debug_insn_changes == 0)
1369 1.1 mrg break;
1370 1.1 mrg }
1371 1.1 mrg }
1372 1.1 mrg
1373 1.1 mrg queued_debug_insn_change_pool.release ();
1374 1.1 mrg }
1375 1.1 mrg
1376 1.1 mrg unsigned int
1377 1.1 mrg pass_cprop_hardreg::execute (function *fun)
1378 1.1 mrg {
1379 1.1 mrg struct value_data *all_vd;
1380 1.1 mrg basic_block bb;
1381 1.1 mrg
1382 1.1 mrg all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1383 1.1 mrg
1384 1.1 mrg auto_sbitmap visited (last_basic_block_for_fn (fun));
1385 1.1 mrg bitmap_clear (visited);
1386 1.1 mrg
1387 1.1 mrg auto_vec<int> worklist;
1388 1.1 mrg bool any_debug_changes = false;
1389 1.1 mrg
1390 1.1 mrg /* We need accurate notes. Earlier passes such as if-conversion may
1391 1.1 mrg leave notes in an inconsistent state. */
1392 1.1 mrg df_note_add_problem ();
1393 1.1 mrg df_analyze ();
1394 1.1 mrg
1395 1.1 mrg /* It is tempting to set DF_LR_RUN_DCE, but DCE may choose to delete
1396 1.1 mrg an insn and this pass would not have visibility into the removal.
1397 1.1 mrg This pass would then potentially use the source of that
1398 1.1 mrg INSN for propagation purposes, generating invalid code.
1399 1.1 mrg
1400 1.1 mrg So we just ask for updated notes and handle trivial deletions
1401 1.1 mrg within this pass where we can update this passes internal
1402 1.1 mrg data structures appropriately. */
1403 1.1 mrg df_set_flags (DF_DEFER_INSN_RESCAN);
1404 1.1 mrg
1405 1.1 mrg FOR_EACH_BB_FN (bb, fun)
1406 1.1 mrg {
1407 1.1 mrg if (cprop_hardreg_bb (bb, all_vd, visited))
1408 1.1 mrg worklist.safe_push (bb->index);
1409 1.1 mrg if (all_vd[bb->index].n_debug_insn_changes)
1410 1.1 mrg any_debug_changes = true;
1411 1.1 mrg }
1412 1.1 mrg
1413 1.1 mrg /* We must call df_analyze here unconditionally to ensure that the
1414 1.1 mrg REG_UNUSED and REG_DEAD notes are consistent with and without -g. */
1415 1.1 mrg df_analyze ();
1416 1.1 mrg
1417 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1418 1.1 mrg cprop_hardreg_debug (fun, all_vd);
1419 1.1 mrg
1420 1.1 mrg /* Second pass if we've changed anything, only for the bbs where we have
1421 1.1 mrg changed anything though. */
1422 1.1 mrg if (!worklist.is_empty ())
1423 1.1 mrg {
1424 1.1 mrg any_debug_changes = false;
1425 1.1 mrg bitmap_clear (visited);
1426 1.1 mrg for (int index : worklist)
1427 1.1 mrg {
1428 1.1 mrg bb = BASIC_BLOCK_FOR_FN (fun, index);
1429 1.1 mrg cprop_hardreg_bb (bb, all_vd, visited);
1430 1.1 mrg if (all_vd[bb->index].n_debug_insn_changes)
1431 1.1 mrg any_debug_changes = true;
1432 1.1 mrg }
1433 1.1 mrg
1434 1.1 mrg df_analyze ();
1435 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1436 1.1 mrg cprop_hardreg_debug (fun, all_vd);
1437 1.1 mrg }
1438 1.1 mrg
1439 1.1 mrg free (all_vd);
1440 1.1 mrg return 0;
1441 1.1 mrg }
1442 1.1 mrg
1443 1.1 mrg } // anon namespace
1444 1.1 mrg
1445 1.1 mrg rtl_opt_pass *
1446 1.1 mrg make_pass_cprop_hardreg (gcc::context *ctxt)
1447 1.1 mrg {
1448 1.1 mrg return new pass_cprop_hardreg (ctxt);
1449 }
1450