cse.cc revision 1.1.1.1 1 1.1 mrg /* Common subexpression elimination for GNU compiler.
2 1.1 mrg Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 1.1 mrg
4 1.1 mrg This file is part of GCC.
5 1.1 mrg
6 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
7 1.1 mrg the terms of the GNU General Public License as published by the Free
8 1.1 mrg Software Foundation; either version 3, or (at your option) any later
9 1.1 mrg version.
10 1.1 mrg
11 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 1.1 mrg for more details.
15 1.1 mrg
16 1.1 mrg You should have received a copy of the GNU General Public License
17 1.1 mrg along with GCC; see the file COPYING3. If not see
18 1.1 mrg <http://www.gnu.org/licenses/>. */
19 1.1 mrg
20 1.1 mrg #include "config.h"
21 1.1 mrg #include "system.h"
22 1.1 mrg #include "coretypes.h"
23 1.1 mrg #include "backend.h"
24 1.1 mrg #include "target.h"
25 1.1 mrg #include "rtl.h"
26 1.1 mrg #include "tree.h"
27 1.1 mrg #include "cfghooks.h"
28 1.1 mrg #include "df.h"
29 1.1 mrg #include "memmodel.h"
30 1.1 mrg #include "tm_p.h"
31 1.1 mrg #include "insn-config.h"
32 1.1 mrg #include "regs.h"
33 1.1 mrg #include "emit-rtl.h"
34 1.1 mrg #include "recog.h"
35 1.1 mrg #include "cfgrtl.h"
36 1.1 mrg #include "cfganal.h"
37 1.1 mrg #include "cfgcleanup.h"
38 1.1 mrg #include "alias.h"
39 1.1 mrg #include "toplev.h"
40 1.1 mrg #include "rtlhooks-def.h"
41 1.1 mrg #include "tree-pass.h"
42 1.1 mrg #include "dbgcnt.h"
43 1.1 mrg #include "rtl-iter.h"
44 1.1 mrg #include "regs.h"
45 1.1 mrg #include "function-abi.h"
46 1.1 mrg #include "rtlanal.h"
47 1.1 mrg #include "expr.h"
48 1.1 mrg
49 1.1 mrg /* The basic idea of common subexpression elimination is to go
50 1.1 mrg through the code, keeping a record of expressions that would
51 1.1 mrg have the same value at the current scan point, and replacing
52 1.1 mrg expressions encountered with the cheapest equivalent expression.
53 1.1 mrg
54 1.1 mrg It is too complicated to keep track of the different possibilities
55 1.1 mrg when control paths merge in this code; so, at each label, we forget all
56 1.1 mrg that is known and start fresh. This can be described as processing each
57 1.1 mrg extended basic block separately. We have a separate pass to perform
58 1.1 mrg global CSE.
59 1.1 mrg
60 1.1 mrg Note CSE can turn a conditional or computed jump into a nop or
61 1.1 mrg an unconditional jump. When this occurs we arrange to run the jump
62 1.1 mrg optimizer after CSE to delete the unreachable code.
63 1.1 mrg
64 1.1 mrg We use two data structures to record the equivalent expressions:
65 1.1 mrg a hash table for most expressions, and a vector of "quantity
66 1.1 mrg numbers" to record equivalent (pseudo) registers.
67 1.1 mrg
68 1.1 mrg The use of the special data structure for registers is desirable
69 1.1 mrg because it is faster. It is possible because registers references
70 1.1 mrg contain a fairly small number, the register number, taken from
71 1.1 mrg a contiguously allocated series, and two register references are
72 1.1 mrg identical if they have the same number. General expressions
73 1.1 mrg do not have any such thing, so the only way to retrieve the
74 1.1 mrg information recorded on an expression other than a register
75 1.1 mrg is to keep it in a hash table.
76 1.1 mrg
77 1.1 mrg Registers and "quantity numbers":
78 1.1 mrg
79 1.1 mrg At the start of each basic block, all of the (hardware and pseudo)
80 1.1 mrg registers used in the function are given distinct quantity
81 1.1 mrg numbers to indicate their contents. During scan, when the code
82 1.1 mrg copies one register into another, we copy the quantity number.
83 1.1 mrg When a register is loaded in any other way, we allocate a new
84 1.1 mrg quantity number to describe the value generated by this operation.
85 1.1 mrg `REG_QTY (N)' records what quantity register N is currently thought
86 1.1 mrg of as containing.
87 1.1 mrg
88 1.1 mrg All real quantity numbers are greater than or equal to zero.
89 1.1 mrg If register N has not been assigned a quantity, `REG_QTY (N)' will
90 1.1 mrg equal -N - 1, which is always negative.
91 1.1 mrg
92 1.1 mrg Quantity numbers below zero do not exist and none of the `qty_table'
93 1.1 mrg entries should be referenced with a negative index.
94 1.1 mrg
95 1.1 mrg We also maintain a bidirectional chain of registers for each
96 1.1 mrg quantity number. The `qty_table` members `first_reg' and `last_reg',
97 1.1 mrg and `reg_eqv_table' members `next' and `prev' hold these chains.
98 1.1 mrg
99 1.1 mrg The first register in a chain is the one whose lifespan is least local.
100 1.1 mrg Among equals, it is the one that was seen first.
101 1.1 mrg We replace any equivalent register with that one.
102 1.1 mrg
103 1.1 mrg If two registers have the same quantity number, it must be true that
104 1.1 mrg REG expressions with qty_table `mode' must be in the hash table for both
105 1.1 mrg registers and must be in the same class.
106 1.1 mrg
107 1.1 mrg The converse is not true. Since hard registers may be referenced in
108 1.1 mrg any mode, two REG expressions might be equivalent in the hash table
109 1.1 mrg but not have the same quantity number if the quantity number of one
110 1.1 mrg of the registers is not the same mode as those expressions.
111 1.1 mrg
112 1.1 mrg Constants and quantity numbers
113 1.1 mrg
114 1.1 mrg When a quantity has a known constant value, that value is stored
115 1.1 mrg in the appropriate qty_table `const_rtx'. This is in addition to
116 1.1 mrg putting the constant in the hash table as is usual for non-regs.
117 1.1 mrg
118 1.1 mrg Whether a reg or a constant is preferred is determined by the configuration
119 1.1 mrg macro CONST_COSTS and will often depend on the constant value. In any
120 1.1 mrg event, expressions containing constants can be simplified, by fold_rtx.
121 1.1 mrg
122 1.1 mrg When a quantity has a known nearly constant value (such as an address
123 1.1 mrg of a stack slot), that value is stored in the appropriate qty_table
124 1.1 mrg `const_rtx'.
125 1.1 mrg
126 1.1 mrg Integer constants don't have a machine mode. However, cse
127 1.1 mrg determines the intended machine mode from the destination
128 1.1 mrg of the instruction that moves the constant. The machine mode
129 1.1 mrg is recorded in the hash table along with the actual RTL
130 1.1 mrg constant expression so that different modes are kept separate.
131 1.1 mrg
132 1.1 mrg Other expressions:
133 1.1 mrg
134 1.1 mrg To record known equivalences among expressions in general
135 1.1 mrg we use a hash table called `table'. It has a fixed number of buckets
136 1.1 mrg that contain chains of `struct table_elt' elements for expressions.
137 1.1 mrg These chains connect the elements whose expressions have the same
138 1.1 mrg hash codes.
139 1.1 mrg
140 1.1 mrg Other chains through the same elements connect the elements which
141 1.1 mrg currently have equivalent values.
142 1.1 mrg
143 1.1 mrg Register references in an expression are canonicalized before hashing
144 1.1 mrg the expression. This is done using `reg_qty' and qty_table `first_reg'.
145 1.1 mrg The hash code of a register reference is computed using the quantity
146 1.1 mrg number, not the register number.
147 1.1 mrg
148 1.1 mrg When the value of an expression changes, it is necessary to remove from the
149 1.1 mrg hash table not just that expression but all expressions whose values
150 1.1 mrg could be different as a result.
151 1.1 mrg
152 1.1 mrg 1. If the value changing is in memory, except in special cases
153 1.1 mrg ANYTHING referring to memory could be changed. That is because
154 1.1 mrg nobody knows where a pointer does not point.
155 1.1 mrg The function `invalidate_memory' removes what is necessary.
156 1.1 mrg
157 1.1 mrg The special cases are when the address is constant or is
158 1.1 mrg a constant plus a fixed register such as the frame pointer
159 1.1 mrg or a static chain pointer. When such addresses are stored in,
160 1.1 mrg we can tell exactly which other such addresses must be invalidated
161 1.1 mrg due to overlap. `invalidate' does this.
162 1.1 mrg All expressions that refer to non-constant
163 1.1 mrg memory addresses are also invalidated. `invalidate_memory' does this.
164 1.1 mrg
165 1.1 mrg 2. If the value changing is a register, all expressions
166 1.1 mrg containing references to that register, and only those,
167 1.1 mrg must be removed.
168 1.1 mrg
169 1.1 mrg Because searching the entire hash table for expressions that contain
170 1.1 mrg a register is very slow, we try to figure out when it isn't necessary.
171 1.1 mrg Precisely, this is necessary only when expressions have been
172 1.1 mrg entered in the hash table using this register, and then the value has
173 1.1 mrg changed, and then another expression wants to be added to refer to
174 1.1 mrg the register's new value. This sequence of circumstances is rare
175 1.1 mrg within any one basic block.
176 1.1 mrg
177 1.1 mrg `REG_TICK' and `REG_IN_TABLE', accessors for members of
178 1.1 mrg cse_reg_info, are used to detect this case. REG_TICK (i) is
179 1.1 mrg incremented whenever a value is stored in register i.
180 1.1 mrg REG_IN_TABLE (i) holds -1 if no references to register i have been
181 1.1 mrg entered in the table; otherwise, it contains the value REG_TICK (i)
182 1.1 mrg had when the references were entered. If we want to enter a
183 1.1 mrg reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
184 1.1 mrg remove old references. Until we want to enter a new entry, the
185 1.1 mrg mere fact that the two vectors don't match makes the entries be
186 1.1 mrg ignored if anyone tries to match them.
187 1.1 mrg
188 1.1 mrg Registers themselves are entered in the hash table as well as in
189 1.1 mrg the equivalent-register chains. However, `REG_TICK' and
190 1.1 mrg `REG_IN_TABLE' do not apply to expressions which are simple
191 1.1 mrg register references. These expressions are removed from the table
192 1.1 mrg immediately when they become invalid, and this can be done even if
193 1.1 mrg we do not immediately search for all the expressions that refer to
194 1.1 mrg the register.
195 1.1 mrg
196 1.1 mrg A CLOBBER rtx in an instruction invalidates its operand for further
197 1.1 mrg reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
198 1.1 mrg invalidates everything that resides in memory.
199 1.1 mrg
200 1.1 mrg Related expressions:
201 1.1 mrg
202 1.1 mrg Constant expressions that differ only by an additive integer
203 1.1 mrg are called related. When a constant expression is put in
204 1.1 mrg the table, the related expression with no constant term
205 1.1 mrg is also entered. These are made to point at each other
206 1.1 mrg so that it is possible to find out if there exists any
207 1.1 mrg register equivalent to an expression related to a given expression. */
208 1.1 mrg
209 1.1 mrg /* Length of qty_table vector. We know in advance we will not need
210 1.1 mrg a quantity number this big. */
211 1.1 mrg
212 1.1 mrg static int max_qty;
213 1.1 mrg
214 1.1 mrg /* Next quantity number to be allocated.
215 1.1 mrg This is 1 + the largest number needed so far. */
216 1.1 mrg
217 1.1 mrg static int next_qty;
218 1.1 mrg
219 1.1 mrg /* Per-qty information tracking.
220 1.1 mrg
221 1.1 mrg `first_reg' and `last_reg' track the head and tail of the
222 1.1 mrg chain of registers which currently contain this quantity.
223 1.1 mrg
224 1.1 mrg `mode' contains the machine mode of this quantity.
225 1.1 mrg
226 1.1 mrg `const_rtx' holds the rtx of the constant value of this
227 1.1 mrg quantity, if known. A summations of the frame/arg pointer
228 1.1 mrg and a constant can also be entered here. When this holds
229 1.1 mrg a known value, `const_insn' is the insn which stored the
230 1.1 mrg constant value.
231 1.1 mrg
232 1.1 mrg `comparison_{code,const,qty}' are used to track when a
233 1.1 mrg comparison between a quantity and some constant or register has
234 1.1 mrg been passed. In such a case, we know the results of the comparison
235 1.1 mrg in case we see it again. These members record a comparison that
236 1.1 mrg is known to be true. `comparison_code' holds the rtx code of such
237 1.1 mrg a comparison, else it is set to UNKNOWN and the other two
238 1.1 mrg comparison members are undefined. `comparison_const' holds
239 1.1 mrg the constant being compared against, or zero if the comparison
240 1.1 mrg is not against a constant. `comparison_qty' holds the quantity
241 1.1 mrg being compared against when the result is known. If the comparison
242 1.1 mrg is not with a register, `comparison_qty' is INT_MIN. */
243 1.1 mrg
244 1.1 mrg struct qty_table_elem
245 1.1 mrg {
246 1.1 mrg rtx const_rtx;
247 1.1 mrg rtx_insn *const_insn;
248 1.1 mrg rtx comparison_const;
249 1.1 mrg int comparison_qty;
250 1.1 mrg unsigned int first_reg, last_reg;
251 1.1 mrg /* The sizes of these fields should match the sizes of the
252 1.1 mrg code and mode fields of struct rtx_def (see rtl.h). */
253 1.1 mrg ENUM_BITFIELD(rtx_code) comparison_code : 16;
254 1.1 mrg ENUM_BITFIELD(machine_mode) mode : 8;
255 1.1 mrg };
256 1.1 mrg
257 1.1 mrg /* The table of all qtys, indexed by qty number. */
258 1.1 mrg static struct qty_table_elem *qty_table;
259 1.1 mrg
260 1.1 mrg /* Insn being scanned. */
261 1.1 mrg
262 1.1 mrg static rtx_insn *this_insn;
263 1.1 mrg static bool optimize_this_for_speed_p;
264 1.1 mrg
265 1.1 mrg /* Index by register number, gives the number of the next (or
266 1.1 mrg previous) register in the chain of registers sharing the same
267 1.1 mrg value.
268 1.1 mrg
269 1.1 mrg Or -1 if this register is at the end of the chain.
270 1.1 mrg
271 1.1 mrg If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
272 1.1 mrg
273 1.1 mrg /* Per-register equivalence chain. */
274 1.1 mrg struct reg_eqv_elem
275 1.1 mrg {
276 1.1 mrg int next, prev;
277 1.1 mrg };
278 1.1 mrg
279 1.1 mrg /* The table of all register equivalence chains. */
280 1.1 mrg static struct reg_eqv_elem *reg_eqv_table;
281 1.1 mrg
282 1.1 mrg struct cse_reg_info
283 1.1 mrg {
284 1.1 mrg /* The timestamp at which this register is initialized. */
285 1.1 mrg unsigned int timestamp;
286 1.1 mrg
287 1.1 mrg /* The quantity number of the register's current contents. */
288 1.1 mrg int reg_qty;
289 1.1 mrg
290 1.1 mrg /* The number of times the register has been altered in the current
291 1.1 mrg basic block. */
292 1.1 mrg int reg_tick;
293 1.1 mrg
294 1.1 mrg /* The REG_TICK value at which rtx's containing this register are
295 1.1 mrg valid in the hash table. If this does not equal the current
296 1.1 mrg reg_tick value, such expressions existing in the hash table are
297 1.1 mrg invalid. */
298 1.1 mrg int reg_in_table;
299 1.1 mrg
300 1.1 mrg /* The SUBREG that was set when REG_TICK was last incremented. Set
301 1.1 mrg to -1 if the last store was to the whole register, not a subreg. */
302 1.1 mrg unsigned int subreg_ticked;
303 1.1 mrg };
304 1.1 mrg
305 1.1 mrg /* A table of cse_reg_info indexed by register numbers. */
306 1.1 mrg static struct cse_reg_info *cse_reg_info_table;
307 1.1 mrg
308 1.1 mrg /* The size of the above table. */
309 1.1 mrg static unsigned int cse_reg_info_table_size;
310 1.1 mrg
311 1.1 mrg /* The index of the first entry that has not been initialized. */
312 1.1 mrg static unsigned int cse_reg_info_table_first_uninitialized;
313 1.1 mrg
314 1.1 mrg /* The timestamp at the beginning of the current run of
315 1.1 mrg cse_extended_basic_block. We increment this variable at the beginning of
316 1.1 mrg the current run of cse_extended_basic_block. The timestamp field of a
317 1.1 mrg cse_reg_info entry matches the value of this variable if and only
318 1.1 mrg if the entry has been initialized during the current run of
319 1.1 mrg cse_extended_basic_block. */
320 1.1 mrg static unsigned int cse_reg_info_timestamp;
321 1.1 mrg
322 1.1 mrg /* A HARD_REG_SET containing all the hard registers for which there is
323 1.1 mrg currently a REG expression in the hash table. Note the difference
324 1.1 mrg from the above variables, which indicate if the REG is mentioned in some
325 1.1 mrg expression in the table. */
326 1.1 mrg
327 1.1 mrg static HARD_REG_SET hard_regs_in_table;
328 1.1 mrg
329 1.1 mrg /* True if CSE has altered the CFG. */
330 1.1 mrg static bool cse_cfg_altered;
331 1.1 mrg
332 1.1 mrg /* True if CSE has altered conditional jump insns in such a way
333 1.1 mrg that jump optimization should be redone. */
334 1.1 mrg static bool cse_jumps_altered;
335 1.1 mrg
336 1.1 mrg /* True if we put a LABEL_REF into the hash table for an INSN
337 1.1 mrg without a REG_LABEL_OPERAND, we have to rerun jump after CSE
338 1.1 mrg to put in the note. */
339 1.1 mrg static bool recorded_label_ref;
340 1.1 mrg
341 1.1 mrg /* canon_hash stores 1 in do_not_record if it notices a reference to PC or
342 1.1 mrg some other volatile subexpression. */
343 1.1 mrg
344 1.1 mrg static int do_not_record;
345 1.1 mrg
346 1.1 mrg /* canon_hash stores 1 in hash_arg_in_memory
347 1.1 mrg if it notices a reference to memory within the expression being hashed. */
348 1.1 mrg
349 1.1 mrg static int hash_arg_in_memory;
350 1.1 mrg
351 1.1 mrg /* The hash table contains buckets which are chains of `struct table_elt's,
352 1.1 mrg each recording one expression's information.
353 1.1 mrg That expression is in the `exp' field.
354 1.1 mrg
355 1.1 mrg The canon_exp field contains a canonical (from the point of view of
356 1.1 mrg alias analysis) version of the `exp' field.
357 1.1 mrg
358 1.1 mrg Those elements with the same hash code are chained in both directions
359 1.1 mrg through the `next_same_hash' and `prev_same_hash' fields.
360 1.1 mrg
361 1.1 mrg Each set of expressions with equivalent values
362 1.1 mrg are on a two-way chain through the `next_same_value'
363 1.1 mrg and `prev_same_value' fields, and all point with
364 1.1 mrg the `first_same_value' field at the first element in
365 1.1 mrg that chain. The chain is in order of increasing cost.
366 1.1 mrg Each element's cost value is in its `cost' field.
367 1.1 mrg
368 1.1 mrg The `in_memory' field is nonzero for elements that
369 1.1 mrg involve any reference to memory. These elements are removed
370 1.1 mrg whenever a write is done to an unidentified location in memory.
371 1.1 mrg To be safe, we assume that a memory address is unidentified unless
372 1.1 mrg the address is either a symbol constant or a constant plus
373 1.1 mrg the frame pointer or argument pointer.
374 1.1 mrg
375 1.1 mrg The `related_value' field is used to connect related expressions
376 1.1 mrg (that differ by adding an integer).
377 1.1 mrg The related expressions are chained in a circular fashion.
378 1.1 mrg `related_value' is zero for expressions for which this
379 1.1 mrg chain is not useful.
380 1.1 mrg
381 1.1 mrg The `cost' field stores the cost of this element's expression.
382 1.1 mrg The `regcost' field stores the value returned by approx_reg_cost for
383 1.1 mrg this element's expression.
384 1.1 mrg
385 1.1 mrg The `is_const' flag is set if the element is a constant (including
386 1.1 mrg a fixed address).
387 1.1 mrg
388 1.1 mrg The `flag' field is used as a temporary during some search routines.
389 1.1 mrg
390 1.1 mrg The `mode' field is usually the same as GET_MODE (`exp'), but
391 1.1 mrg if `exp' is a CONST_INT and has no machine mode then the `mode'
392 1.1 mrg field is the mode it was being used as. Each constant is
393 1.1 mrg recorded separately for each mode it is used with. */
394 1.1 mrg
395 1.1 mrg struct table_elt
396 1.1 mrg {
397 1.1 mrg rtx exp;
398 1.1 mrg rtx canon_exp;
399 1.1 mrg struct table_elt *next_same_hash;
400 1.1 mrg struct table_elt *prev_same_hash;
401 1.1 mrg struct table_elt *next_same_value;
402 1.1 mrg struct table_elt *prev_same_value;
403 1.1 mrg struct table_elt *first_same_value;
404 1.1 mrg struct table_elt *related_value;
405 1.1 mrg int cost;
406 1.1 mrg int regcost;
407 1.1 mrg /* The size of this field should match the size
408 1.1 mrg of the mode field of struct rtx_def (see rtl.h). */
409 1.1 mrg ENUM_BITFIELD(machine_mode) mode : 8;
410 1.1 mrg char in_memory;
411 1.1 mrg char is_const;
412 1.1 mrg char flag;
413 1.1 mrg };
414 1.1 mrg
415 1.1 mrg /* We don't want a lot of buckets, because we rarely have very many
416 1.1 mrg things stored in the hash table, and a lot of buckets slows
417 1.1 mrg down a lot of loops that happen frequently. */
418 1.1 mrg #define HASH_SHIFT 5
419 1.1 mrg #define HASH_SIZE (1 << HASH_SHIFT)
420 1.1 mrg #define HASH_MASK (HASH_SIZE - 1)
421 1.1 mrg
422 1.1 mrg /* Compute hash code of X in mode M. Special-case case where X is a pseudo
423 1.1 mrg register (hard registers may require `do_not_record' to be set). */
424 1.1 mrg
425 1.1 mrg #define HASH(X, M) \
426 1.1 mrg ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
427 1.1 mrg ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
428 1.1 mrg : canon_hash (X, M)) & HASH_MASK)
429 1.1 mrg
430 1.1 mrg /* Like HASH, but without side-effects. */
431 1.1 mrg #define SAFE_HASH(X, M) \
432 1.1 mrg ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
433 1.1 mrg ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
434 1.1 mrg : safe_hash (X, M)) & HASH_MASK)
435 1.1 mrg
436 1.1 mrg /* Determine whether register number N is considered a fixed register for the
437 1.1 mrg purpose of approximating register costs.
438 1.1 mrg It is desirable to replace other regs with fixed regs, to reduce need for
439 1.1 mrg non-fixed hard regs.
440 1.1 mrg A reg wins if it is either the frame pointer or designated as fixed. */
441 1.1 mrg #define FIXED_REGNO_P(N) \
442 1.1 mrg ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
443 1.1 mrg || fixed_regs[N] || global_regs[N])
444 1.1 mrg
445 1.1 mrg /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
446 1.1 mrg hard registers and pointers into the frame are the cheapest with a cost
447 1.1 mrg of 0. Next come pseudos with a cost of one and other hard registers with
448 1.1 mrg a cost of 2. Aside from these special cases, call `rtx_cost'. */
449 1.1 mrg
450 1.1 mrg #define CHEAP_REGNO(N) \
451 1.1 mrg (REGNO_PTR_FRAME_P (N) \
452 1.1 mrg || (HARD_REGISTER_NUM_P (N) \
453 1.1 mrg && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
454 1.1 mrg
455 1.1 mrg #define COST(X, MODE) \
456 1.1 mrg (REG_P (X) ? 0 : notreg_cost (X, MODE, SET, 1))
457 1.1 mrg #define COST_IN(X, MODE, OUTER, OPNO) \
458 1.1 mrg (REG_P (X) ? 0 : notreg_cost (X, MODE, OUTER, OPNO))
459 1.1 mrg
460 1.1 mrg /* Get the number of times this register has been updated in this
461 1.1 mrg basic block. */
462 1.1 mrg
463 1.1 mrg #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
464 1.1 mrg
465 1.1 mrg /* Get the point at which REG was recorded in the table. */
466 1.1 mrg
467 1.1 mrg #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
468 1.1 mrg
469 1.1 mrg /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
470 1.1 mrg SUBREG). */
471 1.1 mrg
472 1.1 mrg #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
473 1.1 mrg
474 1.1 mrg /* Get the quantity number for REG. */
475 1.1 mrg
476 1.1 mrg #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
477 1.1 mrg
478 1.1 mrg /* Determine if the quantity number for register X represents a valid index
479 1.1 mrg into the qty_table. */
480 1.1 mrg
481 1.1 mrg #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
482 1.1 mrg
483 1.1 mrg /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
484 1.1 mrg
485 1.1 mrg #define CHEAPER(X, Y) \
486 1.1 mrg (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
487 1.1 mrg
488 1.1 mrg static struct table_elt *table[HASH_SIZE];
489 1.1 mrg
490 1.1 mrg /* Chain of `struct table_elt's made so far for this function
491 1.1 mrg but currently removed from the table. */
492 1.1 mrg
493 1.1 mrg static struct table_elt *free_element_chain;
494 1.1 mrg
495 1.1 mrg /* Trace a patch through the CFG. */
496 1.1 mrg
497 1.1 mrg struct branch_path
498 1.1 mrg {
499 1.1 mrg /* The basic block for this path entry. */
500 1.1 mrg basic_block bb;
501 1.1 mrg };
502 1.1 mrg
503 1.1 mrg /* This data describes a block that will be processed by
504 1.1 mrg cse_extended_basic_block. */
505 1.1 mrg
506 1.1 mrg struct cse_basic_block_data
507 1.1 mrg {
508 1.1 mrg /* Total number of SETs in block. */
509 1.1 mrg int nsets;
510 1.1 mrg /* Size of current branch path, if any. */
511 1.1 mrg int path_size;
512 1.1 mrg /* Current path, indicating which basic_blocks will be processed. */
513 1.1 mrg struct branch_path *path;
514 1.1 mrg };
515 1.1 mrg
516 1.1 mrg
517 1.1 mrg /* Pointers to the live in/live out bitmaps for the boundaries of the
518 1.1 mrg current EBB. */
519 1.1 mrg static bitmap cse_ebb_live_in, cse_ebb_live_out;
520 1.1 mrg
521 1.1 mrg /* A simple bitmap to track which basic blocks have been visited
522 1.1 mrg already as part of an already processed extended basic block. */
523 1.1 mrg static sbitmap cse_visited_basic_blocks;
524 1.1 mrg
525 1.1 mrg static bool fixed_base_plus_p (rtx x);
526 1.1 mrg static int notreg_cost (rtx, machine_mode, enum rtx_code, int);
527 1.1 mrg static int preferable (int, int, int, int);
528 1.1 mrg static void new_basic_block (void);
529 1.1 mrg static void make_new_qty (unsigned int, machine_mode);
530 1.1 mrg static void make_regs_eqv (unsigned int, unsigned int);
531 1.1 mrg static void delete_reg_equiv (unsigned int);
532 1.1 mrg static int mention_regs (rtx);
533 1.1 mrg static int insert_regs (rtx, struct table_elt *, int);
534 1.1 mrg static void remove_from_table (struct table_elt *, unsigned);
535 1.1 mrg static void remove_pseudo_from_table (rtx, unsigned);
536 1.1 mrg static struct table_elt *lookup (rtx, unsigned, machine_mode);
537 1.1 mrg static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode);
538 1.1 mrg static rtx lookup_as_function (rtx, enum rtx_code);
539 1.1 mrg static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
540 1.1 mrg machine_mode, int, int);
541 1.1 mrg static struct table_elt *insert (rtx, struct table_elt *, unsigned,
542 1.1 mrg machine_mode);
543 1.1 mrg static void merge_equiv_classes (struct table_elt *, struct table_elt *);
544 1.1 mrg static void invalidate (rtx, machine_mode);
545 1.1 mrg static void remove_invalid_refs (unsigned int);
546 1.1 mrg static void remove_invalid_subreg_refs (unsigned int, poly_uint64,
547 1.1 mrg machine_mode);
548 1.1 mrg static void rehash_using_reg (rtx);
549 1.1 mrg static void invalidate_memory (void);
550 1.1 mrg static rtx use_related_value (rtx, struct table_elt *);
551 1.1 mrg
552 1.1 mrg static inline unsigned canon_hash (rtx, machine_mode);
553 1.1 mrg static inline unsigned safe_hash (rtx, machine_mode);
554 1.1 mrg static inline unsigned hash_rtx_string (const char *);
555 1.1 mrg
556 1.1 mrg static rtx canon_reg (rtx, rtx_insn *);
557 1.1 mrg static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
558 1.1 mrg machine_mode *,
559 1.1 mrg machine_mode *);
560 1.1 mrg static rtx fold_rtx (rtx, rtx_insn *);
561 1.1 mrg static rtx equiv_constant (rtx);
562 1.1 mrg static void record_jump_equiv (rtx_insn *, bool);
563 1.1 mrg static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx,
564 1.1 mrg int);
565 1.1 mrg static void cse_insn (rtx_insn *);
566 1.1 mrg static void cse_prescan_path (struct cse_basic_block_data *);
567 1.1 mrg static void invalidate_from_clobbers (rtx_insn *);
568 1.1 mrg static void invalidate_from_sets_and_clobbers (rtx_insn *);
569 1.1 mrg static void cse_extended_basic_block (struct cse_basic_block_data *);
570 1.1 mrg extern void dump_class (struct table_elt*);
571 1.1 mrg static void get_cse_reg_info_1 (unsigned int regno);
572 1.1 mrg static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
573 1.1 mrg
574 1.1 mrg static void flush_hash_table (void);
575 1.1 mrg static bool insn_live_p (rtx_insn *, int *);
576 1.1 mrg static bool set_live_p (rtx, int *);
577 1.1 mrg static void cse_change_cc_mode_insn (rtx_insn *, rtx);
578 1.1 mrg static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
579 1.1 mrg static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
580 1.1 mrg bool);
581 1.1 mrg
582 1.1 mrg
584 1.1 mrg #undef RTL_HOOKS_GEN_LOWPART
585 1.1 mrg #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
586 1.1 mrg
587 1.1 mrg static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
588 1.1 mrg
589 1.1 mrg /* Nonzero if X has the form (PLUS frame-pointer integer). */
591 1.1 mrg
592 1.1 mrg static bool
593 1.1 mrg fixed_base_plus_p (rtx x)
594 1.1 mrg {
595 1.1 mrg switch (GET_CODE (x))
596 1.1 mrg {
597 1.1 mrg case REG:
598 1.1 mrg if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
599 1.1 mrg return true;
600 1.1 mrg if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
601 1.1 mrg return true;
602 1.1 mrg return false;
603 1.1 mrg
604 1.1 mrg case PLUS:
605 1.1 mrg if (!CONST_INT_P (XEXP (x, 1)))
606 1.1 mrg return false;
607 1.1 mrg return fixed_base_plus_p (XEXP (x, 0));
608 1.1 mrg
609 1.1 mrg default:
610 1.1 mrg return false;
611 1.1 mrg }
612 1.1 mrg }
613 1.1 mrg
614 1.1 mrg /* Dump the expressions in the equivalence class indicated by CLASSP.
615 1.1 mrg This function is used only for debugging. */
616 1.1 mrg DEBUG_FUNCTION void
617 1.1 mrg dump_class (struct table_elt *classp)
618 1.1 mrg {
619 1.1 mrg struct table_elt *elt;
620 1.1 mrg
621 1.1 mrg fprintf (stderr, "Equivalence chain for ");
622 1.1 mrg print_rtl (stderr, classp->exp);
623 1.1 mrg fprintf (stderr, ": \n");
624 1.1 mrg
625 1.1 mrg for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
626 1.1 mrg {
627 1.1 mrg print_rtl (stderr, elt->exp);
628 1.1 mrg fprintf (stderr, "\n");
629 1.1 mrg }
630 1.1 mrg }
631 1.1 mrg
632 1.1 mrg /* Return an estimate of the cost of the registers used in an rtx.
633 1.1 mrg This is mostly the number of different REG expressions in the rtx;
634 1.1 mrg however for some exceptions like fixed registers we use a cost of
635 1.1 mrg 0. If any other hard register reference occurs, return MAX_COST. */
636 1.1 mrg
637 1.1 mrg static int
638 1.1 mrg approx_reg_cost (const_rtx x)
639 1.1 mrg {
640 1.1 mrg int cost = 0;
641 1.1 mrg subrtx_iterator::array_type array;
642 1.1 mrg FOR_EACH_SUBRTX (iter, array, x, NONCONST)
643 1.1 mrg {
644 1.1 mrg const_rtx x = *iter;
645 1.1 mrg if (REG_P (x))
646 1.1 mrg {
647 1.1 mrg unsigned int regno = REGNO (x);
648 1.1 mrg if (!CHEAP_REGNO (regno))
649 1.1 mrg {
650 1.1 mrg if (regno < FIRST_PSEUDO_REGISTER)
651 1.1 mrg {
652 1.1 mrg if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
653 1.1 mrg return MAX_COST;
654 1.1 mrg cost += 2;
655 1.1 mrg }
656 1.1 mrg else
657 1.1 mrg cost += 1;
658 1.1 mrg }
659 1.1 mrg }
660 1.1 mrg }
661 1.1 mrg return cost;
662 1.1 mrg }
663 1.1 mrg
664 1.1 mrg /* Return a negative value if an rtx A, whose costs are given by COST_A
665 1.1 mrg and REGCOST_A, is more desirable than an rtx B.
666 1.1 mrg Return a positive value if A is less desirable, or 0 if the two are
667 1.1 mrg equally good. */
668 1.1 mrg static int
669 1.1 mrg preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
670 1.1 mrg {
671 1.1 mrg /* First, get rid of cases involving expressions that are entirely
672 1.1 mrg unwanted. */
673 1.1 mrg if (cost_a != cost_b)
674 1.1 mrg {
675 1.1 mrg if (cost_a == MAX_COST)
676 1.1 mrg return 1;
677 1.1 mrg if (cost_b == MAX_COST)
678 1.1 mrg return -1;
679 1.1 mrg }
680 1.1 mrg
681 1.1 mrg /* Avoid extending lifetimes of hardregs. */
682 1.1 mrg if (regcost_a != regcost_b)
683 1.1 mrg {
684 1.1 mrg if (regcost_a == MAX_COST)
685 1.1 mrg return 1;
686 1.1 mrg if (regcost_b == MAX_COST)
687 1.1 mrg return -1;
688 1.1 mrg }
689 1.1 mrg
690 1.1 mrg /* Normal operation costs take precedence. */
691 1.1 mrg if (cost_a != cost_b)
692 1.1 mrg return cost_a - cost_b;
693 1.1 mrg /* Only if these are identical consider effects on register pressure. */
694 1.1 mrg if (regcost_a != regcost_b)
695 1.1 mrg return regcost_a - regcost_b;
696 1.1 mrg return 0;
697 1.1 mrg }
698 1.1 mrg
699 1.1 mrg /* Internal function, to compute cost when X is not a register; called
700 1.1 mrg from COST macro to keep it simple. */
701 1.1 mrg
702 1.1 mrg static int
703 1.1 mrg notreg_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno)
704 1.1 mrg {
705 1.1 mrg scalar_int_mode int_mode, inner_mode;
706 1.1 mrg return ((GET_CODE (x) == SUBREG
707 1.1 mrg && REG_P (SUBREG_REG (x))
708 1.1 mrg && is_int_mode (mode, &int_mode)
709 1.1 mrg && is_int_mode (GET_MODE (SUBREG_REG (x)), &inner_mode)
710 1.1 mrg && GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (inner_mode)
711 1.1 mrg && subreg_lowpart_p (x)
712 1.1 mrg && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, inner_mode))
713 1.1 mrg ? 0
714 1.1 mrg : rtx_cost (x, mode, outer, opno, optimize_this_for_speed_p) * 2);
715 1.1 mrg }
716 1.1 mrg
717 1.1 mrg
718 1.1 mrg /* Initialize CSE_REG_INFO_TABLE. */
720 1.1 mrg
721 1.1 mrg static void
722 1.1 mrg init_cse_reg_info (unsigned int nregs)
723 1.1 mrg {
724 1.1 mrg /* Do we need to grow the table? */
725 1.1 mrg if (nregs > cse_reg_info_table_size)
726 1.1 mrg {
727 1.1 mrg unsigned int new_size;
728 1.1 mrg
729 1.1 mrg if (cse_reg_info_table_size < 2048)
730 1.1 mrg {
731 1.1 mrg /* Compute a new size that is a power of 2 and no smaller
732 1.1 mrg than the large of NREGS and 64. */
733 1.1 mrg new_size = (cse_reg_info_table_size
734 1.1 mrg ? cse_reg_info_table_size : 64);
735 1.1 mrg
736 1.1 mrg while (new_size < nregs)
737 1.1 mrg new_size *= 2;
738 1.1 mrg }
739 1.1 mrg else
740 1.1 mrg {
741 1.1 mrg /* If we need a big table, allocate just enough to hold
742 1.1 mrg NREGS registers. */
743 1.1 mrg new_size = nregs;
744 1.1 mrg }
745 1.1 mrg
746 1.1 mrg /* Reallocate the table with NEW_SIZE entries. */
747 1.1 mrg free (cse_reg_info_table);
748 1.1 mrg cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
749 1.1 mrg cse_reg_info_table_size = new_size;
750 1.1 mrg cse_reg_info_table_first_uninitialized = 0;
751 1.1 mrg }
752 1.1 mrg
753 1.1 mrg /* Do we have all of the first NREGS entries initialized? */
754 1.1 mrg if (cse_reg_info_table_first_uninitialized < nregs)
755 1.1 mrg {
756 1.1 mrg unsigned int old_timestamp = cse_reg_info_timestamp - 1;
757 1.1 mrg unsigned int i;
758 1.1 mrg
759 1.1 mrg /* Put the old timestamp on newly allocated entries so that they
760 1.1 mrg will all be considered out of date. We do not touch those
761 1.1 mrg entries beyond the first NREGS entries to be nice to the
762 1.1 mrg virtual memory. */
763 1.1 mrg for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
764 1.1 mrg cse_reg_info_table[i].timestamp = old_timestamp;
765 1.1 mrg
766 1.1 mrg cse_reg_info_table_first_uninitialized = nregs;
767 1.1 mrg }
768 1.1 mrg }
769 1.1 mrg
770 1.1 mrg /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
771 1.1 mrg
772 1.1 mrg static void
773 1.1 mrg get_cse_reg_info_1 (unsigned int regno)
774 1.1 mrg {
775 1.1 mrg /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
776 1.1 mrg entry will be considered to have been initialized. */
777 1.1 mrg cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
778 1.1 mrg
779 1.1 mrg /* Initialize the rest of the entry. */
780 1.1 mrg cse_reg_info_table[regno].reg_tick = 1;
781 1.1 mrg cse_reg_info_table[regno].reg_in_table = -1;
782 1.1 mrg cse_reg_info_table[regno].subreg_ticked = -1;
783 1.1 mrg cse_reg_info_table[regno].reg_qty = -regno - 1;
784 1.1 mrg }
785 1.1 mrg
786 1.1 mrg /* Find a cse_reg_info entry for REGNO. */
787 1.1 mrg
788 1.1 mrg static inline struct cse_reg_info *
789 1.1 mrg get_cse_reg_info (unsigned int regno)
790 1.1 mrg {
791 1.1 mrg struct cse_reg_info *p = &cse_reg_info_table[regno];
792 1.1 mrg
793 1.1 mrg /* If this entry has not been initialized, go ahead and initialize
794 1.1 mrg it. */
795 1.1 mrg if (p->timestamp != cse_reg_info_timestamp)
796 1.1 mrg get_cse_reg_info_1 (regno);
797 1.1 mrg
798 1.1 mrg return p;
799 1.1 mrg }
800 1.1 mrg
801 1.1 mrg /* Clear the hash table and initialize each register with its own quantity,
802 1.1 mrg for a new basic block. */
803 1.1 mrg
804 1.1 mrg static void
805 1.1 mrg new_basic_block (void)
806 1.1 mrg {
807 1.1 mrg int i;
808 1.1 mrg
809 1.1 mrg next_qty = 0;
810 1.1 mrg
811 1.1 mrg /* Invalidate cse_reg_info_table. */
812 1.1 mrg cse_reg_info_timestamp++;
813 1.1 mrg
814 1.1 mrg /* Clear out hash table state for this pass. */
815 1.1 mrg CLEAR_HARD_REG_SET (hard_regs_in_table);
816 1.1 mrg
817 1.1 mrg /* The per-quantity values used to be initialized here, but it is
818 1.1 mrg much faster to initialize each as it is made in `make_new_qty'. */
819 1.1 mrg
820 1.1 mrg for (i = 0; i < HASH_SIZE; i++)
821 1.1 mrg {
822 1.1 mrg struct table_elt *first;
823 1.1 mrg
824 1.1 mrg first = table[i];
825 1.1 mrg if (first != NULL)
826 1.1 mrg {
827 1.1 mrg struct table_elt *last = first;
828 1.1 mrg
829 1.1 mrg table[i] = NULL;
830 1.1 mrg
831 1.1 mrg while (last->next_same_hash != NULL)
832 1.1 mrg last = last->next_same_hash;
833 1.1 mrg
834 1.1 mrg /* Now relink this hash entire chain into
835 1.1 mrg the free element list. */
836 1.1 mrg
837 1.1 mrg last->next_same_hash = free_element_chain;
838 1.1 mrg free_element_chain = first;
839 1.1 mrg }
840 1.1 mrg }
841 1.1 mrg }
842 1.1 mrg
843 1.1 mrg /* Say that register REG contains a quantity in mode MODE not in any
844 1.1 mrg register before and initialize that quantity. */
845 1.1 mrg
846 1.1 mrg static void
847 1.1 mrg make_new_qty (unsigned int reg, machine_mode mode)
848 1.1 mrg {
849 1.1 mrg int q;
850 1.1 mrg struct qty_table_elem *ent;
851 1.1 mrg struct reg_eqv_elem *eqv;
852 1.1 mrg
853 1.1 mrg gcc_assert (next_qty < max_qty);
854 1.1 mrg
855 1.1 mrg q = REG_QTY (reg) = next_qty++;
856 1.1 mrg ent = &qty_table[q];
857 1.1 mrg ent->first_reg = reg;
858 1.1 mrg ent->last_reg = reg;
859 1.1 mrg ent->mode = mode;
860 1.1 mrg ent->const_rtx = ent->const_insn = NULL;
861 1.1 mrg ent->comparison_code = UNKNOWN;
862 1.1 mrg
863 1.1 mrg eqv = ®_eqv_table[reg];
864 1.1 mrg eqv->next = eqv->prev = -1;
865 1.1 mrg }
866 1.1 mrg
867 1.1 mrg /* Make reg NEW equivalent to reg OLD.
868 1.1 mrg OLD is not changing; NEW is. */
869 1.1 mrg
870 1.1 mrg static void
871 1.1 mrg make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
872 1.1 mrg {
873 1.1 mrg unsigned int lastr, firstr;
874 1.1 mrg int q = REG_QTY (old_reg);
875 1.1 mrg struct qty_table_elem *ent;
876 1.1 mrg
877 1.1 mrg ent = &qty_table[q];
878 1.1 mrg
879 1.1 mrg /* Nothing should become eqv until it has a "non-invalid" qty number. */
880 1.1 mrg gcc_assert (REGNO_QTY_VALID_P (old_reg));
881 1.1 mrg
882 1.1 mrg REG_QTY (new_reg) = q;
883 1.1 mrg firstr = ent->first_reg;
884 1.1 mrg lastr = ent->last_reg;
885 1.1 mrg
886 1.1 mrg /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
887 1.1 mrg hard regs. Among pseudos, if NEW will live longer than any other reg
888 1.1 mrg of the same qty, and that is beyond the current basic block,
889 1.1 mrg make it the new canonical replacement for this qty. */
890 1.1 mrg if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
891 1.1 mrg /* Certain fixed registers might be of the class NO_REGS. This means
892 1.1 mrg that not only can they not be allocated by the compiler, but
893 1.1 mrg they cannot be used in substitutions or canonicalizations
894 1.1 mrg either. */
895 1.1 mrg && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
896 1.1 mrg && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
897 1.1 mrg || (new_reg >= FIRST_PSEUDO_REGISTER
898 1.1 mrg && (firstr < FIRST_PSEUDO_REGISTER
899 1.1 mrg || (bitmap_bit_p (cse_ebb_live_out, new_reg)
900 1.1 mrg && !bitmap_bit_p (cse_ebb_live_out, firstr))
901 1.1 mrg || (bitmap_bit_p (cse_ebb_live_in, new_reg)
902 1.1 mrg && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
903 1.1 mrg {
904 1.1 mrg reg_eqv_table[firstr].prev = new_reg;
905 1.1 mrg reg_eqv_table[new_reg].next = firstr;
906 1.1 mrg reg_eqv_table[new_reg].prev = -1;
907 1.1 mrg ent->first_reg = new_reg;
908 1.1 mrg }
909 1.1 mrg else
910 1.1 mrg {
911 1.1 mrg /* If NEW is a hard reg (known to be non-fixed), insert at end.
912 1.1 mrg Otherwise, insert before any non-fixed hard regs that are at the
913 1.1 mrg end. Registers of class NO_REGS cannot be used as an
914 1.1 mrg equivalent for anything. */
915 1.1 mrg while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
916 1.1 mrg && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
917 1.1 mrg && new_reg >= FIRST_PSEUDO_REGISTER)
918 1.1 mrg lastr = reg_eqv_table[lastr].prev;
919 1.1 mrg reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
920 1.1 mrg if (reg_eqv_table[lastr].next >= 0)
921 1.1 mrg reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
922 1.1 mrg else
923 1.1 mrg qty_table[q].last_reg = new_reg;
924 1.1 mrg reg_eqv_table[lastr].next = new_reg;
925 1.1 mrg reg_eqv_table[new_reg].prev = lastr;
926 1.1 mrg }
927 1.1 mrg }
928 1.1 mrg
929 1.1 mrg /* Remove REG from its equivalence class. */
930 1.1 mrg
931 1.1 mrg static void
932 1.1 mrg delete_reg_equiv (unsigned int reg)
933 1.1 mrg {
934 1.1 mrg struct qty_table_elem *ent;
935 1.1 mrg int q = REG_QTY (reg);
936 1.1 mrg int p, n;
937 1.1 mrg
938 1.1 mrg /* If invalid, do nothing. */
939 1.1 mrg if (! REGNO_QTY_VALID_P (reg))
940 1.1 mrg return;
941 1.1 mrg
942 1.1 mrg ent = &qty_table[q];
943 1.1 mrg
944 1.1 mrg p = reg_eqv_table[reg].prev;
945 1.1 mrg n = reg_eqv_table[reg].next;
946 1.1 mrg
947 1.1 mrg if (n != -1)
948 1.1 mrg reg_eqv_table[n].prev = p;
949 1.1 mrg else
950 1.1 mrg ent->last_reg = p;
951 1.1 mrg if (p != -1)
952 1.1 mrg reg_eqv_table[p].next = n;
953 1.1 mrg else
954 1.1 mrg ent->first_reg = n;
955 1.1 mrg
956 1.1 mrg REG_QTY (reg) = -reg - 1;
957 1.1 mrg }
958 1.1 mrg
959 1.1 mrg /* Remove any invalid expressions from the hash table
960 1.1 mrg that refer to any of the registers contained in expression X.
961 1.1 mrg
962 1.1 mrg Make sure that newly inserted references to those registers
963 1.1 mrg as subexpressions will be considered valid.
964 1.1 mrg
965 1.1 mrg mention_regs is not called when a register itself
966 1.1 mrg is being stored in the table.
967 1.1 mrg
968 1.1 mrg Return 1 if we have done something that may have changed the hash code
969 1.1 mrg of X. */
970 1.1 mrg
971 1.1 mrg static int
972 1.1 mrg mention_regs (rtx x)
973 1.1 mrg {
974 1.1 mrg enum rtx_code code;
975 1.1 mrg int i, j;
976 1.1 mrg const char *fmt;
977 1.1 mrg int changed = 0;
978 1.1 mrg
979 1.1 mrg if (x == 0)
980 1.1 mrg return 0;
981 1.1 mrg
982 1.1 mrg code = GET_CODE (x);
983 1.1 mrg if (code == REG)
984 1.1 mrg {
985 1.1 mrg unsigned int regno = REGNO (x);
986 1.1 mrg unsigned int endregno = END_REGNO (x);
987 1.1 mrg unsigned int i;
988 1.1 mrg
989 1.1 mrg for (i = regno; i < endregno; i++)
990 1.1 mrg {
991 1.1 mrg if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
992 1.1 mrg remove_invalid_refs (i);
993 1.1 mrg
994 1.1 mrg REG_IN_TABLE (i) = REG_TICK (i);
995 1.1 mrg SUBREG_TICKED (i) = -1;
996 1.1 mrg }
997 1.1 mrg
998 1.1 mrg return 0;
999 1.1 mrg }
1000 1.1 mrg
1001 1.1 mrg /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1002 1.1 mrg pseudo if they don't use overlapping words. We handle only pseudos
1003 1.1 mrg here for simplicity. */
1004 1.1 mrg if (code == SUBREG && REG_P (SUBREG_REG (x))
1005 1.1 mrg && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1006 1.1 mrg {
1007 1.1 mrg unsigned int i = REGNO (SUBREG_REG (x));
1008 1.1 mrg
1009 1.1 mrg if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1010 1.1 mrg {
1011 1.1 mrg /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1012 1.1 mrg the last store to this register really stored into this
1013 1.1 mrg subreg, then remove the memory of this subreg.
1014 1.1 mrg Otherwise, remove any memory of the entire register and
1015 1.1 mrg all its subregs from the table. */
1016 1.1 mrg if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1017 1.1 mrg || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1018 1.1 mrg remove_invalid_refs (i);
1019 1.1 mrg else
1020 1.1 mrg remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1021 1.1 mrg }
1022 1.1 mrg
1023 1.1 mrg REG_IN_TABLE (i) = REG_TICK (i);
1024 1.1 mrg SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1025 1.1 mrg return 0;
1026 1.1 mrg }
1027 1.1 mrg
1028 1.1 mrg /* If X is a comparison or a COMPARE and either operand is a register
1029 1.1 mrg that does not have a quantity, give it one. This is so that a later
1030 1.1 mrg call to record_jump_equiv won't cause X to be assigned a different
1031 1.1 mrg hash code and not found in the table after that call.
1032 1.1 mrg
1033 1.1 mrg It is not necessary to do this here, since rehash_using_reg can
1034 1.1 mrg fix up the table later, but doing this here eliminates the need to
1035 1.1 mrg call that expensive function in the most common case where the only
1036 1.1 mrg use of the register is in the comparison. */
1037 1.1 mrg
1038 1.1 mrg if (code == COMPARE || COMPARISON_P (x))
1039 1.1 mrg {
1040 1.1 mrg if (REG_P (XEXP (x, 0))
1041 1.1 mrg && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1042 1.1 mrg if (insert_regs (XEXP (x, 0), NULL, 0))
1043 1.1 mrg {
1044 1.1 mrg rehash_using_reg (XEXP (x, 0));
1045 1.1 mrg changed = 1;
1046 1.1 mrg }
1047 1.1 mrg
1048 1.1 mrg if (REG_P (XEXP (x, 1))
1049 1.1 mrg && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1050 1.1 mrg if (insert_regs (XEXP (x, 1), NULL, 0))
1051 1.1 mrg {
1052 1.1 mrg rehash_using_reg (XEXP (x, 1));
1053 1.1 mrg changed = 1;
1054 1.1 mrg }
1055 1.1 mrg }
1056 1.1 mrg
1057 1.1 mrg fmt = GET_RTX_FORMAT (code);
1058 1.1 mrg for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1059 1.1 mrg if (fmt[i] == 'e')
1060 1.1 mrg changed |= mention_regs (XEXP (x, i));
1061 1.1 mrg else if (fmt[i] == 'E')
1062 1.1 mrg for (j = 0; j < XVECLEN (x, i); j++)
1063 1.1 mrg changed |= mention_regs (XVECEXP (x, i, j));
1064 1.1 mrg
1065 1.1 mrg return changed;
1066 1.1 mrg }
1067 1.1 mrg
1068 1.1 mrg /* Update the register quantities for inserting X into the hash table
1069 1.1 mrg with a value equivalent to CLASSP.
1070 1.1 mrg (If the class does not contain a REG, it is irrelevant.)
1071 1.1 mrg If MODIFIED is nonzero, X is a destination; it is being modified.
1072 1.1 mrg Note that delete_reg_equiv should be called on a register
1073 1.1 mrg before insert_regs is done on that register with MODIFIED != 0.
1074 1.1 mrg
1075 1.1 mrg Nonzero value means that elements of reg_qty have changed
1076 1.1 mrg so X's hash code may be different. */
1077 1.1 mrg
1078 1.1 mrg static int
1079 1.1 mrg insert_regs (rtx x, struct table_elt *classp, int modified)
1080 1.1 mrg {
1081 1.1 mrg if (REG_P (x))
1082 1.1 mrg {
1083 1.1 mrg unsigned int regno = REGNO (x);
1084 1.1 mrg int qty_valid;
1085 1.1 mrg
1086 1.1 mrg /* If REGNO is in the equivalence table already but is of the
1087 1.1 mrg wrong mode for that equivalence, don't do anything here. */
1088 1.1 mrg
1089 1.1 mrg qty_valid = REGNO_QTY_VALID_P (regno);
1090 1.1 mrg if (qty_valid)
1091 1.1 mrg {
1092 1.1 mrg struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1093 1.1 mrg
1094 1.1 mrg if (ent->mode != GET_MODE (x))
1095 1.1 mrg return 0;
1096 1.1 mrg }
1097 1.1 mrg
1098 1.1 mrg if (modified || ! qty_valid)
1099 1.1 mrg {
1100 1.1 mrg if (classp)
1101 1.1 mrg for (classp = classp->first_same_value;
1102 1.1 mrg classp != 0;
1103 1.1 mrg classp = classp->next_same_value)
1104 1.1 mrg if (REG_P (classp->exp)
1105 1.1 mrg && GET_MODE (classp->exp) == GET_MODE (x))
1106 1.1 mrg {
1107 1.1 mrg unsigned c_regno = REGNO (classp->exp);
1108 1.1 mrg
1109 1.1 mrg gcc_assert (REGNO_QTY_VALID_P (c_regno));
1110 1.1 mrg
1111 1.1 mrg /* Suppose that 5 is hard reg and 100 and 101 are
1112 1.1 mrg pseudos. Consider
1113 1.1 mrg
1114 1.1 mrg (set (reg:si 100) (reg:si 5))
1115 1.1 mrg (set (reg:si 5) (reg:si 100))
1116 1.1 mrg (set (reg:di 101) (reg:di 5))
1117 1.1 mrg
1118 1.1 mrg We would now set REG_QTY (101) = REG_QTY (5), but the
1119 1.1 mrg entry for 5 is in SImode. When we use this later in
1120 1.1 mrg copy propagation, we get the register in wrong mode. */
1121 1.1 mrg if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1122 1.1 mrg continue;
1123 1.1 mrg
1124 1.1 mrg make_regs_eqv (regno, c_regno);
1125 1.1 mrg return 1;
1126 1.1 mrg }
1127 1.1 mrg
1128 1.1 mrg /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1129 1.1 mrg than REG_IN_TABLE to find out if there was only a single preceding
1130 1.1 mrg invalidation - for the SUBREG - or another one, which would be
1131 1.1 mrg for the full register. However, if we find here that REG_TICK
1132 1.1 mrg indicates that the register is invalid, it means that it has
1133 1.1 mrg been invalidated in a separate operation. The SUBREG might be used
1134 1.1 mrg now (then this is a recursive call), or we might use the full REG
1135 1.1 mrg now and a SUBREG of it later. So bump up REG_TICK so that
1136 1.1 mrg mention_regs will do the right thing. */
1137 1.1 mrg if (! modified
1138 1.1 mrg && REG_IN_TABLE (regno) >= 0
1139 1.1 mrg && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1140 1.1 mrg REG_TICK (regno)++;
1141 1.1 mrg make_new_qty (regno, GET_MODE (x));
1142 1.1 mrg return 1;
1143 1.1 mrg }
1144 1.1 mrg
1145 1.1 mrg return 0;
1146 1.1 mrg }
1147 1.1 mrg
1148 1.1 mrg /* If X is a SUBREG, we will likely be inserting the inner register in the
1149 1.1 mrg table. If that register doesn't have an assigned quantity number at
1150 1.1 mrg this point but does later, the insertion that we will be doing now will
1151 1.1 mrg not be accessible because its hash code will have changed. So assign
1152 1.1 mrg a quantity number now. */
1153 1.1 mrg
1154 1.1 mrg else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1155 1.1 mrg && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1156 1.1 mrg {
1157 1.1 mrg insert_regs (SUBREG_REG (x), NULL, 0);
1158 1.1 mrg mention_regs (x);
1159 1.1 mrg return 1;
1160 1.1 mrg }
1161 1.1 mrg else
1162 1.1 mrg return mention_regs (x);
1163 1.1 mrg }
1164 1.1 mrg
1165 1.1 mrg
1167 1.1 mrg /* Compute upper and lower anchors for CST. Also compute the offset of CST
1168 1.1 mrg from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1169 1.1 mrg CST is equal to an anchor. */
1170 1.1 mrg
1171 1.1 mrg static bool
1172 1.1 mrg compute_const_anchors (rtx cst,
1173 1.1 mrg HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1174 1.1 mrg HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1175 1.1 mrg {
1176 1.1 mrg unsigned HOST_WIDE_INT n = UINTVAL (cst);
1177 1.1 mrg
1178 1.1 mrg *lower_base = n & ~(targetm.const_anchor - 1);
1179 1.1 mrg if ((unsigned HOST_WIDE_INT) *lower_base == n)
1180 1.1 mrg return false;
1181 1.1 mrg
1182 1.1 mrg *upper_base = ((n + (targetm.const_anchor - 1))
1183 1.1 mrg & ~(targetm.const_anchor - 1));
1184 1.1 mrg *upper_offs = n - *upper_base;
1185 1.1 mrg *lower_offs = n - *lower_base;
1186 1.1 mrg return true;
1187 1.1 mrg }
1188 1.1 mrg
1189 1.1 mrg /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1190 1.1 mrg
1191 1.1 mrg static void
1192 1.1 mrg insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1193 1.1 mrg machine_mode mode)
1194 1.1 mrg {
1195 1.1 mrg struct table_elt *elt;
1196 1.1 mrg unsigned hash;
1197 1.1 mrg rtx anchor_exp;
1198 1.1 mrg rtx exp;
1199 1.1 mrg
1200 1.1 mrg anchor_exp = gen_int_mode (anchor, mode);
1201 1.1 mrg hash = HASH (anchor_exp, mode);
1202 1.1 mrg elt = lookup (anchor_exp, hash, mode);
1203 1.1 mrg if (!elt)
1204 1.1 mrg elt = insert (anchor_exp, NULL, hash, mode);
1205 1.1 mrg
1206 1.1 mrg exp = plus_constant (mode, reg, offs);
1207 1.1 mrg /* REG has just been inserted and the hash codes recomputed. */
1208 1.1 mrg mention_regs (exp);
1209 1.1 mrg hash = HASH (exp, mode);
1210 1.1 mrg
1211 1.1 mrg /* Use the cost of the register rather than the whole expression. When
1212 1.1 mrg looking up constant anchors we will further offset the corresponding
1213 1.1 mrg expression therefore it does not make sense to prefer REGs over
1214 1.1 mrg reg-immediate additions. Prefer instead the oldest expression. Also
1215 1.1 mrg don't prefer pseudos over hard regs so that we derive constants in
1216 1.1 mrg argument registers from other argument registers rather than from the
1217 1.1 mrg original pseudo that was used to synthesize the constant. */
1218 1.1 mrg insert_with_costs (exp, elt, hash, mode, COST (reg, mode), 1);
1219 1.1 mrg }
1220 1.1 mrg
1221 1.1 mrg /* The constant CST is equivalent to the register REG. Create
1222 1.1 mrg equivalences between the two anchors of CST and the corresponding
1223 1.1 mrg register-offset expressions using REG. */
1224 1.1 mrg
1225 1.1 mrg static void
1226 1.1 mrg insert_const_anchors (rtx reg, rtx cst, machine_mode mode)
1227 1.1 mrg {
1228 1.1 mrg HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1229 1.1 mrg
1230 1.1 mrg if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1231 1.1 mrg &upper_base, &upper_offs))
1232 1.1 mrg return;
1233 1.1 mrg
1234 1.1 mrg /* Ignore anchors of value 0. Constants accessible from zero are
1235 1.1 mrg simple. */
1236 1.1 mrg if (lower_base != 0)
1237 1.1 mrg insert_const_anchor (lower_base, reg, -lower_offs, mode);
1238 1.1 mrg
1239 1.1 mrg if (upper_base != 0)
1240 1.1 mrg insert_const_anchor (upper_base, reg, -upper_offs, mode);
1241 1.1 mrg }
1242 1.1 mrg
1243 1.1 mrg /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1244 1.1 mrg ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1245 1.1 mrg valid expression. Return the cheapest and oldest of such expressions. In
1246 1.1 mrg *OLD, return how old the resulting expression is compared to the other
1247 1.1 mrg equivalent expressions. */
1248 1.1 mrg
1249 1.1 mrg static rtx
1250 1.1 mrg find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1251 1.1 mrg unsigned *old)
1252 1.1 mrg {
1253 1.1 mrg struct table_elt *elt;
1254 1.1 mrg unsigned idx;
1255 1.1 mrg struct table_elt *match_elt;
1256 1.1 mrg rtx match;
1257 1.1 mrg
1258 1.1 mrg /* Find the cheapest and *oldest* expression to maximize the chance of
1259 1.1 mrg reusing the same pseudo. */
1260 1.1 mrg
1261 1.1 mrg match_elt = NULL;
1262 1.1 mrg match = NULL_RTX;
1263 1.1 mrg for (elt = anchor_elt->first_same_value, idx = 0;
1264 1.1 mrg elt;
1265 1.1 mrg elt = elt->next_same_value, idx++)
1266 1.1 mrg {
1267 1.1 mrg if (match_elt && CHEAPER (match_elt, elt))
1268 1.1 mrg return match;
1269 1.1 mrg
1270 1.1 mrg if (REG_P (elt->exp)
1271 1.1 mrg || (GET_CODE (elt->exp) == PLUS
1272 1.1 mrg && REG_P (XEXP (elt->exp, 0))
1273 1.1 mrg && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1274 1.1 mrg {
1275 1.1 mrg rtx x;
1276 1.1 mrg
1277 1.1 mrg /* Ignore expressions that are no longer valid. */
1278 1.1 mrg if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1279 1.1 mrg continue;
1280 1.1 mrg
1281 1.1 mrg x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
1282 1.1 mrg if (REG_P (x)
1283 1.1 mrg || (GET_CODE (x) == PLUS
1284 1.1 mrg && IN_RANGE (INTVAL (XEXP (x, 1)),
1285 1.1 mrg -targetm.const_anchor,
1286 1.1 mrg targetm.const_anchor - 1)))
1287 1.1 mrg {
1288 1.1 mrg match = x;
1289 1.1 mrg match_elt = elt;
1290 1.1 mrg *old = idx;
1291 1.1 mrg }
1292 1.1 mrg }
1293 1.1 mrg }
1294 1.1 mrg
1295 1.1 mrg return match;
1296 1.1 mrg }
1297 1.1 mrg
1298 1.1 mrg /* Try to express the constant SRC_CONST using a register+offset expression
1299 1.1 mrg derived from a constant anchor. Return it if successful or NULL_RTX,
1300 1.1 mrg otherwise. */
1301 1.1 mrg
1302 1.1 mrg static rtx
1303 1.1 mrg try_const_anchors (rtx src_const, machine_mode mode)
1304 1.1 mrg {
1305 1.1 mrg struct table_elt *lower_elt, *upper_elt;
1306 1.1 mrg HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1307 1.1 mrg rtx lower_anchor_rtx, upper_anchor_rtx;
1308 1.1 mrg rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1309 1.1 mrg unsigned lower_old, upper_old;
1310 1.1 mrg
1311 1.1 mrg /* CONST_INT is used for CC modes, but we should leave those alone. */
1312 1.1 mrg if (GET_MODE_CLASS (mode) == MODE_CC)
1313 1.1 mrg return NULL_RTX;
1314 1.1 mrg
1315 1.1 mrg gcc_assert (SCALAR_INT_MODE_P (mode));
1316 1.1 mrg if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1317 1.1 mrg &upper_base, &upper_offs))
1318 1.1 mrg return NULL_RTX;
1319 1.1 mrg
1320 1.1 mrg lower_anchor_rtx = GEN_INT (lower_base);
1321 1.1 mrg upper_anchor_rtx = GEN_INT (upper_base);
1322 1.1 mrg lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1323 1.1 mrg upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1324 1.1 mrg
1325 1.1 mrg if (lower_elt)
1326 1.1 mrg lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1327 1.1 mrg if (upper_elt)
1328 1.1 mrg upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1329 1.1 mrg
1330 1.1 mrg if (!lower_exp)
1331 1.1 mrg return upper_exp;
1332 1.1 mrg if (!upper_exp)
1333 1.1 mrg return lower_exp;
1334 1.1 mrg
1335 1.1 mrg /* Return the older expression. */
1336 1.1 mrg return (upper_old > lower_old ? upper_exp : lower_exp);
1337 1.1 mrg }
1338 1.1 mrg
1339 1.1 mrg /* Look in or update the hash table. */
1341 1.1 mrg
1342 1.1 mrg /* Remove table element ELT from use in the table.
1343 1.1 mrg HASH is its hash code, made using the HASH macro.
1344 1.1 mrg It's an argument because often that is known in advance
1345 1.1 mrg and we save much time not recomputing it. */
1346 1.1 mrg
1347 1.1 mrg static void
1348 1.1 mrg remove_from_table (struct table_elt *elt, unsigned int hash)
1349 1.1 mrg {
1350 1.1 mrg if (elt == 0)
1351 1.1 mrg return;
1352 1.1 mrg
1353 1.1 mrg /* Mark this element as removed. See cse_insn. */
1354 1.1 mrg elt->first_same_value = 0;
1355 1.1 mrg
1356 1.1 mrg /* Remove the table element from its equivalence class. */
1357 1.1 mrg
1358 1.1 mrg {
1359 1.1 mrg struct table_elt *prev = elt->prev_same_value;
1360 1.1 mrg struct table_elt *next = elt->next_same_value;
1361 1.1 mrg
1362 1.1 mrg if (next)
1363 1.1 mrg next->prev_same_value = prev;
1364 1.1 mrg
1365 1.1 mrg if (prev)
1366 1.1 mrg prev->next_same_value = next;
1367 1.1 mrg else
1368 1.1 mrg {
1369 1.1 mrg struct table_elt *newfirst = next;
1370 1.1 mrg while (next)
1371 1.1 mrg {
1372 1.1 mrg next->first_same_value = newfirst;
1373 1.1 mrg next = next->next_same_value;
1374 1.1 mrg }
1375 1.1 mrg }
1376 1.1 mrg }
1377 1.1 mrg
1378 1.1 mrg /* Remove the table element from its hash bucket. */
1379 1.1 mrg
1380 1.1 mrg {
1381 1.1 mrg struct table_elt *prev = elt->prev_same_hash;
1382 1.1 mrg struct table_elt *next = elt->next_same_hash;
1383 1.1 mrg
1384 1.1 mrg if (next)
1385 1.1 mrg next->prev_same_hash = prev;
1386 1.1 mrg
1387 1.1 mrg if (prev)
1388 1.1 mrg prev->next_same_hash = next;
1389 1.1 mrg else if (table[hash] == elt)
1390 1.1 mrg table[hash] = next;
1391 1.1 mrg else
1392 1.1 mrg {
1393 1.1 mrg /* This entry is not in the proper hash bucket. This can happen
1394 1.1 mrg when two classes were merged by `merge_equiv_classes'. Search
1395 1.1 mrg for the hash bucket that it heads. This happens only very
1396 1.1 mrg rarely, so the cost is acceptable. */
1397 1.1 mrg for (hash = 0; hash < HASH_SIZE; hash++)
1398 1.1 mrg if (table[hash] == elt)
1399 1.1 mrg table[hash] = next;
1400 1.1 mrg }
1401 1.1 mrg }
1402 1.1 mrg
1403 1.1 mrg /* Remove the table element from its related-value circular chain. */
1404 1.1 mrg
1405 1.1 mrg if (elt->related_value != 0 && elt->related_value != elt)
1406 1.1 mrg {
1407 1.1 mrg struct table_elt *p = elt->related_value;
1408 1.1 mrg
1409 1.1 mrg while (p->related_value != elt)
1410 1.1 mrg p = p->related_value;
1411 1.1 mrg p->related_value = elt->related_value;
1412 1.1 mrg if (p->related_value == p)
1413 1.1 mrg p->related_value = 0;
1414 1.1 mrg }
1415 1.1 mrg
1416 1.1 mrg /* Now add it to the free element chain. */
1417 1.1 mrg elt->next_same_hash = free_element_chain;
1418 1.1 mrg free_element_chain = elt;
1419 1.1 mrg }
1420 1.1 mrg
1421 1.1 mrg /* Same as above, but X is a pseudo-register. */
1422 1.1 mrg
1423 1.1 mrg static void
1424 1.1 mrg remove_pseudo_from_table (rtx x, unsigned int hash)
1425 1.1 mrg {
1426 1.1 mrg struct table_elt *elt;
1427 1.1 mrg
1428 1.1 mrg /* Because a pseudo-register can be referenced in more than one
1429 1.1 mrg mode, we might have to remove more than one table entry. */
1430 1.1 mrg while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1431 1.1 mrg remove_from_table (elt, hash);
1432 1.1 mrg }
1433 1.1 mrg
1434 1.1 mrg /* Look up X in the hash table and return its table element,
1435 1.1 mrg or 0 if X is not in the table.
1436 1.1 mrg
1437 1.1 mrg MODE is the machine-mode of X, or if X is an integer constant
1438 1.1 mrg with VOIDmode then MODE is the mode with which X will be used.
1439 1.1 mrg
1440 1.1 mrg Here we are satisfied to find an expression whose tree structure
1441 1.1 mrg looks like X. */
1442 1.1 mrg
1443 1.1 mrg static struct table_elt *
1444 1.1 mrg lookup (rtx x, unsigned int hash, machine_mode mode)
1445 1.1 mrg {
1446 1.1 mrg struct table_elt *p;
1447 1.1 mrg
1448 1.1 mrg for (p = table[hash]; p; p = p->next_same_hash)
1449 1.1 mrg if (mode == p->mode && ((x == p->exp && REG_P (x))
1450 1.1 mrg || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1451 1.1 mrg return p;
1452 1.1 mrg
1453 1.1 mrg return 0;
1454 1.1 mrg }
1455 1.1 mrg
1456 1.1 mrg /* Like `lookup' but don't care whether the table element uses invalid regs.
1457 1.1 mrg Also ignore discrepancies in the machine mode of a register. */
1458 1.1 mrg
1459 1.1 mrg static struct table_elt *
1460 1.1 mrg lookup_for_remove (rtx x, unsigned int hash, machine_mode mode)
1461 1.1 mrg {
1462 1.1 mrg struct table_elt *p;
1463 1.1 mrg
1464 1.1 mrg if (REG_P (x))
1465 1.1 mrg {
1466 1.1 mrg unsigned int regno = REGNO (x);
1467 1.1 mrg
1468 1.1 mrg /* Don't check the machine mode when comparing registers;
1469 1.1 mrg invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1470 1.1 mrg for (p = table[hash]; p; p = p->next_same_hash)
1471 1.1 mrg if (REG_P (p->exp)
1472 1.1 mrg && REGNO (p->exp) == regno)
1473 1.1 mrg return p;
1474 1.1 mrg }
1475 1.1 mrg else
1476 1.1 mrg {
1477 1.1 mrg for (p = table[hash]; p; p = p->next_same_hash)
1478 1.1 mrg if (mode == p->mode
1479 1.1 mrg && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1480 1.1 mrg return p;
1481 1.1 mrg }
1482 1.1 mrg
1483 1.1 mrg return 0;
1484 1.1 mrg }
1485 1.1 mrg
1486 1.1 mrg /* Look for an expression equivalent to X and with code CODE.
1487 1.1 mrg If one is found, return that expression. */
1488 1.1 mrg
1489 1.1 mrg static rtx
1490 1.1 mrg lookup_as_function (rtx x, enum rtx_code code)
1491 1.1 mrg {
1492 1.1 mrg struct table_elt *p
1493 1.1 mrg = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1494 1.1 mrg
1495 1.1 mrg if (p == 0)
1496 1.1 mrg return 0;
1497 1.1 mrg
1498 1.1 mrg for (p = p->first_same_value; p; p = p->next_same_value)
1499 1.1 mrg if (GET_CODE (p->exp) == code
1500 1.1 mrg /* Make sure this is a valid entry in the table. */
1501 1.1 mrg && exp_equiv_p (p->exp, p->exp, 1, false))
1502 1.1 mrg return p->exp;
1503 1.1 mrg
1504 1.1 mrg return 0;
1505 1.1 mrg }
1506 1.1 mrg
1507 1.1 mrg /* Insert X in the hash table, assuming HASH is its hash code and
1508 1.1 mrg CLASSP is an element of the class it should go in (or 0 if a new
1509 1.1 mrg class should be made). COST is the code of X and reg_cost is the
1510 1.1 mrg cost of registers in X. It is inserted at the proper position to
1511 1.1 mrg keep the class in the order cheapest first.
1512 1.1 mrg
1513 1.1 mrg MODE is the machine-mode of X, or if X is an integer constant
1514 1.1 mrg with VOIDmode then MODE is the mode with which X will be used.
1515 1.1 mrg
1516 1.1 mrg For elements of equal cheapness, the most recent one
1517 1.1 mrg goes in front, except that the first element in the list
1518 1.1 mrg remains first unless a cheaper element is added. The order of
1519 1.1 mrg pseudo-registers does not matter, as canon_reg will be called to
1520 1.1 mrg find the cheapest when a register is retrieved from the table.
1521 1.1 mrg
1522 1.1 mrg The in_memory field in the hash table element is set to 0.
1523 1.1 mrg The caller must set it nonzero if appropriate.
1524 1.1 mrg
1525 1.1 mrg You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1526 1.1 mrg and if insert_regs returns a nonzero value
1527 1.1 mrg you must then recompute its hash code before calling here.
1528 1.1 mrg
1529 1.1 mrg If necessary, update table showing constant values of quantities. */
1530 1.1 mrg
1531 1.1 mrg static struct table_elt *
1532 1.1 mrg insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1533 1.1 mrg machine_mode mode, int cost, int reg_cost)
1534 1.1 mrg {
1535 1.1 mrg struct table_elt *elt;
1536 1.1 mrg
1537 1.1 mrg /* If X is a register and we haven't made a quantity for it,
1538 1.1 mrg something is wrong. */
1539 1.1 mrg gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1540 1.1 mrg
1541 1.1 mrg /* If X is a hard register, show it is being put in the table. */
1542 1.1 mrg if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1543 1.1 mrg add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1544 1.1 mrg
1545 1.1 mrg /* Put an element for X into the right hash bucket. */
1546 1.1 mrg
1547 1.1 mrg elt = free_element_chain;
1548 1.1 mrg if (elt)
1549 1.1 mrg free_element_chain = elt->next_same_hash;
1550 1.1 mrg else
1551 1.1 mrg elt = XNEW (struct table_elt);
1552 1.1 mrg
1553 1.1 mrg elt->exp = x;
1554 1.1 mrg elt->canon_exp = NULL_RTX;
1555 1.1 mrg elt->cost = cost;
1556 1.1 mrg elt->regcost = reg_cost;
1557 1.1 mrg elt->next_same_value = 0;
1558 1.1 mrg elt->prev_same_value = 0;
1559 1.1 mrg elt->next_same_hash = table[hash];
1560 1.1 mrg elt->prev_same_hash = 0;
1561 1.1 mrg elt->related_value = 0;
1562 1.1 mrg elt->in_memory = 0;
1563 1.1 mrg elt->mode = mode;
1564 1.1 mrg elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1565 1.1 mrg
1566 1.1 mrg if (table[hash])
1567 1.1 mrg table[hash]->prev_same_hash = elt;
1568 1.1 mrg table[hash] = elt;
1569 1.1 mrg
1570 1.1 mrg /* Put it into the proper value-class. */
1571 1.1 mrg if (classp)
1572 1.1 mrg {
1573 1.1 mrg classp = classp->first_same_value;
1574 1.1 mrg if (CHEAPER (elt, classp))
1575 1.1 mrg /* Insert at the head of the class. */
1576 1.1 mrg {
1577 1.1 mrg struct table_elt *p;
1578 1.1 mrg elt->next_same_value = classp;
1579 1.1 mrg classp->prev_same_value = elt;
1580 1.1 mrg elt->first_same_value = elt;
1581 1.1 mrg
1582 1.1 mrg for (p = classp; p; p = p->next_same_value)
1583 1.1 mrg p->first_same_value = elt;
1584 1.1 mrg }
1585 1.1 mrg else
1586 1.1 mrg {
1587 1.1 mrg /* Insert not at head of the class. */
1588 1.1 mrg /* Put it after the last element cheaper than X. */
1589 1.1 mrg struct table_elt *p, *next;
1590 1.1 mrg
1591 1.1 mrg for (p = classp;
1592 1.1 mrg (next = p->next_same_value) && CHEAPER (next, elt);
1593 1.1 mrg p = next)
1594 1.1 mrg ;
1595 1.1 mrg
1596 1.1 mrg /* Put it after P and before NEXT. */
1597 1.1 mrg elt->next_same_value = next;
1598 1.1 mrg if (next)
1599 1.1 mrg next->prev_same_value = elt;
1600 1.1 mrg
1601 1.1 mrg elt->prev_same_value = p;
1602 1.1 mrg p->next_same_value = elt;
1603 1.1 mrg elt->first_same_value = classp;
1604 1.1 mrg }
1605 1.1 mrg }
1606 1.1 mrg else
1607 1.1 mrg elt->first_same_value = elt;
1608 1.1 mrg
1609 1.1 mrg /* If this is a constant being set equivalent to a register or a register
1610 1.1 mrg being set equivalent to a constant, note the constant equivalence.
1611 1.1 mrg
1612 1.1 mrg If this is a constant, it cannot be equivalent to a different constant,
1613 1.1 mrg and a constant is the only thing that can be cheaper than a register. So
1614 1.1 mrg we know the register is the head of the class (before the constant was
1615 1.1 mrg inserted).
1616 1.1 mrg
1617 1.1 mrg If this is a register that is not already known equivalent to a
1618 1.1 mrg constant, we must check the entire class.
1619 1.1 mrg
1620 1.1 mrg If this is a register that is already known equivalent to an insn,
1621 1.1 mrg update the qtys `const_insn' to show that `this_insn' is the latest
1622 1.1 mrg insn making that quantity equivalent to the constant. */
1623 1.1 mrg
1624 1.1 mrg if (elt->is_const && classp && REG_P (classp->exp)
1625 1.1 mrg && !REG_P (x))
1626 1.1 mrg {
1627 1.1 mrg int exp_q = REG_QTY (REGNO (classp->exp));
1628 1.1 mrg struct qty_table_elem *exp_ent = &qty_table[exp_q];
1629 1.1 mrg
1630 1.1 mrg exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1631 1.1 mrg exp_ent->const_insn = this_insn;
1632 1.1 mrg }
1633 1.1 mrg
1634 1.1 mrg else if (REG_P (x)
1635 1.1 mrg && classp
1636 1.1 mrg && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1637 1.1 mrg && ! elt->is_const)
1638 1.1 mrg {
1639 1.1 mrg struct table_elt *p;
1640 1.1 mrg
1641 1.1 mrg for (p = classp; p != 0; p = p->next_same_value)
1642 1.1 mrg {
1643 1.1 mrg if (p->is_const && !REG_P (p->exp))
1644 1.1 mrg {
1645 1.1 mrg int x_q = REG_QTY (REGNO (x));
1646 1.1 mrg struct qty_table_elem *x_ent = &qty_table[x_q];
1647 1.1 mrg
1648 1.1 mrg x_ent->const_rtx
1649 1.1 mrg = gen_lowpart (GET_MODE (x), p->exp);
1650 1.1 mrg x_ent->const_insn = this_insn;
1651 1.1 mrg break;
1652 1.1 mrg }
1653 1.1 mrg }
1654 1.1 mrg }
1655 1.1 mrg
1656 1.1 mrg else if (REG_P (x)
1657 1.1 mrg && qty_table[REG_QTY (REGNO (x))].const_rtx
1658 1.1 mrg && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1659 1.1 mrg qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1660 1.1 mrg
1661 1.1 mrg /* If this is a constant with symbolic value,
1662 1.1 mrg and it has a term with an explicit integer value,
1663 1.1 mrg link it up with related expressions. */
1664 1.1 mrg if (GET_CODE (x) == CONST)
1665 1.1 mrg {
1666 1.1 mrg rtx subexp = get_related_value (x);
1667 1.1 mrg unsigned subhash;
1668 1.1 mrg struct table_elt *subelt, *subelt_prev;
1669 1.1 mrg
1670 1.1 mrg if (subexp != 0)
1671 1.1 mrg {
1672 1.1 mrg /* Get the integer-free subexpression in the hash table. */
1673 1.1 mrg subhash = SAFE_HASH (subexp, mode);
1674 1.1 mrg subelt = lookup (subexp, subhash, mode);
1675 1.1 mrg if (subelt == 0)
1676 1.1 mrg subelt = insert (subexp, NULL, subhash, mode);
1677 1.1 mrg /* Initialize SUBELT's circular chain if it has none. */
1678 1.1 mrg if (subelt->related_value == 0)
1679 1.1 mrg subelt->related_value = subelt;
1680 1.1 mrg /* Find the element in the circular chain that precedes SUBELT. */
1681 1.1 mrg subelt_prev = subelt;
1682 1.1 mrg while (subelt_prev->related_value != subelt)
1683 1.1 mrg subelt_prev = subelt_prev->related_value;
1684 1.1 mrg /* Put new ELT into SUBELT's circular chain just before SUBELT.
1685 1.1 mrg This way the element that follows SUBELT is the oldest one. */
1686 1.1 mrg elt->related_value = subelt_prev->related_value;
1687 1.1 mrg subelt_prev->related_value = elt;
1688 1.1 mrg }
1689 1.1 mrg }
1690 1.1 mrg
1691 1.1 mrg return elt;
1692 1.1 mrg }
1693 1.1 mrg
1694 1.1 mrg /* Wrap insert_with_costs by passing the default costs. */
1695 1.1 mrg
1696 1.1 mrg static struct table_elt *
1697 1.1 mrg insert (rtx x, struct table_elt *classp, unsigned int hash,
1698 1.1 mrg machine_mode mode)
1699 1.1 mrg {
1700 1.1 mrg return insert_with_costs (x, classp, hash, mode,
1701 1.1 mrg COST (x, mode), approx_reg_cost (x));
1702 1.1 mrg }
1703 1.1 mrg
1704 1.1 mrg
1705 1.1 mrg /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1707 1.1 mrg CLASS2 into CLASS1. This is done when we have reached an insn which makes
1708 1.1 mrg the two classes equivalent.
1709 1.1 mrg
1710 1.1 mrg CLASS1 will be the surviving class; CLASS2 should not be used after this
1711 1.1 mrg call.
1712 1.1 mrg
1713 1.1 mrg Any invalid entries in CLASS2 will not be copied. */
1714 1.1 mrg
1715 1.1 mrg static void
1716 1.1 mrg merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1717 1.1 mrg {
1718 1.1 mrg struct table_elt *elt, *next, *new_elt;
1719 1.1 mrg
1720 1.1 mrg /* Ensure we start with the head of the classes. */
1721 1.1 mrg class1 = class1->first_same_value;
1722 1.1 mrg class2 = class2->first_same_value;
1723 1.1 mrg
1724 1.1 mrg /* If they were already equal, forget it. */
1725 1.1 mrg if (class1 == class2)
1726 1.1 mrg return;
1727 1.1 mrg
1728 1.1 mrg for (elt = class2; elt; elt = next)
1729 1.1 mrg {
1730 1.1 mrg unsigned int hash;
1731 1.1 mrg rtx exp = elt->exp;
1732 1.1 mrg machine_mode mode = elt->mode;
1733 1.1 mrg
1734 1.1 mrg next = elt->next_same_value;
1735 1.1 mrg
1736 1.1 mrg /* Remove old entry, make a new one in CLASS1's class.
1737 1.1 mrg Don't do this for invalid entries as we cannot find their
1738 1.1 mrg hash code (it also isn't necessary). */
1739 1.1 mrg if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1740 1.1 mrg {
1741 1.1 mrg bool need_rehash = false;
1742 1.1 mrg
1743 1.1 mrg hash_arg_in_memory = 0;
1744 1.1 mrg hash = HASH (exp, mode);
1745 1.1 mrg
1746 1.1 mrg if (REG_P (exp))
1747 1.1 mrg {
1748 1.1 mrg need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1749 1.1 mrg delete_reg_equiv (REGNO (exp));
1750 1.1 mrg }
1751 1.1 mrg
1752 1.1 mrg if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1753 1.1 mrg remove_pseudo_from_table (exp, hash);
1754 1.1 mrg else
1755 1.1 mrg remove_from_table (elt, hash);
1756 1.1 mrg
1757 1.1 mrg if (insert_regs (exp, class1, 0) || need_rehash)
1758 1.1 mrg {
1759 1.1 mrg rehash_using_reg (exp);
1760 1.1 mrg hash = HASH (exp, mode);
1761 1.1 mrg }
1762 1.1 mrg new_elt = insert (exp, class1, hash, mode);
1763 1.1 mrg new_elt->in_memory = hash_arg_in_memory;
1764 1.1 mrg if (GET_CODE (exp) == ASM_OPERANDS && elt->cost == MAX_COST)
1765 1.1 mrg new_elt->cost = MAX_COST;
1766 1.1 mrg }
1767 1.1 mrg }
1768 1.1 mrg }
1769 1.1 mrg
1770 1.1 mrg /* Flush the entire hash table. */
1772 1.1 mrg
1773 1.1 mrg static void
1774 1.1 mrg flush_hash_table (void)
1775 1.1 mrg {
1776 1.1 mrg int i;
1777 1.1 mrg struct table_elt *p;
1778 1.1 mrg
1779 1.1 mrg for (i = 0; i < HASH_SIZE; i++)
1780 1.1 mrg for (p = table[i]; p; p = table[i])
1781 1.1 mrg {
1782 1.1 mrg /* Note that invalidate can remove elements
1783 1.1 mrg after P in the current hash chain. */
1784 1.1 mrg if (REG_P (p->exp))
1785 1.1 mrg invalidate (p->exp, VOIDmode);
1786 1.1 mrg else
1787 1.1 mrg remove_from_table (p, i);
1788 1.1 mrg }
1789 1.1 mrg }
1790 1.1 mrg
1791 1.1 mrg /* Check whether an anti dependence exists between X and EXP. MODE and
1793 1.1 mrg ADDR are as for canon_anti_dependence. */
1794 1.1 mrg
1795 1.1 mrg static bool
1796 1.1 mrg check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr)
1797 1.1 mrg {
1798 1.1 mrg subrtx_iterator::array_type array;
1799 1.1 mrg FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1800 1.1 mrg {
1801 1.1 mrg const_rtx x = *iter;
1802 1.1 mrg if (MEM_P (x) && canon_anti_dependence (x, true, exp, mode, addr))
1803 1.1 mrg return true;
1804 1.1 mrg }
1805 1.1 mrg return false;
1806 1.1 mrg }
1807 1.1 mrg
1808 1.1 mrg /* Remove from the hash table, or mark as invalid, all expressions whose
1809 1.1 mrg values could be altered by storing in register X. */
1810 1.1 mrg
1811 1.1 mrg static void
1812 1.1 mrg invalidate_reg (rtx x)
1813 1.1 mrg {
1814 1.1 mrg gcc_assert (GET_CODE (x) == REG);
1815 1.1 mrg
1816 1.1 mrg /* If X is a register, dependencies on its contents are recorded
1817 1.1 mrg through the qty number mechanism. Just change the qty number of
1818 1.1 mrg the register, mark it as invalid for expressions that refer to it,
1819 1.1 mrg and remove it itself. */
1820 1.1 mrg unsigned int regno = REGNO (x);
1821 1.1 mrg unsigned int hash = HASH (x, GET_MODE (x));
1822 1.1 mrg
1823 1.1 mrg /* Remove REGNO from any quantity list it might be on and indicate
1824 1.1 mrg that its value might have changed. If it is a pseudo, remove its
1825 1.1 mrg entry from the hash table.
1826 1.1 mrg
1827 1.1 mrg For a hard register, we do the first two actions above for any
1828 1.1 mrg additional hard registers corresponding to X. Then, if any of these
1829 1.1 mrg registers are in the table, we must remove any REG entries that
1830 1.1 mrg overlap these registers. */
1831 1.1 mrg
1832 1.1 mrg delete_reg_equiv (regno);
1833 1.1 mrg REG_TICK (regno)++;
1834 1.1 mrg SUBREG_TICKED (regno) = -1;
1835 1.1 mrg
1836 1.1 mrg if (regno >= FIRST_PSEUDO_REGISTER)
1837 1.1 mrg remove_pseudo_from_table (x, hash);
1838 1.1 mrg else
1839 1.1 mrg {
1840 1.1 mrg HOST_WIDE_INT in_table = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1841 1.1 mrg unsigned int endregno = END_REGNO (x);
1842 1.1 mrg unsigned int rn;
1843 1.1 mrg struct table_elt *p, *next;
1844 1.1 mrg
1845 1.1 mrg CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1846 1.1 mrg
1847 1.1 mrg for (rn = regno + 1; rn < endregno; rn++)
1848 1.1 mrg {
1849 1.1 mrg in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1850 1.1 mrg CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1851 1.1 mrg delete_reg_equiv (rn);
1852 1.1 mrg REG_TICK (rn)++;
1853 1.1 mrg SUBREG_TICKED (rn) = -1;
1854 1.1 mrg }
1855 1.1 mrg
1856 1.1 mrg if (in_table)
1857 1.1 mrg for (hash = 0; hash < HASH_SIZE; hash++)
1858 1.1 mrg for (p = table[hash]; p; p = next)
1859 1.1 mrg {
1860 1.1 mrg next = p->next_same_hash;
1861 1.1 mrg
1862 1.1 mrg if (!REG_P (p->exp) || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1863 1.1 mrg continue;
1864 1.1 mrg
1865 1.1 mrg unsigned int tregno = REGNO (p->exp);
1866 1.1 mrg unsigned int tendregno = END_REGNO (p->exp);
1867 1.1 mrg if (tendregno > regno && tregno < endregno)
1868 1.1 mrg remove_from_table (p, hash);
1869 1.1 mrg }
1870 1.1 mrg }
1871 1.1 mrg }
1872 1.1 mrg
1873 1.1 mrg /* Remove from the hash table, or mark as invalid, all expressions whose
1874 1.1 mrg values could be altered by storing in X. X is a register, a subreg, or
1875 1.1 mrg a memory reference with nonvarying address (because, when a memory
1876 1.1 mrg reference with a varying address is stored in, all memory references are
1877 1.1 mrg removed by invalidate_memory so specific invalidation is superfluous).
1878 1.1 mrg FULL_MODE, if not VOIDmode, indicates that this much should be
1879 1.1 mrg invalidated instead of just the amount indicated by the mode of X. This
1880 1.1 mrg is only used for bitfield stores into memory.
1881 1.1 mrg
1882 1.1 mrg A nonvarying address may be just a register or just a symbol reference,
1883 1.1 mrg or it may be either of those plus a numeric offset. */
1884 1.1 mrg
1885 1.1 mrg static void
1886 1.1 mrg invalidate (rtx x, machine_mode full_mode)
1887 1.1 mrg {
1888 1.1 mrg int i;
1889 1.1 mrg struct table_elt *p;
1890 1.1 mrg rtx addr;
1891 1.1 mrg
1892 1.1 mrg switch (GET_CODE (x))
1893 1.1 mrg {
1894 1.1 mrg case REG:
1895 1.1 mrg invalidate_reg (x);
1896 1.1 mrg return;
1897 1.1 mrg
1898 1.1 mrg case SUBREG:
1899 1.1 mrg invalidate (SUBREG_REG (x), VOIDmode);
1900 1.1 mrg return;
1901 1.1 mrg
1902 1.1 mrg case PARALLEL:
1903 1.1 mrg for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1904 1.1 mrg invalidate (XVECEXP (x, 0, i), VOIDmode);
1905 1.1 mrg return;
1906 1.1 mrg
1907 1.1 mrg case EXPR_LIST:
1908 1.1 mrg /* This is part of a disjoint return value; extract the location in
1909 1.1 mrg question ignoring the offset. */
1910 1.1 mrg invalidate (XEXP (x, 0), VOIDmode);
1911 1.1 mrg return;
1912 1.1 mrg
1913 1.1 mrg case MEM:
1914 1.1 mrg addr = canon_rtx (get_addr (XEXP (x, 0)));
1915 1.1 mrg /* Calculate the canonical version of X here so that
1916 1.1 mrg true_dependence doesn't generate new RTL for X on each call. */
1917 1.1 mrg x = canon_rtx (x);
1918 1.1 mrg
1919 1.1 mrg /* Remove all hash table elements that refer to overlapping pieces of
1920 1.1 mrg memory. */
1921 1.1 mrg if (full_mode == VOIDmode)
1922 1.1 mrg full_mode = GET_MODE (x);
1923 1.1 mrg
1924 1.1 mrg for (i = 0; i < HASH_SIZE; i++)
1925 1.1 mrg {
1926 1.1 mrg struct table_elt *next;
1927 1.1 mrg
1928 1.1 mrg for (p = table[i]; p; p = next)
1929 1.1 mrg {
1930 1.1 mrg next = p->next_same_hash;
1931 1.1 mrg if (p->in_memory)
1932 1.1 mrg {
1933 1.1 mrg /* Just canonicalize the expression once;
1934 1.1 mrg otherwise each time we call invalidate
1935 1.1 mrg true_dependence will canonicalize the
1936 1.1 mrg expression again. */
1937 1.1 mrg if (!p->canon_exp)
1938 1.1 mrg p->canon_exp = canon_rtx (p->exp);
1939 1.1 mrg if (check_dependence (p->canon_exp, x, full_mode, addr))
1940 1.1 mrg remove_from_table (p, i);
1941 1.1 mrg }
1942 1.1 mrg }
1943 1.1 mrg }
1944 1.1 mrg return;
1945 1.1 mrg
1946 1.1 mrg default:
1947 1.1 mrg gcc_unreachable ();
1948 1.1 mrg }
1949 1.1 mrg }
1950 1.1 mrg
1951 1.1 mrg /* Invalidate DEST. Used when DEST is not going to be added
1952 1.1 mrg into the hash table for some reason, e.g. do_not_record
1953 1.1 mrg flagged on it. */
1954 1.1 mrg
1955 1.1 mrg static void
1956 1.1 mrg invalidate_dest (rtx dest)
1957 1.1 mrg {
1958 1.1 mrg if (REG_P (dest)
1959 1.1 mrg || GET_CODE (dest) == SUBREG
1960 1.1 mrg || MEM_P (dest))
1961 1.1 mrg invalidate (dest, VOIDmode);
1962 1.1 mrg else if (GET_CODE (dest) == STRICT_LOW_PART
1963 1.1 mrg || GET_CODE (dest) == ZERO_EXTRACT)
1964 1.1 mrg invalidate (XEXP (dest, 0), GET_MODE (dest));
1965 1.1 mrg }
1966 1.1 mrg
1967 1.1 mrg /* Remove all expressions that refer to register REGNO,
1969 1.1 mrg since they are already invalid, and we are about to
1970 1.1 mrg mark that register valid again and don't want the old
1971 1.1 mrg expressions to reappear as valid. */
1972 1.1 mrg
1973 1.1 mrg static void
1974 1.1 mrg remove_invalid_refs (unsigned int regno)
1975 1.1 mrg {
1976 1.1 mrg unsigned int i;
1977 1.1 mrg struct table_elt *p, *next;
1978 1.1 mrg
1979 1.1 mrg for (i = 0; i < HASH_SIZE; i++)
1980 1.1 mrg for (p = table[i]; p; p = next)
1981 1.1 mrg {
1982 1.1 mrg next = p->next_same_hash;
1983 1.1 mrg if (!REG_P (p->exp) && refers_to_regno_p (regno, p->exp))
1984 1.1 mrg remove_from_table (p, i);
1985 1.1 mrg }
1986 1.1 mrg }
1987 1.1 mrg
1988 1.1 mrg /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1989 1.1 mrg and mode MODE. */
1990 1.1 mrg static void
1991 1.1 mrg remove_invalid_subreg_refs (unsigned int regno, poly_uint64 offset,
1992 1.1 mrg machine_mode mode)
1993 1.1 mrg {
1994 1.1 mrg unsigned int i;
1995 1.1 mrg struct table_elt *p, *next;
1996 1.1 mrg
1997 1.1 mrg for (i = 0; i < HASH_SIZE; i++)
1998 1.1 mrg for (p = table[i]; p; p = next)
1999 1.1 mrg {
2000 1.1 mrg rtx exp = p->exp;
2001 1.1 mrg next = p->next_same_hash;
2002 1.1 mrg
2003 1.1 mrg if (!REG_P (exp)
2004 1.1 mrg && (GET_CODE (exp) != SUBREG
2005 1.1 mrg || !REG_P (SUBREG_REG (exp))
2006 1.1 mrg || REGNO (SUBREG_REG (exp)) != regno
2007 1.1 mrg || ranges_maybe_overlap_p (SUBREG_BYTE (exp),
2008 1.1 mrg GET_MODE_SIZE (GET_MODE (exp)),
2009 1.1 mrg offset, GET_MODE_SIZE (mode)))
2010 1.1 mrg && refers_to_regno_p (regno, p->exp))
2011 1.1 mrg remove_from_table (p, i);
2012 1.1 mrg }
2013 1.1 mrg }
2014 1.1 mrg
2015 1.1 mrg /* Recompute the hash codes of any valid entries in the hash table that
2017 1.1 mrg reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2018 1.1 mrg
2019 1.1 mrg This is called when we make a jump equivalence. */
2020 1.1 mrg
2021 1.1 mrg static void
2022 1.1 mrg rehash_using_reg (rtx x)
2023 1.1 mrg {
2024 1.1 mrg unsigned int i;
2025 1.1 mrg struct table_elt *p, *next;
2026 1.1 mrg unsigned hash;
2027 1.1 mrg
2028 1.1 mrg if (GET_CODE (x) == SUBREG)
2029 1.1 mrg x = SUBREG_REG (x);
2030 1.1 mrg
2031 1.1 mrg /* If X is not a register or if the register is known not to be in any
2032 1.1 mrg valid entries in the table, we have no work to do. */
2033 1.1 mrg
2034 1.1 mrg if (!REG_P (x)
2035 1.1 mrg || REG_IN_TABLE (REGNO (x)) < 0
2036 1.1 mrg || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2037 1.1 mrg return;
2038 1.1 mrg
2039 1.1 mrg /* Scan all hash chains looking for valid entries that mention X.
2040 1.1 mrg If we find one and it is in the wrong hash chain, move it. */
2041 1.1 mrg
2042 1.1 mrg for (i = 0; i < HASH_SIZE; i++)
2043 1.1 mrg for (p = table[i]; p; p = next)
2044 1.1 mrg {
2045 1.1 mrg next = p->next_same_hash;
2046 1.1 mrg if (reg_mentioned_p (x, p->exp)
2047 1.1 mrg && exp_equiv_p (p->exp, p->exp, 1, false)
2048 1.1 mrg && i != (hash = SAFE_HASH (p->exp, p->mode)))
2049 1.1 mrg {
2050 1.1 mrg if (p->next_same_hash)
2051 1.1 mrg p->next_same_hash->prev_same_hash = p->prev_same_hash;
2052 1.1 mrg
2053 1.1 mrg if (p->prev_same_hash)
2054 1.1 mrg p->prev_same_hash->next_same_hash = p->next_same_hash;
2055 1.1 mrg else
2056 1.1 mrg table[i] = p->next_same_hash;
2057 1.1 mrg
2058 1.1 mrg p->next_same_hash = table[hash];
2059 1.1 mrg p->prev_same_hash = 0;
2060 1.1 mrg if (table[hash])
2061 1.1 mrg table[hash]->prev_same_hash = p;
2062 1.1 mrg table[hash] = p;
2063 1.1 mrg }
2064 1.1 mrg }
2065 1.1 mrg }
2066 1.1 mrg
2067 1.1 mrg /* Remove from the hash table any expression that is a call-clobbered
2069 1.1 mrg register in INSN. Also update their TICK values. */
2070 1.1 mrg
2071 1.1 mrg static void
2072 1.1 mrg invalidate_for_call (rtx_insn *insn)
2073 1.1 mrg {
2074 1.1 mrg unsigned int regno;
2075 1.1 mrg unsigned hash;
2076 1.1 mrg struct table_elt *p, *next;
2077 1.1 mrg int in_table = 0;
2078 1.1 mrg hard_reg_set_iterator hrsi;
2079 1.1 mrg
2080 1.1 mrg /* Go through all the hard registers. For each that might be clobbered
2081 1.1 mrg in call insn INSN, remove the register from quantity chains and update
2082 1.1 mrg reg_tick if defined. Also see if any of these registers is currently
2083 1.1 mrg in the table.
2084 1.1 mrg
2085 1.1 mrg ??? We could be more precise for partially-clobbered registers,
2086 1.1 mrg and only invalidate values that actually occupy the clobbered part
2087 1.1 mrg of the registers. It doesn't seem worth the effort though, since
2088 1.1 mrg we shouldn't see this situation much before RA. Whatever choice
2089 1.1 mrg we make here has to be consistent with the table walk below,
2090 1.1 mrg so any change to this test will require a change there too. */
2091 1.1 mrg HARD_REG_SET callee_clobbers
2092 1.1 mrg = insn_callee_abi (insn).full_and_partial_reg_clobbers ();
2093 1.1 mrg EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, regno, hrsi)
2094 1.1 mrg {
2095 1.1 mrg delete_reg_equiv (regno);
2096 1.1 mrg if (REG_TICK (regno) >= 0)
2097 1.1 mrg {
2098 1.1 mrg REG_TICK (regno)++;
2099 1.1 mrg SUBREG_TICKED (regno) = -1;
2100 1.1 mrg }
2101 1.1 mrg in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2102 1.1 mrg }
2103 1.1 mrg
2104 1.1 mrg /* In the case where we have no call-clobbered hard registers in the
2105 1.1 mrg table, we are done. Otherwise, scan the table and remove any
2106 1.1 mrg entry that overlaps a call-clobbered register. */
2107 1.1 mrg
2108 1.1 mrg if (in_table)
2109 1.1 mrg for (hash = 0; hash < HASH_SIZE; hash++)
2110 1.1 mrg for (p = table[hash]; p; p = next)
2111 1.1 mrg {
2112 1.1 mrg next = p->next_same_hash;
2113 1.1 mrg
2114 1.1 mrg if (!REG_P (p->exp)
2115 1.1 mrg || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2116 1.1 mrg continue;
2117 1.1 mrg
2118 1.1 mrg /* This must use the same test as above rather than the
2119 1.1 mrg more accurate clobbers_reg_p. */
2120 1.1 mrg if (overlaps_hard_reg_set_p (callee_clobbers, GET_MODE (p->exp),
2121 1.1 mrg REGNO (p->exp)))
2122 1.1 mrg remove_from_table (p, hash);
2123 1.1 mrg }
2124 1.1 mrg }
2125 1.1 mrg
2126 1.1 mrg /* Given an expression X of type CONST,
2128 1.1 mrg and ELT which is its table entry (or 0 if it
2129 1.1 mrg is not in the hash table),
2130 1.1 mrg return an alternate expression for X as a register plus integer.
2131 1.1 mrg If none can be found, return 0. */
2132 1.1 mrg
2133 1.1 mrg static rtx
2134 1.1 mrg use_related_value (rtx x, struct table_elt *elt)
2135 1.1 mrg {
2136 1.1 mrg struct table_elt *relt = 0;
2137 1.1 mrg struct table_elt *p, *q;
2138 1.1 mrg HOST_WIDE_INT offset;
2139 1.1 mrg
2140 1.1 mrg /* First, is there anything related known?
2141 1.1 mrg If we have a table element, we can tell from that.
2142 1.1 mrg Otherwise, must look it up. */
2143 1.1 mrg
2144 1.1 mrg if (elt != 0 && elt->related_value != 0)
2145 1.1 mrg relt = elt;
2146 1.1 mrg else if (elt == 0 && GET_CODE (x) == CONST)
2147 1.1 mrg {
2148 1.1 mrg rtx subexp = get_related_value (x);
2149 1.1 mrg if (subexp != 0)
2150 1.1 mrg relt = lookup (subexp,
2151 1.1 mrg SAFE_HASH (subexp, GET_MODE (subexp)),
2152 1.1 mrg GET_MODE (subexp));
2153 1.1 mrg }
2154 1.1 mrg
2155 1.1 mrg if (relt == 0)
2156 1.1 mrg return 0;
2157 1.1 mrg
2158 1.1 mrg /* Search all related table entries for one that has an
2159 1.1 mrg equivalent register. */
2160 1.1 mrg
2161 1.1 mrg p = relt;
2162 1.1 mrg while (1)
2163 1.1 mrg {
2164 1.1 mrg /* This loop is strange in that it is executed in two different cases.
2165 1.1 mrg The first is when X is already in the table. Then it is searching
2166 1.1 mrg the RELATED_VALUE list of X's class (RELT). The second case is when
2167 1.1 mrg X is not in the table. Then RELT points to a class for the related
2168 1.1 mrg value.
2169 1.1 mrg
2170 1.1 mrg Ensure that, whatever case we are in, that we ignore classes that have
2171 1.1 mrg the same value as X. */
2172 1.1 mrg
2173 1.1 mrg if (rtx_equal_p (x, p->exp))
2174 1.1 mrg q = 0;
2175 1.1 mrg else
2176 1.1 mrg for (q = p->first_same_value; q; q = q->next_same_value)
2177 1.1 mrg if (REG_P (q->exp))
2178 1.1 mrg break;
2179 1.1 mrg
2180 1.1 mrg if (q)
2181 1.1 mrg break;
2182 1.1 mrg
2183 1.1 mrg p = p->related_value;
2184 1.1 mrg
2185 1.1 mrg /* We went all the way around, so there is nothing to be found.
2186 1.1 mrg Alternatively, perhaps RELT was in the table for some other reason
2187 1.1 mrg and it has no related values recorded. */
2188 1.1 mrg if (p == relt || p == 0)
2189 1.1 mrg break;
2190 1.1 mrg }
2191 1.1 mrg
2192 1.1 mrg if (q == 0)
2193 1.1 mrg return 0;
2194 1.1 mrg
2195 1.1 mrg offset = (get_integer_term (x) - get_integer_term (p->exp));
2196 1.1 mrg /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2197 1.1 mrg return plus_constant (q->mode, q->exp, offset);
2198 1.1 mrg }
2199 1.1 mrg
2200 1.1 mrg
2202 1.1 mrg /* Hash a string. Just add its bytes up. */
2203 1.1 mrg static inline unsigned
2204 1.1 mrg hash_rtx_string (const char *ps)
2205 1.1 mrg {
2206 1.1 mrg unsigned hash = 0;
2207 1.1 mrg const unsigned char *p = (const unsigned char *) ps;
2208 1.1 mrg
2209 1.1 mrg if (p)
2210 1.1 mrg while (*p)
2211 1.1 mrg hash += *p++;
2212 1.1 mrg
2213 1.1 mrg return hash;
2214 1.1 mrg }
2215 1.1 mrg
2216 1.1 mrg /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2217 1.1 mrg When the callback returns true, we continue with the new rtx. */
2218 1.1 mrg
2219 1.1 mrg unsigned
2220 1.1 mrg hash_rtx_cb (const_rtx x, machine_mode mode,
2221 1.1 mrg int *do_not_record_p, int *hash_arg_in_memory_p,
2222 1.1 mrg bool have_reg_qty, hash_rtx_callback_function cb)
2223 1.1 mrg {
2224 1.1 mrg int i, j;
2225 1.1 mrg unsigned hash = 0;
2226 1.1 mrg enum rtx_code code;
2227 1.1 mrg const char *fmt;
2228 1.1 mrg machine_mode newmode;
2229 1.1 mrg rtx newx;
2230 1.1 mrg
2231 1.1 mrg /* Used to turn recursion into iteration. We can't rely on GCC's
2232 1.1 mrg tail-recursion elimination since we need to keep accumulating values
2233 1.1 mrg in HASH. */
2234 1.1 mrg repeat:
2235 1.1 mrg if (x == 0)
2236 1.1 mrg return hash;
2237 1.1 mrg
2238 1.1 mrg /* Invoke the callback first. */
2239 1.1 mrg if (cb != NULL
2240 1.1 mrg && ((*cb) (x, mode, &newx, &newmode)))
2241 1.1 mrg {
2242 1.1 mrg hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2243 1.1 mrg hash_arg_in_memory_p, have_reg_qty, cb);
2244 1.1 mrg return hash;
2245 1.1 mrg }
2246 1.1 mrg
2247 1.1 mrg code = GET_CODE (x);
2248 1.1 mrg switch (code)
2249 1.1 mrg {
2250 1.1 mrg case REG:
2251 1.1 mrg {
2252 1.1 mrg unsigned int regno = REGNO (x);
2253 1.1 mrg
2254 1.1 mrg if (do_not_record_p && !reload_completed)
2255 1.1 mrg {
2256 1.1 mrg /* On some machines, we can't record any non-fixed hard register,
2257 1.1 mrg because extending its life will cause reload problems. We
2258 1.1 mrg consider ap, fp, sp, gp to be fixed for this purpose.
2259 1.1 mrg
2260 1.1 mrg We also consider CCmode registers to be fixed for this purpose;
2261 1.1 mrg failure to do so leads to failure to simplify 0<100 type of
2262 1.1 mrg conditionals.
2263 1.1 mrg
2264 1.1 mrg On all machines, we can't record any global registers.
2265 1.1 mrg Nor should we record any register that is in a small
2266 1.1 mrg class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2267 1.1 mrg bool record;
2268 1.1 mrg
2269 1.1 mrg if (regno >= FIRST_PSEUDO_REGISTER)
2270 1.1 mrg record = true;
2271 1.1 mrg else if (x == frame_pointer_rtx
2272 1.1 mrg || x == hard_frame_pointer_rtx
2273 1.1 mrg || x == arg_pointer_rtx
2274 1.1 mrg || x == stack_pointer_rtx
2275 1.1 mrg || x == pic_offset_table_rtx)
2276 1.1 mrg record = true;
2277 1.1 mrg else if (global_regs[regno])
2278 1.1 mrg record = false;
2279 1.1 mrg else if (fixed_regs[regno])
2280 1.1 mrg record = true;
2281 1.1 mrg else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2282 1.1 mrg record = true;
2283 1.1 mrg else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2284 1.1 mrg record = false;
2285 1.1 mrg else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2286 1.1 mrg record = false;
2287 1.1 mrg else
2288 1.1 mrg record = true;
2289 1.1 mrg
2290 1.1 mrg if (!record)
2291 1.1 mrg {
2292 1.1 mrg *do_not_record_p = 1;
2293 1.1 mrg return 0;
2294 1.1 mrg }
2295 1.1 mrg }
2296 1.1 mrg
2297 1.1 mrg hash += ((unsigned int) REG << 7);
2298 1.1 mrg hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2299 1.1 mrg return hash;
2300 1.1 mrg }
2301 1.1 mrg
2302 1.1 mrg /* We handle SUBREG of a REG specially because the underlying
2303 1.1 mrg reg changes its hash value with every value change; we don't
2304 1.1 mrg want to have to forget unrelated subregs when one subreg changes. */
2305 1.1 mrg case SUBREG:
2306 1.1 mrg {
2307 1.1 mrg if (REG_P (SUBREG_REG (x)))
2308 1.1 mrg {
2309 1.1 mrg hash += (((unsigned int) SUBREG << 7)
2310 1.1 mrg + REGNO (SUBREG_REG (x))
2311 1.1 mrg + (constant_lower_bound (SUBREG_BYTE (x))
2312 1.1 mrg / UNITS_PER_WORD));
2313 1.1 mrg return hash;
2314 1.1 mrg }
2315 1.1 mrg break;
2316 1.1 mrg }
2317 1.1 mrg
2318 1.1 mrg case CONST_INT:
2319 1.1 mrg hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2320 1.1 mrg + (unsigned int) INTVAL (x));
2321 1.1 mrg return hash;
2322 1.1 mrg
2323 1.1 mrg case CONST_WIDE_INT:
2324 1.1 mrg for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
2325 1.1 mrg hash += CONST_WIDE_INT_ELT (x, i);
2326 1.1 mrg return hash;
2327 1.1 mrg
2328 1.1 mrg case CONST_POLY_INT:
2329 1.1 mrg {
2330 1.1 mrg inchash::hash h;
2331 1.1 mrg h.add_int (hash);
2332 1.1 mrg for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2333 1.1 mrg h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
2334 1.1 mrg return h.end ();
2335 1.1 mrg }
2336 1.1 mrg
2337 1.1 mrg case CONST_DOUBLE:
2338 1.1 mrg /* This is like the general case, except that it only counts
2339 1.1 mrg the integers representing the constant. */
2340 1.1 mrg hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2341 1.1 mrg if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
2342 1.1 mrg hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2343 1.1 mrg + (unsigned int) CONST_DOUBLE_HIGH (x));
2344 1.1 mrg else
2345 1.1 mrg hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2346 1.1 mrg return hash;
2347 1.1 mrg
2348 1.1 mrg case CONST_FIXED:
2349 1.1 mrg hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2350 1.1 mrg hash += fixed_hash (CONST_FIXED_VALUE (x));
2351 1.1 mrg return hash;
2352 1.1 mrg
2353 1.1 mrg case CONST_VECTOR:
2354 1.1 mrg {
2355 1.1 mrg int units;
2356 1.1 mrg rtx elt;
2357 1.1 mrg
2358 1.1 mrg units = const_vector_encoded_nelts (x);
2359 1.1 mrg
2360 1.1 mrg for (i = 0; i < units; ++i)
2361 1.1 mrg {
2362 1.1 mrg elt = CONST_VECTOR_ENCODED_ELT (x, i);
2363 1.1 mrg hash += hash_rtx_cb (elt, GET_MODE (elt),
2364 1.1 mrg do_not_record_p, hash_arg_in_memory_p,
2365 1.1 mrg have_reg_qty, cb);
2366 1.1 mrg }
2367 1.1 mrg
2368 1.1 mrg return hash;
2369 1.1 mrg }
2370 1.1 mrg
2371 1.1 mrg /* Assume there is only one rtx object for any given label. */
2372 1.1 mrg case LABEL_REF:
2373 1.1 mrg /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2374 1.1 mrg differences and differences between each stage's debugging dumps. */
2375 1.1 mrg hash += (((unsigned int) LABEL_REF << 7)
2376 1.1 mrg + CODE_LABEL_NUMBER (label_ref_label (x)));
2377 1.1 mrg return hash;
2378 1.1 mrg
2379 1.1 mrg case SYMBOL_REF:
2380 1.1 mrg {
2381 1.1 mrg /* Don't hash on the symbol's address to avoid bootstrap differences.
2382 1.1 mrg Different hash values may cause expressions to be recorded in
2383 1.1 mrg different orders and thus different registers to be used in the
2384 1.1 mrg final assembler. This also avoids differences in the dump files
2385 1.1 mrg between various stages. */
2386 1.1 mrg unsigned int h = 0;
2387 1.1 mrg const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2388 1.1 mrg
2389 1.1 mrg while (*p)
2390 1.1 mrg h += (h << 7) + *p++; /* ??? revisit */
2391 1.1 mrg
2392 1.1 mrg hash += ((unsigned int) SYMBOL_REF << 7) + h;
2393 1.1 mrg return hash;
2394 1.1 mrg }
2395 1.1 mrg
2396 1.1 mrg case MEM:
2397 1.1 mrg /* We don't record if marked volatile or if BLKmode since we don't
2398 1.1 mrg know the size of the move. */
2399 1.1 mrg if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2400 1.1 mrg {
2401 1.1 mrg *do_not_record_p = 1;
2402 1.1 mrg return 0;
2403 1.1 mrg }
2404 1.1 mrg if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2405 1.1 mrg *hash_arg_in_memory_p = 1;
2406 1.1 mrg
2407 1.1 mrg /* Now that we have already found this special case,
2408 1.1 mrg might as well speed it up as much as possible. */
2409 1.1 mrg hash += (unsigned) MEM;
2410 1.1 mrg x = XEXP (x, 0);
2411 1.1 mrg goto repeat;
2412 1.1 mrg
2413 1.1 mrg case USE:
2414 1.1 mrg /* A USE that mentions non-volatile memory needs special
2415 1.1 mrg handling since the MEM may be BLKmode which normally
2416 1.1 mrg prevents an entry from being made. Pure calls are
2417 1.1 mrg marked by a USE which mentions BLKmode memory.
2418 1.1 mrg See calls.cc:emit_call_1. */
2419 1.1 mrg if (MEM_P (XEXP (x, 0))
2420 1.1 mrg && ! MEM_VOLATILE_P (XEXP (x, 0)))
2421 1.1 mrg {
2422 1.1 mrg hash += (unsigned) USE;
2423 1.1 mrg x = XEXP (x, 0);
2424 1.1 mrg
2425 1.1 mrg if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2426 1.1 mrg *hash_arg_in_memory_p = 1;
2427 1.1 mrg
2428 1.1 mrg /* Now that we have already found this special case,
2429 1.1 mrg might as well speed it up as much as possible. */
2430 1.1 mrg hash += (unsigned) MEM;
2431 1.1 mrg x = XEXP (x, 0);
2432 1.1 mrg goto repeat;
2433 1.1 mrg }
2434 1.1 mrg break;
2435 1.1 mrg
2436 1.1 mrg case PRE_DEC:
2437 1.1 mrg case PRE_INC:
2438 1.1 mrg case POST_DEC:
2439 1.1 mrg case POST_INC:
2440 1.1 mrg case PRE_MODIFY:
2441 1.1 mrg case POST_MODIFY:
2442 1.1 mrg case PC:
2443 1.1 mrg case CALL:
2444 1.1 mrg case UNSPEC_VOLATILE:
2445 1.1 mrg if (do_not_record_p) {
2446 1.1 mrg *do_not_record_p = 1;
2447 1.1 mrg return 0;
2448 1.1 mrg }
2449 1.1 mrg else
2450 1.1 mrg return hash;
2451 1.1 mrg break;
2452 1.1 mrg
2453 1.1 mrg case ASM_OPERANDS:
2454 1.1 mrg if (do_not_record_p && MEM_VOLATILE_P (x))
2455 1.1 mrg {
2456 1.1 mrg *do_not_record_p = 1;
2457 1.1 mrg return 0;
2458 1.1 mrg }
2459 1.1 mrg else
2460 1.1 mrg {
2461 1.1 mrg /* We don't want to take the filename and line into account. */
2462 1.1 mrg hash += (unsigned) code + (unsigned) GET_MODE (x)
2463 1.1 mrg + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2464 1.1 mrg + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2465 1.1 mrg + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2466 1.1 mrg
2467 1.1 mrg if (ASM_OPERANDS_INPUT_LENGTH (x))
2468 1.1 mrg {
2469 1.1 mrg for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2470 1.1 mrg {
2471 1.1 mrg hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2472 1.1 mrg GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2473 1.1 mrg do_not_record_p, hash_arg_in_memory_p,
2474 1.1 mrg have_reg_qty, cb)
2475 1.1 mrg + hash_rtx_string
2476 1.1 mrg (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2477 1.1 mrg }
2478 1.1 mrg
2479 1.1 mrg hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2480 1.1 mrg x = ASM_OPERANDS_INPUT (x, 0);
2481 1.1 mrg mode = GET_MODE (x);
2482 1.1 mrg goto repeat;
2483 1.1 mrg }
2484 1.1 mrg
2485 1.1 mrg return hash;
2486 1.1 mrg }
2487 1.1 mrg break;
2488 1.1 mrg
2489 1.1 mrg default:
2490 1.1 mrg break;
2491 1.1 mrg }
2492 1.1 mrg
2493 1.1 mrg i = GET_RTX_LENGTH (code) - 1;
2494 1.1 mrg hash += (unsigned) code + (unsigned) GET_MODE (x);
2495 1.1 mrg fmt = GET_RTX_FORMAT (code);
2496 1.1 mrg for (; i >= 0; i--)
2497 1.1 mrg {
2498 1.1 mrg switch (fmt[i])
2499 1.1 mrg {
2500 1.1 mrg case 'e':
2501 1.1 mrg /* If we are about to do the last recursive call
2502 1.1 mrg needed at this level, change it into iteration.
2503 1.1 mrg This function is called enough to be worth it. */
2504 1.1 mrg if (i == 0)
2505 1.1 mrg {
2506 1.1 mrg x = XEXP (x, i);
2507 1.1 mrg goto repeat;
2508 1.1 mrg }
2509 1.1 mrg
2510 1.1 mrg hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2511 1.1 mrg hash_arg_in_memory_p,
2512 1.1 mrg have_reg_qty, cb);
2513 1.1 mrg break;
2514 1.1 mrg
2515 1.1 mrg case 'E':
2516 1.1 mrg for (j = 0; j < XVECLEN (x, i); j++)
2517 1.1 mrg hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2518 1.1 mrg hash_arg_in_memory_p,
2519 1.1 mrg have_reg_qty, cb);
2520 1.1 mrg break;
2521 1.1 mrg
2522 1.1 mrg case 's':
2523 1.1 mrg hash += hash_rtx_string (XSTR (x, i));
2524 1.1 mrg break;
2525 1.1 mrg
2526 1.1 mrg case 'i':
2527 1.1 mrg hash += (unsigned int) XINT (x, i);
2528 1.1 mrg break;
2529 1.1 mrg
2530 1.1 mrg case 'p':
2531 1.1 mrg hash += constant_lower_bound (SUBREG_BYTE (x));
2532 1.1 mrg break;
2533 1.1 mrg
2534 1.1 mrg case '0': case 't':
2535 1.1 mrg /* Unused. */
2536 1.1 mrg break;
2537 1.1 mrg
2538 1.1 mrg default:
2539 1.1 mrg gcc_unreachable ();
2540 1.1 mrg }
2541 1.1 mrg }
2542 1.1 mrg
2543 1.1 mrg return hash;
2544 1.1 mrg }
2545 1.1 mrg
2546 1.1 mrg /* Hash an rtx. We are careful to make sure the value is never negative.
2547 1.1 mrg Equivalent registers hash identically.
2548 1.1 mrg MODE is used in hashing for CONST_INTs only;
2549 1.1 mrg otherwise the mode of X is used.
2550 1.1 mrg
2551 1.1 mrg Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2552 1.1 mrg
2553 1.1 mrg If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2554 1.1 mrg a MEM rtx which does not have the MEM_READONLY_P flag set.
2555 1.1 mrg
2556 1.1 mrg Note that cse_insn knows that the hash code of a MEM expression
2557 1.1 mrg is just (int) MEM plus the hash code of the address. */
2558 1.1 mrg
2559 1.1 mrg unsigned
2560 1.1 mrg hash_rtx (const_rtx x, machine_mode mode, int *do_not_record_p,
2561 1.1 mrg int *hash_arg_in_memory_p, bool have_reg_qty)
2562 1.1 mrg {
2563 1.1 mrg return hash_rtx_cb (x, mode, do_not_record_p,
2564 1.1 mrg hash_arg_in_memory_p, have_reg_qty, NULL);
2565 1.1 mrg }
2566 1.1 mrg
2567 1.1 mrg /* Hash an rtx X for cse via hash_rtx.
2568 1.1 mrg Stores 1 in do_not_record if any subexpression is volatile.
2569 1.1 mrg Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2570 1.1 mrg does not have the MEM_READONLY_P flag set. */
2571 1.1 mrg
2572 1.1 mrg static inline unsigned
2573 1.1 mrg canon_hash (rtx x, machine_mode mode)
2574 1.1 mrg {
2575 1.1 mrg return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2576 1.1 mrg }
2577 1.1 mrg
2578 1.1 mrg /* Like canon_hash but with no side effects, i.e. do_not_record
2579 1.1 mrg and hash_arg_in_memory are not changed. */
2580 1.1 mrg
2581 1.1 mrg static inline unsigned
2582 1.1 mrg safe_hash (rtx x, machine_mode mode)
2583 1.1 mrg {
2584 1.1 mrg int dummy_do_not_record;
2585 1.1 mrg return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2586 1.1 mrg }
2587 1.1 mrg
2588 1.1 mrg /* Return 1 iff X and Y would canonicalize into the same thing,
2590 1.1 mrg without actually constructing the canonicalization of either one.
2591 1.1 mrg If VALIDATE is nonzero,
2592 1.1 mrg we assume X is an expression being processed from the rtl
2593 1.1 mrg and Y was found in the hash table. We check register refs
2594 1.1 mrg in Y for being marked as valid.
2595 1.1 mrg
2596 1.1 mrg If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2597 1.1 mrg
2598 1.1 mrg int
2599 1.1 mrg exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2600 1.1 mrg {
2601 1.1 mrg int i, j;
2602 1.1 mrg enum rtx_code code;
2603 1.1 mrg const char *fmt;
2604 1.1 mrg
2605 1.1 mrg /* Note: it is incorrect to assume an expression is equivalent to itself
2606 1.1 mrg if VALIDATE is nonzero. */
2607 1.1 mrg if (x == y && !validate)
2608 1.1 mrg return 1;
2609 1.1 mrg
2610 1.1 mrg if (x == 0 || y == 0)
2611 1.1 mrg return x == y;
2612 1.1 mrg
2613 1.1 mrg code = GET_CODE (x);
2614 1.1 mrg if (code != GET_CODE (y))
2615 1.1 mrg return 0;
2616 1.1 mrg
2617 1.1 mrg /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2618 1.1 mrg if (GET_MODE (x) != GET_MODE (y))
2619 1.1 mrg return 0;
2620 1.1 mrg
2621 1.1 mrg /* MEMs referring to different address space are not equivalent. */
2622 1.1 mrg if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2623 1.1 mrg return 0;
2624 1.1 mrg
2625 1.1 mrg switch (code)
2626 1.1 mrg {
2627 1.1 mrg case PC:
2628 1.1 mrg CASE_CONST_UNIQUE:
2629 1.1 mrg return x == y;
2630 1.1 mrg
2631 1.1 mrg case CONST_VECTOR:
2632 1.1 mrg if (!same_vector_encodings_p (x, y))
2633 1.1 mrg return false;
2634 1.1 mrg break;
2635 1.1 mrg
2636 1.1 mrg case LABEL_REF:
2637 1.1 mrg return label_ref_label (x) == label_ref_label (y);
2638 1.1 mrg
2639 1.1 mrg case SYMBOL_REF:
2640 1.1 mrg return XSTR (x, 0) == XSTR (y, 0);
2641 1.1 mrg
2642 1.1 mrg case REG:
2643 1.1 mrg if (for_gcse)
2644 1.1 mrg return REGNO (x) == REGNO (y);
2645 1.1 mrg else
2646 1.1 mrg {
2647 1.1 mrg unsigned int regno = REGNO (y);
2648 1.1 mrg unsigned int i;
2649 1.1 mrg unsigned int endregno = END_REGNO (y);
2650 1.1 mrg
2651 1.1 mrg /* If the quantities are not the same, the expressions are not
2652 1.1 mrg equivalent. If there are and we are not to validate, they
2653 1.1 mrg are equivalent. Otherwise, ensure all regs are up-to-date. */
2654 1.1 mrg
2655 1.1 mrg if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2656 1.1 mrg return 0;
2657 1.1 mrg
2658 1.1 mrg if (! validate)
2659 1.1 mrg return 1;
2660 1.1 mrg
2661 1.1 mrg for (i = regno; i < endregno; i++)
2662 1.1 mrg if (REG_IN_TABLE (i) != REG_TICK (i))
2663 1.1 mrg return 0;
2664 1.1 mrg
2665 1.1 mrg return 1;
2666 1.1 mrg }
2667 1.1 mrg
2668 1.1 mrg case MEM:
2669 1.1 mrg if (for_gcse)
2670 1.1 mrg {
2671 1.1 mrg /* A volatile mem should not be considered equivalent to any
2672 1.1 mrg other. */
2673 1.1 mrg if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2674 1.1 mrg return 0;
2675 1.1 mrg
2676 1.1 mrg /* Can't merge two expressions in different alias sets, since we
2677 1.1 mrg can decide that the expression is transparent in a block when
2678 1.1 mrg it isn't, due to it being set with the different alias set.
2679 1.1 mrg
2680 1.1 mrg Also, can't merge two expressions with different MEM_ATTRS.
2681 1.1 mrg They could e.g. be two different entities allocated into the
2682 1.1 mrg same space on the stack (see e.g. PR25130). In that case, the
2683 1.1 mrg MEM addresses can be the same, even though the two MEMs are
2684 1.1 mrg absolutely not equivalent.
2685 1.1 mrg
2686 1.1 mrg But because really all MEM attributes should be the same for
2687 1.1 mrg equivalent MEMs, we just use the invariant that MEMs that have
2688 1.1 mrg the same attributes share the same mem_attrs data structure. */
2689 1.1 mrg if (!mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
2690 1.1 mrg return 0;
2691 1.1 mrg
2692 1.1 mrg /* If we are handling exceptions, we cannot consider two expressions
2693 1.1 mrg with different trapping status as equivalent, because simple_mem
2694 1.1 mrg might accept one and reject the other. */
2695 1.1 mrg if (cfun->can_throw_non_call_exceptions
2696 1.1 mrg && (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y)))
2697 1.1 mrg return 0;
2698 1.1 mrg }
2699 1.1 mrg break;
2700 1.1 mrg
2701 1.1 mrg /* For commutative operations, check both orders. */
2702 1.1 mrg case PLUS:
2703 1.1 mrg case MULT:
2704 1.1 mrg case AND:
2705 1.1 mrg case IOR:
2706 1.1 mrg case XOR:
2707 1.1 mrg case NE:
2708 1.1 mrg case EQ:
2709 1.1 mrg return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2710 1.1 mrg validate, for_gcse)
2711 1.1 mrg && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2712 1.1 mrg validate, for_gcse))
2713 1.1 mrg || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2714 1.1 mrg validate, for_gcse)
2715 1.1 mrg && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2716 1.1 mrg validate, for_gcse)));
2717 1.1 mrg
2718 1.1 mrg case ASM_OPERANDS:
2719 1.1 mrg /* We don't use the generic code below because we want to
2720 1.1 mrg disregard filename and line numbers. */
2721 1.1 mrg
2722 1.1 mrg /* A volatile asm isn't equivalent to any other. */
2723 1.1 mrg if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2724 1.1 mrg return 0;
2725 1.1 mrg
2726 1.1 mrg if (GET_MODE (x) != GET_MODE (y)
2727 1.1 mrg || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2728 1.1 mrg || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2729 1.1 mrg ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2730 1.1 mrg || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2731 1.1 mrg || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2732 1.1 mrg return 0;
2733 1.1 mrg
2734 1.1 mrg if (ASM_OPERANDS_INPUT_LENGTH (x))
2735 1.1 mrg {
2736 1.1 mrg for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2737 1.1 mrg if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2738 1.1 mrg ASM_OPERANDS_INPUT (y, i),
2739 1.1 mrg validate, for_gcse)
2740 1.1 mrg || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2741 1.1 mrg ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2742 1.1 mrg return 0;
2743 1.1 mrg }
2744 1.1 mrg
2745 1.1 mrg return 1;
2746 1.1 mrg
2747 1.1 mrg default:
2748 1.1 mrg break;
2749 1.1 mrg }
2750 1.1 mrg
2751 1.1 mrg /* Compare the elements. If any pair of corresponding elements
2752 1.1 mrg fail to match, return 0 for the whole thing. */
2753 1.1 mrg
2754 1.1 mrg fmt = GET_RTX_FORMAT (code);
2755 1.1 mrg for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2756 1.1 mrg {
2757 1.1 mrg switch (fmt[i])
2758 1.1 mrg {
2759 1.1 mrg case 'e':
2760 1.1 mrg if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2761 1.1 mrg validate, for_gcse))
2762 1.1 mrg return 0;
2763 1.1 mrg break;
2764 1.1 mrg
2765 1.1 mrg case 'E':
2766 1.1 mrg if (XVECLEN (x, i) != XVECLEN (y, i))
2767 1.1 mrg return 0;
2768 1.1 mrg for (j = 0; j < XVECLEN (x, i); j++)
2769 1.1 mrg if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2770 1.1 mrg validate, for_gcse))
2771 1.1 mrg return 0;
2772 1.1 mrg break;
2773 1.1 mrg
2774 1.1 mrg case 's':
2775 1.1 mrg if (strcmp (XSTR (x, i), XSTR (y, i)))
2776 1.1 mrg return 0;
2777 1.1 mrg break;
2778 1.1 mrg
2779 1.1 mrg case 'i':
2780 1.1 mrg if (XINT (x, i) != XINT (y, i))
2781 1.1 mrg return 0;
2782 1.1 mrg break;
2783 1.1 mrg
2784 1.1 mrg case 'w':
2785 1.1 mrg if (XWINT (x, i) != XWINT (y, i))
2786 1.1 mrg return 0;
2787 1.1 mrg break;
2788 1.1 mrg
2789 1.1 mrg case 'p':
2790 1.1 mrg if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2791 1.1 mrg return 0;
2792 1.1 mrg break;
2793 1.1 mrg
2794 1.1 mrg case '0':
2795 1.1 mrg case 't':
2796 1.1 mrg break;
2797 1.1 mrg
2798 1.1 mrg default:
2799 1.1 mrg gcc_unreachable ();
2800 1.1 mrg }
2801 1.1 mrg }
2802 1.1 mrg
2803 1.1 mrg return 1;
2804 1.1 mrg }
2805 1.1 mrg
2806 1.1 mrg /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2808 1.1 mrg the result if necessary. INSN is as for canon_reg. */
2809 1.1 mrg
2810 1.1 mrg static void
2811 1.1 mrg validate_canon_reg (rtx *xloc, rtx_insn *insn)
2812 1.1 mrg {
2813 1.1 mrg if (*xloc)
2814 1.1 mrg {
2815 1.1 mrg rtx new_rtx = canon_reg (*xloc, insn);
2816 1.1 mrg
2817 1.1 mrg /* If replacing pseudo with hard reg or vice versa, ensure the
2818 1.1 mrg insn remains valid. Likewise if the insn has MATCH_DUPs. */
2819 1.1 mrg gcc_assert (insn && new_rtx);
2820 1.1 mrg validate_change (insn, xloc, new_rtx, 1);
2821 1.1 mrg }
2822 1.1 mrg }
2823 1.1 mrg
2824 1.1 mrg /* Canonicalize an expression:
2825 1.1 mrg replace each register reference inside it
2826 1.1 mrg with the "oldest" equivalent register.
2827 1.1 mrg
2828 1.1 mrg If INSN is nonzero validate_change is used to ensure that INSN remains valid
2829 1.1 mrg after we make our substitution. The calls are made with IN_GROUP nonzero
2830 1.1 mrg so apply_change_group must be called upon the outermost return from this
2831 1.1 mrg function (unless INSN is zero). The result of apply_change_group can
2832 1.1 mrg generally be discarded since the changes we are making are optional. */
2833 1.1 mrg
2834 1.1 mrg static rtx
2835 1.1 mrg canon_reg (rtx x, rtx_insn *insn)
2836 1.1 mrg {
2837 1.1 mrg int i;
2838 1.1 mrg enum rtx_code code;
2839 1.1 mrg const char *fmt;
2840 1.1 mrg
2841 1.1 mrg if (x == 0)
2842 1.1 mrg return x;
2843 1.1 mrg
2844 1.1 mrg code = GET_CODE (x);
2845 1.1 mrg switch (code)
2846 1.1 mrg {
2847 1.1 mrg case PC:
2848 1.1 mrg case CONST:
2849 1.1 mrg CASE_CONST_ANY:
2850 1.1 mrg case SYMBOL_REF:
2851 1.1 mrg case LABEL_REF:
2852 1.1 mrg case ADDR_VEC:
2853 1.1 mrg case ADDR_DIFF_VEC:
2854 1.1 mrg return x;
2855 1.1 mrg
2856 1.1 mrg case REG:
2857 1.1 mrg {
2858 1.1 mrg int first;
2859 1.1 mrg int q;
2860 1.1 mrg struct qty_table_elem *ent;
2861 1.1 mrg
2862 1.1 mrg /* Never replace a hard reg, because hard regs can appear
2863 1.1 mrg in more than one machine mode, and we must preserve the mode
2864 1.1 mrg of each occurrence. Also, some hard regs appear in
2865 1.1 mrg MEMs that are shared and mustn't be altered. Don't try to
2866 1.1 mrg replace any reg that maps to a reg of class NO_REGS. */
2867 1.1 mrg if (REGNO (x) < FIRST_PSEUDO_REGISTER
2868 1.1 mrg || ! REGNO_QTY_VALID_P (REGNO (x)))
2869 1.1 mrg return x;
2870 1.1 mrg
2871 1.1 mrg q = REG_QTY (REGNO (x));
2872 1.1 mrg ent = &qty_table[q];
2873 1.1 mrg first = ent->first_reg;
2874 1.1 mrg return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2875 1.1 mrg : REGNO_REG_CLASS (first) == NO_REGS ? x
2876 1.1 mrg : gen_rtx_REG (ent->mode, first));
2877 1.1 mrg }
2878 1.1 mrg
2879 1.1 mrg default:
2880 1.1 mrg break;
2881 1.1 mrg }
2882 1.1 mrg
2883 1.1 mrg fmt = GET_RTX_FORMAT (code);
2884 1.1 mrg for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2885 1.1 mrg {
2886 1.1 mrg int j;
2887 1.1 mrg
2888 1.1 mrg if (fmt[i] == 'e')
2889 1.1 mrg validate_canon_reg (&XEXP (x, i), insn);
2890 1.1 mrg else if (fmt[i] == 'E')
2891 1.1 mrg for (j = 0; j < XVECLEN (x, i); j++)
2892 1.1 mrg validate_canon_reg (&XVECEXP (x, i, j), insn);
2893 1.1 mrg }
2894 1.1 mrg
2895 1.1 mrg return x;
2896 1.1 mrg }
2897 1.1 mrg
2898 1.1 mrg /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2900 1.1 mrg operation (EQ, NE, GT, etc.), follow it back through the hash table and
2901 1.1 mrg what values are being compared.
2902 1.1 mrg
2903 1.1 mrg *PARG1 and *PARG2 are updated to contain the rtx representing the values
2904 1.1 mrg actually being compared. For example, if *PARG1 was (reg:CC CC_REG) and
2905 1.1 mrg *PARG2 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that
2906 1.1 mrg were compared to produce (reg:CC CC_REG).
2907 1.1 mrg
2908 1.1 mrg The return value is the comparison operator and is either the code of
2909 1.1 mrg A or the code corresponding to the inverse of the comparison. */
2910 1.1 mrg
2911 1.1 mrg static enum rtx_code
2912 1.1 mrg find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2913 1.1 mrg machine_mode *pmode1, machine_mode *pmode2)
2914 1.1 mrg {
2915 1.1 mrg rtx arg1, arg2;
2916 1.1 mrg hash_set<rtx> *visited = NULL;
2917 1.1 mrg /* Set nonzero when we find something of interest. */
2918 1.1 mrg rtx x = NULL;
2919 1.1 mrg
2920 1.1 mrg arg1 = *parg1, arg2 = *parg2;
2921 1.1 mrg
2922 1.1 mrg /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2923 1.1 mrg
2924 1.1 mrg while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2925 1.1 mrg {
2926 1.1 mrg int reverse_code = 0;
2927 1.1 mrg struct table_elt *p = 0;
2928 1.1 mrg
2929 1.1 mrg /* Remember state from previous iteration. */
2930 1.1 mrg if (x)
2931 1.1 mrg {
2932 1.1 mrg if (!visited)
2933 1.1 mrg visited = new hash_set<rtx>;
2934 1.1 mrg visited->add (x);
2935 1.1 mrg x = 0;
2936 1.1 mrg }
2937 1.1 mrg
2938 1.1 mrg /* If arg1 is a COMPARE, extract the comparison arguments from it. */
2939 1.1 mrg
2940 1.1 mrg if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2941 1.1 mrg x = arg1;
2942 1.1 mrg
2943 1.1 mrg /* If ARG1 is a comparison operator and CODE is testing for
2944 1.1 mrg STORE_FLAG_VALUE, get the inner arguments. */
2945 1.1 mrg
2946 1.1 mrg else if (COMPARISON_P (arg1))
2947 1.1 mrg {
2948 1.1 mrg #ifdef FLOAT_STORE_FLAG_VALUE
2949 1.1 mrg REAL_VALUE_TYPE fsfv;
2950 1.1 mrg #endif
2951 1.1 mrg
2952 1.1 mrg if (code == NE
2953 1.1 mrg || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2954 1.1 mrg && code == LT && STORE_FLAG_VALUE == -1)
2955 1.1 mrg #ifdef FLOAT_STORE_FLAG_VALUE
2956 1.1 mrg || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2957 1.1 mrg && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2958 1.1 mrg REAL_VALUE_NEGATIVE (fsfv)))
2959 1.1 mrg #endif
2960 1.1 mrg )
2961 1.1 mrg x = arg1;
2962 1.1 mrg else if (code == EQ
2963 1.1 mrg || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2964 1.1 mrg && code == GE && STORE_FLAG_VALUE == -1)
2965 1.1 mrg #ifdef FLOAT_STORE_FLAG_VALUE
2966 1.1 mrg || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2967 1.1 mrg && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2968 1.1 mrg REAL_VALUE_NEGATIVE (fsfv)))
2969 1.1 mrg #endif
2970 1.1 mrg )
2971 1.1 mrg x = arg1, reverse_code = 1;
2972 1.1 mrg }
2973 1.1 mrg
2974 1.1 mrg /* ??? We could also check for
2975 1.1 mrg
2976 1.1 mrg (ne (and (eq (...) (const_int 1))) (const_int 0))
2977 1.1 mrg
2978 1.1 mrg and related forms, but let's wait until we see them occurring. */
2979 1.1 mrg
2980 1.1 mrg if (x == 0)
2981 1.1 mrg /* Look up ARG1 in the hash table and see if it has an equivalence
2982 1.1 mrg that lets us see what is being compared. */
2983 1.1 mrg p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2984 1.1 mrg if (p)
2985 1.1 mrg {
2986 1.1 mrg p = p->first_same_value;
2987 1.1 mrg
2988 1.1 mrg /* If what we compare is already known to be constant, that is as
2989 1.1 mrg good as it gets.
2990 1.1 mrg We need to break the loop in this case, because otherwise we
2991 1.1 mrg can have an infinite loop when looking at a reg that is known
2992 1.1 mrg to be a constant which is the same as a comparison of a reg
2993 1.1 mrg against zero which appears later in the insn stream, which in
2994 1.1 mrg turn is constant and the same as the comparison of the first reg
2995 1.1 mrg against zero... */
2996 1.1 mrg if (p->is_const)
2997 1.1 mrg break;
2998 1.1 mrg }
2999 1.1 mrg
3000 1.1 mrg for (; p; p = p->next_same_value)
3001 1.1 mrg {
3002 1.1 mrg machine_mode inner_mode = GET_MODE (p->exp);
3003 1.1 mrg #ifdef FLOAT_STORE_FLAG_VALUE
3004 1.1 mrg REAL_VALUE_TYPE fsfv;
3005 1.1 mrg #endif
3006 1.1 mrg
3007 1.1 mrg /* If the entry isn't valid, skip it. */
3008 1.1 mrg if (! exp_equiv_p (p->exp, p->exp, 1, false))
3009 1.1 mrg continue;
3010 1.1 mrg
3011 1.1 mrg /* If it's a comparison we've used before, skip it. */
3012 1.1 mrg if (visited && visited->contains (p->exp))
3013 1.1 mrg continue;
3014 1.1 mrg
3015 1.1 mrg if (GET_CODE (p->exp) == COMPARE
3016 1.1 mrg /* Another possibility is that this machine has a compare insn
3017 1.1 mrg that includes the comparison code. In that case, ARG1 would
3018 1.1 mrg be equivalent to a comparison operation that would set ARG1 to
3019 1.1 mrg either STORE_FLAG_VALUE or zero. If this is an NE operation,
3020 1.1 mrg ORIG_CODE is the actual comparison being done; if it is an EQ,
3021 1.1 mrg we must reverse ORIG_CODE. On machine with a negative value
3022 1.1 mrg for STORE_FLAG_VALUE, also look at LT and GE operations. */
3023 1.1 mrg || ((code == NE
3024 1.1 mrg || (code == LT
3025 1.1 mrg && val_signbit_known_set_p (inner_mode,
3026 1.1 mrg STORE_FLAG_VALUE))
3027 1.1 mrg #ifdef FLOAT_STORE_FLAG_VALUE
3028 1.1 mrg || (code == LT
3029 1.1 mrg && SCALAR_FLOAT_MODE_P (inner_mode)
3030 1.1 mrg && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3031 1.1 mrg REAL_VALUE_NEGATIVE (fsfv)))
3032 1.1 mrg #endif
3033 1.1 mrg )
3034 1.1 mrg && COMPARISON_P (p->exp)))
3035 1.1 mrg {
3036 1.1 mrg x = p->exp;
3037 1.1 mrg break;
3038 1.1 mrg }
3039 1.1 mrg else if ((code == EQ
3040 1.1 mrg || (code == GE
3041 1.1 mrg && val_signbit_known_set_p (inner_mode,
3042 1.1 mrg STORE_FLAG_VALUE))
3043 1.1 mrg #ifdef FLOAT_STORE_FLAG_VALUE
3044 1.1 mrg || (code == GE
3045 1.1 mrg && SCALAR_FLOAT_MODE_P (inner_mode)
3046 1.1 mrg && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3047 1.1 mrg REAL_VALUE_NEGATIVE (fsfv)))
3048 1.1 mrg #endif
3049 1.1 mrg )
3050 1.1 mrg && COMPARISON_P (p->exp))
3051 1.1 mrg {
3052 1.1 mrg reverse_code = 1;
3053 1.1 mrg x = p->exp;
3054 1.1 mrg break;
3055 1.1 mrg }
3056 1.1 mrg
3057 1.1 mrg /* If this non-trapping address, e.g. fp + constant, the
3058 1.1 mrg equivalent is a better operand since it may let us predict
3059 1.1 mrg the value of the comparison. */
3060 1.1 mrg else if (!rtx_addr_can_trap_p (p->exp))
3061 1.1 mrg {
3062 1.1 mrg arg1 = p->exp;
3063 1.1 mrg continue;
3064 1.1 mrg }
3065 1.1 mrg }
3066 1.1 mrg
3067 1.1 mrg /* If we didn't find a useful equivalence for ARG1, we are done.
3068 1.1 mrg Otherwise, set up for the next iteration. */
3069 1.1 mrg if (x == 0)
3070 1.1 mrg break;
3071 1.1 mrg
3072 1.1 mrg /* If we need to reverse the comparison, make sure that is
3073 1.1 mrg possible -- we can't necessarily infer the value of GE from LT
3074 1.1 mrg with floating-point operands. */
3075 1.1 mrg if (reverse_code)
3076 1.1 mrg {
3077 1.1 mrg enum rtx_code reversed = reversed_comparison_code (x, NULL);
3078 1.1 mrg if (reversed == UNKNOWN)
3079 1.1 mrg break;
3080 1.1 mrg else
3081 1.1 mrg code = reversed;
3082 1.1 mrg }
3083 1.1 mrg else if (COMPARISON_P (x))
3084 1.1 mrg code = GET_CODE (x);
3085 1.1 mrg arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3086 1.1 mrg }
3087 1.1 mrg
3088 1.1 mrg /* Return our results. Return the modes from before fold_rtx
3089 1.1 mrg because fold_rtx might produce const_int, and then it's too late. */
3090 1.1 mrg *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3091 1.1 mrg *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3092 1.1 mrg
3093 1.1 mrg if (visited)
3094 1.1 mrg delete visited;
3095 1.1 mrg return code;
3096 1.1 mrg }
3097 1.1 mrg
3098 1.1 mrg /* If X is a nontrivial arithmetic operation on an argument for which
3100 1.1 mrg a constant value can be determined, return the result of operating
3101 1.1 mrg on that value, as a constant. Otherwise, return X, possibly with
3102 1.1 mrg one or more operands changed to a forward-propagated constant.
3103 1.1 mrg
3104 1.1 mrg If X is a register whose contents are known, we do NOT return
3105 1.1 mrg those contents here; equiv_constant is called to perform that task.
3106 1.1 mrg For SUBREGs and MEMs, we do that both here and in equiv_constant.
3107 1.1 mrg
3108 1.1 mrg INSN is the insn that we may be modifying. If it is 0, make a copy
3109 1.1 mrg of X before modifying it. */
3110 1.1 mrg
3111 1.1 mrg static rtx
3112 1.1 mrg fold_rtx (rtx x, rtx_insn *insn)
3113 1.1 mrg {
3114 1.1 mrg enum rtx_code code;
3115 1.1 mrg machine_mode mode;
3116 1.1 mrg const char *fmt;
3117 1.1 mrg int i;
3118 1.1 mrg rtx new_rtx = 0;
3119 1.1 mrg int changed = 0;
3120 1.1 mrg poly_int64 xval;
3121 1.1 mrg
3122 1.1 mrg /* Operands of X. */
3123 1.1 mrg /* Workaround -Wmaybe-uninitialized false positive during
3124 1.1 mrg profiledbootstrap by initializing them. */
3125 1.1 mrg rtx folded_arg0 = NULL_RTX;
3126 1.1 mrg rtx folded_arg1 = NULL_RTX;
3127 1.1 mrg
3128 1.1 mrg /* Constant equivalents of first three operands of X;
3129 1.1 mrg 0 when no such equivalent is known. */
3130 1.1 mrg rtx const_arg0;
3131 1.1 mrg rtx const_arg1;
3132 1.1 mrg rtx const_arg2;
3133 1.1 mrg
3134 1.1 mrg /* The mode of the first operand of X. We need this for sign and zero
3135 1.1 mrg extends. */
3136 1.1 mrg machine_mode mode_arg0;
3137 1.1 mrg
3138 1.1 mrg if (x == 0)
3139 1.1 mrg return x;
3140 1.1 mrg
3141 1.1 mrg /* Try to perform some initial simplifications on X. */
3142 1.1 mrg code = GET_CODE (x);
3143 1.1 mrg switch (code)
3144 1.1 mrg {
3145 1.1 mrg case MEM:
3146 1.1 mrg case SUBREG:
3147 1.1 mrg /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning
3148 1.1 mrg than it would in other contexts. Basically its mode does not
3149 1.1 mrg signify the size of the object read. That information is carried
3150 1.1 mrg by size operand. If we happen to have a MEM of the appropriate
3151 1.1 mrg mode in our tables with a constant value we could simplify the
3152 1.1 mrg extraction incorrectly if we allowed substitution of that value
3153 1.1 mrg for the MEM. */
3154 1.1 mrg case ZERO_EXTRACT:
3155 1.1 mrg case SIGN_EXTRACT:
3156 1.1 mrg if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3157 1.1 mrg return new_rtx;
3158 1.1 mrg return x;
3159 1.1 mrg
3160 1.1 mrg case CONST:
3161 1.1 mrg CASE_CONST_ANY:
3162 1.1 mrg case SYMBOL_REF:
3163 1.1 mrg case LABEL_REF:
3164 1.1 mrg case REG:
3165 1.1 mrg case PC:
3166 1.1 mrg /* No use simplifying an EXPR_LIST
3167 1.1 mrg since they are used only for lists of args
3168 1.1 mrg in a function call's REG_EQUAL note. */
3169 1.1 mrg case EXPR_LIST:
3170 1.1 mrg return x;
3171 1.1 mrg
3172 1.1 mrg case ASM_OPERANDS:
3173 1.1 mrg if (insn)
3174 1.1 mrg {
3175 1.1 mrg for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3176 1.1 mrg validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3177 1.1 mrg fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3178 1.1 mrg }
3179 1.1 mrg return x;
3180 1.1 mrg
3181 1.1 mrg case CALL:
3182 1.1 mrg if (NO_FUNCTION_CSE && CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3183 1.1 mrg return x;
3184 1.1 mrg break;
3185 1.1 mrg case VEC_SELECT:
3186 1.1 mrg {
3187 1.1 mrg rtx trueop0 = XEXP (x, 0);
3188 1.1 mrg mode = GET_MODE (trueop0);
3189 1.1 mrg rtx trueop1 = XEXP (x, 1);
3190 1.1 mrg /* If we select a low-part subreg, return that. */
3191 1.1 mrg if (vec_series_lowpart_p (GET_MODE (x), mode, trueop1))
3192 1.1 mrg {
3193 1.1 mrg rtx new_rtx = lowpart_subreg (GET_MODE (x), trueop0, mode);
3194 1.1 mrg if (new_rtx != NULL_RTX)
3195 1.1 mrg return new_rtx;
3196 1.1 mrg }
3197 1.1 mrg }
3198 1.1 mrg
3199 1.1 mrg /* Anything else goes through the loop below. */
3200 1.1 mrg default:
3201 1.1 mrg break;
3202 1.1 mrg }
3203 1.1 mrg
3204 1.1 mrg mode = GET_MODE (x);
3205 1.1 mrg const_arg0 = 0;
3206 1.1 mrg const_arg1 = 0;
3207 1.1 mrg const_arg2 = 0;
3208 1.1 mrg mode_arg0 = VOIDmode;
3209 1.1 mrg
3210 1.1 mrg /* Try folding our operands.
3211 1.1 mrg Then see which ones have constant values known. */
3212 1.1 mrg
3213 1.1 mrg fmt = GET_RTX_FORMAT (code);
3214 1.1 mrg for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3215 1.1 mrg if (fmt[i] == 'e')
3216 1.1 mrg {
3217 1.1 mrg rtx folded_arg = XEXP (x, i), const_arg;
3218 1.1 mrg machine_mode mode_arg = GET_MODE (folded_arg);
3219 1.1 mrg
3220 1.1 mrg switch (GET_CODE (folded_arg))
3221 1.1 mrg {
3222 1.1 mrg case MEM:
3223 1.1 mrg case REG:
3224 1.1 mrg case SUBREG:
3225 1.1 mrg const_arg = equiv_constant (folded_arg);
3226 1.1 mrg break;
3227 1.1 mrg
3228 1.1 mrg case CONST:
3229 1.1 mrg CASE_CONST_ANY:
3230 1.1 mrg case SYMBOL_REF:
3231 1.1 mrg case LABEL_REF:
3232 1.1 mrg const_arg = folded_arg;
3233 1.1 mrg break;
3234 1.1 mrg
3235 1.1 mrg default:
3236 1.1 mrg folded_arg = fold_rtx (folded_arg, insn);
3237 1.1 mrg const_arg = equiv_constant (folded_arg);
3238 1.1 mrg break;
3239 1.1 mrg }
3240 1.1 mrg
3241 1.1 mrg /* For the first three operands, see if the operand
3242 1.1 mrg is constant or equivalent to a constant. */
3243 1.1 mrg switch (i)
3244 1.1 mrg {
3245 1.1 mrg case 0:
3246 1.1 mrg folded_arg0 = folded_arg;
3247 1.1 mrg const_arg0 = const_arg;
3248 1.1 mrg mode_arg0 = mode_arg;
3249 1.1 mrg break;
3250 1.1 mrg case 1:
3251 1.1 mrg folded_arg1 = folded_arg;
3252 1.1 mrg const_arg1 = const_arg;
3253 1.1 mrg break;
3254 1.1 mrg case 2:
3255 1.1 mrg const_arg2 = const_arg;
3256 1.1 mrg break;
3257 1.1 mrg }
3258 1.1 mrg
3259 1.1 mrg /* Pick the least expensive of the argument and an equivalent constant
3260 1.1 mrg argument. */
3261 1.1 mrg if (const_arg != 0
3262 1.1 mrg && const_arg != folded_arg
3263 1.1 mrg && (COST_IN (const_arg, mode_arg, code, i)
3264 1.1 mrg <= COST_IN (folded_arg, mode_arg, code, i))
3265 1.1 mrg
3266 1.1 mrg /* It's not safe to substitute the operand of a conversion
3267 1.1 mrg operator with a constant, as the conversion's identity
3268 1.1 mrg depends upon the mode of its operand. This optimization
3269 1.1 mrg is handled by the call to simplify_unary_operation. */
3270 1.1 mrg && (GET_RTX_CLASS (code) != RTX_UNARY
3271 1.1 mrg || GET_MODE (const_arg) == mode_arg0
3272 1.1 mrg || (code != ZERO_EXTEND
3273 1.1 mrg && code != SIGN_EXTEND
3274 1.1 mrg && code != TRUNCATE
3275 1.1 mrg && code != FLOAT_TRUNCATE
3276 1.1 mrg && code != FLOAT_EXTEND
3277 1.1 mrg && code != FLOAT
3278 1.1 mrg && code != FIX
3279 1.1 mrg && code != UNSIGNED_FLOAT
3280 1.1 mrg && code != UNSIGNED_FIX)))
3281 1.1 mrg folded_arg = const_arg;
3282 1.1 mrg
3283 1.1 mrg if (folded_arg == XEXP (x, i))
3284 1.1 mrg continue;
3285 1.1 mrg
3286 1.1 mrg if (insn == NULL_RTX && !changed)
3287 1.1 mrg x = copy_rtx (x);
3288 1.1 mrg changed = 1;
3289 1.1 mrg validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3290 1.1 mrg }
3291 1.1 mrg
3292 1.1 mrg if (changed)
3293 1.1 mrg {
3294 1.1 mrg /* Canonicalize X if necessary, and keep const_argN and folded_argN
3295 1.1 mrg consistent with the order in X. */
3296 1.1 mrg if (canonicalize_change_group (insn, x))
3297 1.1 mrg {
3298 1.1 mrg std::swap (const_arg0, const_arg1);
3299 1.1 mrg std::swap (folded_arg0, folded_arg1);
3300 1.1 mrg }
3301 1.1 mrg
3302 1.1 mrg apply_change_group ();
3303 1.1 mrg }
3304 1.1 mrg
3305 1.1 mrg /* If X is an arithmetic operation, see if we can simplify it. */
3306 1.1 mrg
3307 1.1 mrg switch (GET_RTX_CLASS (code))
3308 1.1 mrg {
3309 1.1 mrg case RTX_UNARY:
3310 1.1 mrg {
3311 1.1 mrg /* We can't simplify extension ops unless we know the
3312 1.1 mrg original mode. */
3313 1.1 mrg if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3314 1.1 mrg && mode_arg0 == VOIDmode)
3315 1.1 mrg break;
3316 1.1 mrg
3317 1.1 mrg new_rtx = simplify_unary_operation (code, mode,
3318 1.1 mrg const_arg0 ? const_arg0 : folded_arg0,
3319 1.1 mrg mode_arg0);
3320 1.1 mrg }
3321 1.1 mrg break;
3322 1.1 mrg
3323 1.1 mrg case RTX_COMPARE:
3324 1.1 mrg case RTX_COMM_COMPARE:
3325 1.1 mrg /* See what items are actually being compared and set FOLDED_ARG[01]
3326 1.1 mrg to those values and CODE to the actual comparison code. If any are
3327 1.1 mrg constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3328 1.1 mrg do anything if both operands are already known to be constant. */
3329 1.1 mrg
3330 1.1 mrg /* ??? Vector mode comparisons are not supported yet. */
3331 1.1 mrg if (VECTOR_MODE_P (mode))
3332 1.1 mrg break;
3333 1.1 mrg
3334 1.1 mrg if (const_arg0 == 0 || const_arg1 == 0)
3335 1.1 mrg {
3336 1.1 mrg struct table_elt *p0, *p1;
3337 1.1 mrg rtx true_rtx, false_rtx;
3338 1.1 mrg machine_mode mode_arg1;
3339 1.1 mrg
3340 1.1 mrg if (SCALAR_FLOAT_MODE_P (mode))
3341 1.1 mrg {
3342 1.1 mrg #ifdef FLOAT_STORE_FLAG_VALUE
3343 1.1 mrg true_rtx = (const_double_from_real_value
3344 1.1 mrg (FLOAT_STORE_FLAG_VALUE (mode), mode));
3345 1.1 mrg #else
3346 1.1 mrg true_rtx = NULL_RTX;
3347 1.1 mrg #endif
3348 1.1 mrg false_rtx = CONST0_RTX (mode);
3349 1.1 mrg }
3350 1.1 mrg else
3351 1.1 mrg {
3352 1.1 mrg true_rtx = const_true_rtx;
3353 1.1 mrg false_rtx = const0_rtx;
3354 1.1 mrg }
3355 1.1 mrg
3356 1.1 mrg code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3357 1.1 mrg &mode_arg0, &mode_arg1);
3358 1.1 mrg
3359 1.1 mrg /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3360 1.1 mrg what kinds of things are being compared, so we can't do
3361 1.1 mrg anything with this comparison. */
3362 1.1 mrg
3363 1.1 mrg if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3364 1.1 mrg break;
3365 1.1 mrg
3366 1.1 mrg const_arg0 = equiv_constant (folded_arg0);
3367 1.1 mrg const_arg1 = equiv_constant (folded_arg1);
3368 1.1 mrg
3369 1.1 mrg /* If we do not now have two constants being compared, see
3370 1.1 mrg if we can nevertheless deduce some things about the
3371 1.1 mrg comparison. */
3372 1.1 mrg if (const_arg0 == 0 || const_arg1 == 0)
3373 1.1 mrg {
3374 1.1 mrg if (const_arg1 != NULL)
3375 1.1 mrg {
3376 1.1 mrg rtx cheapest_simplification;
3377 1.1 mrg int cheapest_cost;
3378 1.1 mrg rtx simp_result;
3379 1.1 mrg struct table_elt *p;
3380 1.1 mrg
3381 1.1 mrg /* See if we can find an equivalent of folded_arg0
3382 1.1 mrg that gets us a cheaper expression, possibly a
3383 1.1 mrg constant through simplifications. */
3384 1.1 mrg p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3385 1.1 mrg mode_arg0);
3386 1.1 mrg
3387 1.1 mrg if (p != NULL)
3388 1.1 mrg {
3389 1.1 mrg cheapest_simplification = x;
3390 1.1 mrg cheapest_cost = COST (x, mode);
3391 1.1 mrg
3392 1.1 mrg for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3393 1.1 mrg {
3394 1.1 mrg int cost;
3395 1.1 mrg
3396 1.1 mrg /* If the entry isn't valid, skip it. */
3397 1.1 mrg if (! exp_equiv_p (p->exp, p->exp, 1, false))
3398 1.1 mrg continue;
3399 1.1 mrg
3400 1.1 mrg /* Try to simplify using this equivalence. */
3401 1.1 mrg simp_result
3402 1.1 mrg = simplify_relational_operation (code, mode,
3403 1.1 mrg mode_arg0,
3404 1.1 mrg p->exp,
3405 1.1 mrg const_arg1);
3406 1.1 mrg
3407 1.1 mrg if (simp_result == NULL)
3408 1.1 mrg continue;
3409 1.1 mrg
3410 1.1 mrg cost = COST (simp_result, mode);
3411 1.1 mrg if (cost < cheapest_cost)
3412 1.1 mrg {
3413 1.1 mrg cheapest_cost = cost;
3414 1.1 mrg cheapest_simplification = simp_result;
3415 1.1 mrg }
3416 1.1 mrg }
3417 1.1 mrg
3418 1.1 mrg /* If we have a cheaper expression now, use that
3419 1.1 mrg and try folding it further, from the top. */
3420 1.1 mrg if (cheapest_simplification != x)
3421 1.1 mrg return fold_rtx (copy_rtx (cheapest_simplification),
3422 1.1 mrg insn);
3423 1.1 mrg }
3424 1.1 mrg }
3425 1.1 mrg
3426 1.1 mrg /* See if the two operands are the same. */
3427 1.1 mrg
3428 1.1 mrg if ((REG_P (folded_arg0)
3429 1.1 mrg && REG_P (folded_arg1)
3430 1.1 mrg && (REG_QTY (REGNO (folded_arg0))
3431 1.1 mrg == REG_QTY (REGNO (folded_arg1))))
3432 1.1 mrg || ((p0 = lookup (folded_arg0,
3433 1.1 mrg SAFE_HASH (folded_arg0, mode_arg0),
3434 1.1 mrg mode_arg0))
3435 1.1 mrg && (p1 = lookup (folded_arg1,
3436 1.1 mrg SAFE_HASH (folded_arg1, mode_arg0),
3437 1.1 mrg mode_arg0))
3438 1.1 mrg && p0->first_same_value == p1->first_same_value))
3439 1.1 mrg folded_arg1 = folded_arg0;
3440 1.1 mrg
3441 1.1 mrg /* If FOLDED_ARG0 is a register, see if the comparison we are
3442 1.1 mrg doing now is either the same as we did before or the reverse
3443 1.1 mrg (we only check the reverse if not floating-point). */
3444 1.1 mrg else if (REG_P (folded_arg0))
3445 1.1 mrg {
3446 1.1 mrg int qty = REG_QTY (REGNO (folded_arg0));
3447 1.1 mrg
3448 1.1 mrg if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3449 1.1 mrg {
3450 1.1 mrg struct qty_table_elem *ent = &qty_table[qty];
3451 1.1 mrg
3452 1.1 mrg if ((comparison_dominates_p (ent->comparison_code, code)
3453 1.1 mrg || (! FLOAT_MODE_P (mode_arg0)
3454 1.1 mrg && comparison_dominates_p (ent->comparison_code,
3455 1.1 mrg reverse_condition (code))))
3456 1.1 mrg && (rtx_equal_p (ent->comparison_const, folded_arg1)
3457 1.1 mrg || (const_arg1
3458 1.1 mrg && rtx_equal_p (ent->comparison_const,
3459 1.1 mrg const_arg1))
3460 1.1 mrg || (REG_P (folded_arg1)
3461 1.1 mrg && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3462 1.1 mrg {
3463 1.1 mrg if (comparison_dominates_p (ent->comparison_code, code))
3464 1.1 mrg {
3465 1.1 mrg if (true_rtx)
3466 1.1 mrg return true_rtx;
3467 1.1 mrg else
3468 1.1 mrg break;
3469 1.1 mrg }
3470 1.1 mrg else
3471 1.1 mrg return false_rtx;
3472 1.1 mrg }
3473 1.1 mrg }
3474 1.1 mrg }
3475 1.1 mrg }
3476 1.1 mrg }
3477 1.1 mrg
3478 1.1 mrg /* If we are comparing against zero, see if the first operand is
3479 1.1 mrg equivalent to an IOR with a constant. If so, we may be able to
3480 1.1 mrg determine the result of this comparison. */
3481 1.1 mrg if (const_arg1 == const0_rtx && !const_arg0)
3482 1.1 mrg {
3483 1.1 mrg rtx y = lookup_as_function (folded_arg0, IOR);
3484 1.1 mrg rtx inner_const;
3485 1.1 mrg
3486 1.1 mrg if (y != 0
3487 1.1 mrg && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3488 1.1 mrg && CONST_INT_P (inner_const)
3489 1.1 mrg && INTVAL (inner_const) != 0)
3490 1.1 mrg folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3491 1.1 mrg }
3492 1.1 mrg
3493 1.1 mrg {
3494 1.1 mrg rtx op0 = const_arg0 ? const_arg0 : copy_rtx (folded_arg0);
3495 1.1 mrg rtx op1 = const_arg1 ? const_arg1 : copy_rtx (folded_arg1);
3496 1.1 mrg new_rtx = simplify_relational_operation (code, mode, mode_arg0,
3497 1.1 mrg op0, op1);
3498 1.1 mrg }
3499 1.1 mrg break;
3500 1.1 mrg
3501 1.1 mrg case RTX_BIN_ARITH:
3502 1.1 mrg case RTX_COMM_ARITH:
3503 1.1 mrg switch (code)
3504 1.1 mrg {
3505 1.1 mrg case PLUS:
3506 1.1 mrg /* If the second operand is a LABEL_REF, see if the first is a MINUS
3507 1.1 mrg with that LABEL_REF as its second operand. If so, the result is
3508 1.1 mrg the first operand of that MINUS. This handles switches with an
3509 1.1 mrg ADDR_DIFF_VEC table. */
3510 1.1 mrg if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3511 1.1 mrg {
3512 1.1 mrg rtx y
3513 1.1 mrg = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3514 1.1 mrg : lookup_as_function (folded_arg0, MINUS);
3515 1.1 mrg
3516 1.1 mrg if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3517 1.1 mrg && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg1))
3518 1.1 mrg return XEXP (y, 0);
3519 1.1 mrg
3520 1.1 mrg /* Now try for a CONST of a MINUS like the above. */
3521 1.1 mrg if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3522 1.1 mrg : lookup_as_function (folded_arg0, CONST))) != 0
3523 1.1 mrg && GET_CODE (XEXP (y, 0)) == MINUS
3524 1.1 mrg && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3525 1.1 mrg && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg1))
3526 1.1 mrg return XEXP (XEXP (y, 0), 0);
3527 1.1 mrg }
3528 1.1 mrg
3529 1.1 mrg /* Likewise if the operands are in the other order. */
3530 1.1 mrg if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3531 1.1 mrg {
3532 1.1 mrg rtx y
3533 1.1 mrg = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3534 1.1 mrg : lookup_as_function (folded_arg1, MINUS);
3535 1.1 mrg
3536 1.1 mrg if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3537 1.1 mrg && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg0))
3538 1.1 mrg return XEXP (y, 0);
3539 1.1 mrg
3540 1.1 mrg /* Now try for a CONST of a MINUS like the above. */
3541 1.1 mrg if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3542 1.1 mrg : lookup_as_function (folded_arg1, CONST))) != 0
3543 1.1 mrg && GET_CODE (XEXP (y, 0)) == MINUS
3544 1.1 mrg && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3545 1.1 mrg && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg0))
3546 1.1 mrg return XEXP (XEXP (y, 0), 0);
3547 1.1 mrg }
3548 1.1 mrg
3549 1.1 mrg /* If second operand is a register equivalent to a negative
3550 1.1 mrg CONST_INT, see if we can find a register equivalent to the
3551 1.1 mrg positive constant. Make a MINUS if so. Don't do this for
3552 1.1 mrg a non-negative constant since we might then alternate between
3553 1.1 mrg choosing positive and negative constants. Having the positive
3554 1.1 mrg constant previously-used is the more common case. Be sure
3555 1.1 mrg the resulting constant is non-negative; if const_arg1 were
3556 1.1 mrg the smallest negative number this would overflow: depending
3557 1.1 mrg on the mode, this would either just be the same value (and
3558 1.1 mrg hence not save anything) or be incorrect. */
3559 1.1 mrg if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3560 1.1 mrg && INTVAL (const_arg1) < 0
3561 1.1 mrg /* This used to test
3562 1.1 mrg
3563 1.1 mrg -INTVAL (const_arg1) >= 0
3564 1.1 mrg
3565 1.1 mrg But The Sun V5.0 compilers mis-compiled that test. So
3566 1.1 mrg instead we test for the problematic value in a more direct
3567 1.1 mrg manner and hope the Sun compilers get it correct. */
3568 1.1 mrg && INTVAL (const_arg1) !=
3569 1.1 mrg (HOST_WIDE_INT_1 << (HOST_BITS_PER_WIDE_INT - 1))
3570 1.1 mrg && REG_P (folded_arg1))
3571 1.1 mrg {
3572 1.1 mrg rtx new_const = GEN_INT (-INTVAL (const_arg1));
3573 1.1 mrg struct table_elt *p
3574 1.1 mrg = lookup (new_const, SAFE_HASH (new_const, mode), mode);
3575 1.1 mrg
3576 1.1 mrg if (p)
3577 1.1 mrg for (p = p->first_same_value; p; p = p->next_same_value)
3578 1.1 mrg if (REG_P (p->exp))
3579 1.1 mrg return simplify_gen_binary (MINUS, mode, folded_arg0,
3580 1.1 mrg canon_reg (p->exp, NULL));
3581 1.1 mrg }
3582 1.1 mrg goto from_plus;
3583 1.1 mrg
3584 1.1 mrg case MINUS:
3585 1.1 mrg /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3586 1.1 mrg If so, produce (PLUS Z C2-C). */
3587 1.1 mrg if (const_arg1 != 0 && poly_int_rtx_p (const_arg1, &xval))
3588 1.1 mrg {
3589 1.1 mrg rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3590 1.1 mrg if (y && poly_int_rtx_p (XEXP (y, 1)))
3591 1.1 mrg return fold_rtx (plus_constant (mode, copy_rtx (y), -xval),
3592 1.1 mrg NULL);
3593 1.1 mrg }
3594 1.1 mrg
3595 1.1 mrg /* Fall through. */
3596 1.1 mrg
3597 1.1 mrg from_plus:
3598 1.1 mrg case SMIN: case SMAX: case UMIN: case UMAX:
3599 1.1 mrg case IOR: case AND: case XOR:
3600 1.1 mrg case MULT:
3601 1.1 mrg case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3602 1.1 mrg /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3603 1.1 mrg is known to be of similar form, we may be able to replace the
3604 1.1 mrg operation with a combined operation. This may eliminate the
3605 1.1 mrg intermediate operation if every use is simplified in this way.
3606 1.1 mrg Note that the similar optimization done by combine.cc only works
3607 1.1 mrg if the intermediate operation's result has only one reference. */
3608 1.1 mrg
3609 1.1 mrg if (REG_P (folded_arg0)
3610 1.1 mrg && const_arg1 && CONST_INT_P (const_arg1))
3611 1.1 mrg {
3612 1.1 mrg int is_shift
3613 1.1 mrg = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3614 1.1 mrg rtx y, inner_const, new_const;
3615 1.1 mrg rtx canon_const_arg1 = const_arg1;
3616 1.1 mrg enum rtx_code associate_code;
3617 1.1 mrg
3618 1.1 mrg if (is_shift
3619 1.1 mrg && (INTVAL (const_arg1) >= GET_MODE_UNIT_PRECISION (mode)
3620 1.1 mrg || INTVAL (const_arg1) < 0))
3621 1.1 mrg {
3622 1.1 mrg if (SHIFT_COUNT_TRUNCATED)
3623 1.1 mrg canon_const_arg1 = gen_int_shift_amount
3624 1.1 mrg (mode, (INTVAL (const_arg1)
3625 1.1 mrg & (GET_MODE_UNIT_BITSIZE (mode) - 1)));
3626 1.1 mrg else
3627 1.1 mrg break;
3628 1.1 mrg }
3629 1.1 mrg
3630 1.1 mrg y = lookup_as_function (folded_arg0, code);
3631 1.1 mrg if (y == 0)
3632 1.1 mrg break;
3633 1.1 mrg
3634 1.1 mrg /* If we have compiled a statement like
3635 1.1 mrg "if (x == (x & mask1))", and now are looking at
3636 1.1 mrg "x & mask2", we will have a case where the first operand
3637 1.1 mrg of Y is the same as our first operand. Unless we detect
3638 1.1 mrg this case, an infinite loop will result. */
3639 1.1 mrg if (XEXP (y, 0) == folded_arg0)
3640 1.1 mrg break;
3641 1.1 mrg
3642 1.1 mrg inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3643 1.1 mrg if (!inner_const || !CONST_INT_P (inner_const))
3644 1.1 mrg break;
3645 1.1 mrg
3646 1.1 mrg /* Don't associate these operations if they are a PLUS with the
3647 1.1 mrg same constant and it is a power of two. These might be doable
3648 1.1 mrg with a pre- or post-increment. Similarly for two subtracts of
3649 1.1 mrg identical powers of two with post decrement. */
3650 1.1 mrg
3651 1.1 mrg if (code == PLUS && const_arg1 == inner_const
3652 1.1 mrg && ((HAVE_PRE_INCREMENT
3653 1.1 mrg && pow2p_hwi (INTVAL (const_arg1)))
3654 1.1 mrg || (HAVE_POST_INCREMENT
3655 1.1 mrg && pow2p_hwi (INTVAL (const_arg1)))
3656 1.1 mrg || (HAVE_PRE_DECREMENT
3657 1.1 mrg && pow2p_hwi (- INTVAL (const_arg1)))
3658 1.1 mrg || (HAVE_POST_DECREMENT
3659 1.1 mrg && pow2p_hwi (- INTVAL (const_arg1)))))
3660 1.1 mrg break;
3661 1.1 mrg
3662 1.1 mrg /* ??? Vector mode shifts by scalar
3663 1.1 mrg shift operand are not supported yet. */
3664 1.1 mrg if (is_shift && VECTOR_MODE_P (mode))
3665 1.1 mrg break;
3666 1.1 mrg
3667 1.1 mrg if (is_shift
3668 1.1 mrg && (INTVAL (inner_const) >= GET_MODE_UNIT_PRECISION (mode)
3669 1.1 mrg || INTVAL (inner_const) < 0))
3670 1.1 mrg {
3671 1.1 mrg if (SHIFT_COUNT_TRUNCATED)
3672 1.1 mrg inner_const = gen_int_shift_amount
3673 1.1 mrg (mode, (INTVAL (inner_const)
3674 1.1 mrg & (GET_MODE_UNIT_BITSIZE (mode) - 1)));
3675 1.1 mrg else
3676 1.1 mrg break;
3677 1.1 mrg }
3678 1.1 mrg
3679 1.1 mrg /* Compute the code used to compose the constants. For example,
3680 1.1 mrg A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3681 1.1 mrg
3682 1.1 mrg associate_code = (is_shift || code == MINUS ? PLUS : code);
3683 1.1 mrg
3684 1.1 mrg new_const = simplify_binary_operation (associate_code, mode,
3685 1.1 mrg canon_const_arg1,
3686 1.1 mrg inner_const);
3687 1.1 mrg
3688 1.1 mrg if (new_const == 0)
3689 1.1 mrg break;
3690 1.1 mrg
3691 1.1 mrg /* If we are associating shift operations, don't let this
3692 1.1 mrg produce a shift of the size of the object or larger.
3693 1.1 mrg This could occur when we follow a sign-extend by a right
3694 1.1 mrg shift on a machine that does a sign-extend as a pair
3695 1.1 mrg of shifts. */
3696 1.1 mrg
3697 1.1 mrg if (is_shift
3698 1.1 mrg && CONST_INT_P (new_const)
3699 1.1 mrg && INTVAL (new_const) >= GET_MODE_UNIT_PRECISION (mode))
3700 1.1 mrg {
3701 1.1 mrg /* As an exception, we can turn an ASHIFTRT of this
3702 1.1 mrg form into a shift of the number of bits - 1. */
3703 1.1 mrg if (code == ASHIFTRT)
3704 1.1 mrg new_const = gen_int_shift_amount
3705 1.1 mrg (mode, GET_MODE_UNIT_BITSIZE (mode) - 1);
3706 1.1 mrg else if (!side_effects_p (XEXP (y, 0)))
3707 1.1 mrg return CONST0_RTX (mode);
3708 1.1 mrg else
3709 1.1 mrg break;
3710 1.1 mrg }
3711 1.1 mrg
3712 1.1 mrg y = copy_rtx (XEXP (y, 0));
3713 1.1 mrg
3714 1.1 mrg /* If Y contains our first operand (the most common way this
3715 1.1 mrg can happen is if Y is a MEM), we would do into an infinite
3716 1.1 mrg loop if we tried to fold it. So don't in that case. */
3717 1.1 mrg
3718 1.1 mrg if (! reg_mentioned_p (folded_arg0, y))
3719 1.1 mrg y = fold_rtx (y, insn);
3720 1.1 mrg
3721 1.1 mrg return simplify_gen_binary (code, mode, y, new_const);
3722 1.1 mrg }
3723 1.1 mrg break;
3724 1.1 mrg
3725 1.1 mrg case DIV: case UDIV:
3726 1.1 mrg /* ??? The associative optimization performed immediately above is
3727 1.1 mrg also possible for DIV and UDIV using associate_code of MULT.
3728 1.1 mrg However, we would need extra code to verify that the
3729 1.1 mrg multiplication does not overflow, that is, there is no overflow
3730 1.1 mrg in the calculation of new_const. */
3731 1.1 mrg break;
3732 1.1 mrg
3733 1.1 mrg default:
3734 1.1 mrg break;
3735 1.1 mrg }
3736 1.1 mrg
3737 1.1 mrg new_rtx = simplify_binary_operation (code, mode,
3738 1.1 mrg const_arg0 ? const_arg0 : folded_arg0,
3739 1.1 mrg const_arg1 ? const_arg1 : folded_arg1);
3740 1.1 mrg break;
3741 1.1 mrg
3742 1.1 mrg case RTX_OBJ:
3743 1.1 mrg /* (lo_sum (high X) X) is simply X. */
3744 1.1 mrg if (code == LO_SUM && const_arg0 != 0
3745 1.1 mrg && GET_CODE (const_arg0) == HIGH
3746 1.1 mrg && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3747 1.1 mrg return const_arg1;
3748 1.1 mrg break;
3749 1.1 mrg
3750 1.1 mrg case RTX_TERNARY:
3751 1.1 mrg case RTX_BITFIELD_OPS:
3752 1.1 mrg new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3753 1.1 mrg const_arg0 ? const_arg0 : folded_arg0,
3754 1.1 mrg const_arg1 ? const_arg1 : folded_arg1,
3755 1.1 mrg const_arg2 ? const_arg2 : XEXP (x, 2));
3756 1.1 mrg break;
3757 1.1 mrg
3758 1.1 mrg default:
3759 1.1 mrg break;
3760 1.1 mrg }
3761 1.1 mrg
3762 1.1 mrg return new_rtx ? new_rtx : x;
3763 1.1 mrg }
3764 1.1 mrg
3765 1.1 mrg /* Return a constant value currently equivalent to X.
3767 1.1 mrg Return 0 if we don't know one. */
3768 1.1 mrg
3769 1.1 mrg static rtx
3770 1.1 mrg equiv_constant (rtx x)
3771 1.1 mrg {
3772 1.1 mrg if (REG_P (x)
3773 1.1 mrg && REGNO_QTY_VALID_P (REGNO (x)))
3774 1.1 mrg {
3775 1.1 mrg int x_q = REG_QTY (REGNO (x));
3776 1.1 mrg struct qty_table_elem *x_ent = &qty_table[x_q];
3777 1.1 mrg
3778 1.1 mrg if (x_ent->const_rtx)
3779 1.1 mrg x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3780 1.1 mrg }
3781 1.1 mrg
3782 1.1 mrg if (x == 0 || CONSTANT_P (x))
3783 1.1 mrg return x;
3784 1.1 mrg
3785 1.1 mrg if (GET_CODE (x) == SUBREG)
3786 1.1 mrg {
3787 1.1 mrg machine_mode mode = GET_MODE (x);
3788 1.1 mrg machine_mode imode = GET_MODE (SUBREG_REG (x));
3789 1.1 mrg rtx new_rtx;
3790 1.1 mrg
3791 1.1 mrg /* See if we previously assigned a constant value to this SUBREG. */
3792 1.1 mrg if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3793 1.1 mrg || (new_rtx = lookup_as_function (x, CONST_WIDE_INT)) != 0
3794 1.1 mrg || (NUM_POLY_INT_COEFFS > 1
3795 1.1 mrg && (new_rtx = lookup_as_function (x, CONST_POLY_INT)) != 0)
3796 1.1 mrg || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3797 1.1 mrg || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3798 1.1 mrg return new_rtx;
3799 1.1 mrg
3800 1.1 mrg /* If we didn't and if doing so makes sense, see if we previously
3801 1.1 mrg assigned a constant value to the enclosing word mode SUBREG. */
3802 1.1 mrg if (known_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD)
3803 1.1 mrg && known_lt (UNITS_PER_WORD, GET_MODE_SIZE (imode)))
3804 1.1 mrg {
3805 1.1 mrg poly_int64 byte = (SUBREG_BYTE (x)
3806 1.1 mrg - subreg_lowpart_offset (mode, word_mode));
3807 1.1 mrg if (known_ge (byte, 0) && multiple_p (byte, UNITS_PER_WORD))
3808 1.1 mrg {
3809 1.1 mrg rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3810 1.1 mrg new_rtx = lookup_as_function (y, CONST_INT);
3811 1.1 mrg if (new_rtx)
3812 1.1 mrg return gen_lowpart (mode, new_rtx);
3813 1.1 mrg }
3814 1.1 mrg }
3815 1.1 mrg
3816 1.1 mrg /* Otherwise see if we already have a constant for the inner REG,
3817 1.1 mrg and if that is enough to calculate an equivalent constant for
3818 1.1 mrg the subreg. Note that the upper bits of paradoxical subregs
3819 1.1 mrg are undefined, so they cannot be said to equal anything. */
3820 1.1 mrg if (REG_P (SUBREG_REG (x))
3821 1.1 mrg && !paradoxical_subreg_p (x)
3822 1.1 mrg && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3823 1.1 mrg return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3824 1.1 mrg
3825 1.1 mrg return 0;
3826 1.1 mrg }
3827 1.1 mrg
3828 1.1 mrg /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3829 1.1 mrg the hash table in case its value was seen before. */
3830 1.1 mrg
3831 1.1 mrg if (MEM_P (x))
3832 1.1 mrg {
3833 1.1 mrg struct table_elt *elt;
3834 1.1 mrg
3835 1.1 mrg x = avoid_constant_pool_reference (x);
3836 1.1 mrg if (CONSTANT_P (x))
3837 1.1 mrg return x;
3838 1.1 mrg
3839 1.1 mrg elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3840 1.1 mrg if (elt == 0)
3841 1.1 mrg return 0;
3842 1.1 mrg
3843 1.1 mrg for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3844 1.1 mrg if (elt->is_const && CONSTANT_P (elt->exp))
3845 1.1 mrg return elt->exp;
3846 1.1 mrg }
3847 1.1 mrg
3848 1.1 mrg return 0;
3849 1.1 mrg }
3850 1.1 mrg
3851 1.1 mrg /* Given INSN, a jump insn, TAKEN indicates if we are following the
3853 1.1 mrg "taken" branch.
3854 1.1 mrg
3855 1.1 mrg In certain cases, this can cause us to add an equivalence. For example,
3856 1.1 mrg if we are following the taken case of
3857 1.1 mrg if (i == 2)
3858 1.1 mrg we can add the fact that `i' and '2' are now equivalent.
3859 1.1 mrg
3860 1.1 mrg In any case, we can record that this comparison was passed. If the same
3861 1.1 mrg comparison is seen later, we will know its value. */
3862 1.1 mrg
3863 1.1 mrg static void
3864 1.1 mrg record_jump_equiv (rtx_insn *insn, bool taken)
3865 1.1 mrg {
3866 1.1 mrg int cond_known_true;
3867 1.1 mrg rtx op0, op1;
3868 1.1 mrg rtx set;
3869 1.1 mrg machine_mode mode, mode0, mode1;
3870 1.1 mrg int reversed_nonequality = 0;
3871 1.1 mrg enum rtx_code code;
3872 1.1 mrg
3873 1.1 mrg /* Ensure this is the right kind of insn. */
3874 1.1 mrg gcc_assert (any_condjump_p (insn));
3875 1.1 mrg
3876 1.1 mrg set = pc_set (insn);
3877 1.1 mrg
3878 1.1 mrg /* See if this jump condition is known true or false. */
3879 1.1 mrg if (taken)
3880 1.1 mrg cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3881 1.1 mrg else
3882 1.1 mrg cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3883 1.1 mrg
3884 1.1 mrg /* Get the type of comparison being done and the operands being compared.
3885 1.1 mrg If we had to reverse a non-equality condition, record that fact so we
3886 1.1 mrg know that it isn't valid for floating-point. */
3887 1.1 mrg code = GET_CODE (XEXP (SET_SRC (set), 0));
3888 1.1 mrg op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3889 1.1 mrg op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3890 1.1 mrg
3891 1.1 mrg /* If fold_rtx returns NULL_RTX, there's nothing to record. */
3892 1.1 mrg if (op0 == NULL_RTX || op1 == NULL_RTX)
3893 1.1 mrg return;
3894 1.1 mrg
3895 1.1 mrg code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3896 1.1 mrg if (! cond_known_true)
3897 1.1 mrg {
3898 1.1 mrg code = reversed_comparison_code_parts (code, op0, op1, insn);
3899 1.1 mrg
3900 1.1 mrg /* Don't remember if we can't find the inverse. */
3901 1.1 mrg if (code == UNKNOWN)
3902 1.1 mrg return;
3903 1.1 mrg }
3904 1.1 mrg
3905 1.1 mrg /* The mode is the mode of the non-constant. */
3906 1.1 mrg mode = mode0;
3907 1.1 mrg if (mode1 != VOIDmode)
3908 1.1 mrg mode = mode1;
3909 1.1 mrg
3910 1.1 mrg record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3911 1.1 mrg }
3912 1.1 mrg
3913 1.1 mrg /* Yet another form of subreg creation. In this case, we want something in
3914 1.1 mrg MODE, and we should assume OP has MODE iff it is naturally modeless. */
3915 1.1 mrg
3916 1.1 mrg static rtx
3917 1.1 mrg record_jump_cond_subreg (machine_mode mode, rtx op)
3918 1.1 mrg {
3919 1.1 mrg machine_mode op_mode = GET_MODE (op);
3920 1.1 mrg if (op_mode == mode || op_mode == VOIDmode)
3921 1.1 mrg return op;
3922 1.1 mrg return lowpart_subreg (mode, op, op_mode);
3923 1.1 mrg }
3924 1.1 mrg
3925 1.1 mrg /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3926 1.1 mrg REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3927 1.1 mrg Make any useful entries we can with that information. Called from
3928 1.1 mrg above function and called recursively. */
3929 1.1 mrg
3930 1.1 mrg static void
3931 1.1 mrg record_jump_cond (enum rtx_code code, machine_mode mode, rtx op0,
3932 1.1 mrg rtx op1, int reversed_nonequality)
3933 1.1 mrg {
3934 1.1 mrg unsigned op0_hash, op1_hash;
3935 1.1 mrg int op0_in_memory, op1_in_memory;
3936 1.1 mrg struct table_elt *op0_elt, *op1_elt;
3937 1.1 mrg
3938 1.1 mrg /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3939 1.1 mrg we know that they are also equal in the smaller mode (this is also
3940 1.1 mrg true for all smaller modes whether or not there is a SUBREG, but
3941 1.1 mrg is not worth testing for with no SUBREG). */
3942 1.1 mrg
3943 1.1 mrg /* Note that GET_MODE (op0) may not equal MODE. */
3944 1.1 mrg if (code == EQ && paradoxical_subreg_p (op0))
3945 1.1 mrg {
3946 1.1 mrg machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3947 1.1 mrg rtx tem = record_jump_cond_subreg (inner_mode, op1);
3948 1.1 mrg if (tem)
3949 1.1 mrg record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3950 1.1 mrg reversed_nonequality);
3951 1.1 mrg }
3952 1.1 mrg
3953 1.1 mrg if (code == EQ && paradoxical_subreg_p (op1))
3954 1.1 mrg {
3955 1.1 mrg machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3956 1.1 mrg rtx tem = record_jump_cond_subreg (inner_mode, op0);
3957 1.1 mrg if (tem)
3958 1.1 mrg record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3959 1.1 mrg reversed_nonequality);
3960 1.1 mrg }
3961 1.1 mrg
3962 1.1 mrg /* Similarly, if this is an NE comparison, and either is a SUBREG
3963 1.1 mrg making a smaller mode, we know the whole thing is also NE. */
3964 1.1 mrg
3965 1.1 mrg /* Note that GET_MODE (op0) may not equal MODE;
3966 1.1 mrg if we test MODE instead, we can get an infinite recursion
3967 1.1 mrg alternating between two modes each wider than MODE. */
3968 1.1 mrg
3969 1.1 mrg if (code == NE
3970 1.1 mrg && partial_subreg_p (op0)
3971 1.1 mrg && subreg_lowpart_p (op0))
3972 1.1 mrg {
3973 1.1 mrg machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3974 1.1 mrg rtx tem = record_jump_cond_subreg (inner_mode, op1);
3975 1.1 mrg if (tem)
3976 1.1 mrg record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3977 1.1 mrg reversed_nonequality);
3978 1.1 mrg }
3979 1.1 mrg
3980 1.1 mrg if (code == NE
3981 1.1 mrg && partial_subreg_p (op1)
3982 1.1 mrg && subreg_lowpart_p (op1))
3983 1.1 mrg {
3984 1.1 mrg machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3985 1.1 mrg rtx tem = record_jump_cond_subreg (inner_mode, op0);
3986 1.1 mrg if (tem)
3987 1.1 mrg record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3988 1.1 mrg reversed_nonequality);
3989 1.1 mrg }
3990 1.1 mrg
3991 1.1 mrg /* Hash both operands. */
3992 1.1 mrg
3993 1.1 mrg do_not_record = 0;
3994 1.1 mrg hash_arg_in_memory = 0;
3995 1.1 mrg op0_hash = HASH (op0, mode);
3996 1.1 mrg op0_in_memory = hash_arg_in_memory;
3997 1.1 mrg
3998 1.1 mrg if (do_not_record)
3999 1.1 mrg return;
4000 1.1 mrg
4001 1.1 mrg do_not_record = 0;
4002 1.1 mrg hash_arg_in_memory = 0;
4003 1.1 mrg op1_hash = HASH (op1, mode);
4004 1.1 mrg op1_in_memory = hash_arg_in_memory;
4005 1.1 mrg
4006 1.1 mrg if (do_not_record)
4007 1.1 mrg return;
4008 1.1 mrg
4009 1.1 mrg /* Look up both operands. */
4010 1.1 mrg op0_elt = lookup (op0, op0_hash, mode);
4011 1.1 mrg op1_elt = lookup (op1, op1_hash, mode);
4012 1.1 mrg
4013 1.1 mrg /* If both operands are already equivalent or if they are not in the
4014 1.1 mrg table but are identical, do nothing. */
4015 1.1 mrg if ((op0_elt != 0 && op1_elt != 0
4016 1.1 mrg && op0_elt->first_same_value == op1_elt->first_same_value)
4017 1.1 mrg || op0 == op1 || rtx_equal_p (op0, op1))
4018 1.1 mrg return;
4019 1.1 mrg
4020 1.1 mrg /* If we aren't setting two things equal all we can do is save this
4021 1.1 mrg comparison. Similarly if this is floating-point. In the latter
4022 1.1 mrg case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4023 1.1 mrg If we record the equality, we might inadvertently delete code
4024 1.1 mrg whose intent was to change -0 to +0. */
4025 1.1 mrg
4026 1.1 mrg if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4027 1.1 mrg {
4028 1.1 mrg struct qty_table_elem *ent;
4029 1.1 mrg int qty;
4030 1.1 mrg
4031 1.1 mrg /* If we reversed a floating-point comparison, if OP0 is not a
4032 1.1 mrg register, or if OP1 is neither a register or constant, we can't
4033 1.1 mrg do anything. */
4034 1.1 mrg
4035 1.1 mrg if (!REG_P (op1))
4036 1.1 mrg op1 = equiv_constant (op1);
4037 1.1 mrg
4038 1.1 mrg if ((reversed_nonequality && FLOAT_MODE_P (mode))
4039 1.1 mrg || !REG_P (op0) || op1 == 0)
4040 1.1 mrg return;
4041 1.1 mrg
4042 1.1 mrg /* Put OP0 in the hash table if it isn't already. This gives it a
4043 1.1 mrg new quantity number. */
4044 1.1 mrg if (op0_elt == 0)
4045 1.1 mrg {
4046 1.1 mrg if (insert_regs (op0, NULL, 0))
4047 1.1 mrg {
4048 1.1 mrg rehash_using_reg (op0);
4049 1.1 mrg op0_hash = HASH (op0, mode);
4050 1.1 mrg
4051 1.1 mrg /* If OP0 is contained in OP1, this changes its hash code
4052 1.1 mrg as well. Faster to rehash than to check, except
4053 1.1 mrg for the simple case of a constant. */
4054 1.1 mrg if (! CONSTANT_P (op1))
4055 1.1 mrg op1_hash = HASH (op1,mode);
4056 1.1 mrg }
4057 1.1 mrg
4058 1.1 mrg op0_elt = insert (op0, NULL, op0_hash, mode);
4059 1.1 mrg op0_elt->in_memory = op0_in_memory;
4060 1.1 mrg }
4061 1.1 mrg
4062 1.1 mrg qty = REG_QTY (REGNO (op0));
4063 1.1 mrg ent = &qty_table[qty];
4064 1.1 mrg
4065 1.1 mrg ent->comparison_code = code;
4066 1.1 mrg if (REG_P (op1))
4067 1.1 mrg {
4068 1.1 mrg /* Look it up again--in case op0 and op1 are the same. */
4069 1.1 mrg op1_elt = lookup (op1, op1_hash, mode);
4070 1.1 mrg
4071 1.1 mrg /* Put OP1 in the hash table so it gets a new quantity number. */
4072 1.1 mrg if (op1_elt == 0)
4073 1.1 mrg {
4074 1.1 mrg if (insert_regs (op1, NULL, 0))
4075 1.1 mrg {
4076 1.1 mrg rehash_using_reg (op1);
4077 1.1 mrg op1_hash = HASH (op1, mode);
4078 1.1 mrg }
4079 1.1 mrg
4080 1.1 mrg op1_elt = insert (op1, NULL, op1_hash, mode);
4081 1.1 mrg op1_elt->in_memory = op1_in_memory;
4082 1.1 mrg }
4083 1.1 mrg
4084 1.1 mrg ent->comparison_const = NULL_RTX;
4085 1.1 mrg ent->comparison_qty = REG_QTY (REGNO (op1));
4086 1.1 mrg }
4087 1.1 mrg else
4088 1.1 mrg {
4089 1.1 mrg ent->comparison_const = op1;
4090 1.1 mrg ent->comparison_qty = INT_MIN;
4091 1.1 mrg }
4092 1.1 mrg
4093 1.1 mrg return;
4094 1.1 mrg }
4095 1.1 mrg
4096 1.1 mrg /* If either side is still missing an equivalence, make it now,
4097 1.1 mrg then merge the equivalences. */
4098 1.1 mrg
4099 1.1 mrg if (op0_elt == 0)
4100 1.1 mrg {
4101 1.1 mrg if (insert_regs (op0, NULL, 0))
4102 1.1 mrg {
4103 1.1 mrg rehash_using_reg (op0);
4104 1.1 mrg op0_hash = HASH (op0, mode);
4105 1.1 mrg }
4106 1.1 mrg
4107 1.1 mrg op0_elt = insert (op0, NULL, op0_hash, mode);
4108 1.1 mrg op0_elt->in_memory = op0_in_memory;
4109 1.1 mrg }
4110 1.1 mrg
4111 1.1 mrg if (op1_elt == 0)
4112 1.1 mrg {
4113 1.1 mrg if (insert_regs (op1, NULL, 0))
4114 1.1 mrg {
4115 1.1 mrg rehash_using_reg (op1);
4116 1.1 mrg op1_hash = HASH (op1, mode);
4117 1.1 mrg }
4118 1.1 mrg
4119 1.1 mrg op1_elt = insert (op1, NULL, op1_hash, mode);
4120 1.1 mrg op1_elt->in_memory = op1_in_memory;
4121 1.1 mrg }
4122 1.1 mrg
4123 1.1 mrg merge_equiv_classes (op0_elt, op1_elt);
4124 1.1 mrg }
4125 1.1 mrg
4126 1.1 mrg /* CSE processing for one instruction.
4128 1.1 mrg
4129 1.1 mrg Most "true" common subexpressions are mostly optimized away in GIMPLE,
4130 1.1 mrg but the few that "leak through" are cleaned up by cse_insn, and complex
4131 1.1 mrg addressing modes are often formed here.
4132 1.1 mrg
4133 1.1 mrg The main function is cse_insn, and between here and that function
4134 1.1 mrg a couple of helper functions is defined to keep the size of cse_insn
4135 1.1 mrg within reasonable proportions.
4136 1.1 mrg
4137 1.1 mrg Data is shared between the main and helper functions via STRUCT SET,
4138 1.1 mrg that contains all data related for every set in the instruction that
4139 1.1 mrg is being processed.
4140 1.1 mrg
4141 1.1 mrg Note that cse_main processes all sets in the instruction. Most
4142 1.1 mrg passes in GCC only process simple SET insns or single_set insns, but
4143 1.1 mrg CSE processes insns with multiple sets as well. */
4144 1.1 mrg
4145 1.1 mrg /* Data on one SET contained in the instruction. */
4146 1.1 mrg
4147 1.1 mrg struct set
4148 1.1 mrg {
4149 1.1 mrg /* The SET rtx itself. */
4150 1.1 mrg rtx rtl;
4151 1.1 mrg /* The SET_SRC of the rtx (the original value, if it is changing). */
4152 1.1 mrg rtx src;
4153 1.1 mrg /* The hash-table element for the SET_SRC of the SET. */
4154 1.1 mrg struct table_elt *src_elt;
4155 1.1 mrg /* Hash value for the SET_SRC. */
4156 1.1 mrg unsigned src_hash;
4157 1.1 mrg /* Hash value for the SET_DEST. */
4158 1.1 mrg unsigned dest_hash;
4159 1.1 mrg /* The SET_DEST, with SUBREG, etc., stripped. */
4160 1.1 mrg rtx inner_dest;
4161 1.1 mrg /* Nonzero if the SET_SRC is in memory. */
4162 1.1 mrg char src_in_memory;
4163 1.1 mrg /* Nonzero if the SET_SRC contains something
4164 1.1 mrg whose value cannot be predicted and understood. */
4165 1.1 mrg char src_volatile;
4166 1.1 mrg /* Original machine mode, in case it becomes a CONST_INT.
4167 1.1 mrg The size of this field should match the size of the mode
4168 1.1 mrg field of struct rtx_def (see rtl.h). */
4169 1.1 mrg ENUM_BITFIELD(machine_mode) mode : 8;
4170 1.1 mrg /* Hash value of constant equivalent for SET_SRC. */
4171 1.1 mrg unsigned src_const_hash;
4172 1.1 mrg /* A constant equivalent for SET_SRC, if any. */
4173 1.1 mrg rtx src_const;
4174 1.1 mrg /* Table entry for constant equivalent for SET_SRC, if any. */
4175 1.1 mrg struct table_elt *src_const_elt;
4176 1.1 mrg /* Table entry for the destination address. */
4177 1.1 mrg struct table_elt *dest_addr_elt;
4178 1.1 mrg };
4179 1.1 mrg
4180 1.1 mrg /* Special handling for (set REG0 REG1) where REG0 is the
4182 1.1 mrg "cheapest", cheaper than REG1. After cse, REG1 will probably not
4183 1.1 mrg be used in the sequel, so (if easily done) change this insn to
4184 1.1 mrg (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4185 1.1 mrg that computed their value. Then REG1 will become a dead store
4186 1.1 mrg and won't cloud the situation for later optimizations.
4187 1.1 mrg
4188 1.1 mrg Do not make this change if REG1 is a hard register, because it will
4189 1.1 mrg then be used in the sequel and we may be changing a two-operand insn
4190 1.1 mrg into a three-operand insn.
4191 1.1 mrg
4192 1.1 mrg This is the last transformation that cse_insn will try to do. */
4193 1.1 mrg
4194 1.1 mrg static void
4195 1.1 mrg try_back_substitute_reg (rtx set, rtx_insn *insn)
4196 1.1 mrg {
4197 1.1 mrg rtx dest = SET_DEST (set);
4198 1.1 mrg rtx src = SET_SRC (set);
4199 1.1 mrg
4200 1.1 mrg if (REG_P (dest)
4201 1.1 mrg && REG_P (src) && ! HARD_REGISTER_P (src)
4202 1.1 mrg && REGNO_QTY_VALID_P (REGNO (src)))
4203 1.1 mrg {
4204 1.1 mrg int src_q = REG_QTY (REGNO (src));
4205 1.1 mrg struct qty_table_elem *src_ent = &qty_table[src_q];
4206 1.1 mrg
4207 1.1 mrg if (src_ent->first_reg == REGNO (dest))
4208 1.1 mrg {
4209 1.1 mrg /* Scan for the previous nonnote insn, but stop at a basic
4210 1.1 mrg block boundary. */
4211 1.1 mrg rtx_insn *prev = insn;
4212 1.1 mrg rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
4213 1.1 mrg do
4214 1.1 mrg {
4215 1.1 mrg prev = PREV_INSN (prev);
4216 1.1 mrg }
4217 1.1 mrg while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
4218 1.1 mrg
4219 1.1 mrg /* Do not swap the registers around if the previous instruction
4220 1.1 mrg attaches a REG_EQUIV note to REG1.
4221 1.1 mrg
4222 1.1 mrg ??? It's not entirely clear whether we can transfer a REG_EQUIV
4223 1.1 mrg from the pseudo that originally shadowed an incoming argument
4224 1.1 mrg to another register. Some uses of REG_EQUIV might rely on it
4225 1.1 mrg being attached to REG1 rather than REG2.
4226 1.1 mrg
4227 1.1 mrg This section previously turned the REG_EQUIV into a REG_EQUAL
4228 1.1 mrg note. We cannot do that because REG_EQUIV may provide an
4229 1.1 mrg uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
4230 1.1 mrg if (NONJUMP_INSN_P (prev)
4231 1.1 mrg && GET_CODE (PATTERN (prev)) == SET
4232 1.1 mrg && SET_DEST (PATTERN (prev)) == src
4233 1.1 mrg && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
4234 1.1 mrg {
4235 1.1 mrg rtx note;
4236 1.1 mrg
4237 1.1 mrg validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
4238 1.1 mrg validate_change (insn, &SET_DEST (set), src, 1);
4239 1.1 mrg validate_change (insn, &SET_SRC (set), dest, 1);
4240 1.1 mrg apply_change_group ();
4241 1.1 mrg
4242 1.1 mrg /* If INSN has a REG_EQUAL note, and this note mentions
4243 1.1 mrg REG0, then we must delete it, because the value in
4244 1.1 mrg REG0 has changed. If the note's value is REG1, we must
4245 1.1 mrg also delete it because that is now this insn's dest. */
4246 1.1 mrg note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4247 1.1 mrg if (note != 0
4248 1.1 mrg && (reg_mentioned_p (dest, XEXP (note, 0))
4249 1.1 mrg || rtx_equal_p (src, XEXP (note, 0))))
4250 1.1 mrg remove_note (insn, note);
4251 1.1 mrg
4252 1.1 mrg /* If INSN has a REG_ARGS_SIZE note, move it to PREV. */
4253 1.1 mrg note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4254 1.1 mrg if (note != 0)
4255 1.1 mrg {
4256 1.1 mrg remove_note (insn, note);
4257 1.1 mrg gcc_assert (!find_reg_note (prev, REG_ARGS_SIZE, NULL_RTX));
4258 1.1 mrg set_unique_reg_note (prev, REG_ARGS_SIZE, XEXP (note, 0));
4259 1.1 mrg }
4260 1.1 mrg }
4261 1.1 mrg }
4262 1.1 mrg }
4263 1.1 mrg }
4264 1.1 mrg
4265 1.1 mrg /* Add an entry containing RTL X into SETS. */
4266 1.1 mrg static inline void
4267 1.1 mrg add_to_set (vec<struct set> *sets, rtx x)
4268 1.1 mrg {
4269 1.1 mrg struct set entry = {};
4270 1.1 mrg entry.rtl = x;
4271 1.1 mrg sets->safe_push (entry);
4272 1.1 mrg }
4273 1.1 mrg
4274 1.1 mrg /* Record all the SETs in this instruction into SETS_PTR,
4275 1.1 mrg and return the number of recorded sets. */
4276 1.1 mrg static int
4277 1.1 mrg find_sets_in_insn (rtx_insn *insn, vec<struct set> *psets)
4278 1.1 mrg {
4279 1.1 mrg rtx x = PATTERN (insn);
4280 1.1 mrg
4281 1.1 mrg if (GET_CODE (x) == SET)
4282 1.1 mrg {
4283 1.1 mrg /* Ignore SETs that are unconditional jumps.
4284 1.1 mrg They never need cse processing, so this does not hurt.
4285 1.1 mrg The reason is not efficiency but rather
4286 1.1 mrg so that we can test at the end for instructions
4287 1.1 mrg that have been simplified to unconditional jumps
4288 1.1 mrg and not be misled by unchanged instructions
4289 1.1 mrg that were unconditional jumps to begin with. */
4290 1.1 mrg if (SET_DEST (x) == pc_rtx
4291 1.1 mrg && GET_CODE (SET_SRC (x)) == LABEL_REF)
4292 1.1 mrg ;
4293 1.1 mrg /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4294 1.1 mrg The hard function value register is used only once, to copy to
4295 1.1 mrg someplace else, so it isn't worth cse'ing. */
4296 1.1 mrg else if (GET_CODE (SET_SRC (x)) == CALL)
4297 1.1 mrg ;
4298 1.1 mrg else if (GET_CODE (SET_SRC (x)) == CONST_VECTOR
4299 1.1 mrg && GET_MODE_CLASS (GET_MODE (SET_SRC (x))) != MODE_VECTOR_BOOL
4300 1.1 mrg /* Prevent duplicates from being generated if the type is a V1
4301 1.1 mrg type and a subreg. Folding this will result in the same
4302 1.1 mrg element as folding x itself. */
4303 1.1 mrg && !(SUBREG_P (SET_DEST (x))
4304 1.1 mrg && known_eq (GET_MODE_NUNITS (GET_MODE (SET_SRC (x))), 1)))
4305 1.1 mrg {
4306 1.1 mrg /* First register the vector itself. */
4307 1.1 mrg add_to_set (psets, x);
4308 1.1 mrg rtx src = SET_SRC (x);
4309 1.1 mrg /* Go over the constants of the CONST_VECTOR in forward order, to
4310 1.1 mrg put them in the same order in the SETS array. */
4311 1.1 mrg for (unsigned i = 0; i < const_vector_encoded_nelts (src) ; i++)
4312 1.1 mrg {
4313 1.1 mrg /* These are templates and don't actually get emitted but are
4314 1.1 mrg used to tell CSE how to get to a particular constant. */
4315 1.1 mrg rtx y = simplify_gen_vec_select (SET_DEST (x), i);
4316 1.1 mrg gcc_assert (y);
4317 1.1 mrg add_to_set (psets, gen_rtx_SET (y, CONST_VECTOR_ELT (src, i)));
4318 1.1 mrg }
4319 1.1 mrg }
4320 1.1 mrg else
4321 1.1 mrg add_to_set (psets, x);
4322 1.1 mrg }
4323 1.1 mrg else if (GET_CODE (x) == PARALLEL)
4324 1.1 mrg {
4325 1.1 mrg int i, lim = XVECLEN (x, 0);
4326 1.1 mrg
4327 1.1 mrg /* Go over the expressions of the PARALLEL in forward order, to
4328 1.1 mrg put them in the same order in the SETS array. */
4329 1.1 mrg for (i = 0; i < lim; i++)
4330 1.1 mrg {
4331 1.1 mrg rtx y = XVECEXP (x, 0, i);
4332 1.1 mrg if (GET_CODE (y) == SET)
4333 1.1 mrg {
4334 1.1 mrg /* As above, we ignore unconditional jumps and call-insns and
4335 1.1 mrg ignore the result of apply_change_group. */
4336 1.1 mrg if (SET_DEST (y) == pc_rtx
4337 1.1 mrg && GET_CODE (SET_SRC (y)) == LABEL_REF)
4338 1.1 mrg ;
4339 1.1 mrg else if (GET_CODE (SET_SRC (y)) == CALL)
4340 1.1 mrg ;
4341 1.1 mrg else
4342 1.1 mrg add_to_set (psets, y);
4343 1.1 mrg }
4344 1.1 mrg }
4345 1.1 mrg }
4346 1.1 mrg
4347 1.1 mrg return psets->length ();
4348 1.1 mrg }
4349 1.1 mrg
4350 1.1 mrg /* Subroutine of canonicalize_insn. X is an ASM_OPERANDS in INSN. */
4352 1.1 mrg
4353 1.1 mrg static void
4354 1.1 mrg canon_asm_operands (rtx x, rtx_insn *insn)
4355 1.1 mrg {
4356 1.1 mrg for (int i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4357 1.1 mrg {
4358 1.1 mrg rtx input = ASM_OPERANDS_INPUT (x, i);
4359 1.1 mrg if (!(REG_P (input) && HARD_REGISTER_P (input)))
4360 1.1 mrg {
4361 1.1 mrg input = canon_reg (input, insn);
4362 1.1 mrg validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4363 1.1 mrg }
4364 1.1 mrg }
4365 1.1 mrg }
4366 1.1 mrg
4367 1.1 mrg /* Where possible, substitute every register reference in the N_SETS
4368 1.1 mrg number of SETS in INSN with the canonical register.
4369 1.1 mrg
4370 1.1 mrg Register canonicalization propagatest the earliest register (i.e.
4371 1.1 mrg one that is set before INSN) with the same value. This is a very
4372 1.1 mrg useful, simple form of CSE, to clean up warts from expanding GIMPLE
4373 1.1 mrg to RTL. For instance, a CONST for an address is usually expanded
4374 1.1 mrg multiple times to loads into different registers, thus creating many
4375 1.1 mrg subexpressions of the form:
4376 1.1 mrg
4377 1.1 mrg (set (reg1) (some_const))
4378 1.1 mrg (set (mem (... reg1 ...) (thing)))
4379 1.1 mrg (set (reg2) (some_const))
4380 1.1 mrg (set (mem (... reg2 ...) (thing)))
4381 1.1 mrg
4382 1.1 mrg After canonicalizing, the code takes the following form:
4383 1.1 mrg
4384 1.1 mrg (set (reg1) (some_const))
4385 1.1 mrg (set (mem (... reg1 ...) (thing)))
4386 1.1 mrg (set (reg2) (some_const))
4387 1.1 mrg (set (mem (... reg1 ...) (thing)))
4388 1.1 mrg
4389 1.1 mrg The set to reg2 is now trivially dead, and the memory reference (or
4390 1.1 mrg address, or whatever) may be a candidate for further CSEing.
4391 1.1 mrg
4392 1.1 mrg In this function, the result of apply_change_group can be ignored;
4393 1.1 mrg see canon_reg. */
4394 1.1 mrg
4395 1.1 mrg static void
4396 1.1 mrg canonicalize_insn (rtx_insn *insn, vec<struct set> *psets)
4397 1.1 mrg {
4398 1.1 mrg vec<struct set> sets = *psets;
4399 1.1 mrg int n_sets = sets.length ();
4400 1.1 mrg rtx tem;
4401 1.1 mrg rtx x = PATTERN (insn);
4402 1.1 mrg int i;
4403 1.1 mrg
4404 1.1 mrg if (CALL_P (insn))
4405 1.1 mrg {
4406 1.1 mrg for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4407 1.1 mrg if (GET_CODE (XEXP (tem, 0)) != SET)
4408 1.1 mrg XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4409 1.1 mrg }
4410 1.1 mrg
4411 1.1 mrg if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
4412 1.1 mrg {
4413 1.1 mrg canon_reg (SET_SRC (x), insn);
4414 1.1 mrg apply_change_group ();
4415 1.1 mrg fold_rtx (SET_SRC (x), insn);
4416 1.1 mrg }
4417 1.1 mrg else if (GET_CODE (x) == CLOBBER)
4418 1.1 mrg {
4419 1.1 mrg /* If we clobber memory, canon the address.
4420 1.1 mrg This does nothing when a register is clobbered
4421 1.1 mrg because we have already invalidated the reg. */
4422 1.1 mrg if (MEM_P (XEXP (x, 0)))
4423 1.1 mrg canon_reg (XEXP (x, 0), insn);
4424 1.1 mrg }
4425 1.1 mrg else if (GET_CODE (x) == USE
4426 1.1 mrg && ! (REG_P (XEXP (x, 0))
4427 1.1 mrg && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4428 1.1 mrg /* Canonicalize a USE of a pseudo register or memory location. */
4429 1.1 mrg canon_reg (x, insn);
4430 1.1 mrg else if (GET_CODE (x) == ASM_OPERANDS)
4431 1.1 mrg canon_asm_operands (x, insn);
4432 1.1 mrg else if (GET_CODE (x) == CALL)
4433 1.1 mrg {
4434 1.1 mrg canon_reg (x, insn);
4435 1.1 mrg apply_change_group ();
4436 1.1 mrg fold_rtx (x, insn);
4437 1.1 mrg }
4438 1.1 mrg else if (DEBUG_INSN_P (insn))
4439 1.1 mrg canon_reg (PATTERN (insn), insn);
4440 1.1 mrg else if (GET_CODE (x) == PARALLEL)
4441 1.1 mrg {
4442 1.1 mrg for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4443 1.1 mrg {
4444 1.1 mrg rtx y = XVECEXP (x, 0, i);
4445 1.1 mrg if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
4446 1.1 mrg {
4447 1.1 mrg canon_reg (SET_SRC (y), insn);
4448 1.1 mrg apply_change_group ();
4449 1.1 mrg fold_rtx (SET_SRC (y), insn);
4450 1.1 mrg }
4451 1.1 mrg else if (GET_CODE (y) == CLOBBER)
4452 1.1 mrg {
4453 1.1 mrg if (MEM_P (XEXP (y, 0)))
4454 1.1 mrg canon_reg (XEXP (y, 0), insn);
4455 1.1 mrg }
4456 1.1 mrg else if (GET_CODE (y) == USE
4457 1.1 mrg && ! (REG_P (XEXP (y, 0))
4458 1.1 mrg && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4459 1.1 mrg canon_reg (y, insn);
4460 1.1 mrg else if (GET_CODE (y) == ASM_OPERANDS)
4461 1.1 mrg canon_asm_operands (y, insn);
4462 1.1 mrg else if (GET_CODE (y) == CALL)
4463 1.1 mrg {
4464 1.1 mrg canon_reg (y, insn);
4465 1.1 mrg apply_change_group ();
4466 1.1 mrg fold_rtx (y, insn);
4467 1.1 mrg }
4468 1.1 mrg }
4469 1.1 mrg }
4470 1.1 mrg
4471 1.1 mrg if (n_sets == 1 && REG_NOTES (insn) != 0
4472 1.1 mrg && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4473 1.1 mrg {
4474 1.1 mrg /* We potentially will process this insn many times. Therefore,
4475 1.1 mrg drop the REG_EQUAL note if it is equal to the SET_SRC of the
4476 1.1 mrg unique set in INSN.
4477 1.1 mrg
4478 1.1 mrg Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4479 1.1 mrg because cse_insn handles those specially. */
4480 1.1 mrg if (GET_CODE (SET_DEST (sets[0].rtl)) != STRICT_LOW_PART
4481 1.1 mrg && rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
4482 1.1 mrg remove_note (insn, tem);
4483 1.1 mrg else
4484 1.1 mrg {
4485 1.1 mrg canon_reg (XEXP (tem, 0), insn);
4486 1.1 mrg apply_change_group ();
4487 1.1 mrg XEXP (tem, 0) = fold_rtx (XEXP (tem, 0), insn);
4488 1.1 mrg df_notes_rescan (insn);
4489 1.1 mrg }
4490 1.1 mrg }
4491 1.1 mrg
4492 1.1 mrg /* Canonicalize sources and addresses of destinations.
4493 1.1 mrg We do this in a separate pass to avoid problems when a MATCH_DUP is
4494 1.1 mrg present in the insn pattern. In that case, we want to ensure that
4495 1.1 mrg we don't break the duplicate nature of the pattern. So we will replace
4496 1.1 mrg both operands at the same time. Otherwise, we would fail to find an
4497 1.1 mrg equivalent substitution in the loop calling validate_change below.
4498 1.1 mrg
4499 1.1 mrg We used to suppress canonicalization of DEST if it appears in SRC,
4500 1.1 mrg but we don't do this any more. */
4501 1.1 mrg
4502 1.1 mrg for (i = 0; i < n_sets; i++)
4503 1.1 mrg {
4504 1.1 mrg rtx dest = SET_DEST (sets[i].rtl);
4505 1.1 mrg rtx src = SET_SRC (sets[i].rtl);
4506 1.1 mrg rtx new_rtx = canon_reg (src, insn);
4507 1.1 mrg
4508 1.1 mrg validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4509 1.1 mrg
4510 1.1 mrg if (GET_CODE (dest) == ZERO_EXTRACT)
4511 1.1 mrg {
4512 1.1 mrg validate_change (insn, &XEXP (dest, 1),
4513 1.1 mrg canon_reg (XEXP (dest, 1), insn), 1);
4514 1.1 mrg validate_change (insn, &XEXP (dest, 2),
4515 1.1 mrg canon_reg (XEXP (dest, 2), insn), 1);
4516 1.1 mrg }
4517 1.1 mrg
4518 1.1 mrg while (GET_CODE (dest) == SUBREG
4519 1.1 mrg || GET_CODE (dest) == ZERO_EXTRACT
4520 1.1 mrg || GET_CODE (dest) == STRICT_LOW_PART)
4521 1.1 mrg dest = XEXP (dest, 0);
4522 1.1 mrg
4523 1.1 mrg if (MEM_P (dest))
4524 1.1 mrg canon_reg (dest, insn);
4525 1.1 mrg }
4526 1.1 mrg
4527 1.1 mrg /* Now that we have done all the replacements, we can apply the change
4528 1.1 mrg group and see if they all work. Note that this will cause some
4529 1.1 mrg canonicalizations that would have worked individually not to be applied
4530 1.1 mrg because some other canonicalization didn't work, but this should not
4531 1.1 mrg occur often.
4532 1.1 mrg
4533 1.1 mrg The result of apply_change_group can be ignored; see canon_reg. */
4534 1.1 mrg
4535 1.1 mrg apply_change_group ();
4536 1.1 mrg }
4537 1.1 mrg
4538 1.1 mrg /* Main function of CSE.
4540 1.1 mrg First simplify sources and addresses of all assignments
4541 1.1 mrg in the instruction, using previously-computed equivalents values.
4542 1.1 mrg Then install the new sources and destinations in the table
4543 1.1 mrg of available values. */
4544 1.1 mrg
4545 1.1 mrg static void
4546 1.1 mrg cse_insn (rtx_insn *insn)
4547 1.1 mrg {
4548 1.1 mrg rtx x = PATTERN (insn);
4549 1.1 mrg int i;
4550 1.1 mrg rtx tem;
4551 1.1 mrg int n_sets = 0;
4552 1.1 mrg
4553 1.1 mrg rtx src_eqv = 0;
4554 1.1 mrg struct table_elt *src_eqv_elt = 0;
4555 1.1 mrg int src_eqv_volatile = 0;
4556 1.1 mrg int src_eqv_in_memory = 0;
4557 1.1 mrg unsigned src_eqv_hash = 0;
4558 1.1 mrg
4559 1.1 mrg this_insn = insn;
4560 1.1 mrg
4561 1.1 mrg /* Find all regs explicitly clobbered in this insn,
4562 1.1 mrg to ensure they are not replaced with any other regs
4563 1.1 mrg elsewhere in this insn. */
4564 1.1 mrg invalidate_from_sets_and_clobbers (insn);
4565 1.1 mrg
4566 1.1 mrg /* Record all the SETs in this instruction. */
4567 1.1 mrg auto_vec<struct set, 8> sets;
4568 1.1 mrg n_sets = find_sets_in_insn (insn, (vec<struct set>*)&sets);
4569 1.1 mrg
4570 1.1 mrg /* Substitute the canonical register where possible. */
4571 1.1 mrg canonicalize_insn (insn, (vec<struct set>*)&sets);
4572 1.1 mrg
4573 1.1 mrg /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4574 1.1 mrg if different, or if the DEST is a STRICT_LOW_PART/ZERO_EXTRACT. The
4575 1.1 mrg latter condition is necessary because SRC_EQV is handled specially for
4576 1.1 mrg this case, and if it isn't set, then there will be no equivalence
4577 1.1 mrg for the destination. */
4578 1.1 mrg if (n_sets == 1 && REG_NOTES (insn) != 0
4579 1.1 mrg && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4580 1.1 mrg {
4581 1.1 mrg
4582 1.1 mrg if (GET_CODE (SET_DEST (sets[0].rtl)) != ZERO_EXTRACT
4583 1.1 mrg && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4584 1.1 mrg || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4585 1.1 mrg src_eqv = copy_rtx (XEXP (tem, 0));
4586 1.1 mrg /* If DEST is of the form ZERO_EXTACT, as in:
4587 1.1 mrg (set (zero_extract:SI (reg:SI 119)
4588 1.1 mrg (const_int 16 [0x10])
4589 1.1 mrg (const_int 16 [0x10]))
4590 1.1 mrg (const_int 51154 [0xc7d2]))
4591 1.1 mrg REG_EQUAL note will specify the value of register (reg:SI 119) at this
4592 1.1 mrg point. Note that this is different from SRC_EQV. We can however
4593 1.1 mrg calculate SRC_EQV with the position and width of ZERO_EXTRACT. */
4594 1.1 mrg else if (GET_CODE (SET_DEST (sets[0].rtl)) == ZERO_EXTRACT
4595 1.1 mrg && CONST_INT_P (XEXP (tem, 0))
4596 1.1 mrg && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 1))
4597 1.1 mrg && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 2)))
4598 1.1 mrg {
4599 1.1 mrg rtx dest_reg = XEXP (SET_DEST (sets[0].rtl), 0);
4600 1.1 mrg /* This is the mode of XEXP (tem, 0) as well. */
4601 1.1 mrg scalar_int_mode dest_mode
4602 1.1 mrg = as_a <scalar_int_mode> (GET_MODE (dest_reg));
4603 1.1 mrg rtx width = XEXP (SET_DEST (sets[0].rtl), 1);
4604 1.1 mrg rtx pos = XEXP (SET_DEST (sets[0].rtl), 2);
4605 1.1 mrg HOST_WIDE_INT val = INTVAL (XEXP (tem, 0));
4606 1.1 mrg HOST_WIDE_INT mask;
4607 1.1 mrg unsigned int shift;
4608 1.1 mrg if (BITS_BIG_ENDIAN)
4609 1.1 mrg shift = (GET_MODE_PRECISION (dest_mode)
4610 1.1 mrg - INTVAL (pos) - INTVAL (width));
4611 1.1 mrg else
4612 1.1 mrg shift = INTVAL (pos);
4613 1.1 mrg if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
4614 1.1 mrg mask = HOST_WIDE_INT_M1;
4615 1.1 mrg else
4616 1.1 mrg mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
4617 1.1 mrg val = (val >> shift) & mask;
4618 1.1 mrg src_eqv = GEN_INT (val);
4619 1.1 mrg }
4620 1.1 mrg }
4621 1.1 mrg
4622 1.1 mrg /* Set sets[i].src_elt to the class each source belongs to.
4623 1.1 mrg Detect assignments from or to volatile things
4624 1.1 mrg and set set[i] to zero so they will be ignored
4625 1.1 mrg in the rest of this function.
4626 1.1 mrg
4627 1.1 mrg Nothing in this loop changes the hash table or the register chains. */
4628 1.1 mrg
4629 1.1 mrg for (i = 0; i < n_sets; i++)
4630 1.1 mrg {
4631 1.1 mrg bool repeat = false;
4632 1.1 mrg bool noop_insn = false;
4633 1.1 mrg rtx src, dest;
4634 1.1 mrg rtx src_folded;
4635 1.1 mrg struct table_elt *elt = 0, *p;
4636 1.1 mrg machine_mode mode;
4637 1.1 mrg rtx src_eqv_here;
4638 1.1 mrg rtx src_const = 0;
4639 1.1 mrg rtx src_related = 0;
4640 1.1 mrg bool src_related_is_const_anchor = false;
4641 1.1 mrg struct table_elt *src_const_elt = 0;
4642 1.1 mrg int src_cost = MAX_COST;
4643 1.1 mrg int src_eqv_cost = MAX_COST;
4644 1.1 mrg int src_folded_cost = MAX_COST;
4645 1.1 mrg int src_related_cost = MAX_COST;
4646 1.1 mrg int src_elt_cost = MAX_COST;
4647 1.1 mrg int src_regcost = MAX_COST;
4648 1.1 mrg int src_eqv_regcost = MAX_COST;
4649 1.1 mrg int src_folded_regcost = MAX_COST;
4650 1.1 mrg int src_related_regcost = MAX_COST;
4651 1.1 mrg int src_elt_regcost = MAX_COST;
4652 1.1 mrg scalar_int_mode int_mode;
4653 1.1 mrg
4654 1.1 mrg dest = SET_DEST (sets[i].rtl);
4655 1.1 mrg src = SET_SRC (sets[i].rtl);
4656 1.1 mrg
4657 1.1 mrg /* If SRC is a constant that has no machine mode,
4658 1.1 mrg hash it with the destination's machine mode.
4659 1.1 mrg This way we can keep different modes separate. */
4660 1.1 mrg
4661 1.1 mrg mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4662 1.1 mrg sets[i].mode = mode;
4663 1.1 mrg
4664 1.1 mrg if (src_eqv)
4665 1.1 mrg {
4666 1.1 mrg machine_mode eqvmode = mode;
4667 1.1 mrg if (GET_CODE (dest) == STRICT_LOW_PART)
4668 1.1 mrg eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4669 1.1 mrg do_not_record = 0;
4670 1.1 mrg hash_arg_in_memory = 0;
4671 1.1 mrg src_eqv_hash = HASH (src_eqv, eqvmode);
4672 1.1 mrg
4673 1.1 mrg /* Find the equivalence class for the equivalent expression. */
4674 1.1 mrg
4675 1.1 mrg if (!do_not_record)
4676 1.1 mrg src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4677 1.1 mrg
4678 1.1 mrg src_eqv_volatile = do_not_record;
4679 1.1 mrg src_eqv_in_memory = hash_arg_in_memory;
4680 1.1 mrg }
4681 1.1 mrg
4682 1.1 mrg /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4683 1.1 mrg value of the INNER register, not the destination. So it is not
4684 1.1 mrg a valid substitution for the source. But save it for later. */
4685 1.1 mrg if (GET_CODE (dest) == STRICT_LOW_PART)
4686 1.1 mrg src_eqv_here = 0;
4687 1.1 mrg else
4688 1.1 mrg src_eqv_here = src_eqv;
4689 1.1 mrg
4690 1.1 mrg /* Simplify and foldable subexpressions in SRC. Then get the fully-
4691 1.1 mrg simplified result, which may not necessarily be valid. */
4692 1.1 mrg src_folded = fold_rtx (src, NULL);
4693 1.1 mrg
4694 1.1 mrg #if 0
4695 1.1 mrg /* ??? This caused bad code to be generated for the m68k port with -O2.
4696 1.1 mrg Suppose src is (CONST_INT -1), and that after truncation src_folded
4697 1.1 mrg is (CONST_INT 3). Suppose src_folded is then used for src_const.
4698 1.1 mrg At the end we will add src and src_const to the same equivalence
4699 1.1 mrg class. We now have 3 and -1 on the same equivalence class. This
4700 1.1 mrg causes later instructions to be mis-optimized. */
4701 1.1 mrg /* If storing a constant in a bitfield, pre-truncate the constant
4702 1.1 mrg so we will be able to record it later. */
4703 1.1 mrg if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4704 1.1 mrg {
4705 1.1 mrg rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4706 1.1 mrg
4707 1.1 mrg if (CONST_INT_P (src)
4708 1.1 mrg && CONST_INT_P (width)
4709 1.1 mrg && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4710 1.1 mrg && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4711 1.1 mrg src_folded
4712 1.1 mrg = GEN_INT (INTVAL (src) & ((HOST_WIDE_INT_1
4713 1.1 mrg << INTVAL (width)) - 1));
4714 1.1 mrg }
4715 1.1 mrg #endif
4716 1.1 mrg
4717 1.1 mrg /* Compute SRC's hash code, and also notice if it
4718 1.1 mrg should not be recorded at all. In that case,
4719 1.1 mrg prevent any further processing of this assignment.
4720 1.1 mrg
4721 1.1 mrg We set DO_NOT_RECORD if the destination has a REG_UNUSED note.
4722 1.1 mrg This avoids getting the source register into the tables, where it
4723 1.1 mrg may be invalidated later (via REG_QTY), then trigger an ICE upon
4724 1.1 mrg re-insertion.
4725 1.1 mrg
4726 1.1 mrg This is only a problem in multi-set insns. If it were a single
4727 1.1 mrg set the dead copy would have been removed. If the RHS were anything
4728 1.1 mrg but a simple REG, then we won't call insert_regs and thus there's
4729 1.1 mrg no potential for triggering the ICE. */
4730 1.1 mrg do_not_record = (REG_P (dest)
4731 1.1 mrg && REG_P (src)
4732 1.1 mrg && find_reg_note (insn, REG_UNUSED, dest));
4733 1.1 mrg hash_arg_in_memory = 0;
4734 1.1 mrg
4735 1.1 mrg sets[i].src = src;
4736 1.1 mrg sets[i].src_hash = HASH (src, mode);
4737 1.1 mrg sets[i].src_volatile = do_not_record;
4738 1.1 mrg sets[i].src_in_memory = hash_arg_in_memory;
4739 1.1 mrg
4740 1.1 mrg /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4741 1.1 mrg a pseudo, do not record SRC. Using SRC as a replacement for
4742 1.1 mrg anything else will be incorrect in that situation. Note that
4743 1.1 mrg this usually occurs only for stack slots, in which case all the
4744 1.1 mrg RTL would be referring to SRC, so we don't lose any optimization
4745 1.1 mrg opportunities by not having SRC in the hash table. */
4746 1.1 mrg
4747 1.1 mrg if (MEM_P (src)
4748 1.1 mrg && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4749 1.1 mrg && REG_P (dest)
4750 1.1 mrg && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4751 1.1 mrg sets[i].src_volatile = 1;
4752 1.1 mrg
4753 1.1 mrg else if (GET_CODE (src) == ASM_OPERANDS
4754 1.1 mrg && GET_CODE (x) == PARALLEL)
4755 1.1 mrg {
4756 1.1 mrg /* Do not record result of a non-volatile inline asm with
4757 1.1 mrg more than one result. */
4758 1.1 mrg if (n_sets > 1)
4759 1.1 mrg sets[i].src_volatile = 1;
4760 1.1 mrg
4761 1.1 mrg int j, lim = XVECLEN (x, 0);
4762 1.1 mrg for (j = 0; j < lim; j++)
4763 1.1 mrg {
4764 1.1 mrg rtx y = XVECEXP (x, 0, j);
4765 1.1 mrg /* And do not record result of a non-volatile inline asm
4766 1.1 mrg with "memory" clobber. */
4767 1.1 mrg if (GET_CODE (y) == CLOBBER && MEM_P (XEXP (y, 0)))
4768 1.1 mrg {
4769 1.1 mrg sets[i].src_volatile = 1;
4770 1.1 mrg break;
4771 1.1 mrg }
4772 1.1 mrg }
4773 1.1 mrg }
4774 1.1 mrg
4775 1.1 mrg #if 0
4776 1.1 mrg /* It is no longer clear why we used to do this, but it doesn't
4777 1.1 mrg appear to still be needed. So let's try without it since this
4778 1.1 mrg code hurts cse'ing widened ops. */
4779 1.1 mrg /* If source is a paradoxical subreg (such as QI treated as an SI),
4780 1.1 mrg treat it as volatile. It may do the work of an SI in one context
4781 1.1 mrg where the extra bits are not being used, but cannot replace an SI
4782 1.1 mrg in general. */
4783 1.1 mrg if (paradoxical_subreg_p (src))
4784 1.1 mrg sets[i].src_volatile = 1;
4785 1.1 mrg #endif
4786 1.1 mrg
4787 1.1 mrg /* Locate all possible equivalent forms for SRC. Try to replace
4788 1.1 mrg SRC in the insn with each cheaper equivalent.
4789 1.1 mrg
4790 1.1 mrg We have the following types of equivalents: SRC itself, a folded
4791 1.1 mrg version, a value given in a REG_EQUAL note, or a value related
4792 1.1 mrg to a constant.
4793 1.1 mrg
4794 1.1 mrg Each of these equivalents may be part of an additional class
4795 1.1 mrg of equivalents (if more than one is in the table, they must be in
4796 1.1 mrg the same class; we check for this).
4797 1.1 mrg
4798 1.1 mrg If the source is volatile, we don't do any table lookups.
4799 1.1 mrg
4800 1.1 mrg We note any constant equivalent for possible later use in a
4801 1.1 mrg REG_NOTE. */
4802 1.1 mrg
4803 1.1 mrg if (!sets[i].src_volatile)
4804 1.1 mrg elt = lookup (src, sets[i].src_hash, mode);
4805 1.1 mrg
4806 1.1 mrg sets[i].src_elt = elt;
4807 1.1 mrg
4808 1.1 mrg if (elt && src_eqv_here && src_eqv_elt)
4809 1.1 mrg {
4810 1.1 mrg if (elt->first_same_value != src_eqv_elt->first_same_value)
4811 1.1 mrg {
4812 1.1 mrg /* The REG_EQUAL is indicating that two formerly distinct
4813 1.1 mrg classes are now equivalent. So merge them. */
4814 1.1 mrg merge_equiv_classes (elt, src_eqv_elt);
4815 1.1 mrg src_eqv_hash = HASH (src_eqv, elt->mode);
4816 1.1 mrg src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4817 1.1 mrg }
4818 1.1 mrg
4819 1.1 mrg src_eqv_here = 0;
4820 1.1 mrg }
4821 1.1 mrg
4822 1.1 mrg else if (src_eqv_elt)
4823 1.1 mrg elt = src_eqv_elt;
4824 1.1 mrg
4825 1.1 mrg /* Try to find a constant somewhere and record it in `src_const'.
4826 1.1 mrg Record its table element, if any, in `src_const_elt'. Look in
4827 1.1 mrg any known equivalences first. (If the constant is not in the
4828 1.1 mrg table, also set `sets[i].src_const_hash'). */
4829 1.1 mrg if (elt)
4830 1.1 mrg for (p = elt->first_same_value; p; p = p->next_same_value)
4831 1.1 mrg if (p->is_const)
4832 1.1 mrg {
4833 1.1 mrg src_const = p->exp;
4834 1.1 mrg src_const_elt = elt;
4835 1.1 mrg break;
4836 1.1 mrg }
4837 1.1 mrg
4838 1.1 mrg if (src_const == 0
4839 1.1 mrg && (CONSTANT_P (src_folded)
4840 1.1 mrg /* Consider (minus (label_ref L1) (label_ref L2)) as
4841 1.1 mrg "constant" here so we will record it. This allows us
4842 1.1 mrg to fold switch statements when an ADDR_DIFF_VEC is used. */
4843 1.1 mrg || (GET_CODE (src_folded) == MINUS
4844 1.1 mrg && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4845 1.1 mrg && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4846 1.1 mrg src_const = src_folded, src_const_elt = elt;
4847 1.1 mrg else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4848 1.1 mrg src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4849 1.1 mrg
4850 1.1 mrg /* If we don't know if the constant is in the table, get its
4851 1.1 mrg hash code and look it up. */
4852 1.1 mrg if (src_const && src_const_elt == 0)
4853 1.1 mrg {
4854 1.1 mrg sets[i].src_const_hash = HASH (src_const, mode);
4855 1.1 mrg src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4856 1.1 mrg }
4857 1.1 mrg
4858 1.1 mrg sets[i].src_const = src_const;
4859 1.1 mrg sets[i].src_const_elt = src_const_elt;
4860 1.1 mrg
4861 1.1 mrg /* If the constant and our source are both in the table, mark them as
4862 1.1 mrg equivalent. Otherwise, if a constant is in the table but the source
4863 1.1 mrg isn't, set ELT to it. */
4864 1.1 mrg if (src_const_elt && elt
4865 1.1 mrg && src_const_elt->first_same_value != elt->first_same_value)
4866 1.1 mrg merge_equiv_classes (elt, src_const_elt);
4867 1.1 mrg else if (src_const_elt && elt == 0)
4868 1.1 mrg elt = src_const_elt;
4869 1.1 mrg
4870 1.1 mrg /* See if there is a register linearly related to a constant
4871 1.1 mrg equivalent of SRC. */
4872 1.1 mrg if (src_const
4873 1.1 mrg && (GET_CODE (src_const) == CONST
4874 1.1 mrg || (src_const_elt && src_const_elt->related_value != 0)))
4875 1.1 mrg {
4876 1.1 mrg src_related = use_related_value (src_const, src_const_elt);
4877 1.1 mrg if (src_related)
4878 1.1 mrg {
4879 1.1 mrg struct table_elt *src_related_elt
4880 1.1 mrg = lookup (src_related, HASH (src_related, mode), mode);
4881 1.1 mrg if (src_related_elt && elt)
4882 1.1 mrg {
4883 1.1 mrg if (elt->first_same_value
4884 1.1 mrg != src_related_elt->first_same_value)
4885 1.1 mrg /* This can occur when we previously saw a CONST
4886 1.1 mrg involving a SYMBOL_REF and then see the SYMBOL_REF
4887 1.1 mrg twice. Merge the involved classes. */
4888 1.1 mrg merge_equiv_classes (elt, src_related_elt);
4889 1.1 mrg
4890 1.1 mrg src_related = 0;
4891 1.1 mrg src_related_elt = 0;
4892 1.1 mrg }
4893 1.1 mrg else if (src_related_elt && elt == 0)
4894 1.1 mrg elt = src_related_elt;
4895 1.1 mrg }
4896 1.1 mrg }
4897 1.1 mrg
4898 1.1 mrg /* See if we have a CONST_INT that is already in a register in a
4899 1.1 mrg wider mode. */
4900 1.1 mrg
4901 1.1 mrg if (src_const && src_related == 0 && CONST_INT_P (src_const)
4902 1.1 mrg && is_int_mode (mode, &int_mode)
4903 1.1 mrg && GET_MODE_PRECISION (int_mode) < BITS_PER_WORD)
4904 1.1 mrg {
4905 1.1 mrg opt_scalar_int_mode wider_mode_iter;
4906 1.1 mrg FOR_EACH_WIDER_MODE (wider_mode_iter, int_mode)
4907 1.1 mrg {
4908 1.1 mrg scalar_int_mode wider_mode = wider_mode_iter.require ();
4909 1.1 mrg if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
4910 1.1 mrg break;
4911 1.1 mrg
4912 1.1 mrg struct table_elt *const_elt
4913 1.1 mrg = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4914 1.1 mrg
4915 1.1 mrg if (const_elt == 0)
4916 1.1 mrg continue;
4917 1.1 mrg
4918 1.1 mrg for (const_elt = const_elt->first_same_value;
4919 1.1 mrg const_elt; const_elt = const_elt->next_same_value)
4920 1.1 mrg if (REG_P (const_elt->exp))
4921 1.1 mrg {
4922 1.1 mrg src_related = gen_lowpart (int_mode, const_elt->exp);
4923 1.1 mrg break;
4924 1.1 mrg }
4925 1.1 mrg
4926 1.1 mrg if (src_related != 0)
4927 1.1 mrg break;
4928 1.1 mrg }
4929 1.1 mrg }
4930 1.1 mrg
4931 1.1 mrg /* Another possibility is that we have an AND with a constant in
4932 1.1 mrg a mode narrower than a word. If so, it might have been generated
4933 1.1 mrg as part of an "if" which would narrow the AND. If we already
4934 1.1 mrg have done the AND in a wider mode, we can use a SUBREG of that
4935 1.1 mrg value. */
4936 1.1 mrg
4937 1.1 mrg if (flag_expensive_optimizations && ! src_related
4938 1.1 mrg && is_a <scalar_int_mode> (mode, &int_mode)
4939 1.1 mrg && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4940 1.1 mrg && GET_MODE_SIZE (int_mode) < UNITS_PER_WORD)
4941 1.1 mrg {
4942 1.1 mrg opt_scalar_int_mode tmode_iter;
4943 1.1 mrg rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4944 1.1 mrg
4945 1.1 mrg FOR_EACH_WIDER_MODE (tmode_iter, int_mode)
4946 1.1 mrg {
4947 1.1 mrg scalar_int_mode tmode = tmode_iter.require ();
4948 1.1 mrg if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
4949 1.1 mrg break;
4950 1.1 mrg
4951 1.1 mrg rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4952 1.1 mrg struct table_elt *larger_elt;
4953 1.1 mrg
4954 1.1 mrg if (inner)
4955 1.1 mrg {
4956 1.1 mrg PUT_MODE (new_and, tmode);
4957 1.1 mrg XEXP (new_and, 0) = inner;
4958 1.1 mrg larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4959 1.1 mrg if (larger_elt == 0)
4960 1.1 mrg continue;
4961 1.1 mrg
4962 1.1 mrg for (larger_elt = larger_elt->first_same_value;
4963 1.1 mrg larger_elt; larger_elt = larger_elt->next_same_value)
4964 1.1 mrg if (REG_P (larger_elt->exp))
4965 1.1 mrg {
4966 1.1 mrg src_related
4967 1.1 mrg = gen_lowpart (int_mode, larger_elt->exp);
4968 1.1 mrg break;
4969 1.1 mrg }
4970 1.1 mrg
4971 1.1 mrg if (src_related)
4972 1.1 mrg break;
4973 1.1 mrg }
4974 1.1 mrg }
4975 1.1 mrg }
4976 1.1 mrg
4977 1.1 mrg /* See if a MEM has already been loaded with a widening operation;
4978 1.1 mrg if it has, we can use a subreg of that. Many CISC machines
4979 1.1 mrg also have such operations, but this is only likely to be
4980 1.1 mrg beneficial on these machines. */
4981 1.1 mrg
4982 1.1 mrg rtx_code extend_op;
4983 1.1 mrg if (flag_expensive_optimizations && src_related == 0
4984 1.1 mrg && MEM_P (src) && ! do_not_record
4985 1.1 mrg && is_a <scalar_int_mode> (mode, &int_mode)
4986 1.1 mrg && (extend_op = load_extend_op (int_mode)) != UNKNOWN)
4987 1.1 mrg {
4988 1.1 mrg struct rtx_def memory_extend_buf;
4989 1.1 mrg rtx memory_extend_rtx = &memory_extend_buf;
4990 1.1 mrg
4991 1.1 mrg /* Set what we are trying to extend and the operation it might
4992 1.1 mrg have been extended with. */
4993 1.1 mrg memset (memory_extend_rtx, 0, sizeof (*memory_extend_rtx));
4994 1.1 mrg PUT_CODE (memory_extend_rtx, extend_op);
4995 1.1 mrg XEXP (memory_extend_rtx, 0) = src;
4996 1.1 mrg
4997 1.1 mrg opt_scalar_int_mode tmode_iter;
4998 1.1 mrg FOR_EACH_WIDER_MODE (tmode_iter, int_mode)
4999 1.1 mrg {
5000 1.1 mrg struct table_elt *larger_elt;
5001 1.1 mrg
5002 1.1 mrg scalar_int_mode tmode = tmode_iter.require ();
5003 1.1 mrg if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
5004 1.1 mrg break;
5005 1.1 mrg
5006 1.1 mrg PUT_MODE (memory_extend_rtx, tmode);
5007 1.1 mrg larger_elt = lookup (memory_extend_rtx,
5008 1.1 mrg HASH (memory_extend_rtx, tmode), tmode);
5009 1.1 mrg if (larger_elt == 0)
5010 1.1 mrg continue;
5011 1.1 mrg
5012 1.1 mrg for (larger_elt = larger_elt->first_same_value;
5013 1.1 mrg larger_elt; larger_elt = larger_elt->next_same_value)
5014 1.1 mrg if (REG_P (larger_elt->exp))
5015 1.1 mrg {
5016 1.1 mrg src_related = gen_lowpart (int_mode, larger_elt->exp);
5017 1.1 mrg break;
5018 1.1 mrg }
5019 1.1 mrg
5020 1.1 mrg if (src_related)
5021 1.1 mrg break;
5022 1.1 mrg }
5023 1.1 mrg }
5024 1.1 mrg
5025 1.1 mrg /* Try to express the constant using a register+offset expression
5026 1.1 mrg derived from a constant anchor. */
5027 1.1 mrg
5028 1.1 mrg if (targetm.const_anchor
5029 1.1 mrg && !src_related
5030 1.1 mrg && src_const
5031 1.1 mrg && GET_CODE (src_const) == CONST_INT)
5032 1.1 mrg {
5033 1.1 mrg src_related = try_const_anchors (src_const, mode);
5034 1.1 mrg src_related_is_const_anchor = src_related != NULL_RTX;
5035 1.1 mrg }
5036 1.1 mrg
5037 1.1 mrg /* Try to re-materialize a vec_dup with an existing constant. */
5038 1.1 mrg rtx src_elt;
5039 1.1 mrg if ((!src_eqv_here || CONSTANT_P (src_eqv_here))
5040 1.1 mrg && const_vec_duplicate_p (src, &src_elt))
5041 1.1 mrg {
5042 1.1 mrg machine_mode const_mode = GET_MODE_INNER (GET_MODE (src));
5043 1.1 mrg struct table_elt *related_elt
5044 1.1 mrg = lookup (src_elt, HASH (src_elt, const_mode), const_mode);
5045 1.1 mrg if (related_elt)
5046 1.1 mrg {
5047 1.1 mrg for (related_elt = related_elt->first_same_value;
5048 1.1 mrg related_elt; related_elt = related_elt->next_same_value)
5049 1.1 mrg if (REG_P (related_elt->exp))
5050 1.1 mrg {
5051 1.1 mrg /* We don't need to compare costs with an existing (constant)
5052 1.1 mrg src_eqv_here, since any such src_eqv_here should already be
5053 1.1 mrg available in src_const. */
5054 1.1 mrg src_eqv_here
5055 1.1 mrg = gen_rtx_VEC_DUPLICATE (GET_MODE (src),
5056 1.1 mrg related_elt->exp);
5057 1.1 mrg break;
5058 1.1 mrg }
5059 1.1 mrg }
5060 1.1 mrg }
5061 1.1 mrg
5062 1.1 mrg if (src == src_folded)
5063 1.1 mrg src_folded = 0;
5064 1.1 mrg
5065 1.1 mrg /* At this point, ELT, if nonzero, points to a class of expressions
5066 1.1 mrg equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5067 1.1 mrg and SRC_RELATED, if nonzero, each contain additional equivalent
5068 1.1 mrg expressions. Prune these latter expressions by deleting expressions
5069 1.1 mrg already in the equivalence class.
5070 1.1 mrg
5071 1.1 mrg Check for an equivalent identical to the destination. If found,
5072 1.1 mrg this is the preferred equivalent since it will likely lead to
5073 1.1 mrg elimination of the insn. Indicate this by placing it in
5074 1.1 mrg `src_related'. */
5075 1.1 mrg
5076 1.1 mrg if (elt)
5077 1.1 mrg elt = elt->first_same_value;
5078 1.1 mrg for (p = elt; p; p = p->next_same_value)
5079 1.1 mrg {
5080 1.1 mrg enum rtx_code code = GET_CODE (p->exp);
5081 1.1 mrg
5082 1.1 mrg /* If the expression is not valid, ignore it. Then we do not
5083 1.1 mrg have to check for validity below. In most cases, we can use
5084 1.1 mrg `rtx_equal_p', since canonicalization has already been done. */
5085 1.1 mrg if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5086 1.1 mrg continue;
5087 1.1 mrg
5088 1.1 mrg /* Also skip paradoxical subregs, unless that's what we're
5089 1.1 mrg looking for. */
5090 1.1 mrg if (paradoxical_subreg_p (p->exp)
5091 1.1 mrg && ! (src != 0
5092 1.1 mrg && GET_CODE (src) == SUBREG
5093 1.1 mrg && GET_MODE (src) == GET_MODE (p->exp)
5094 1.1 mrg && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
5095 1.1 mrg GET_MODE (SUBREG_REG (p->exp)))))
5096 1.1 mrg continue;
5097 1.1 mrg
5098 1.1 mrg if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5099 1.1 mrg src = 0;
5100 1.1 mrg else if (src_folded && GET_CODE (src_folded) == code
5101 1.1 mrg && rtx_equal_p (src_folded, p->exp))
5102 1.1 mrg src_folded = 0;
5103 1.1 mrg else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5104 1.1 mrg && rtx_equal_p (src_eqv_here, p->exp))
5105 1.1 mrg src_eqv_here = 0;
5106 1.1 mrg else if (src_related && GET_CODE (src_related) == code
5107 1.1 mrg && rtx_equal_p (src_related, p->exp))
5108 1.1 mrg src_related = 0;
5109 1.1 mrg
5110 1.1 mrg /* This is the same as the destination of the insns, we want
5111 1.1 mrg to prefer it. Copy it to src_related. The code below will
5112 1.1 mrg then give it a negative cost. */
5113 1.1 mrg if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5114 1.1 mrg src_related = p->exp;
5115 1.1 mrg }
5116 1.1 mrg
5117 1.1 mrg /* Find the cheapest valid equivalent, trying all the available
5118 1.1 mrg possibilities. Prefer items not in the hash table to ones
5119 1.1 mrg that are when they are equal cost. Note that we can never
5120 1.1 mrg worsen an insn as the current contents will also succeed.
5121 1.1 mrg If we find an equivalent identical to the destination, use it as best,
5122 1.1 mrg since this insn will probably be eliminated in that case. */
5123 1.1 mrg if (src)
5124 1.1 mrg {
5125 1.1 mrg if (rtx_equal_p (src, dest))
5126 1.1 mrg src_cost = src_regcost = -1;
5127 1.1 mrg else
5128 1.1 mrg {
5129 1.1 mrg src_cost = COST (src, mode);
5130 1.1 mrg src_regcost = approx_reg_cost (src);
5131 1.1 mrg }
5132 1.1 mrg }
5133 1.1 mrg
5134 1.1 mrg if (src_eqv_here)
5135 1.1 mrg {
5136 1.1 mrg if (rtx_equal_p (src_eqv_here, dest))
5137 1.1 mrg src_eqv_cost = src_eqv_regcost = -1;
5138 1.1 mrg else
5139 1.1 mrg {
5140 1.1 mrg src_eqv_cost = COST (src_eqv_here, mode);
5141 1.1 mrg src_eqv_regcost = approx_reg_cost (src_eqv_here);
5142 1.1 mrg }
5143 1.1 mrg }
5144 1.1 mrg
5145 1.1 mrg if (src_folded)
5146 1.1 mrg {
5147 1.1 mrg if (rtx_equal_p (src_folded, dest))
5148 1.1 mrg src_folded_cost = src_folded_regcost = -1;
5149 1.1 mrg else
5150 1.1 mrg {
5151 1.1 mrg src_folded_cost = COST (src_folded, mode);
5152 1.1 mrg src_folded_regcost = approx_reg_cost (src_folded);
5153 1.1 mrg }
5154 1.1 mrg }
5155 1.1 mrg
5156 1.1 mrg if (src_related)
5157 1.1 mrg {
5158 1.1 mrg if (rtx_equal_p (src_related, dest))
5159 1.1 mrg src_related_cost = src_related_regcost = -1;
5160 1.1 mrg else
5161 1.1 mrg {
5162 1.1 mrg src_related_cost = COST (src_related, mode);
5163 1.1 mrg src_related_regcost = approx_reg_cost (src_related);
5164 1.1 mrg
5165 1.1 mrg /* If a const-anchor is used to synthesize a constant that
5166 1.1 mrg normally requires multiple instructions then slightly prefer
5167 1.1 mrg it over the original sequence. These instructions are likely
5168 1.1 mrg to become redundant now. We can't compare against the cost
5169 1.1 mrg of src_eqv_here because, on MIPS for example, multi-insn
5170 1.1 mrg constants have zero cost; they are assumed to be hoisted from
5171 1.1 mrg loops. */
5172 1.1 mrg if (src_related_is_const_anchor
5173 1.1 mrg && src_related_cost == src_cost
5174 1.1 mrg && src_eqv_here)
5175 1.1 mrg src_related_cost--;
5176 1.1 mrg }
5177 1.1 mrg }
5178 1.1 mrg
5179 1.1 mrg /* If this was an indirect jump insn, a known label will really be
5180 1.1 mrg cheaper even though it looks more expensive. */
5181 1.1 mrg if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5182 1.1 mrg src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5183 1.1 mrg
5184 1.1 mrg /* Terminate loop when replacement made. This must terminate since
5185 1.1 mrg the current contents will be tested and will always be valid. */
5186 1.1 mrg while (1)
5187 1.1 mrg {
5188 1.1 mrg rtx trial;
5189 1.1 mrg
5190 1.1 mrg /* Skip invalid entries. */
5191 1.1 mrg while (elt && !REG_P (elt->exp)
5192 1.1 mrg && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5193 1.1 mrg elt = elt->next_same_value;
5194 1.1 mrg
5195 1.1 mrg /* A paradoxical subreg would be bad here: it'll be the right
5196 1.1 mrg size, but later may be adjusted so that the upper bits aren't
5197 1.1 mrg what we want. So reject it. */
5198 1.1 mrg if (elt != 0
5199 1.1 mrg && paradoxical_subreg_p (elt->exp)
5200 1.1 mrg /* It is okay, though, if the rtx we're trying to match
5201 1.1 mrg will ignore any of the bits we can't predict. */
5202 1.1 mrg && ! (src != 0
5203 1.1 mrg && GET_CODE (src) == SUBREG
5204 1.1 mrg && GET_MODE (src) == GET_MODE (elt->exp)
5205 1.1 mrg && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
5206 1.1 mrg GET_MODE (SUBREG_REG (elt->exp)))))
5207 1.1 mrg {
5208 1.1 mrg elt = elt->next_same_value;
5209 1.1 mrg continue;
5210 1.1 mrg }
5211 1.1 mrg
5212 1.1 mrg if (elt)
5213 1.1 mrg {
5214 1.1 mrg src_elt_cost = elt->cost;
5215 1.1 mrg src_elt_regcost = elt->regcost;
5216 1.1 mrg }
5217 1.1 mrg
5218 1.1 mrg /* Find cheapest and skip it for the next time. For items
5219 1.1 mrg of equal cost, use this order:
5220 1.1 mrg src_folded, src, src_eqv, src_related and hash table entry. */
5221 1.1 mrg if (src_folded
5222 1.1 mrg && preferable (src_folded_cost, src_folded_regcost,
5223 1.1 mrg src_cost, src_regcost) <= 0
5224 1.1 mrg && preferable (src_folded_cost, src_folded_regcost,
5225 1.1 mrg src_eqv_cost, src_eqv_regcost) <= 0
5226 1.1 mrg && preferable (src_folded_cost, src_folded_regcost,
5227 1.1 mrg src_related_cost, src_related_regcost) <= 0
5228 1.1 mrg && preferable (src_folded_cost, src_folded_regcost,
5229 1.1 mrg src_elt_cost, src_elt_regcost) <= 0)
5230 1.1 mrg trial = src_folded, src_folded_cost = MAX_COST;
5231 1.1 mrg else if (src
5232 1.1 mrg && preferable (src_cost, src_regcost,
5233 1.1 mrg src_eqv_cost, src_eqv_regcost) <= 0
5234 1.1 mrg && preferable (src_cost, src_regcost,
5235 1.1 mrg src_related_cost, src_related_regcost) <= 0
5236 1.1 mrg && preferable (src_cost, src_regcost,
5237 1.1 mrg src_elt_cost, src_elt_regcost) <= 0)
5238 1.1 mrg trial = src, src_cost = MAX_COST;
5239 1.1 mrg else if (src_eqv_here
5240 1.1 mrg && preferable (src_eqv_cost, src_eqv_regcost,
5241 1.1 mrg src_related_cost, src_related_regcost) <= 0
5242 1.1 mrg && preferable (src_eqv_cost, src_eqv_regcost,
5243 1.1 mrg src_elt_cost, src_elt_regcost) <= 0)
5244 1.1 mrg trial = src_eqv_here, src_eqv_cost = MAX_COST;
5245 1.1 mrg else if (src_related
5246 1.1 mrg && preferable (src_related_cost, src_related_regcost,
5247 1.1 mrg src_elt_cost, src_elt_regcost) <= 0)
5248 1.1 mrg trial = src_related, src_related_cost = MAX_COST;
5249 1.1 mrg else
5250 1.1 mrg {
5251 1.1 mrg trial = elt->exp;
5252 1.1 mrg elt = elt->next_same_value;
5253 1.1 mrg src_elt_cost = MAX_COST;
5254 1.1 mrg }
5255 1.1 mrg
5256 1.1 mrg /* Try to optimize
5257 1.1 mrg (set (reg:M N) (const_int A))
5258 1.1 mrg (set (reg:M2 O) (const_int B))
5259 1.1 mrg (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5260 1.1 mrg (reg:M2 O)). */
5261 1.1 mrg if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5262 1.1 mrg && CONST_INT_P (trial)
5263 1.1 mrg && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5264 1.1 mrg && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5265 1.1 mrg && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5266 1.1 mrg && (known_ge
5267 1.1 mrg (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl))),
5268 1.1 mrg INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))))
5269 1.1 mrg && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5270 1.1 mrg + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5271 1.1 mrg <= HOST_BITS_PER_WIDE_INT))
5272 1.1 mrg {
5273 1.1 mrg rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5274 1.1 mrg rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5275 1.1 mrg rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5276 1.1 mrg unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5277 1.1 mrg struct table_elt *dest_elt
5278 1.1 mrg = lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5279 1.1 mrg rtx dest_cst = NULL;
5280 1.1 mrg
5281 1.1 mrg if (dest_elt)
5282 1.1 mrg for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5283 1.1 mrg if (p->is_const && CONST_INT_P (p->exp))
5284 1.1 mrg {
5285 1.1 mrg dest_cst = p->exp;
5286 1.1 mrg break;
5287 1.1 mrg }
5288 1.1 mrg if (dest_cst)
5289 1.1 mrg {
5290 1.1 mrg HOST_WIDE_INT val = INTVAL (dest_cst);
5291 1.1 mrg HOST_WIDE_INT mask;
5292 1.1 mrg unsigned int shift;
5293 1.1 mrg /* This is the mode of DEST_CST as well. */
5294 1.1 mrg scalar_int_mode dest_mode
5295 1.1 mrg = as_a <scalar_int_mode> (GET_MODE (dest_reg));
5296 1.1 mrg if (BITS_BIG_ENDIAN)
5297 1.1 mrg shift = GET_MODE_PRECISION (dest_mode)
5298 1.1 mrg - INTVAL (pos) - INTVAL (width);
5299 1.1 mrg else
5300 1.1 mrg shift = INTVAL (pos);
5301 1.1 mrg if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5302 1.1 mrg mask = HOST_WIDE_INT_M1;
5303 1.1 mrg else
5304 1.1 mrg mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
5305 1.1 mrg val &= ~(mask << shift);
5306 1.1 mrg val |= (INTVAL (trial) & mask) << shift;
5307 1.1 mrg val = trunc_int_for_mode (val, dest_mode);
5308 1.1 mrg validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5309 1.1 mrg dest_reg, 1);
5310 1.1 mrg validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5311 1.1 mrg GEN_INT (val), 1);
5312 1.1 mrg if (apply_change_group ())
5313 1.1 mrg {
5314 1.1 mrg rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5315 1.1 mrg if (note)
5316 1.1 mrg {
5317 1.1 mrg remove_note (insn, note);
5318 1.1 mrg df_notes_rescan (insn);
5319 1.1 mrg }
5320 1.1 mrg src_eqv = NULL_RTX;
5321 1.1 mrg src_eqv_elt = NULL;
5322 1.1 mrg src_eqv_volatile = 0;
5323 1.1 mrg src_eqv_in_memory = 0;
5324 1.1 mrg src_eqv_hash = 0;
5325 1.1 mrg repeat = true;
5326 1.1 mrg break;
5327 1.1 mrg }
5328 1.1 mrg }
5329 1.1 mrg }
5330 1.1 mrg
5331 1.1 mrg /* We don't normally have an insn matching (set (pc) (pc)), so
5332 1.1 mrg check for this separately here. We will delete such an
5333 1.1 mrg insn below.
5334 1.1 mrg
5335 1.1 mrg For other cases such as a table jump or conditional jump
5336 1.1 mrg where we know the ultimate target, go ahead and replace the
5337 1.1 mrg operand. While that may not make a valid insn, we will
5338 1.1 mrg reemit the jump below (and also insert any necessary
5339 1.1 mrg barriers). */
5340 1.1 mrg if (n_sets == 1 && dest == pc_rtx
5341 1.1 mrg && (trial == pc_rtx
5342 1.1 mrg || (GET_CODE (trial) == LABEL_REF
5343 1.1 mrg && ! condjump_p (insn))))
5344 1.1 mrg {
5345 1.1 mrg /* Don't substitute non-local labels, this confuses CFG. */
5346 1.1 mrg if (GET_CODE (trial) == LABEL_REF
5347 1.1 mrg && LABEL_REF_NONLOCAL_P (trial))
5348 1.1 mrg continue;
5349 1.1 mrg
5350 1.1 mrg SET_SRC (sets[i].rtl) = trial;
5351 1.1 mrg cse_jumps_altered = true;
5352 1.1 mrg break;
5353 1.1 mrg }
5354 1.1 mrg
5355 1.1 mrg /* Similarly, lots of targets don't allow no-op
5356 1.1 mrg (set (mem x) (mem x)) moves. Even (set (reg x) (reg x))
5357 1.1 mrg might be impossible for certain registers (like CC registers). */
5358 1.1 mrg else if (n_sets == 1
5359 1.1 mrg && !CALL_P (insn)
5360 1.1 mrg && (MEM_P (trial) || REG_P (trial))
5361 1.1 mrg && rtx_equal_p (trial, dest)
5362 1.1 mrg && !side_effects_p (dest)
5363 1.1 mrg && (cfun->can_delete_dead_exceptions
5364 1.1 mrg || insn_nothrow_p (insn))
5365 1.1 mrg /* We can only remove the later store if the earlier aliases
5366 1.1 mrg at least all accesses the later one. */
5367 1.1 mrg && (!MEM_P (trial)
5368 1.1 mrg || ((MEM_ALIAS_SET (dest) == MEM_ALIAS_SET (trial)
5369 1.1 mrg || alias_set_subset_of (MEM_ALIAS_SET (dest),
5370 1.1 mrg MEM_ALIAS_SET (trial)))
5371 1.1 mrg && (!MEM_EXPR (trial)
5372 1.1 mrg || refs_same_for_tbaa_p (MEM_EXPR (trial),
5373 1.1 mrg MEM_EXPR (dest))))))
5374 1.1 mrg {
5375 1.1 mrg SET_SRC (sets[i].rtl) = trial;
5376 1.1 mrg noop_insn = true;
5377 1.1 mrg break;
5378 1.1 mrg }
5379 1.1 mrg
5380 1.1 mrg /* Reject certain invalid forms of CONST that we create. */
5381 1.1 mrg else if (CONSTANT_P (trial)
5382 1.1 mrg && GET_CODE (trial) == CONST
5383 1.1 mrg /* Reject cases that will cause decode_rtx_const to
5384 1.1 mrg die. On the alpha when simplifying a switch, we
5385 1.1 mrg get (const (truncate (minus (label_ref)
5386 1.1 mrg (label_ref)))). */
5387 1.1 mrg && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5388 1.1 mrg /* Likewise on IA-64, except without the
5389 1.1 mrg truncate. */
5390 1.1 mrg || (GET_CODE (XEXP (trial, 0)) == MINUS
5391 1.1 mrg && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5392 1.1 mrg && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5393 1.1 mrg /* Do nothing for this case. */
5394 1.1 mrg ;
5395 1.1 mrg
5396 1.1 mrg /* Do not replace anything with a MEM, except the replacement
5397 1.1 mrg is a no-op. This allows this loop to terminate. */
5398 1.1 mrg else if (MEM_P (trial) && !rtx_equal_p (trial, SET_SRC(sets[i].rtl)))
5399 1.1 mrg /* Do nothing for this case. */
5400 1.1 mrg ;
5401 1.1 mrg
5402 1.1 mrg /* Look for a substitution that makes a valid insn. */
5403 1.1 mrg else if (validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5404 1.1 mrg trial, 0))
5405 1.1 mrg {
5406 1.1 mrg rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5407 1.1 mrg
5408 1.1 mrg /* The result of apply_change_group can be ignored; see
5409 1.1 mrg canon_reg. */
5410 1.1 mrg
5411 1.1 mrg validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5412 1.1 mrg apply_change_group ();
5413 1.1 mrg
5414 1.1 mrg break;
5415 1.1 mrg }
5416 1.1 mrg
5417 1.1 mrg /* If the current function uses a constant pool and this is a
5418 1.1 mrg constant, try making a pool entry. Put it in src_folded
5419 1.1 mrg unless we already have done this since that is where it
5420 1.1 mrg likely came from. */
5421 1.1 mrg
5422 1.1 mrg else if (crtl->uses_const_pool
5423 1.1 mrg && CONSTANT_P (trial)
5424 1.1 mrg && !CONST_INT_P (trial)
5425 1.1 mrg && (src_folded == 0 || !MEM_P (src_folded))
5426 1.1 mrg && GET_MODE_CLASS (mode) != MODE_CC
5427 1.1 mrg && mode != VOIDmode)
5428 1.1 mrg {
5429 1.1 mrg src_folded = force_const_mem (mode, trial);
5430 1.1 mrg if (src_folded)
5431 1.1 mrg {
5432 1.1 mrg src_folded_cost = COST (src_folded, mode);
5433 1.1 mrg src_folded_regcost = approx_reg_cost (src_folded);
5434 1.1 mrg }
5435 1.1 mrg }
5436 1.1 mrg }
5437 1.1 mrg
5438 1.1 mrg /* If we changed the insn too much, handle this set from scratch. */
5439 1.1 mrg if (repeat)
5440 1.1 mrg {
5441 1.1 mrg i--;
5442 1.1 mrg continue;
5443 1.1 mrg }
5444 1.1 mrg
5445 1.1 mrg src = SET_SRC (sets[i].rtl);
5446 1.1 mrg
5447 1.1 mrg /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5448 1.1 mrg However, there is an important exception: If both are registers
5449 1.1 mrg that are not the head of their equivalence class, replace SET_SRC
5450 1.1 mrg with the head of the class. If we do not do this, we will have
5451 1.1 mrg both registers live over a portion of the basic block. This way,
5452 1.1 mrg their lifetimes will likely abut instead of overlapping. */
5453 1.1 mrg if (REG_P (dest)
5454 1.1 mrg && REGNO_QTY_VALID_P (REGNO (dest)))
5455 1.1 mrg {
5456 1.1 mrg int dest_q = REG_QTY (REGNO (dest));
5457 1.1 mrg struct qty_table_elem *dest_ent = &qty_table[dest_q];
5458 1.1 mrg
5459 1.1 mrg if (dest_ent->mode == GET_MODE (dest)
5460 1.1 mrg && dest_ent->first_reg != REGNO (dest)
5461 1.1 mrg && REG_P (src) && REGNO (src) == REGNO (dest)
5462 1.1 mrg /* Don't do this if the original insn had a hard reg as
5463 1.1 mrg SET_SRC or SET_DEST. */
5464 1.1 mrg && (!REG_P (sets[i].src)
5465 1.1 mrg || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5466 1.1 mrg && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5467 1.1 mrg /* We can't call canon_reg here because it won't do anything if
5468 1.1 mrg SRC is a hard register. */
5469 1.1 mrg {
5470 1.1 mrg int src_q = REG_QTY (REGNO (src));
5471 1.1 mrg struct qty_table_elem *src_ent = &qty_table[src_q];
5472 1.1 mrg int first = src_ent->first_reg;
5473 1.1 mrg rtx new_src
5474 1.1 mrg = (first >= FIRST_PSEUDO_REGISTER
5475 1.1 mrg ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5476 1.1 mrg
5477 1.1 mrg /* We must use validate-change even for this, because this
5478 1.1 mrg might be a special no-op instruction, suitable only to
5479 1.1 mrg tag notes onto. */
5480 1.1 mrg if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5481 1.1 mrg {
5482 1.1 mrg src = new_src;
5483 1.1 mrg /* If we had a constant that is cheaper than what we are now
5484 1.1 mrg setting SRC to, use that constant. We ignored it when we
5485 1.1 mrg thought we could make this into a no-op. */
5486 1.1 mrg if (src_const && COST (src_const, mode) < COST (src, mode)
5487 1.1 mrg && validate_change (insn, &SET_SRC (sets[i].rtl),
5488 1.1 mrg src_const, 0))
5489 1.1 mrg src = src_const;
5490 1.1 mrg }
5491 1.1 mrg }
5492 1.1 mrg }
5493 1.1 mrg
5494 1.1 mrg /* If we made a change, recompute SRC values. */
5495 1.1 mrg if (src != sets[i].src)
5496 1.1 mrg {
5497 1.1 mrg do_not_record = 0;
5498 1.1 mrg hash_arg_in_memory = 0;
5499 1.1 mrg sets[i].src = src;
5500 1.1 mrg sets[i].src_hash = HASH (src, mode);
5501 1.1 mrg sets[i].src_volatile = do_not_record;
5502 1.1 mrg sets[i].src_in_memory = hash_arg_in_memory;
5503 1.1 mrg sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5504 1.1 mrg }
5505 1.1 mrg
5506 1.1 mrg /* If this is a single SET, we are setting a register, and we have an
5507 1.1 mrg equivalent constant, we want to add a REG_EQUAL note if the constant
5508 1.1 mrg is different from the source. We don't want to do it for a constant
5509 1.1 mrg pseudo since verifying that this pseudo hasn't been eliminated is a
5510 1.1 mrg pain; moreover such a note won't help anything.
5511 1.1 mrg
5512 1.1 mrg Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5513 1.1 mrg which can be created for a reference to a compile time computable
5514 1.1 mrg entry in a jump table. */
5515 1.1 mrg if (n_sets == 1
5516 1.1 mrg && REG_P (dest)
5517 1.1 mrg && src_const
5518 1.1 mrg && !REG_P (src_const)
5519 1.1 mrg && !(GET_CODE (src_const) == SUBREG
5520 1.1 mrg && REG_P (SUBREG_REG (src_const)))
5521 1.1 mrg && !(GET_CODE (src_const) == CONST
5522 1.1 mrg && GET_CODE (XEXP (src_const, 0)) == MINUS
5523 1.1 mrg && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5524 1.1 mrg && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
5525 1.1 mrg && !rtx_equal_p (src, src_const))
5526 1.1 mrg {
5527 1.1 mrg /* Make sure that the rtx is not shared. */
5528 1.1 mrg src_const = copy_rtx (src_const);
5529 1.1 mrg
5530 1.1 mrg /* Record the actual constant value in a REG_EQUAL note,
5531 1.1 mrg making a new one if one does not already exist. */
5532 1.1 mrg set_unique_reg_note (insn, REG_EQUAL, src_const);
5533 1.1 mrg df_notes_rescan (insn);
5534 1.1 mrg }
5535 1.1 mrg
5536 1.1 mrg /* Now deal with the destination. */
5537 1.1 mrg do_not_record = 0;
5538 1.1 mrg
5539 1.1 mrg /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5540 1.1 mrg while (GET_CODE (dest) == SUBREG
5541 1.1 mrg || GET_CODE (dest) == ZERO_EXTRACT
5542 1.1 mrg || GET_CODE (dest) == STRICT_LOW_PART)
5543 1.1 mrg dest = XEXP (dest, 0);
5544 1.1 mrg
5545 1.1 mrg sets[i].inner_dest = dest;
5546 1.1 mrg
5547 1.1 mrg if (MEM_P (dest))
5548 1.1 mrg {
5549 1.1 mrg #ifdef PUSH_ROUNDING
5550 1.1 mrg /* Stack pushes invalidate the stack pointer. */
5551 1.1 mrg rtx addr = XEXP (dest, 0);
5552 1.1 mrg if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5553 1.1 mrg && XEXP (addr, 0) == stack_pointer_rtx)
5554 1.1 mrg invalidate (stack_pointer_rtx, VOIDmode);
5555 1.1 mrg #endif
5556 1.1 mrg dest = fold_rtx (dest, insn);
5557 1.1 mrg }
5558 1.1 mrg
5559 1.1 mrg /* Compute the hash code of the destination now,
5560 1.1 mrg before the effects of this instruction are recorded,
5561 1.1 mrg since the register values used in the address computation
5562 1.1 mrg are those before this instruction. */
5563 1.1 mrg sets[i].dest_hash = HASH (dest, mode);
5564 1.1 mrg
5565 1.1 mrg /* Don't enter a bit-field in the hash table
5566 1.1 mrg because the value in it after the store
5567 1.1 mrg may not equal what was stored, due to truncation. */
5568 1.1 mrg
5569 1.1 mrg if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5570 1.1 mrg {
5571 1.1 mrg rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5572 1.1 mrg
5573 1.1 mrg if (src_const != 0 && CONST_INT_P (src_const)
5574 1.1 mrg && CONST_INT_P (width)
5575 1.1 mrg && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5576 1.1 mrg && ! (INTVAL (src_const)
5577 1.1 mrg & (HOST_WIDE_INT_M1U << INTVAL (width))))
5578 1.1 mrg /* Exception: if the value is constant,
5579 1.1 mrg and it won't be truncated, record it. */
5580 1.1 mrg ;
5581 1.1 mrg else
5582 1.1 mrg {
5583 1.1 mrg /* This is chosen so that the destination will be invalidated
5584 1.1 mrg but no new value will be recorded.
5585 1.1 mrg We must invalidate because sometimes constant
5586 1.1 mrg values can be recorded for bitfields. */
5587 1.1 mrg sets[i].src_elt = 0;
5588 1.1 mrg sets[i].src_volatile = 1;
5589 1.1 mrg src_eqv = 0;
5590 1.1 mrg src_eqv_elt = 0;
5591 1.1 mrg }
5592 1.1 mrg }
5593 1.1 mrg
5594 1.1 mrg /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5595 1.1 mrg the insn. */
5596 1.1 mrg else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5597 1.1 mrg {
5598 1.1 mrg /* One less use of the label this insn used to jump to. */
5599 1.1 mrg cse_cfg_altered |= delete_insn_and_edges (insn);
5600 1.1 mrg cse_jumps_altered = true;
5601 1.1 mrg /* No more processing for this set. */
5602 1.1 mrg sets[i].rtl = 0;
5603 1.1 mrg }
5604 1.1 mrg
5605 1.1 mrg /* Similarly for no-op moves. */
5606 1.1 mrg else if (noop_insn)
5607 1.1 mrg {
5608 1.1 mrg if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5609 1.1 mrg cse_cfg_altered = true;
5610 1.1 mrg cse_cfg_altered |= delete_insn_and_edges (insn);
5611 1.1 mrg /* No more processing for this set. */
5612 1.1 mrg sets[i].rtl = 0;
5613 1.1 mrg }
5614 1.1 mrg
5615 1.1 mrg /* If this SET is now setting PC to a label, we know it used to
5616 1.1 mrg be a conditional or computed branch. */
5617 1.1 mrg else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5618 1.1 mrg && !LABEL_REF_NONLOCAL_P (src))
5619 1.1 mrg {
5620 1.1 mrg /* We reemit the jump in as many cases as possible just in
5621 1.1 mrg case the form of an unconditional jump is significantly
5622 1.1 mrg different than a computed jump or conditional jump.
5623 1.1 mrg
5624 1.1 mrg If this insn has multiple sets, then reemitting the
5625 1.1 mrg jump is nontrivial. So instead we just force rerecognition
5626 1.1 mrg and hope for the best. */
5627 1.1 mrg if (n_sets == 1)
5628 1.1 mrg {
5629 1.1 mrg rtx_jump_insn *new_rtx;
5630 1.1 mrg rtx note;
5631 1.1 mrg
5632 1.1 mrg rtx_insn *seq = targetm.gen_jump (XEXP (src, 0));
5633 1.1 mrg new_rtx = emit_jump_insn_before (seq, insn);
5634 1.1 mrg JUMP_LABEL (new_rtx) = XEXP (src, 0);
5635 1.1 mrg LABEL_NUSES (XEXP (src, 0))++;
5636 1.1 mrg
5637 1.1 mrg /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5638 1.1 mrg note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5639 1.1 mrg if (note)
5640 1.1 mrg {
5641 1.1 mrg XEXP (note, 1) = NULL_RTX;
5642 1.1 mrg REG_NOTES (new_rtx) = note;
5643 1.1 mrg }
5644 1.1 mrg
5645 1.1 mrg cse_cfg_altered |= delete_insn_and_edges (insn);
5646 1.1 mrg insn = new_rtx;
5647 1.1 mrg }
5648 1.1 mrg else
5649 1.1 mrg INSN_CODE (insn) = -1;
5650 1.1 mrg
5651 1.1 mrg /* Do not bother deleting any unreachable code, let jump do it. */
5652 1.1 mrg cse_jumps_altered = true;
5653 1.1 mrg sets[i].rtl = 0;
5654 1.1 mrg }
5655 1.1 mrg
5656 1.1 mrg /* If destination is volatile, invalidate it and then do no further
5657 1.1 mrg processing for this assignment. */
5658 1.1 mrg
5659 1.1 mrg else if (do_not_record)
5660 1.1 mrg {
5661 1.1 mrg invalidate_dest (dest);
5662 1.1 mrg sets[i].rtl = 0;
5663 1.1 mrg }
5664 1.1 mrg
5665 1.1 mrg if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5666 1.1 mrg {
5667 1.1 mrg do_not_record = 0;
5668 1.1 mrg sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5669 1.1 mrg if (do_not_record)
5670 1.1 mrg {
5671 1.1 mrg invalidate_dest (SET_DEST (sets[i].rtl));
5672 1.1 mrg sets[i].rtl = 0;
5673 1.1 mrg }
5674 1.1 mrg }
5675 1.1 mrg }
5676 1.1 mrg
5677 1.1 mrg /* Now enter all non-volatile source expressions in the hash table
5678 1.1 mrg if they are not already present.
5679 1.1 mrg Record their equivalence classes in src_elt.
5680 1.1 mrg This way we can insert the corresponding destinations into
5681 1.1 mrg the same classes even if the actual sources are no longer in them
5682 1.1 mrg (having been invalidated). */
5683 1.1 mrg
5684 1.1 mrg if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5685 1.1 mrg && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5686 1.1 mrg {
5687 1.1 mrg struct table_elt *elt;
5688 1.1 mrg struct table_elt *classp = sets[0].src_elt;
5689 1.1 mrg rtx dest = SET_DEST (sets[0].rtl);
5690 1.1 mrg machine_mode eqvmode = GET_MODE (dest);
5691 1.1 mrg
5692 1.1 mrg if (GET_CODE (dest) == STRICT_LOW_PART)
5693 1.1 mrg {
5694 1.1 mrg eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5695 1.1 mrg classp = 0;
5696 1.1 mrg }
5697 1.1 mrg if (insert_regs (src_eqv, classp, 0))
5698 1.1 mrg {
5699 1.1 mrg rehash_using_reg (src_eqv);
5700 1.1 mrg src_eqv_hash = HASH (src_eqv, eqvmode);
5701 1.1 mrg }
5702 1.1 mrg elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5703 1.1 mrg elt->in_memory = src_eqv_in_memory;
5704 1.1 mrg src_eqv_elt = elt;
5705 1.1 mrg
5706 1.1 mrg /* Check to see if src_eqv_elt is the same as a set source which
5707 1.1 mrg does not yet have an elt, and if so set the elt of the set source
5708 1.1 mrg to src_eqv_elt. */
5709 1.1 mrg for (i = 0; i < n_sets; i++)
5710 1.1 mrg if (sets[i].rtl && sets[i].src_elt == 0
5711 1.1 mrg && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5712 1.1 mrg sets[i].src_elt = src_eqv_elt;
5713 1.1 mrg }
5714 1.1 mrg
5715 1.1 mrg for (i = 0; i < n_sets; i++)
5716 1.1 mrg if (sets[i].rtl && ! sets[i].src_volatile
5717 1.1 mrg && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5718 1.1 mrg {
5719 1.1 mrg if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5720 1.1 mrg {
5721 1.1 mrg /* REG_EQUAL in setting a STRICT_LOW_PART
5722 1.1 mrg gives an equivalent for the entire destination register,
5723 1.1 mrg not just for the subreg being stored in now.
5724 1.1 mrg This is a more interesting equivalence, so we arrange later
5725 1.1 mrg to treat the entire reg as the destination. */
5726 1.1 mrg sets[i].src_elt = src_eqv_elt;
5727 1.1 mrg sets[i].src_hash = src_eqv_hash;
5728 1.1 mrg }
5729 1.1 mrg else
5730 1.1 mrg {
5731 1.1 mrg /* Insert source and constant equivalent into hash table, if not
5732 1.1 mrg already present. */
5733 1.1 mrg struct table_elt *classp = src_eqv_elt;
5734 1.1 mrg rtx src = sets[i].src;
5735 1.1 mrg rtx dest = SET_DEST (sets[i].rtl);
5736 1.1 mrg machine_mode mode
5737 1.1 mrg = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5738 1.1 mrg
5739 1.1 mrg /* It's possible that we have a source value known to be
5740 1.1 mrg constant but don't have a REG_EQUAL note on the insn.
5741 1.1 mrg Lack of a note will mean src_eqv_elt will be NULL. This
5742 1.1 mrg can happen where we've generated a SUBREG to access a
5743 1.1 mrg CONST_INT that is already in a register in a wider mode.
5744 1.1 mrg Ensure that the source expression is put in the proper
5745 1.1 mrg constant class. */
5746 1.1 mrg if (!classp)
5747 1.1 mrg classp = sets[i].src_const_elt;
5748 1.1 mrg
5749 1.1 mrg if (sets[i].src_elt == 0)
5750 1.1 mrg {
5751 1.1 mrg struct table_elt *elt;
5752 1.1 mrg
5753 1.1 mrg /* Note that these insert_regs calls cannot remove
5754 1.1 mrg any of the src_elt's, because they would have failed to
5755 1.1 mrg match if not still valid. */
5756 1.1 mrg if (insert_regs (src, classp, 0))
5757 1.1 mrg {
5758 1.1 mrg rehash_using_reg (src);
5759 1.1 mrg sets[i].src_hash = HASH (src, mode);
5760 1.1 mrg }
5761 1.1 mrg elt = insert (src, classp, sets[i].src_hash, mode);
5762 1.1 mrg elt->in_memory = sets[i].src_in_memory;
5763 1.1 mrg /* If inline asm has any clobbers, ensure we only reuse
5764 1.1 mrg existing inline asms and never try to put the ASM_OPERANDS
5765 1.1 mrg into an insn that isn't inline asm. */
5766 1.1 mrg if (GET_CODE (src) == ASM_OPERANDS
5767 1.1 mrg && GET_CODE (x) == PARALLEL)
5768 1.1 mrg elt->cost = MAX_COST;
5769 1.1 mrg sets[i].src_elt = classp = elt;
5770 1.1 mrg }
5771 1.1 mrg if (sets[i].src_const && sets[i].src_const_elt == 0
5772 1.1 mrg && src != sets[i].src_const
5773 1.1 mrg && ! rtx_equal_p (sets[i].src_const, src))
5774 1.1 mrg sets[i].src_elt = insert (sets[i].src_const, classp,
5775 1.1 mrg sets[i].src_const_hash, mode);
5776 1.1 mrg }
5777 1.1 mrg }
5778 1.1 mrg else if (sets[i].src_elt == 0)
5779 1.1 mrg /* If we did not insert the source into the hash table (e.g., it was
5780 1.1 mrg volatile), note the equivalence class for the REG_EQUAL value, if any,
5781 1.1 mrg so that the destination goes into that class. */
5782 1.1 mrg sets[i].src_elt = src_eqv_elt;
5783 1.1 mrg
5784 1.1 mrg /* Record destination addresses in the hash table. This allows us to
5785 1.1 mrg check if they are invalidated by other sets. */
5786 1.1 mrg for (i = 0; i < n_sets; i++)
5787 1.1 mrg {
5788 1.1 mrg if (sets[i].rtl)
5789 1.1 mrg {
5790 1.1 mrg rtx x = sets[i].inner_dest;
5791 1.1 mrg struct table_elt *elt;
5792 1.1 mrg machine_mode mode;
5793 1.1 mrg unsigned hash;
5794 1.1 mrg
5795 1.1 mrg if (MEM_P (x))
5796 1.1 mrg {
5797 1.1 mrg x = XEXP (x, 0);
5798 1.1 mrg mode = GET_MODE (x);
5799 1.1 mrg hash = HASH (x, mode);
5800 1.1 mrg elt = lookup (x, hash, mode);
5801 1.1 mrg if (!elt)
5802 1.1 mrg {
5803 1.1 mrg if (insert_regs (x, NULL, 0))
5804 1.1 mrg {
5805 1.1 mrg rtx dest = SET_DEST (sets[i].rtl);
5806 1.1 mrg
5807 1.1 mrg rehash_using_reg (x);
5808 1.1 mrg hash = HASH (x, mode);
5809 1.1 mrg sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5810 1.1 mrg }
5811 1.1 mrg elt = insert (x, NULL, hash, mode);
5812 1.1 mrg }
5813 1.1 mrg
5814 1.1 mrg sets[i].dest_addr_elt = elt;
5815 1.1 mrg }
5816 1.1 mrg else
5817 1.1 mrg sets[i].dest_addr_elt = NULL;
5818 1.1 mrg }
5819 1.1 mrg }
5820 1.1 mrg
5821 1.1 mrg invalidate_from_clobbers (insn);
5822 1.1 mrg
5823 1.1 mrg /* Some registers are invalidated by subroutine calls. Memory is
5824 1.1 mrg invalidated by non-constant calls. */
5825 1.1 mrg
5826 1.1 mrg if (CALL_P (insn))
5827 1.1 mrg {
5828 1.1 mrg if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5829 1.1 mrg invalidate_memory ();
5830 1.1 mrg else
5831 1.1 mrg /* For const/pure calls, invalidate any argument slots, because
5832 1.1 mrg those are owned by the callee. */
5833 1.1 mrg for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5834 1.1 mrg if (GET_CODE (XEXP (tem, 0)) == USE
5835 1.1 mrg && MEM_P (XEXP (XEXP (tem, 0), 0)))
5836 1.1 mrg invalidate (XEXP (XEXP (tem, 0), 0), VOIDmode);
5837 1.1 mrg invalidate_for_call (insn);
5838 1.1 mrg }
5839 1.1 mrg
5840 1.1 mrg /* Now invalidate everything set by this instruction.
5841 1.1 mrg If a SUBREG or other funny destination is being set,
5842 1.1 mrg sets[i].rtl is still nonzero, so here we invalidate the reg
5843 1.1 mrg a part of which is being set. */
5844 1.1 mrg
5845 1.1 mrg for (i = 0; i < n_sets; i++)
5846 1.1 mrg if (sets[i].rtl)
5847 1.1 mrg {
5848 1.1 mrg /* We can't use the inner dest, because the mode associated with
5849 1.1 mrg a ZERO_EXTRACT is significant. */
5850 1.1 mrg rtx dest = SET_DEST (sets[i].rtl);
5851 1.1 mrg
5852 1.1 mrg /* Needed for registers to remove the register from its
5853 1.1 mrg previous quantity's chain.
5854 1.1 mrg Needed for memory if this is a nonvarying address, unless
5855 1.1 mrg we have just done an invalidate_memory that covers even those. */
5856 1.1 mrg if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5857 1.1 mrg invalidate (dest, VOIDmode);
5858 1.1 mrg else if (MEM_P (dest))
5859 1.1 mrg invalidate (dest, VOIDmode);
5860 1.1 mrg else if (GET_CODE (dest) == STRICT_LOW_PART
5861 1.1 mrg || GET_CODE (dest) == ZERO_EXTRACT)
5862 1.1 mrg invalidate (XEXP (dest, 0), GET_MODE (dest));
5863 1.1 mrg }
5864 1.1 mrg
5865 1.1 mrg /* Don't cse over a call to setjmp; on some machines (eg VAX)
5866 1.1 mrg the regs restored by the longjmp come from a later time
5867 1.1 mrg than the setjmp. */
5868 1.1 mrg if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5869 1.1 mrg {
5870 1.1 mrg flush_hash_table ();
5871 1.1 mrg goto done;
5872 1.1 mrg }
5873 1.1 mrg
5874 1.1 mrg /* Make sure registers mentioned in destinations
5875 1.1 mrg are safe for use in an expression to be inserted.
5876 1.1 mrg This removes from the hash table
5877 1.1 mrg any invalid entry that refers to one of these registers.
5878 1.1 mrg
5879 1.1 mrg We don't care about the return value from mention_regs because
5880 1.1 mrg we are going to hash the SET_DEST values unconditionally. */
5881 1.1 mrg
5882 1.1 mrg for (i = 0; i < n_sets; i++)
5883 1.1 mrg {
5884 1.1 mrg if (sets[i].rtl)
5885 1.1 mrg {
5886 1.1 mrg rtx x = SET_DEST (sets[i].rtl);
5887 1.1 mrg
5888 1.1 mrg if (!REG_P (x))
5889 1.1 mrg mention_regs (x);
5890 1.1 mrg else
5891 1.1 mrg {
5892 1.1 mrg /* We used to rely on all references to a register becoming
5893 1.1 mrg inaccessible when a register changes to a new quantity,
5894 1.1 mrg since that changes the hash code. However, that is not
5895 1.1 mrg safe, since after HASH_SIZE new quantities we get a
5896 1.1 mrg hash 'collision' of a register with its own invalid
5897 1.1 mrg entries. And since SUBREGs have been changed not to
5898 1.1 mrg change their hash code with the hash code of the register,
5899 1.1 mrg it wouldn't work any longer at all. So we have to check
5900 1.1 mrg for any invalid references lying around now.
5901 1.1 mrg This code is similar to the REG case in mention_regs,
5902 1.1 mrg but it knows that reg_tick has been incremented, and
5903 1.1 mrg it leaves reg_in_table as -1 . */
5904 1.1 mrg unsigned int regno = REGNO (x);
5905 1.1 mrg unsigned int endregno = END_REGNO (x);
5906 1.1 mrg unsigned int i;
5907 1.1 mrg
5908 1.1 mrg for (i = regno; i < endregno; i++)
5909 1.1 mrg {
5910 1.1 mrg if (REG_IN_TABLE (i) >= 0)
5911 1.1 mrg {
5912 1.1 mrg remove_invalid_refs (i);
5913 1.1 mrg REG_IN_TABLE (i) = -1;
5914 1.1 mrg }
5915 1.1 mrg }
5916 1.1 mrg }
5917 1.1 mrg }
5918 1.1 mrg }
5919 1.1 mrg
5920 1.1 mrg /* We may have just removed some of the src_elt's from the hash table.
5921 1.1 mrg So replace each one with the current head of the same class.
5922 1.1 mrg Also check if destination addresses have been removed. */
5923 1.1 mrg
5924 1.1 mrg for (i = 0; i < n_sets; i++)
5925 1.1 mrg if (sets[i].rtl)
5926 1.1 mrg {
5927 1.1 mrg if (sets[i].dest_addr_elt
5928 1.1 mrg && sets[i].dest_addr_elt->first_same_value == 0)
5929 1.1 mrg {
5930 1.1 mrg /* The elt was removed, which means this destination is not
5931 1.1 mrg valid after this instruction. */
5932 1.1 mrg sets[i].rtl = NULL_RTX;
5933 1.1 mrg }
5934 1.1 mrg else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5935 1.1 mrg /* If elt was removed, find current head of same class,
5936 1.1 mrg or 0 if nothing remains of that class. */
5937 1.1 mrg {
5938 1.1 mrg struct table_elt *elt = sets[i].src_elt;
5939 1.1 mrg
5940 1.1 mrg while (elt && elt->prev_same_value)
5941 1.1 mrg elt = elt->prev_same_value;
5942 1.1 mrg
5943 1.1 mrg while (elt && elt->first_same_value == 0)
5944 1.1 mrg elt = elt->next_same_value;
5945 1.1 mrg sets[i].src_elt = elt ? elt->first_same_value : 0;
5946 1.1 mrg }
5947 1.1 mrg }
5948 1.1 mrg
5949 1.1 mrg /* Now insert the destinations into their equivalence classes. */
5950 1.1 mrg
5951 1.1 mrg for (i = 0; i < n_sets; i++)
5952 1.1 mrg if (sets[i].rtl)
5953 1.1 mrg {
5954 1.1 mrg rtx dest = SET_DEST (sets[i].rtl);
5955 1.1 mrg struct table_elt *elt;
5956 1.1 mrg
5957 1.1 mrg /* Don't record value if we are not supposed to risk allocating
5958 1.1 mrg floating-point values in registers that might be wider than
5959 1.1 mrg memory. */
5960 1.1 mrg if ((flag_float_store
5961 1.1 mrg && MEM_P (dest)
5962 1.1 mrg && FLOAT_MODE_P (GET_MODE (dest)))
5963 1.1 mrg /* Don't record BLKmode values, because we don't know the
5964 1.1 mrg size of it, and can't be sure that other BLKmode values
5965 1.1 mrg have the same or smaller size. */
5966 1.1 mrg || GET_MODE (dest) == BLKmode
5967 1.1 mrg /* If we didn't put a REG_EQUAL value or a source into the hash
5968 1.1 mrg table, there is no point is recording DEST. */
5969 1.1 mrg || sets[i].src_elt == 0)
5970 1.1 mrg continue;
5971 1.1 mrg
5972 1.1 mrg /* STRICT_LOW_PART isn't part of the value BEING set,
5973 1.1 mrg and neither is the SUBREG inside it.
5974 1.1 mrg Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5975 1.1 mrg if (GET_CODE (dest) == STRICT_LOW_PART)
5976 1.1 mrg dest = SUBREG_REG (XEXP (dest, 0));
5977 1.1 mrg
5978 1.1 mrg if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5979 1.1 mrg /* Registers must also be inserted into chains for quantities. */
5980 1.1 mrg if (insert_regs (dest, sets[i].src_elt, 1))
5981 1.1 mrg {
5982 1.1 mrg /* If `insert_regs' changes something, the hash code must be
5983 1.1 mrg recalculated. */
5984 1.1 mrg rehash_using_reg (dest);
5985 1.1 mrg sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5986 1.1 mrg }
5987 1.1 mrg
5988 1.1 mrg /* If DEST is a paradoxical SUBREG, don't record DEST since the bits
5989 1.1 mrg outside the mode of GET_MODE (SUBREG_REG (dest)) are undefined. */
5990 1.1 mrg if (paradoxical_subreg_p (dest))
5991 1.1 mrg continue;
5992 1.1 mrg
5993 1.1 mrg elt = insert (dest, sets[i].src_elt,
5994 1.1 mrg sets[i].dest_hash, GET_MODE (dest));
5995 1.1 mrg
5996 1.1 mrg /* If this is a constant, insert the constant anchors with the
5997 1.1 mrg equivalent register-offset expressions using register DEST. */
5998 1.1 mrg if (targetm.const_anchor
5999 1.1 mrg && REG_P (dest)
6000 1.1 mrg && SCALAR_INT_MODE_P (GET_MODE (dest))
6001 1.1 mrg && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
6002 1.1 mrg insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
6003 1.1 mrg
6004 1.1 mrg elt->in_memory = (MEM_P (sets[i].inner_dest)
6005 1.1 mrg && !MEM_READONLY_P (sets[i].inner_dest));
6006 1.1 mrg
6007 1.1 mrg /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6008 1.1 mrg narrower than M2, and both M1 and M2 are the same number of words,
6009 1.1 mrg we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6010 1.1 mrg make that equivalence as well.
6011 1.1 mrg
6012 1.1 mrg However, BAR may have equivalences for which gen_lowpart
6013 1.1 mrg will produce a simpler value than gen_lowpart applied to
6014 1.1 mrg BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6015 1.1 mrg BAR's equivalences. If we don't get a simplified form, make
6016 1.1 mrg the SUBREG. It will not be used in an equivalence, but will
6017 1.1 mrg cause two similar assignments to be detected.
6018 1.1 mrg
6019 1.1 mrg Note the loop below will find SUBREG_REG (DEST) since we have
6020 1.1 mrg already entered SRC and DEST of the SET in the table. */
6021 1.1 mrg
6022 1.1 mrg if (GET_CODE (dest) == SUBREG
6023 1.1 mrg && (known_equal_after_align_down
6024 1.1 mrg (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1,
6025 1.1 mrg GET_MODE_SIZE (GET_MODE (dest)) - 1,
6026 1.1 mrg UNITS_PER_WORD))
6027 1.1 mrg && !partial_subreg_p (dest)
6028 1.1 mrg && sets[i].src_elt != 0)
6029 1.1 mrg {
6030 1.1 mrg machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6031 1.1 mrg struct table_elt *elt, *classp = 0;
6032 1.1 mrg
6033 1.1 mrg for (elt = sets[i].src_elt->first_same_value; elt;
6034 1.1 mrg elt = elt->next_same_value)
6035 1.1 mrg {
6036 1.1 mrg rtx new_src = 0;
6037 1.1 mrg unsigned src_hash;
6038 1.1 mrg struct table_elt *src_elt;
6039 1.1 mrg
6040 1.1 mrg /* Ignore invalid entries. */
6041 1.1 mrg if (!REG_P (elt->exp)
6042 1.1 mrg && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6043 1.1 mrg continue;
6044 1.1 mrg
6045 1.1 mrg /* We may have already been playing subreg games. If the
6046 1.1 mrg mode is already correct for the destination, use it. */
6047 1.1 mrg if (GET_MODE (elt->exp) == new_mode)
6048 1.1 mrg new_src = elt->exp;
6049 1.1 mrg else
6050 1.1 mrg {
6051 1.1 mrg poly_uint64 byte
6052 1.1 mrg = subreg_lowpart_offset (new_mode, GET_MODE (dest));
6053 1.1 mrg new_src = simplify_gen_subreg (new_mode, elt->exp,
6054 1.1 mrg GET_MODE (dest), byte);
6055 1.1 mrg }
6056 1.1 mrg
6057 1.1 mrg /* The call to simplify_gen_subreg fails if the value
6058 1.1 mrg is VOIDmode, yet we can't do any simplification, e.g.
6059 1.1 mrg for EXPR_LISTs denoting function call results.
6060 1.1 mrg It is invalid to construct a SUBREG with a VOIDmode
6061 1.1 mrg SUBREG_REG, hence a zero new_src means we can't do
6062 1.1 mrg this substitution. */
6063 1.1 mrg if (! new_src)
6064 1.1 mrg continue;
6065 1.1 mrg
6066 1.1 mrg src_hash = HASH (new_src, new_mode);
6067 1.1 mrg src_elt = lookup (new_src, src_hash, new_mode);
6068 1.1 mrg
6069 1.1 mrg /* Put the new source in the hash table is if isn't
6070 1.1 mrg already. */
6071 1.1 mrg if (src_elt == 0)
6072 1.1 mrg {
6073 1.1 mrg if (insert_regs (new_src, classp, 0))
6074 1.1 mrg {
6075 1.1 mrg rehash_using_reg (new_src);
6076 1.1 mrg src_hash = HASH (new_src, new_mode);
6077 1.1 mrg }
6078 1.1 mrg src_elt = insert (new_src, classp, src_hash, new_mode);
6079 1.1 mrg src_elt->in_memory = elt->in_memory;
6080 1.1 mrg if (GET_CODE (new_src) == ASM_OPERANDS
6081 1.1 mrg && elt->cost == MAX_COST)
6082 1.1 mrg src_elt->cost = MAX_COST;
6083 1.1 mrg }
6084 1.1 mrg else if (classp && classp != src_elt->first_same_value)
6085 1.1 mrg /* Show that two things that we've seen before are
6086 1.1 mrg actually the same. */
6087 1.1 mrg merge_equiv_classes (src_elt, classp);
6088 1.1 mrg
6089 1.1 mrg classp = src_elt->first_same_value;
6090 1.1 mrg /* Ignore invalid entries. */
6091 1.1 mrg while (classp
6092 1.1 mrg && !REG_P (classp->exp)
6093 1.1 mrg && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6094 1.1 mrg classp = classp->next_same_value;
6095 1.1 mrg }
6096 1.1 mrg }
6097 1.1 mrg }
6098 1.1 mrg
6099 1.1 mrg /* Special handling for (set REG0 REG1) where REG0 is the
6100 1.1 mrg "cheapest", cheaper than REG1. After cse, REG1 will probably not
6101 1.1 mrg be used in the sequel, so (if easily done) change this insn to
6102 1.1 mrg (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6103 1.1 mrg that computed their value. Then REG1 will become a dead store
6104 1.1 mrg and won't cloud the situation for later optimizations.
6105 1.1 mrg
6106 1.1 mrg Do not make this change if REG1 is a hard register, because it will
6107 1.1 mrg then be used in the sequel and we may be changing a two-operand insn
6108 1.1 mrg into a three-operand insn.
6109 1.1 mrg
6110 1.1 mrg Also do not do this if we are operating on a copy of INSN. */
6111 1.1 mrg
6112 1.1 mrg if (n_sets == 1 && sets[0].rtl)
6113 1.1 mrg try_back_substitute_reg (sets[0].rtl, insn);
6114 1.1 mrg
6115 1.1 mrg done:;
6116 1.1 mrg }
6117 1.1 mrg
6118 1.1 mrg /* Remove from the hash table all expressions that reference memory. */
6120 1.1 mrg
6121 1.1 mrg static void
6122 1.1 mrg invalidate_memory (void)
6123 1.1 mrg {
6124 1.1 mrg int i;
6125 1.1 mrg struct table_elt *p, *next;
6126 1.1 mrg
6127 1.1 mrg for (i = 0; i < HASH_SIZE; i++)
6128 1.1 mrg for (p = table[i]; p; p = next)
6129 1.1 mrg {
6130 1.1 mrg next = p->next_same_hash;
6131 1.1 mrg if (p->in_memory)
6132 1.1 mrg remove_from_table (p, i);
6133 1.1 mrg }
6134 1.1 mrg }
6135 1.1 mrg
6136 1.1 mrg /* Perform invalidation on the basis of everything about INSN,
6137 1.1 mrg except for invalidating the actual places that are SET in it.
6138 1.1 mrg This includes the places CLOBBERed, and anything that might
6139 1.1 mrg alias with something that is SET or CLOBBERed. */
6140 1.1 mrg
6141 1.1 mrg static void
6142 1.1 mrg invalidate_from_clobbers (rtx_insn *insn)
6143 1.1 mrg {
6144 1.1 mrg rtx x = PATTERN (insn);
6145 1.1 mrg
6146 1.1 mrg if (GET_CODE (x) == CLOBBER)
6147 1.1 mrg {
6148 1.1 mrg rtx ref = XEXP (x, 0);
6149 1.1 mrg if (ref)
6150 1.1 mrg {
6151 1.1 mrg if (REG_P (ref) || GET_CODE (ref) == SUBREG
6152 1.1 mrg || MEM_P (ref))
6153 1.1 mrg invalidate (ref, VOIDmode);
6154 1.1 mrg else if (GET_CODE (ref) == STRICT_LOW_PART
6155 1.1 mrg || GET_CODE (ref) == ZERO_EXTRACT)
6156 1.1 mrg invalidate (XEXP (ref, 0), GET_MODE (ref));
6157 1.1 mrg }
6158 1.1 mrg }
6159 1.1 mrg else if (GET_CODE (x) == PARALLEL)
6160 1.1 mrg {
6161 1.1 mrg int i;
6162 1.1 mrg for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6163 1.1 mrg {
6164 1.1 mrg rtx y = XVECEXP (x, 0, i);
6165 1.1 mrg if (GET_CODE (y) == CLOBBER)
6166 1.1 mrg {
6167 1.1 mrg rtx ref = XEXP (y, 0);
6168 1.1 mrg if (REG_P (ref) || GET_CODE (ref) == SUBREG
6169 1.1 mrg || MEM_P (ref))
6170 1.1 mrg invalidate (ref, VOIDmode);
6171 1.1 mrg else if (GET_CODE (ref) == STRICT_LOW_PART
6172 1.1 mrg || GET_CODE (ref) == ZERO_EXTRACT)
6173 1.1 mrg invalidate (XEXP (ref, 0), GET_MODE (ref));
6174 1.1 mrg }
6175 1.1 mrg }
6176 1.1 mrg }
6177 1.1 mrg }
6178 1.1 mrg
6179 1.1 mrg /* Perform invalidation on the basis of everything about INSN.
6181 1.1 mrg This includes the places CLOBBERed, and anything that might
6182 1.1 mrg alias with something that is SET or CLOBBERed. */
6183 1.1 mrg
6184 1.1 mrg static void
6185 1.1 mrg invalidate_from_sets_and_clobbers (rtx_insn *insn)
6186 1.1 mrg {
6187 1.1 mrg rtx tem;
6188 1.1 mrg rtx x = PATTERN (insn);
6189 1.1 mrg
6190 1.1 mrg if (CALL_P (insn))
6191 1.1 mrg {
6192 1.1 mrg for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6193 1.1 mrg {
6194 1.1 mrg rtx temx = XEXP (tem, 0);
6195 1.1 mrg if (GET_CODE (temx) == CLOBBER)
6196 1.1 mrg invalidate (SET_DEST (temx), VOIDmode);
6197 1.1 mrg }
6198 1.1 mrg }
6199 1.1 mrg
6200 1.1 mrg /* Ensure we invalidate the destination register of a CALL insn.
6201 1.1 mrg This is necessary for machines where this register is a fixed_reg,
6202 1.1 mrg because no other code would invalidate it. */
6203 1.1 mrg if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
6204 1.1 mrg invalidate (SET_DEST (x), VOIDmode);
6205 1.1 mrg
6206 1.1 mrg else if (GET_CODE (x) == PARALLEL)
6207 1.1 mrg {
6208 1.1 mrg int i;
6209 1.1 mrg
6210 1.1 mrg for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6211 1.1 mrg {
6212 1.1 mrg rtx y = XVECEXP (x, 0, i);
6213 1.1 mrg if (GET_CODE (y) == CLOBBER)
6214 1.1 mrg {
6215 1.1 mrg rtx clobbered = XEXP (y, 0);
6216 1.1 mrg
6217 1.1 mrg if (REG_P (clobbered)
6218 1.1 mrg || GET_CODE (clobbered) == SUBREG)
6219 1.1 mrg invalidate (clobbered, VOIDmode);
6220 1.1 mrg else if (GET_CODE (clobbered) == STRICT_LOW_PART
6221 1.1 mrg || GET_CODE (clobbered) == ZERO_EXTRACT)
6222 1.1 mrg invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6223 1.1 mrg }
6224 1.1 mrg else if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
6225 1.1 mrg invalidate (SET_DEST (y), VOIDmode);
6226 1.1 mrg }
6227 1.1 mrg }
6228 1.1 mrg }
6229 1.1 mrg
6230 1.1 mrg static rtx cse_process_note (rtx);
6232 1.1 mrg
6233 1.1 mrg /* A simplify_replace_fn_rtx callback for cse_process_note. Process X,
6234 1.1 mrg part of the REG_NOTES of an insn. Replace any registers with either
6235 1.1 mrg an equivalent constant or the canonical form of the register.
6236 1.1 mrg Only replace addresses if the containing MEM remains valid.
6237 1.1 mrg
6238 1.1 mrg Return the replacement for X, or null if it should be simplified
6239 1.1 mrg recursively. */
6240 1.1 mrg
6241 1.1 mrg static rtx
6242 1.1 mrg cse_process_note_1 (rtx x, const_rtx, void *)
6243 1.1 mrg {
6244 1.1 mrg if (MEM_P (x))
6245 1.1 mrg {
6246 1.1 mrg validate_change (x, &XEXP (x, 0), cse_process_note (XEXP (x, 0)), false);
6247 1.1 mrg return x;
6248 1.1 mrg }
6249 1.1 mrg
6250 1.1 mrg if (REG_P (x))
6251 1.1 mrg {
6252 1.1 mrg int i = REG_QTY (REGNO (x));
6253 1.1 mrg
6254 1.1 mrg /* Return a constant or a constant register. */
6255 1.1 mrg if (REGNO_QTY_VALID_P (REGNO (x)))
6256 1.1 mrg {
6257 1.1 mrg struct qty_table_elem *ent = &qty_table[i];
6258 1.1 mrg
6259 1.1 mrg if (ent->const_rtx != NULL_RTX
6260 1.1 mrg && (CONSTANT_P (ent->const_rtx)
6261 1.1 mrg || REG_P (ent->const_rtx)))
6262 1.1 mrg {
6263 1.1 mrg rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6264 1.1 mrg if (new_rtx)
6265 1.1 mrg return copy_rtx (new_rtx);
6266 1.1 mrg }
6267 1.1 mrg }
6268 1.1 mrg
6269 1.1 mrg /* Otherwise, canonicalize this register. */
6270 1.1 mrg return canon_reg (x, NULL);
6271 1.1 mrg }
6272 1.1 mrg
6273 1.1 mrg return NULL_RTX;
6274 1.1 mrg }
6275 1.1 mrg
6276 1.1 mrg /* Process X, part of the REG_NOTES of an insn. Replace any registers in it
6277 1.1 mrg with either an equivalent constant or the canonical form of the register.
6278 1.1 mrg Only replace addresses if the containing MEM remains valid. */
6279 1.1 mrg
6280 1.1 mrg static rtx
6281 1.1 mrg cse_process_note (rtx x)
6282 1.1 mrg {
6283 1.1 mrg return simplify_replace_fn_rtx (x, NULL_RTX, cse_process_note_1, NULL);
6284 1.1 mrg }
6285 1.1 mrg
6286 1.1 mrg
6287 1.1 mrg /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6289 1.1 mrg
6290 1.1 mrg DATA is a pointer to a struct cse_basic_block_data, that is used to
6291 1.1 mrg describe the path.
6292 1.1 mrg It is filled with a queue of basic blocks, starting with FIRST_BB
6293 1.1 mrg and following a trace through the CFG.
6294 1.1 mrg
6295 1.1 mrg If all paths starting at FIRST_BB have been followed, or no new path
6296 1.1 mrg starting at FIRST_BB can be constructed, this function returns FALSE.
6297 1.1 mrg Otherwise, DATA->path is filled and the function returns TRUE indicating
6298 1.1 mrg that a path to follow was found.
6299 1.1 mrg
6300 1.1 mrg If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6301 1.1 mrg block in the path will be FIRST_BB. */
6302 1.1 mrg
6303 1.1 mrg static bool
6304 1.1 mrg cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6305 1.1 mrg int follow_jumps)
6306 1.1 mrg {
6307 1.1 mrg basic_block bb;
6308 1.1 mrg edge e;
6309 1.1 mrg int path_size;
6310 1.1 mrg
6311 1.1 mrg bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
6312 1.1 mrg
6313 1.1 mrg /* See if there is a previous path. */
6314 1.1 mrg path_size = data->path_size;
6315 1.1 mrg
6316 1.1 mrg /* There is a previous path. Make sure it started with FIRST_BB. */
6317 1.1 mrg if (path_size)
6318 1.1 mrg gcc_assert (data->path[0].bb == first_bb);
6319 1.1 mrg
6320 1.1 mrg /* There was only one basic block in the last path. Clear the path and
6321 1.1 mrg return, so that paths starting at another basic block can be tried. */
6322 1.1 mrg if (path_size == 1)
6323 1.1 mrg {
6324 1.1 mrg path_size = 0;
6325 1.1 mrg goto done;
6326 1.1 mrg }
6327 1.1 mrg
6328 1.1 mrg /* If the path was empty from the beginning, construct a new path. */
6329 1.1 mrg if (path_size == 0)
6330 1.1 mrg data->path[path_size++].bb = first_bb;
6331 1.1 mrg else
6332 1.1 mrg {
6333 1.1 mrg /* Otherwise, path_size must be equal to or greater than 2, because
6334 1.1 mrg a previous path exists that is at least two basic blocks long.
6335 1.1 mrg
6336 1.1 mrg Update the previous branch path, if any. If the last branch was
6337 1.1 mrg previously along the branch edge, take the fallthrough edge now. */
6338 1.1 mrg while (path_size >= 2)
6339 1.1 mrg {
6340 1.1 mrg basic_block last_bb_in_path, previous_bb_in_path;
6341 1.1 mrg edge e;
6342 1.1 mrg
6343 1.1 mrg --path_size;
6344 1.1 mrg last_bb_in_path = data->path[path_size].bb;
6345 1.1 mrg previous_bb_in_path = data->path[path_size - 1].bb;
6346 1.1 mrg
6347 1.1 mrg /* If we previously followed a path along the branch edge, try
6348 1.1 mrg the fallthru edge now. */
6349 1.1 mrg if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6350 1.1 mrg && any_condjump_p (BB_END (previous_bb_in_path))
6351 1.1 mrg && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6352 1.1 mrg && e == BRANCH_EDGE (previous_bb_in_path))
6353 1.1 mrg {
6354 1.1 mrg bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6355 1.1 mrg if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
6356 1.1 mrg && single_pred_p (bb)
6357 1.1 mrg /* We used to assert here that we would only see blocks
6358 1.1 mrg that we have not visited yet. But we may end up
6359 1.1 mrg visiting basic blocks twice if the CFG has changed
6360 1.1 mrg in this run of cse_main, because when the CFG changes
6361 1.1 mrg the topological sort of the CFG also changes. A basic
6362 1.1 mrg blocks that previously had more than two predecessors
6363 1.1 mrg may now have a single predecessor, and become part of
6364 1.1 mrg a path that starts at another basic block.
6365 1.1 mrg
6366 1.1 mrg We still want to visit each basic block only once, so
6367 1.1 mrg halt the path here if we have already visited BB. */
6368 1.1 mrg && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
6369 1.1 mrg {
6370 1.1 mrg bitmap_set_bit (cse_visited_basic_blocks, bb->index);
6371 1.1 mrg data->path[path_size++].bb = bb;
6372 1.1 mrg break;
6373 1.1 mrg }
6374 1.1 mrg }
6375 1.1 mrg
6376 1.1 mrg data->path[path_size].bb = NULL;
6377 1.1 mrg }
6378 1.1 mrg
6379 1.1 mrg /* If only one block remains in the path, bail. */
6380 1.1 mrg if (path_size == 1)
6381 1.1 mrg {
6382 1.1 mrg path_size = 0;
6383 1.1 mrg goto done;
6384 1.1 mrg }
6385 1.1 mrg }
6386 1.1 mrg
6387 1.1 mrg /* Extend the path if possible. */
6388 1.1 mrg if (follow_jumps)
6389 1.1 mrg {
6390 1.1 mrg bb = data->path[path_size - 1].bb;
6391 1.1 mrg while (bb && path_size < param_max_cse_path_length)
6392 1.1 mrg {
6393 1.1 mrg if (single_succ_p (bb))
6394 1.1 mrg e = single_succ_edge (bb);
6395 1.1 mrg else if (EDGE_COUNT (bb->succs) == 2
6396 1.1 mrg && any_condjump_p (BB_END (bb)))
6397 1.1 mrg {
6398 1.1 mrg /* First try to follow the branch. If that doesn't lead
6399 1.1 mrg to a useful path, follow the fallthru edge. */
6400 1.1 mrg e = BRANCH_EDGE (bb);
6401 1.1 mrg if (!single_pred_p (e->dest))
6402 1.1 mrg e = FALLTHRU_EDGE (bb);
6403 1.1 mrg }
6404 1.1 mrg else
6405 1.1 mrg e = NULL;
6406 1.1 mrg
6407 1.1 mrg if (e
6408 1.1 mrg && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6409 1.1 mrg && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
6410 1.1 mrg && single_pred_p (e->dest)
6411 1.1 mrg /* Avoid visiting basic blocks twice. The large comment
6412 1.1 mrg above explains why this can happen. */
6413 1.1 mrg && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
6414 1.1 mrg {
6415 1.1 mrg basic_block bb2 = e->dest;
6416 1.1 mrg bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
6417 1.1 mrg data->path[path_size++].bb = bb2;
6418 1.1 mrg bb = bb2;
6419 1.1 mrg }
6420 1.1 mrg else
6421 1.1 mrg bb = NULL;
6422 1.1 mrg }
6423 1.1 mrg }
6424 1.1 mrg
6425 1.1 mrg done:
6426 1.1 mrg data->path_size = path_size;
6427 1.1 mrg return path_size != 0;
6428 1.1 mrg }
6429 1.1 mrg
6430 1.1 mrg /* Dump the path in DATA to file F. NSETS is the number of sets
6432 1.1 mrg in the path. */
6433 1.1 mrg
6434 1.1 mrg static void
6435 1.1 mrg cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6436 1.1 mrg {
6437 1.1 mrg int path_entry;
6438 1.1 mrg
6439 1.1 mrg fprintf (f, ";; Following path with %d sets: ", nsets);
6440 1.1 mrg for (path_entry = 0; path_entry < data->path_size; path_entry++)
6441 1.1 mrg fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6442 1.1 mrg fputc ('\n', f);
6443 1.1 mrg fflush (f);
6444 1.1 mrg }
6445 1.1 mrg
6446 1.1 mrg
6447 1.1 mrg /* Return true if BB has exception handling successor edges. */
6449 1.1 mrg
6450 1.1 mrg static bool
6451 1.1 mrg have_eh_succ_edges (basic_block bb)
6452 1.1 mrg {
6453 1.1 mrg edge e;
6454 1.1 mrg edge_iterator ei;
6455 1.1 mrg
6456 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs)
6457 1.1 mrg if (e->flags & EDGE_EH)
6458 1.1 mrg return true;
6459 1.1 mrg
6460 1.1 mrg return false;
6461 1.1 mrg }
6462 1.1 mrg
6463 1.1 mrg
6464 1.1 mrg /* Scan to the end of the path described by DATA. Return an estimate of
6466 1.1 mrg the total number of SETs of all insns in the path. */
6467 1.1 mrg
6468 1.1 mrg static void
6469 1.1 mrg cse_prescan_path (struct cse_basic_block_data *data)
6470 1.1 mrg {
6471 1.1 mrg int nsets = 0;
6472 1.1 mrg int path_size = data->path_size;
6473 1.1 mrg int path_entry;
6474 1.1 mrg
6475 1.1 mrg /* Scan to end of each basic block in the path. */
6476 1.1 mrg for (path_entry = 0; path_entry < path_size; path_entry++)
6477 1.1 mrg {
6478 1.1 mrg basic_block bb;
6479 1.1 mrg rtx_insn *insn;
6480 1.1 mrg
6481 1.1 mrg bb = data->path[path_entry].bb;
6482 1.1 mrg
6483 1.1 mrg FOR_BB_INSNS (bb, insn)
6484 1.1 mrg {
6485 1.1 mrg if (!INSN_P (insn))
6486 1.1 mrg continue;
6487 1.1 mrg
6488 1.1 mrg /* A PARALLEL can have lots of SETs in it,
6489 1.1 mrg especially if it is really an ASM_OPERANDS. */
6490 1.1 mrg if (GET_CODE (PATTERN (insn)) == PARALLEL)
6491 1.1 mrg nsets += XVECLEN (PATTERN (insn), 0);
6492 1.1 mrg else
6493 1.1 mrg nsets += 1;
6494 1.1 mrg }
6495 1.1 mrg }
6496 1.1 mrg
6497 1.1 mrg data->nsets = nsets;
6498 1.1 mrg }
6499 1.1 mrg
6500 1.1 mrg /* Return true if the pattern of INSN uses a LABEL_REF for which
6502 1.1 mrg there isn't a REG_LABEL_OPERAND note. */
6503 1.1 mrg
6504 1.1 mrg static bool
6505 1.1 mrg check_for_label_ref (rtx_insn *insn)
6506 1.1 mrg {
6507 1.1 mrg /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6508 1.1 mrg note for it, we must rerun jump since it needs to place the note. If
6509 1.1 mrg this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6510 1.1 mrg don't do this since no REG_LABEL_OPERAND will be added. */
6511 1.1 mrg subrtx_iterator::array_type array;
6512 1.1 mrg FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
6513 1.1 mrg {
6514 1.1 mrg const_rtx x = *iter;
6515 1.1 mrg if (GET_CODE (x) == LABEL_REF
6516 1.1 mrg && !LABEL_REF_NONLOCAL_P (x)
6517 1.1 mrg && (!JUMP_P (insn)
6518 1.1 mrg || !label_is_jump_target_p (label_ref_label (x), insn))
6519 1.1 mrg && LABEL_P (label_ref_label (x))
6520 1.1 mrg && INSN_UID (label_ref_label (x)) != 0
6521 1.1 mrg && !find_reg_note (insn, REG_LABEL_OPERAND, label_ref_label (x)))
6522 1.1 mrg return true;
6523 1.1 mrg }
6524 1.1 mrg return false;
6525 1.1 mrg }
6526 1.1 mrg
6527 1.1 mrg /* Process a single extended basic block described by EBB_DATA. */
6528 1.1 mrg
6529 1.1 mrg static void
6530 1.1 mrg cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6531 1.1 mrg {
6532 1.1 mrg int path_size = ebb_data->path_size;
6533 1.1 mrg int path_entry;
6534 1.1 mrg int num_insns = 0;
6535 1.1 mrg
6536 1.1 mrg /* Allocate the space needed by qty_table. */
6537 1.1 mrg qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6538 1.1 mrg
6539 1.1 mrg new_basic_block ();
6540 1.1 mrg cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6541 1.1 mrg cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6542 1.1 mrg for (path_entry = 0; path_entry < path_size; path_entry++)
6543 1.1 mrg {
6544 1.1 mrg basic_block bb;
6545 1.1 mrg rtx_insn *insn;
6546 1.1 mrg
6547 1.1 mrg bb = ebb_data->path[path_entry].bb;
6548 1.1 mrg
6549 1.1 mrg /* Invalidate recorded information for eh regs if there is an EH
6550 1.1 mrg edge pointing to that bb. */
6551 1.1 mrg if (bb_has_eh_pred (bb))
6552 1.1 mrg {
6553 1.1 mrg df_ref def;
6554 1.1 mrg
6555 1.1 mrg FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
6556 1.1 mrg if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6557 1.1 mrg invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6558 1.1 mrg }
6559 1.1 mrg
6560 1.1 mrg optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6561 1.1 mrg FOR_BB_INSNS (bb, insn)
6562 1.1 mrg {
6563 1.1 mrg /* If we have processed 1,000 insns, flush the hash table to
6564 1.1 mrg avoid extreme quadratic behavior. We must not include NOTEs
6565 1.1 mrg in the count since there may be more of them when generating
6566 1.1 mrg debugging information. If we clear the table at different
6567 1.1 mrg times, code generated with -g -O might be different than code
6568 1.1 mrg generated with -O but not -g.
6569 1.1 mrg
6570 1.1 mrg FIXME: This is a real kludge and needs to be done some other
6571 1.1 mrg way. */
6572 1.1 mrg if (NONDEBUG_INSN_P (insn)
6573 1.1 mrg && num_insns++ > param_max_cse_insns)
6574 1.1 mrg {
6575 1.1 mrg flush_hash_table ();
6576 1.1 mrg num_insns = 0;
6577 1.1 mrg }
6578 1.1 mrg
6579 1.1 mrg if (INSN_P (insn))
6580 1.1 mrg {
6581 1.1 mrg /* Process notes first so we have all notes in canonical forms
6582 1.1 mrg when looking for duplicate operations. */
6583 1.1 mrg bool changed = false;
6584 1.1 mrg for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
6585 1.1 mrg if (REG_NOTE_KIND (note) == REG_EQUAL)
6586 1.1 mrg {
6587 1.1 mrg rtx newval = cse_process_note (XEXP (note, 0));
6588 1.1 mrg if (newval != XEXP (note, 0))
6589 1.1 mrg {
6590 1.1 mrg XEXP (note, 0) = newval;
6591 1.1 mrg changed = true;
6592 1.1 mrg }
6593 1.1 mrg }
6594 1.1 mrg if (changed)
6595 1.1 mrg df_notes_rescan (insn);
6596 1.1 mrg
6597 1.1 mrg cse_insn (insn);
6598 1.1 mrg
6599 1.1 mrg /* If we haven't already found an insn where we added a LABEL_REF,
6600 1.1 mrg check this one. */
6601 1.1 mrg if (INSN_P (insn) && !recorded_label_ref
6602 1.1 mrg && check_for_label_ref (insn))
6603 1.1 mrg recorded_label_ref = true;
6604 1.1 mrg }
6605 1.1 mrg }
6606 1.1 mrg
6607 1.1 mrg /* With non-call exceptions, we are not always able to update
6608 1.1 mrg the CFG properly inside cse_insn. So clean up possibly
6609 1.1 mrg redundant EH edges here. */
6610 1.1 mrg if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6611 1.1 mrg cse_cfg_altered |= purge_dead_edges (bb);
6612 1.1 mrg
6613 1.1 mrg /* If we changed a conditional jump, we may have terminated
6614 1.1 mrg the path we are following. Check that by verifying that
6615 1.1 mrg the edge we would take still exists. If the edge does
6616 1.1 mrg not exist anymore, purge the remainder of the path.
6617 1.1 mrg Note that this will cause us to return to the caller. */
6618 1.1 mrg if (path_entry < path_size - 1)
6619 1.1 mrg {
6620 1.1 mrg basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6621 1.1 mrg if (!find_edge (bb, next_bb))
6622 1.1 mrg {
6623 1.1 mrg do
6624 1.1 mrg {
6625 1.1 mrg path_size--;
6626 1.1 mrg
6627 1.1 mrg /* If we truncate the path, we must also reset the
6628 1.1 mrg visited bit on the remaining blocks in the path,
6629 1.1 mrg or we will never visit them at all. */
6630 1.1 mrg bitmap_clear_bit (cse_visited_basic_blocks,
6631 1.1 mrg ebb_data->path[path_size].bb->index);
6632 1.1 mrg ebb_data->path[path_size].bb = NULL;
6633 1.1 mrg }
6634 1.1 mrg while (path_size - 1 != path_entry);
6635 1.1 mrg ebb_data->path_size = path_size;
6636 1.1 mrg }
6637 1.1 mrg }
6638 1.1 mrg
6639 1.1 mrg /* If this is a conditional jump insn, record any known
6640 1.1 mrg equivalences due to the condition being tested. */
6641 1.1 mrg insn = BB_END (bb);
6642 1.1 mrg if (path_entry < path_size - 1
6643 1.1 mrg && EDGE_COUNT (bb->succs) == 2
6644 1.1 mrg && JUMP_P (insn)
6645 1.1 mrg && single_set (insn)
6646 1.1 mrg && any_condjump_p (insn))
6647 1.1 mrg {
6648 1.1 mrg basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6649 1.1 mrg bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6650 1.1 mrg record_jump_equiv (insn, taken);
6651 1.1 mrg }
6652 1.1 mrg }
6653 1.1 mrg
6654 1.1 mrg gcc_assert (next_qty <= max_qty);
6655 1.1 mrg
6656 1.1 mrg free (qty_table);
6657 1.1 mrg }
6658 1.1 mrg
6659 1.1 mrg
6660 1.1 mrg /* Perform cse on the instructions of a function.
6662 1.1 mrg F is the first instruction.
6663 1.1 mrg NREGS is one plus the highest pseudo-reg number used in the instruction.
6664 1.1 mrg
6665 1.1 mrg Return 2 if jump optimizations should be redone due to simplifications
6666 1.1 mrg in conditional jump instructions.
6667 1.1 mrg Return 1 if the CFG should be cleaned up because it has been modified.
6668 1.1 mrg Return 0 otherwise. */
6669 1.1 mrg
6670 1.1 mrg static int
6671 1.1 mrg cse_main (rtx_insn *f ATTRIBUTE_UNUSED, int nregs)
6672 1.1 mrg {
6673 1.1 mrg struct cse_basic_block_data ebb_data;
6674 1.1 mrg basic_block bb;
6675 1.1 mrg int *rc_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6676 1.1 mrg int i, n_blocks;
6677 1.1 mrg
6678 1.1 mrg /* CSE doesn't use dominane info but can invalidate it in different ways.
6679 1.1 mrg For simplicity free dominance info here. */
6680 1.1 mrg free_dominance_info (CDI_DOMINATORS);
6681 1.1 mrg
6682 1.1 mrg df_set_flags (DF_LR_RUN_DCE);
6683 1.1 mrg df_note_add_problem ();
6684 1.1 mrg df_analyze ();
6685 1.1 mrg df_set_flags (DF_DEFER_INSN_RESCAN);
6686 1.1 mrg
6687 1.1 mrg reg_scan (get_insns (), max_reg_num ());
6688 1.1 mrg init_cse_reg_info (nregs);
6689 1.1 mrg
6690 1.1 mrg ebb_data.path = XNEWVEC (struct branch_path,
6691 1.1 mrg param_max_cse_path_length);
6692 1.1 mrg
6693 1.1 mrg cse_cfg_altered = false;
6694 1.1 mrg cse_jumps_altered = false;
6695 1.1 mrg recorded_label_ref = false;
6696 1.1 mrg ebb_data.path_size = 0;
6697 1.1 mrg ebb_data.nsets = 0;
6698 1.1 mrg rtl_hooks = cse_rtl_hooks;
6699 1.1 mrg
6700 1.1 mrg init_recog ();
6701 1.1 mrg init_alias_analysis ();
6702 1.1 mrg
6703 1.1 mrg reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6704 1.1 mrg
6705 1.1 mrg /* Set up the table of already visited basic blocks. */
6706 1.1 mrg cse_visited_basic_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6707 1.1 mrg bitmap_clear (cse_visited_basic_blocks);
6708 1.1 mrg
6709 1.1 mrg /* Loop over basic blocks in reverse completion order (RPO),
6710 1.1 mrg excluding the ENTRY and EXIT blocks. */
6711 1.1 mrg n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6712 1.1 mrg i = 0;
6713 1.1 mrg while (i < n_blocks)
6714 1.1 mrg {
6715 1.1 mrg /* Find the first block in the RPO queue that we have not yet
6716 1.1 mrg processed before. */
6717 1.1 mrg do
6718 1.1 mrg {
6719 1.1 mrg bb = BASIC_BLOCK_FOR_FN (cfun, rc_order[i++]);
6720 1.1 mrg }
6721 1.1 mrg while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
6722 1.1 mrg && i < n_blocks);
6723 1.1 mrg
6724 1.1 mrg /* Find all paths starting with BB, and process them. */
6725 1.1 mrg while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6726 1.1 mrg {
6727 1.1 mrg /* Pre-scan the path. */
6728 1.1 mrg cse_prescan_path (&ebb_data);
6729 1.1 mrg
6730 1.1 mrg /* If this basic block has no sets, skip it. */
6731 1.1 mrg if (ebb_data.nsets == 0)
6732 1.1 mrg continue;
6733 1.1 mrg
6734 1.1 mrg /* Get a reasonable estimate for the maximum number of qty's
6735 1.1 mrg needed for this path. For this, we take the number of sets
6736 1.1 mrg and multiply that by MAX_RECOG_OPERANDS. */
6737 1.1 mrg max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6738 1.1 mrg
6739 1.1 mrg /* Dump the path we're about to process. */
6740 1.1 mrg if (dump_file)
6741 1.1 mrg cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6742 1.1 mrg
6743 1.1 mrg cse_extended_basic_block (&ebb_data);
6744 1.1 mrg }
6745 1.1 mrg }
6746 1.1 mrg
6747 1.1 mrg /* Clean up. */
6748 1.1 mrg end_alias_analysis ();
6749 1.1 mrg free (reg_eqv_table);
6750 1.1 mrg free (ebb_data.path);
6751 1.1 mrg sbitmap_free (cse_visited_basic_blocks);
6752 1.1 mrg free (rc_order);
6753 1.1 mrg rtl_hooks = general_rtl_hooks;
6754 1.1 mrg
6755 1.1 mrg if (cse_jumps_altered || recorded_label_ref)
6756 1.1 mrg return 2;
6757 1.1 mrg else if (cse_cfg_altered)
6758 1.1 mrg return 1;
6759 1.1 mrg else
6760 1.1 mrg return 0;
6761 1.1 mrg }
6762 1.1 mrg
6763 1.1 mrg /* Count the number of times registers are used (not set) in X.
6765 1.1 mrg COUNTS is an array in which we accumulate the count, INCR is how much
6766 1.1 mrg we count each register usage.
6767 1.1 mrg
6768 1.1 mrg Don't count a usage of DEST, which is the SET_DEST of a SET which
6769 1.1 mrg contains X in its SET_SRC. This is because such a SET does not
6770 1.1 mrg modify the liveness of DEST.
6771 1.1 mrg DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6772 1.1 mrg We must then count uses of a SET_DEST regardless, because the insn can't be
6773 1.1 mrg deleted here. */
6774 1.1 mrg
6775 1.1 mrg static void
6776 1.1 mrg count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6777 1.1 mrg {
6778 1.1 mrg enum rtx_code code;
6779 1.1 mrg rtx note;
6780 1.1 mrg const char *fmt;
6781 1.1 mrg int i, j;
6782 1.1 mrg
6783 1.1 mrg if (x == 0)
6784 1.1 mrg return;
6785 1.1 mrg
6786 1.1 mrg switch (code = GET_CODE (x))
6787 1.1 mrg {
6788 1.1 mrg case REG:
6789 1.1 mrg if (x != dest)
6790 1.1 mrg counts[REGNO (x)] += incr;
6791 1.1 mrg return;
6792 1.1 mrg
6793 1.1 mrg case PC:
6794 1.1 mrg case CONST:
6795 1.1 mrg CASE_CONST_ANY:
6796 1.1 mrg case SYMBOL_REF:
6797 1.1 mrg case LABEL_REF:
6798 1.1 mrg return;
6799 1.1 mrg
6800 1.1 mrg case CLOBBER:
6801 1.1 mrg /* If we are clobbering a MEM, mark any registers inside the address
6802 1.1 mrg as being used. */
6803 1.1 mrg if (MEM_P (XEXP (x, 0)))
6804 1.1 mrg count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6805 1.1 mrg return;
6806 1.1 mrg
6807 1.1 mrg case SET:
6808 1.1 mrg /* Unless we are setting a REG, count everything in SET_DEST. */
6809 1.1 mrg if (!REG_P (SET_DEST (x)))
6810 1.1 mrg count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6811 1.1 mrg count_reg_usage (SET_SRC (x), counts,
6812 1.1 mrg dest ? dest : SET_DEST (x),
6813 1.1 mrg incr);
6814 1.1 mrg return;
6815 1.1 mrg
6816 1.1 mrg case DEBUG_INSN:
6817 1.1 mrg return;
6818 1.1 mrg
6819 1.1 mrg case CALL_INSN:
6820 1.1 mrg case INSN:
6821 1.1 mrg case JUMP_INSN:
6822 1.1 mrg /* We expect dest to be NULL_RTX here. If the insn may throw,
6823 1.1 mrg or if it cannot be deleted due to side-effects, mark this fact
6824 1.1 mrg by setting DEST to pc_rtx. */
6825 1.1 mrg if ((!cfun->can_delete_dead_exceptions && !insn_nothrow_p (x))
6826 1.1 mrg || side_effects_p (PATTERN (x)))
6827 1.1 mrg dest = pc_rtx;
6828 1.1 mrg if (code == CALL_INSN)
6829 1.1 mrg count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6830 1.1 mrg count_reg_usage (PATTERN (x), counts, dest, incr);
6831 1.1 mrg
6832 1.1 mrg /* Things used in a REG_EQUAL note aren't dead since loop may try to
6833 1.1 mrg use them. */
6834 1.1 mrg
6835 1.1 mrg note = find_reg_equal_equiv_note (x);
6836 1.1 mrg if (note)
6837 1.1 mrg {
6838 1.1 mrg rtx eqv = XEXP (note, 0);
6839 1.1 mrg
6840 1.1 mrg if (GET_CODE (eqv) == EXPR_LIST)
6841 1.1 mrg /* This REG_EQUAL note describes the result of a function call.
6842 1.1 mrg Process all the arguments. */
6843 1.1 mrg do
6844 1.1 mrg {
6845 1.1 mrg count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6846 1.1 mrg eqv = XEXP (eqv, 1);
6847 1.1 mrg }
6848 1.1 mrg while (eqv && GET_CODE (eqv) == EXPR_LIST);
6849 1.1 mrg else
6850 1.1 mrg count_reg_usage (eqv, counts, dest, incr);
6851 1.1 mrg }
6852 1.1 mrg return;
6853 1.1 mrg
6854 1.1 mrg case EXPR_LIST:
6855 1.1 mrg if (REG_NOTE_KIND (x) == REG_EQUAL
6856 1.1 mrg || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6857 1.1 mrg /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6858 1.1 mrg involving registers in the address. */
6859 1.1 mrg || GET_CODE (XEXP (x, 0)) == CLOBBER)
6860 1.1 mrg count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6861 1.1 mrg
6862 1.1 mrg count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6863 1.1 mrg return;
6864 1.1 mrg
6865 1.1 mrg case ASM_OPERANDS:
6866 1.1 mrg /* Iterate over just the inputs, not the constraints as well. */
6867 1.1 mrg for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6868 1.1 mrg count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6869 1.1 mrg return;
6870 1.1 mrg
6871 1.1 mrg case INSN_LIST:
6872 1.1 mrg case INT_LIST:
6873 1.1 mrg gcc_unreachable ();
6874 1.1 mrg
6875 1.1 mrg default:
6876 1.1 mrg break;
6877 1.1 mrg }
6878 1.1 mrg
6879 1.1 mrg fmt = GET_RTX_FORMAT (code);
6880 1.1 mrg for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6881 1.1 mrg {
6882 1.1 mrg if (fmt[i] == 'e')
6883 1.1 mrg count_reg_usage (XEXP (x, i), counts, dest, incr);
6884 1.1 mrg else if (fmt[i] == 'E')
6885 1.1 mrg for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6886 1.1 mrg count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6887 1.1 mrg }
6888 1.1 mrg }
6889 1.1 mrg
6890 1.1 mrg /* Return true if X is a dead register. */
6892 1.1 mrg
6893 1.1 mrg static inline int
6894 1.1 mrg is_dead_reg (const_rtx x, int *counts)
6895 1.1 mrg {
6896 1.1 mrg return (REG_P (x)
6897 1.1 mrg && REGNO (x) >= FIRST_PSEUDO_REGISTER
6898 1.1 mrg && counts[REGNO (x)] == 0);
6899 1.1 mrg }
6900 1.1 mrg
6901 1.1 mrg /* Return true if set is live. */
6902 1.1 mrg static bool
6903 1.1 mrg set_live_p (rtx set, int *counts)
6904 1.1 mrg {
6905 1.1 mrg if (set_noop_p (set))
6906 1.1 mrg return false;
6907 1.1 mrg
6908 1.1 mrg if (!is_dead_reg (SET_DEST (set), counts)
6909 1.1 mrg || side_effects_p (SET_SRC (set)))
6910 1.1 mrg return true;
6911 1.1 mrg
6912 1.1 mrg return false;
6913 1.1 mrg }
6914 1.1 mrg
6915 1.1 mrg /* Return true if insn is live. */
6916 1.1 mrg
6917 1.1 mrg static bool
6918 1.1 mrg insn_live_p (rtx_insn *insn, int *counts)
6919 1.1 mrg {
6920 1.1 mrg int i;
6921 1.1 mrg if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
6922 1.1 mrg return true;
6923 1.1 mrg else if (GET_CODE (PATTERN (insn)) == SET)
6924 1.1 mrg return set_live_p (PATTERN (insn), counts);
6925 1.1 mrg else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6926 1.1 mrg {
6927 1.1 mrg for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6928 1.1 mrg {
6929 1.1 mrg rtx elt = XVECEXP (PATTERN (insn), 0, i);
6930 1.1 mrg
6931 1.1 mrg if (GET_CODE (elt) == SET)
6932 1.1 mrg {
6933 1.1 mrg if (set_live_p (elt, counts))
6934 1.1 mrg return true;
6935 1.1 mrg }
6936 1.1 mrg else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6937 1.1 mrg return true;
6938 1.1 mrg }
6939 1.1 mrg return false;
6940 1.1 mrg }
6941 1.1 mrg else if (DEBUG_INSN_P (insn))
6942 1.1 mrg {
6943 1.1 mrg rtx_insn *next;
6944 1.1 mrg
6945 1.1 mrg if (DEBUG_MARKER_INSN_P (insn))
6946 1.1 mrg return true;
6947 1.1 mrg
6948 1.1 mrg for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6949 1.1 mrg if (NOTE_P (next))
6950 1.1 mrg continue;
6951 1.1 mrg else if (!DEBUG_INSN_P (next))
6952 1.1 mrg return true;
6953 1.1 mrg /* If we find an inspection point, such as a debug begin stmt,
6954 1.1 mrg we want to keep the earlier debug insn. */
6955 1.1 mrg else if (DEBUG_MARKER_INSN_P (next))
6956 1.1 mrg return true;
6957 1.1 mrg else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6958 1.1 mrg return false;
6959 1.1 mrg
6960 1.1 mrg return true;
6961 1.1 mrg }
6962 1.1 mrg else
6963 1.1 mrg return true;
6964 1.1 mrg }
6965 1.1 mrg
6966 1.1 mrg /* Count the number of stores into pseudo. Callback for note_stores. */
6967 1.1 mrg
6968 1.1 mrg static void
6969 1.1 mrg count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
6970 1.1 mrg {
6971 1.1 mrg int *counts = (int *) data;
6972 1.1 mrg if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
6973 1.1 mrg counts[REGNO (x)]++;
6974 1.1 mrg }
6975 1.1 mrg
6976 1.1 mrg /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
6977 1.1 mrg pseudo doesn't have a replacement. COUNTS[X] is zero if register X
6978 1.1 mrg is dead and REPLACEMENTS[X] is null if it has no replacemenet.
6979 1.1 mrg Set *SEEN_REPL to true if we see a dead register that does have
6980 1.1 mrg a replacement. */
6981 1.1 mrg
6982 1.1 mrg static bool
6983 1.1 mrg is_dead_debug_insn (const_rtx pat, int *counts, rtx *replacements,
6984 1.1 mrg bool *seen_repl)
6985 1.1 mrg {
6986 1.1 mrg subrtx_iterator::array_type array;
6987 1.1 mrg FOR_EACH_SUBRTX (iter, array, pat, NONCONST)
6988 1.1 mrg {
6989 1.1 mrg const_rtx x = *iter;
6990 1.1 mrg if (is_dead_reg (x, counts))
6991 1.1 mrg {
6992 1.1 mrg if (replacements && replacements[REGNO (x)] != NULL_RTX)
6993 1.1 mrg *seen_repl = true;
6994 1.1 mrg else
6995 1.1 mrg return true;
6996 1.1 mrg }
6997 1.1 mrg }
6998 1.1 mrg return false;
6999 1.1 mrg }
7000 1.1 mrg
7001 1.1 mrg /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
7002 1.1 mrg Callback for simplify_replace_fn_rtx. */
7003 1.1 mrg
7004 1.1 mrg static rtx
7005 1.1 mrg replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
7006 1.1 mrg {
7007 1.1 mrg rtx *replacements = (rtx *) data;
7008 1.1 mrg
7009 1.1 mrg if (REG_P (x)
7010 1.1 mrg && REGNO (x) >= FIRST_PSEUDO_REGISTER
7011 1.1 mrg && replacements[REGNO (x)] != NULL_RTX)
7012 1.1 mrg {
7013 1.1 mrg if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
7014 1.1 mrg return replacements[REGNO (x)];
7015 1.1 mrg return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
7016 1.1 mrg GET_MODE (replacements[REGNO (x)]));
7017 1.1 mrg }
7018 1.1 mrg return NULL_RTX;
7019 1.1 mrg }
7020 1.1 mrg
7021 1.1 mrg /* Scan all the insns and delete any that are dead; i.e., they store a register
7022 1.1 mrg that is never used or they copy a register to itself.
7023 1.1 mrg
7024 1.1 mrg This is used to remove insns made obviously dead by cse, loop or other
7025 1.1 mrg optimizations. It improves the heuristics in loop since it won't try to
7026 1.1 mrg move dead invariants out of loops or make givs for dead quantities. The
7027 1.1 mrg remaining passes of the compilation are also sped up. */
7028 1.1 mrg
7029 1.1 mrg int
7030 1.1 mrg delete_trivially_dead_insns (rtx_insn *insns, int nreg)
7031 1.1 mrg {
7032 1.1 mrg int *counts;
7033 1.1 mrg rtx_insn *insn, *prev;
7034 1.1 mrg rtx *replacements = NULL;
7035 1.1 mrg int ndead = 0;
7036 1.1 mrg
7037 1.1 mrg timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7038 1.1 mrg /* First count the number of times each register is used. */
7039 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS)
7040 1.1 mrg {
7041 1.1 mrg counts = XCNEWVEC (int, nreg * 3);
7042 1.1 mrg for (insn = insns; insn; insn = NEXT_INSN (insn))
7043 1.1 mrg if (DEBUG_BIND_INSN_P (insn))
7044 1.1 mrg count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7045 1.1 mrg NULL_RTX, 1);
7046 1.1 mrg else if (INSN_P (insn))
7047 1.1 mrg {
7048 1.1 mrg count_reg_usage (insn, counts, NULL_RTX, 1);
7049 1.1 mrg note_stores (insn, count_stores, counts + nreg * 2);
7050 1.1 mrg }
7051 1.1 mrg /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7052 1.1 mrg First one counts how many times each pseudo is used outside
7053 1.1 mrg of debug insns, second counts how many times each pseudo is
7054 1.1 mrg used in debug insns and third counts how many times a pseudo
7055 1.1 mrg is stored. */
7056 1.1 mrg }
7057 1.1 mrg else
7058 1.1 mrg {
7059 1.1 mrg counts = XCNEWVEC (int, nreg);
7060 1.1 mrg for (insn = insns; insn; insn = NEXT_INSN (insn))
7061 1.1 mrg if (INSN_P (insn))
7062 1.1 mrg count_reg_usage (insn, counts, NULL_RTX, 1);
7063 1.1 mrg /* If no debug insns can be present, COUNTS is just an array
7064 1.1 mrg which counts how many times each pseudo is used. */
7065 1.1 mrg }
7066 1.1 mrg /* Pseudo PIC register should be considered as used due to possible
7067 1.1 mrg new usages generated. */
7068 1.1 mrg if (!reload_completed
7069 1.1 mrg && pic_offset_table_rtx
7070 1.1 mrg && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
7071 1.1 mrg counts[REGNO (pic_offset_table_rtx)]++;
7072 1.1 mrg /* Go from the last insn to the first and delete insns that only set unused
7073 1.1 mrg registers or copy a register to itself. As we delete an insn, remove
7074 1.1 mrg usage counts for registers it uses.
7075 1.1 mrg
7076 1.1 mrg The first jump optimization pass may leave a real insn as the last
7077 1.1 mrg insn in the function. We must not skip that insn or we may end
7078 1.1 mrg up deleting code that is not really dead.
7079 1.1 mrg
7080 1.1 mrg If some otherwise unused register is only used in DEBUG_INSNs,
7081 1.1 mrg try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7082 1.1 mrg the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
7083 1.1 mrg has been created for the unused register, replace it with
7084 1.1 mrg the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
7085 1.1 mrg for (insn = get_last_insn (); insn; insn = prev)
7086 1.1 mrg {
7087 1.1 mrg int live_insn = 0;
7088 1.1 mrg
7089 1.1 mrg prev = PREV_INSN (insn);
7090 1.1 mrg if (!INSN_P (insn))
7091 1.1 mrg continue;
7092 1.1 mrg
7093 1.1 mrg live_insn = insn_live_p (insn, counts);
7094 1.1 mrg
7095 1.1 mrg /* If this is a dead insn, delete it and show registers in it aren't
7096 1.1 mrg being used. */
7097 1.1 mrg
7098 1.1 mrg if (! live_insn && dbg_cnt (delete_trivial_dead))
7099 1.1 mrg {
7100 1.1 mrg if (DEBUG_INSN_P (insn))
7101 1.1 mrg {
7102 1.1 mrg if (DEBUG_BIND_INSN_P (insn))
7103 1.1 mrg count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7104 1.1 mrg NULL_RTX, -1);
7105 1.1 mrg }
7106 1.1 mrg else
7107 1.1 mrg {
7108 1.1 mrg rtx set;
7109 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS
7110 1.1 mrg && (set = single_set (insn)) != NULL_RTX
7111 1.1 mrg && is_dead_reg (SET_DEST (set), counts)
7112 1.1 mrg /* Used at least once in some DEBUG_INSN. */
7113 1.1 mrg && counts[REGNO (SET_DEST (set)) + nreg] > 0
7114 1.1 mrg /* And set exactly once. */
7115 1.1 mrg && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
7116 1.1 mrg && !side_effects_p (SET_SRC (set))
7117 1.1 mrg && asm_noperands (PATTERN (insn)) < 0)
7118 1.1 mrg {
7119 1.1 mrg rtx dval, bind_var_loc;
7120 1.1 mrg rtx_insn *bind;
7121 1.1 mrg
7122 1.1 mrg /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
7123 1.1 mrg dval = make_debug_expr_from_rtl (SET_DEST (set));
7124 1.1 mrg
7125 1.1 mrg /* Emit a debug bind insn before the insn in which
7126 1.1 mrg reg dies. */
7127 1.1 mrg bind_var_loc =
7128 1.1 mrg gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
7129 1.1 mrg DEBUG_EXPR_TREE_DECL (dval),
7130 1.1 mrg SET_SRC (set),
7131 1.1 mrg VAR_INIT_STATUS_INITIALIZED);
7132 1.1 mrg count_reg_usage (bind_var_loc, counts + nreg, NULL_RTX, 1);
7133 1.1 mrg
7134 1.1 mrg bind = emit_debug_insn_before (bind_var_loc, insn);
7135 1.1 mrg df_insn_rescan (bind);
7136 1.1 mrg
7137 1.1 mrg if (replacements == NULL)
7138 1.1 mrg replacements = XCNEWVEC (rtx, nreg);
7139 1.1 mrg replacements[REGNO (SET_DEST (set))] = dval;
7140 1.1 mrg }
7141 1.1 mrg
7142 1.1 mrg count_reg_usage (insn, counts, NULL_RTX, -1);
7143 1.1 mrg ndead++;
7144 1.1 mrg }
7145 1.1 mrg cse_cfg_altered |= delete_insn_and_edges (insn);
7146 1.1 mrg }
7147 1.1 mrg }
7148 1.1 mrg
7149 1.1 mrg if (MAY_HAVE_DEBUG_BIND_INSNS)
7150 1.1 mrg {
7151 1.1 mrg for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7152 1.1 mrg if (DEBUG_BIND_INSN_P (insn))
7153 1.1 mrg {
7154 1.1 mrg /* If this debug insn references a dead register that wasn't replaced
7155 1.1 mrg with an DEBUG_EXPR, reset the DEBUG_INSN. */
7156 1.1 mrg bool seen_repl = false;
7157 1.1 mrg if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn),
7158 1.1 mrg counts, replacements, &seen_repl))
7159 1.1 mrg {
7160 1.1 mrg INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
7161 1.1 mrg df_insn_rescan (insn);
7162 1.1 mrg }
7163 1.1 mrg else if (seen_repl)
7164 1.1 mrg {
7165 1.1 mrg INSN_VAR_LOCATION_LOC (insn)
7166 1.1 mrg = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
7167 1.1 mrg NULL_RTX, replace_dead_reg,
7168 1.1 mrg replacements);
7169 1.1 mrg df_insn_rescan (insn);
7170 1.1 mrg }
7171 1.1 mrg }
7172 1.1 mrg free (replacements);
7173 1.1 mrg }
7174 1.1 mrg
7175 1.1 mrg if (dump_file && ndead)
7176 1.1 mrg fprintf (dump_file, "Deleted %i trivially dead insns\n",
7177 1.1 mrg ndead);
7178 1.1 mrg /* Clean up. */
7179 1.1 mrg free (counts);
7180 1.1 mrg timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7181 1.1 mrg return ndead;
7182 1.1 mrg }
7183 1.1 mrg
7184 1.1 mrg /* If LOC contains references to NEWREG in a different mode, change them
7185 1.1 mrg to use NEWREG instead. */
7186 1.1 mrg
7187 1.1 mrg static void
7188 1.1 mrg cse_change_cc_mode (subrtx_ptr_iterator::array_type &array,
7189 1.1 mrg rtx *loc, rtx_insn *insn, rtx newreg)
7190 1.1 mrg {
7191 1.1 mrg FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
7192 1.1 mrg {
7193 1.1 mrg rtx *loc = *iter;
7194 1.1 mrg rtx x = *loc;
7195 1.1 mrg if (x
7196 1.1 mrg && REG_P (x)
7197 1.1 mrg && REGNO (x) == REGNO (newreg)
7198 1.1 mrg && GET_MODE (x) != GET_MODE (newreg))
7199 1.1 mrg {
7200 1.1 mrg validate_change (insn, loc, newreg, 1);
7201 1.1 mrg iter.skip_subrtxes ();
7202 1.1 mrg }
7203 1.1 mrg }
7204 1.1 mrg }
7205 1.1 mrg
7206 1.1 mrg /* Change the mode of any reference to the register REGNO (NEWREG) to
7207 1.1 mrg GET_MODE (NEWREG) in INSN. */
7208 1.1 mrg
7209 1.1 mrg static void
7210 1.1 mrg cse_change_cc_mode_insn (rtx_insn *insn, rtx newreg)
7211 1.1 mrg {
7212 1.1 mrg int success;
7213 1.1 mrg
7214 1.1 mrg if (!INSN_P (insn))
7215 1.1 mrg return;
7216 1.1 mrg
7217 1.1 mrg subrtx_ptr_iterator::array_type array;
7218 1.1 mrg cse_change_cc_mode (array, &PATTERN (insn), insn, newreg);
7219 1.1 mrg cse_change_cc_mode (array, ®_NOTES (insn), insn, newreg);
7220 1.1 mrg
7221 1.1 mrg /* If the following assertion was triggered, there is most probably
7222 1.1 mrg something wrong with the cc_modes_compatible back end function.
7223 1.1 mrg CC modes only can be considered compatible if the insn - with the mode
7224 1.1 mrg replaced by any of the compatible modes - can still be recognized. */
7225 1.1 mrg success = apply_change_group ();
7226 1.1 mrg gcc_assert (success);
7227 1.1 mrg }
7228 1.1 mrg
7229 1.1 mrg /* Change the mode of any reference to the register REGNO (NEWREG) to
7230 1.1 mrg GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7231 1.1 mrg any instruction which modifies NEWREG. */
7232 1.1 mrg
7233 1.1 mrg static void
7234 1.1 mrg cse_change_cc_mode_insns (rtx_insn *start, rtx_insn *end, rtx newreg)
7235 1.1 mrg {
7236 1.1 mrg rtx_insn *insn;
7237 1.1 mrg
7238 1.1 mrg for (insn = start; insn != end; insn = NEXT_INSN (insn))
7239 1.1 mrg {
7240 1.1 mrg if (! INSN_P (insn))
7241 1.1 mrg continue;
7242 1.1 mrg
7243 1.1 mrg if (reg_set_p (newreg, insn))
7244 1.1 mrg return;
7245 1.1 mrg
7246 1.1 mrg cse_change_cc_mode_insn (insn, newreg);
7247 1.1 mrg }
7248 1.1 mrg }
7249 1.1 mrg
7250 1.1 mrg /* BB is a basic block which finishes with CC_REG as a condition code
7251 1.1 mrg register which is set to CC_SRC. Look through the successors of BB
7252 1.1 mrg to find blocks which have a single predecessor (i.e., this one),
7253 1.1 mrg and look through those blocks for an assignment to CC_REG which is
7254 1.1 mrg equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7255 1.1 mrg permitted to change the mode of CC_SRC to a compatible mode. This
7256 1.1 mrg returns VOIDmode if no equivalent assignments were found.
7257 1.1 mrg Otherwise it returns the mode which CC_SRC should wind up with.
7258 1.1 mrg ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7259 1.1 mrg but is passed unmodified down to recursive calls in order to prevent
7260 1.1 mrg endless recursion.
7261 1.1 mrg
7262 1.1 mrg The main complexity in this function is handling the mode issues.
7263 1.1 mrg We may have more than one duplicate which we can eliminate, and we
7264 1.1 mrg try to find a mode which will work for multiple duplicates. */
7265 1.1 mrg
7266 1.1 mrg static machine_mode
7267 1.1 mrg cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7268 1.1 mrg bool can_change_mode)
7269 1.1 mrg {
7270 1.1 mrg bool found_equiv;
7271 1.1 mrg machine_mode mode;
7272 1.1 mrg unsigned int insn_count;
7273 1.1 mrg edge e;
7274 1.1 mrg rtx_insn *insns[2];
7275 1.1 mrg machine_mode modes[2];
7276 1.1 mrg rtx_insn *last_insns[2];
7277 1.1 mrg unsigned int i;
7278 1.1 mrg rtx newreg;
7279 1.1 mrg edge_iterator ei;
7280 1.1 mrg
7281 1.1 mrg /* We expect to have two successors. Look at both before picking
7282 1.1 mrg the final mode for the comparison. If we have more successors
7283 1.1 mrg (i.e., some sort of table jump, although that seems unlikely),
7284 1.1 mrg then we require all beyond the first two to use the same
7285 1.1 mrg mode. */
7286 1.1 mrg
7287 1.1 mrg found_equiv = false;
7288 1.1 mrg mode = GET_MODE (cc_src);
7289 1.1 mrg insn_count = 0;
7290 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs)
7291 1.1 mrg {
7292 1.1 mrg rtx_insn *insn;
7293 1.1 mrg rtx_insn *end;
7294 1.1 mrg
7295 1.1 mrg if (e->flags & EDGE_COMPLEX)
7296 1.1 mrg continue;
7297 1.1 mrg
7298 1.1 mrg if (EDGE_COUNT (e->dest->preds) != 1
7299 1.1 mrg || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
7300 1.1 mrg /* Avoid endless recursion on unreachable blocks. */
7301 1.1 mrg || e->dest == orig_bb)
7302 1.1 mrg continue;
7303 1.1 mrg
7304 1.1 mrg end = NEXT_INSN (BB_END (e->dest));
7305 1.1 mrg for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7306 1.1 mrg {
7307 1.1 mrg rtx set;
7308 1.1 mrg
7309 1.1 mrg if (! INSN_P (insn))
7310 1.1 mrg continue;
7311 1.1 mrg
7312 1.1 mrg /* If CC_SRC is modified, we have to stop looking for
7313 1.1 mrg something which uses it. */
7314 1.1 mrg if (modified_in_p (cc_src, insn))
7315 1.1 mrg break;
7316 1.1 mrg
7317 1.1 mrg /* Check whether INSN sets CC_REG to CC_SRC. */
7318 1.1 mrg set = single_set (insn);
7319 1.1 mrg if (set
7320 1.1 mrg && REG_P (SET_DEST (set))
7321 1.1 mrg && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7322 1.1 mrg {
7323 1.1 mrg bool found;
7324 1.1 mrg machine_mode set_mode;
7325 1.1 mrg machine_mode comp_mode;
7326 1.1 mrg
7327 1.1 mrg found = false;
7328 1.1 mrg set_mode = GET_MODE (SET_SRC (set));
7329 1.1 mrg comp_mode = set_mode;
7330 1.1 mrg if (rtx_equal_p (cc_src, SET_SRC (set)))
7331 1.1 mrg found = true;
7332 1.1 mrg else if (GET_CODE (cc_src) == COMPARE
7333 1.1 mrg && GET_CODE (SET_SRC (set)) == COMPARE
7334 1.1 mrg && mode != set_mode
7335 1.1 mrg && rtx_equal_p (XEXP (cc_src, 0),
7336 1.1 mrg XEXP (SET_SRC (set), 0))
7337 1.1 mrg && rtx_equal_p (XEXP (cc_src, 1),
7338 1.1 mrg XEXP (SET_SRC (set), 1)))
7339 1.1 mrg
7340 1.1 mrg {
7341 1.1 mrg comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7342 1.1 mrg if (comp_mode != VOIDmode
7343 1.1 mrg && (can_change_mode || comp_mode == mode))
7344 1.1 mrg found = true;
7345 1.1 mrg }
7346 1.1 mrg
7347 1.1 mrg if (found)
7348 1.1 mrg {
7349 1.1 mrg found_equiv = true;
7350 1.1 mrg if (insn_count < ARRAY_SIZE (insns))
7351 1.1 mrg {
7352 1.1 mrg insns[insn_count] = insn;
7353 1.1 mrg modes[insn_count] = set_mode;
7354 1.1 mrg last_insns[insn_count] = end;
7355 1.1 mrg ++insn_count;
7356 1.1 mrg
7357 1.1 mrg if (mode != comp_mode)
7358 1.1 mrg {
7359 1.1 mrg gcc_assert (can_change_mode);
7360 1.1 mrg mode = comp_mode;
7361 1.1 mrg
7362 1.1 mrg /* The modified insn will be re-recognized later. */
7363 1.1 mrg PUT_MODE (cc_src, mode);
7364 1.1 mrg }
7365 1.1 mrg }
7366 1.1 mrg else
7367 1.1 mrg {
7368 1.1 mrg if (set_mode != mode)
7369 1.1 mrg {
7370 1.1 mrg /* We found a matching expression in the
7371 1.1 mrg wrong mode, but we don't have room to
7372 1.1 mrg store it in the array. Punt. This case
7373 1.1 mrg should be rare. */
7374 1.1 mrg break;
7375 1.1 mrg }
7376 1.1 mrg /* INSN sets CC_REG to a value equal to CC_SRC
7377 1.1 mrg with the right mode. We can simply delete
7378 1.1 mrg it. */
7379 1.1 mrg delete_insn (insn);
7380 1.1 mrg }
7381 1.1 mrg
7382 1.1 mrg /* We found an instruction to delete. Keep looking,
7383 1.1 mrg in the hopes of finding a three-way jump. */
7384 1.1 mrg continue;
7385 1.1 mrg }
7386 1.1 mrg
7387 1.1 mrg /* We found an instruction which sets the condition
7388 1.1 mrg code, so don't look any farther. */
7389 1.1 mrg break;
7390 1.1 mrg }
7391 1.1 mrg
7392 1.1 mrg /* If INSN sets CC_REG in some other way, don't look any
7393 1.1 mrg farther. */
7394 1.1 mrg if (reg_set_p (cc_reg, insn))
7395 1.1 mrg break;
7396 1.1 mrg }
7397 1.1 mrg
7398 1.1 mrg /* If we fell off the bottom of the block, we can keep looking
7399 1.1 mrg through successors. We pass CAN_CHANGE_MODE as false because
7400 1.1 mrg we aren't prepared to handle compatibility between the
7401 1.1 mrg further blocks and this block. */
7402 1.1 mrg if (insn == end)
7403 1.1 mrg {
7404 1.1 mrg machine_mode submode;
7405 1.1 mrg
7406 1.1 mrg submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7407 1.1 mrg if (submode != VOIDmode)
7408 1.1 mrg {
7409 1.1 mrg gcc_assert (submode == mode);
7410 1.1 mrg found_equiv = true;
7411 1.1 mrg can_change_mode = false;
7412 1.1 mrg }
7413 1.1 mrg }
7414 1.1 mrg }
7415 1.1 mrg
7416 1.1 mrg if (! found_equiv)
7417 1.1 mrg return VOIDmode;
7418 1.1 mrg
7419 1.1 mrg /* Now INSN_COUNT is the number of instructions we found which set
7420 1.1 mrg CC_REG to a value equivalent to CC_SRC. The instructions are in
7421 1.1 mrg INSNS. The modes used by those instructions are in MODES. */
7422 1.1 mrg
7423 1.1 mrg newreg = NULL_RTX;
7424 1.1 mrg for (i = 0; i < insn_count; ++i)
7425 1.1 mrg {
7426 1.1 mrg if (modes[i] != mode)
7427 1.1 mrg {
7428 1.1 mrg /* We need to change the mode of CC_REG in INSNS[i] and
7429 1.1 mrg subsequent instructions. */
7430 1.1 mrg if (! newreg)
7431 1.1 mrg {
7432 1.1 mrg if (GET_MODE (cc_reg) == mode)
7433 1.1 mrg newreg = cc_reg;
7434 1.1 mrg else
7435 1.1 mrg newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7436 1.1 mrg }
7437 1.1 mrg cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7438 1.1 mrg newreg);
7439 1.1 mrg }
7440 1.1 mrg
7441 1.1 mrg cse_cfg_altered |= delete_insn_and_edges (insns[i]);
7442 1.1 mrg }
7443 1.1 mrg
7444 1.1 mrg return mode;
7445 1.1 mrg }
7446 1.1 mrg
7447 1.1 mrg /* If we have a fixed condition code register (or two), walk through
7448 1.1 mrg the instructions and try to eliminate duplicate assignments. */
7449 1.1 mrg
7450 1.1 mrg static void
7451 1.1 mrg cse_condition_code_reg (void)
7452 1.1 mrg {
7453 1.1 mrg unsigned int cc_regno_1;
7454 1.1 mrg unsigned int cc_regno_2;
7455 1.1 mrg rtx cc_reg_1;
7456 1.1 mrg rtx cc_reg_2;
7457 1.1 mrg basic_block bb;
7458 1.1 mrg
7459 1.1 mrg if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7460 1.1 mrg return;
7461 1.1 mrg
7462 1.1 mrg cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7463 1.1 mrg if (cc_regno_2 != INVALID_REGNUM)
7464 1.1 mrg cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7465 1.1 mrg else
7466 1.1 mrg cc_reg_2 = NULL_RTX;
7467 1.1 mrg
7468 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
7469 1.1 mrg {
7470 1.1 mrg rtx_insn *last_insn;
7471 1.1 mrg rtx cc_reg;
7472 1.1 mrg rtx_insn *insn;
7473 1.1 mrg rtx_insn *cc_src_insn;
7474 1.1 mrg rtx cc_src;
7475 1.1 mrg machine_mode mode;
7476 1.1 mrg machine_mode orig_mode;
7477 1.1 mrg
7478 1.1 mrg /* Look for blocks which end with a conditional jump based on a
7479 1.1 mrg condition code register. Then look for the instruction which
7480 1.1 mrg sets the condition code register. Then look through the
7481 1.1 mrg successor blocks for instructions which set the condition
7482 1.1 mrg code register to the same value. There are other possible
7483 1.1 mrg uses of the condition code register, but these are by far the
7484 1.1 mrg most common and the ones which we are most likely to be able
7485 1.1 mrg to optimize. */
7486 1.1 mrg
7487 1.1 mrg last_insn = BB_END (bb);
7488 1.1 mrg if (!JUMP_P (last_insn))
7489 1.1 mrg continue;
7490 1.1 mrg
7491 1.1 mrg if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7492 1.1 mrg cc_reg = cc_reg_1;
7493 1.1 mrg else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7494 1.1 mrg cc_reg = cc_reg_2;
7495 1.1 mrg else
7496 1.1 mrg continue;
7497 1.1 mrg
7498 1.1 mrg cc_src_insn = NULL;
7499 1.1 mrg cc_src = NULL_RTX;
7500 1.1 mrg for (insn = PREV_INSN (last_insn);
7501 1.1 mrg insn && insn != PREV_INSN (BB_HEAD (bb));
7502 1.1 mrg insn = PREV_INSN (insn))
7503 1.1 mrg {
7504 1.1 mrg rtx set;
7505 1.1 mrg
7506 1.1 mrg if (! INSN_P (insn))
7507 1.1 mrg continue;
7508 1.1 mrg set = single_set (insn);
7509 1.1 mrg if (set
7510 1.1 mrg && REG_P (SET_DEST (set))
7511 1.1 mrg && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7512 1.1 mrg {
7513 1.1 mrg cc_src_insn = insn;
7514 1.1 mrg cc_src = SET_SRC (set);
7515 1.1 mrg break;
7516 1.1 mrg }
7517 1.1 mrg else if (reg_set_p (cc_reg, insn))
7518 1.1 mrg break;
7519 1.1 mrg }
7520 1.1 mrg
7521 1.1 mrg if (! cc_src_insn)
7522 1.1 mrg continue;
7523 1.1 mrg
7524 1.1 mrg if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7525 1.1 mrg continue;
7526 1.1 mrg
7527 1.1 mrg /* Now CC_REG is a condition code register used for a
7528 1.1 mrg conditional jump at the end of the block, and CC_SRC, in
7529 1.1 mrg CC_SRC_INSN, is the value to which that condition code
7530 1.1 mrg register is set, and CC_SRC is still meaningful at the end of
7531 1.1 mrg the basic block. */
7532 1.1 mrg
7533 1.1 mrg orig_mode = GET_MODE (cc_src);
7534 1.1 mrg mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7535 1.1 mrg if (mode != VOIDmode)
7536 1.1 mrg {
7537 1.1 mrg gcc_assert (mode == GET_MODE (cc_src));
7538 1.1 mrg if (mode != orig_mode)
7539 1.1 mrg {
7540 1.1 mrg rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7541 1.1 mrg
7542 1.1 mrg cse_change_cc_mode_insn (cc_src_insn, newreg);
7543 1.1 mrg
7544 1.1 mrg /* Do the same in the following insns that use the
7545 1.1 mrg current value of CC_REG within BB. */
7546 1.1 mrg cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7547 1.1 mrg NEXT_INSN (last_insn),
7548 1.1 mrg newreg);
7549 1.1 mrg }
7550 1.1 mrg }
7551 1.1 mrg }
7552 1.1 mrg }
7553 1.1 mrg
7554 1.1 mrg
7556 1.1 mrg /* Perform common subexpression elimination. Nonzero value from
7557 1.1 mrg `cse_main' means that jumps were simplified and some code may now
7558 1.1 mrg be unreachable, so do jump optimization again. */
7559 1.1 mrg static unsigned int
7560 1.1 mrg rest_of_handle_cse (void)
7561 1.1 mrg {
7562 1.1 mrg int tem;
7563 1.1 mrg
7564 1.1 mrg if (dump_file)
7565 1.1 mrg dump_flow_info (dump_file, dump_flags);
7566 1.1 mrg
7567 1.1 mrg tem = cse_main (get_insns (), max_reg_num ());
7568 1.1 mrg
7569 1.1 mrg /* If we are not running more CSE passes, then we are no longer
7570 1.1 mrg expecting CSE to be run. But always rerun it in a cheap mode. */
7571 1.1 mrg cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7572 1.1 mrg
7573 1.1 mrg if (tem == 2)
7574 1.1 mrg {
7575 1.1 mrg timevar_push (TV_JUMP);
7576 1.1 mrg rebuild_jump_labels (get_insns ());
7577 1.1 mrg cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7578 1.1 mrg timevar_pop (TV_JUMP);
7579 1.1 mrg }
7580 1.1 mrg else if (tem == 1 || optimize > 1)
7581 1.1 mrg cse_cfg_altered |= cleanup_cfg (0);
7582 1.1 mrg
7583 1.1 mrg return 0;
7584 1.1 mrg }
7585 1.1 mrg
7586 1.1 mrg namespace {
7587 1.1 mrg
7588 1.1 mrg const pass_data pass_data_cse =
7589 1.1 mrg {
7590 1.1 mrg RTL_PASS, /* type */
7591 1.1 mrg "cse1", /* name */
7592 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
7593 1.1 mrg TV_CSE, /* tv_id */
7594 1.1 mrg 0, /* properties_required */
7595 1.1 mrg 0, /* properties_provided */
7596 1.1 mrg 0, /* properties_destroyed */
7597 1.1 mrg 0, /* todo_flags_start */
7598 1.1 mrg TODO_df_finish, /* todo_flags_finish */
7599 1.1 mrg };
7600 1.1 mrg
7601 1.1 mrg class pass_cse : public rtl_opt_pass
7602 1.1 mrg {
7603 1.1 mrg public:
7604 1.1 mrg pass_cse (gcc::context *ctxt)
7605 1.1 mrg : rtl_opt_pass (pass_data_cse, ctxt)
7606 1.1 mrg {}
7607 1.1 mrg
7608 1.1 mrg /* opt_pass methods: */
7609 1.1 mrg virtual bool gate (function *) { return optimize > 0; }
7610 1.1 mrg virtual unsigned int execute (function *) { return rest_of_handle_cse (); }
7611 1.1 mrg
7612 1.1 mrg }; // class pass_cse
7613 1.1 mrg
7614 1.1 mrg } // anon namespace
7615 1.1 mrg
7616 1.1 mrg rtl_opt_pass *
7617 1.1 mrg make_pass_cse (gcc::context *ctxt)
7618 1.1 mrg {
7619 1.1 mrg return new pass_cse (ctxt);
7620 1.1 mrg }
7621 1.1 mrg
7622 1.1 mrg
7623 1.1 mrg /* Run second CSE pass after loop optimizations. */
7624 1.1 mrg static unsigned int
7625 1.1 mrg rest_of_handle_cse2 (void)
7626 1.1 mrg {
7627 1.1 mrg int tem;
7628 1.1 mrg
7629 1.1 mrg if (dump_file)
7630 1.1 mrg dump_flow_info (dump_file, dump_flags);
7631 1.1 mrg
7632 1.1 mrg tem = cse_main (get_insns (), max_reg_num ());
7633 1.1 mrg
7634 1.1 mrg /* Run a pass to eliminate duplicated assignments to condition code
7635 1.1 mrg registers. We have to run this after bypass_jumps, because it
7636 1.1 mrg makes it harder for that pass to determine whether a jump can be
7637 1.1 mrg bypassed safely. */
7638 1.1 mrg cse_condition_code_reg ();
7639 1.1 mrg
7640 1.1 mrg delete_trivially_dead_insns (get_insns (), max_reg_num ());
7641 1.1 mrg
7642 1.1 mrg if (tem == 2)
7643 1.1 mrg {
7644 1.1 mrg timevar_push (TV_JUMP);
7645 1.1 mrg rebuild_jump_labels (get_insns ());
7646 1.1 mrg cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7647 1.1 mrg timevar_pop (TV_JUMP);
7648 1.1 mrg }
7649 1.1 mrg else if (tem == 1 || cse_cfg_altered)
7650 1.1 mrg cse_cfg_altered |= cleanup_cfg (0);
7651 1.1 mrg
7652 1.1 mrg cse_not_expected = 1;
7653 1.1 mrg return 0;
7654 1.1 mrg }
7655 1.1 mrg
7656 1.1 mrg
7657 1.1 mrg namespace {
7658 1.1 mrg
7659 1.1 mrg const pass_data pass_data_cse2 =
7660 1.1 mrg {
7661 1.1 mrg RTL_PASS, /* type */
7662 1.1 mrg "cse2", /* name */
7663 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
7664 1.1 mrg TV_CSE2, /* tv_id */
7665 1.1 mrg 0, /* properties_required */
7666 1.1 mrg 0, /* properties_provided */
7667 1.1 mrg 0, /* properties_destroyed */
7668 1.1 mrg 0, /* todo_flags_start */
7669 1.1 mrg TODO_df_finish, /* todo_flags_finish */
7670 1.1 mrg };
7671 1.1 mrg
7672 1.1 mrg class pass_cse2 : public rtl_opt_pass
7673 1.1 mrg {
7674 1.1 mrg public:
7675 1.1 mrg pass_cse2 (gcc::context *ctxt)
7676 1.1 mrg : rtl_opt_pass (pass_data_cse2, ctxt)
7677 1.1 mrg {}
7678 1.1 mrg
7679 1.1 mrg /* opt_pass methods: */
7680 1.1 mrg virtual bool gate (function *)
7681 1.1 mrg {
7682 1.1 mrg return optimize > 0 && flag_rerun_cse_after_loop;
7683 1.1 mrg }
7684 1.1 mrg
7685 1.1 mrg virtual unsigned int execute (function *) { return rest_of_handle_cse2 (); }
7686 1.1 mrg
7687 1.1 mrg }; // class pass_cse2
7688 1.1 mrg
7689 1.1 mrg } // anon namespace
7690 1.1 mrg
7691 1.1 mrg rtl_opt_pass *
7692 1.1 mrg make_pass_cse2 (gcc::context *ctxt)
7693 1.1 mrg {
7694 1.1 mrg return new pass_cse2 (ctxt);
7695 1.1 mrg }
7696 1.1 mrg
7697 1.1 mrg /* Run second CSE pass after loop optimizations. */
7698 1.1 mrg static unsigned int
7699 1.1 mrg rest_of_handle_cse_after_global_opts (void)
7700 1.1 mrg {
7701 1.1 mrg int save_cfj;
7702 1.1 mrg int tem;
7703 1.1 mrg
7704 1.1 mrg /* We only want to do local CSE, so don't follow jumps. */
7705 1.1 mrg save_cfj = flag_cse_follow_jumps;
7706 1.1 mrg flag_cse_follow_jumps = 0;
7707 1.1 mrg
7708 1.1 mrg rebuild_jump_labels (get_insns ());
7709 1.1 mrg tem = cse_main (get_insns (), max_reg_num ());
7710 1.1 mrg cse_cfg_altered |= purge_all_dead_edges ();
7711 1.1 mrg delete_trivially_dead_insns (get_insns (), max_reg_num ());
7712 1.1 mrg
7713 1.1 mrg cse_not_expected = !flag_rerun_cse_after_loop;
7714 1.1 mrg
7715 1.1 mrg /* If cse altered any jumps, rerun jump opts to clean things up. */
7716 1.1 mrg if (tem == 2)
7717 1.1 mrg {
7718 1.1 mrg timevar_push (TV_JUMP);
7719 1.1 mrg rebuild_jump_labels (get_insns ());
7720 1.1 mrg cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7721 1.1 mrg timevar_pop (TV_JUMP);
7722 1.1 mrg }
7723 1.1 mrg else if (tem == 1 || cse_cfg_altered)
7724 1.1 mrg cse_cfg_altered |= cleanup_cfg (0);
7725 1.1 mrg
7726 1.1 mrg flag_cse_follow_jumps = save_cfj;
7727 1.1 mrg return 0;
7728 1.1 mrg }
7729 1.1 mrg
7730 1.1 mrg namespace {
7731 1.1 mrg
7732 1.1 mrg const pass_data pass_data_cse_after_global_opts =
7733 1.1 mrg {
7734 1.1 mrg RTL_PASS, /* type */
7735 1.1 mrg "cse_local", /* name */
7736 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
7737 TV_CSE, /* tv_id */
7738 0, /* properties_required */
7739 0, /* properties_provided */
7740 0, /* properties_destroyed */
7741 0, /* todo_flags_start */
7742 TODO_df_finish, /* todo_flags_finish */
7743 };
7744
7745 class pass_cse_after_global_opts : public rtl_opt_pass
7746 {
7747 public:
7748 pass_cse_after_global_opts (gcc::context *ctxt)
7749 : rtl_opt_pass (pass_data_cse_after_global_opts, ctxt)
7750 {}
7751
7752 /* opt_pass methods: */
7753 virtual bool gate (function *)
7754 {
7755 return optimize > 0 && flag_rerun_cse_after_global_opts;
7756 }
7757
7758 virtual unsigned int execute (function *)
7759 {
7760 return rest_of_handle_cse_after_global_opts ();
7761 }
7762
7763 }; // class pass_cse_after_global_opts
7764
7765 } // anon namespace
7766
7767 rtl_opt_pass *
7768 make_pass_cse_after_global_opts (gcc::context *ctxt)
7769 {
7770 return new pass_cse_after_global_opts (ctxt);
7771 }
7772