tree-dfa.cc revision 1.1 1 1.1 mrg /* Data flow functions for trees.
2 1.1 mrg Copyright (C) 2001-2022 Free Software Foundation, Inc.
3 1.1 mrg Contributed by Diego Novillo <dnovillo (at) redhat.com>
4 1.1 mrg
5 1.1 mrg This file is part of GCC.
6 1.1 mrg
7 1.1 mrg GCC is free software; you can redistribute it and/or modify
8 1.1 mrg it under the terms of the GNU General Public License as published by
9 1.1 mrg the Free Software Foundation; either version 3, or (at your option)
10 1.1 mrg any later version.
11 1.1 mrg
12 1.1 mrg GCC is distributed in the hope that it will be useful,
13 1.1 mrg but WITHOUT ANY WARRANTY; without even the implied warranty of
14 1.1 mrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 1.1 mrg GNU General Public License for more details.
16 1.1 mrg
17 1.1 mrg You should have received a copy of the GNU General Public License
18 1.1 mrg along with GCC; see the file COPYING3. If not see
19 1.1 mrg <http://www.gnu.org/licenses/>. */
20 1.1 mrg
21 1.1 mrg #include "config.h"
22 1.1 mrg #include "system.h"
23 1.1 mrg #include "coretypes.h"
24 1.1 mrg #include "backend.h"
25 1.1 mrg #include "rtl.h"
26 1.1 mrg #include "tree.h"
27 1.1 mrg #include "gimple.h"
28 1.1 mrg #include "tree-pass.h"
29 1.1 mrg #include "ssa.h"
30 1.1 mrg #include "tree-pretty-print.h"
31 1.1 mrg #include "fold-const.h"
32 1.1 mrg #include "stor-layout.h"
33 1.1 mrg #include "langhooks.h"
34 1.1 mrg #include "gimple-iterator.h"
35 1.1 mrg #include "gimple-walk.h"
36 1.1 mrg #include "tree-dfa.h"
37 1.1 mrg #include "gimple-range.h"
38 1.1 mrg
39 1.1 mrg /* Build and maintain data flow information for trees. */
40 1.1 mrg
41 1.1 mrg /* Counters used to display DFA and SSA statistics. */
42 1.1 mrg struct dfa_stats_d
43 1.1 mrg {
44 1.1 mrg long num_defs;
45 1.1 mrg long num_uses;
46 1.1 mrg long num_phis;
47 1.1 mrg long num_phi_args;
48 1.1 mrg size_t max_num_phi_args;
49 1.1 mrg long num_vdefs;
50 1.1 mrg long num_vuses;
51 1.1 mrg };
52 1.1 mrg
53 1.1 mrg
54 1.1 mrg /* Local functions. */
55 1.1 mrg static void collect_dfa_stats (struct dfa_stats_d *);
56 1.1 mrg
57 1.1 mrg
58 1.1 mrg /*---------------------------------------------------------------------------
59 1.1 mrg Dataflow analysis (DFA) routines
60 1.1 mrg ---------------------------------------------------------------------------*/
61 1.1 mrg
62 1.1 mrg /* Renumber all of the gimple stmt uids. */
63 1.1 mrg
64 1.1 mrg void
65 1.1 mrg renumber_gimple_stmt_uids (struct function *fun)
66 1.1 mrg {
67 1.1 mrg basic_block bb;
68 1.1 mrg
69 1.1 mrg set_gimple_stmt_max_uid (fun, 0);
70 1.1 mrg FOR_ALL_BB_FN (bb, fun)
71 1.1 mrg {
72 1.1 mrg gimple_stmt_iterator bsi;
73 1.1 mrg for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
74 1.1 mrg {
75 1.1 mrg gimple *stmt = gsi_stmt (bsi);
76 1.1 mrg gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
77 1.1 mrg }
78 1.1 mrg for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
79 1.1 mrg {
80 1.1 mrg gimple *stmt = gsi_stmt (bsi);
81 1.1 mrg gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
82 1.1 mrg }
83 1.1 mrg }
84 1.1 mrg }
85 1.1 mrg
86 1.1 mrg /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
87 1.1 mrg in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
88 1.1 mrg
89 1.1 mrg void
90 1.1 mrg renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
91 1.1 mrg {
92 1.1 mrg int i;
93 1.1 mrg
94 1.1 mrg set_gimple_stmt_max_uid (cfun, 0);
95 1.1 mrg for (i = 0; i < n_blocks; i++)
96 1.1 mrg {
97 1.1 mrg basic_block bb = blocks[i];
98 1.1 mrg gimple_stmt_iterator bsi;
99 1.1 mrg for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
100 1.1 mrg {
101 1.1 mrg gimple *stmt = gsi_stmt (bsi);
102 1.1 mrg gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
103 1.1 mrg }
104 1.1 mrg for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
105 1.1 mrg {
106 1.1 mrg gimple *stmt = gsi_stmt (bsi);
107 1.1 mrg gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
108 1.1 mrg }
109 1.1 mrg }
110 1.1 mrg }
111 1.1 mrg
112 1.1 mrg
113 1.1 mrg
114 1.1 mrg /*---------------------------------------------------------------------------
115 1.1 mrg Debugging functions
116 1.1 mrg ---------------------------------------------------------------------------*/
117 1.1 mrg
118 1.1 mrg /* Dump variable VAR and its may-aliases to FILE. */
119 1.1 mrg
120 1.1 mrg void
121 1.1 mrg dump_variable (FILE *file, tree var)
122 1.1 mrg {
123 1.1 mrg if (TREE_CODE (var) == SSA_NAME)
124 1.1 mrg {
125 1.1 mrg if (POINTER_TYPE_P (TREE_TYPE (var)))
126 1.1 mrg dump_points_to_info_for (file, var);
127 1.1 mrg var = SSA_NAME_VAR (var);
128 1.1 mrg }
129 1.1 mrg
130 1.1 mrg if (var == NULL_TREE)
131 1.1 mrg {
132 1.1 mrg fprintf (file, "<nil>");
133 1.1 mrg return;
134 1.1 mrg }
135 1.1 mrg
136 1.1 mrg print_generic_expr (file, var, dump_flags);
137 1.1 mrg
138 1.1 mrg fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
139 1.1 mrg if (DECL_PT_UID (var) != DECL_UID (var))
140 1.1 mrg fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
141 1.1 mrg
142 1.1 mrg fprintf (file, ", ");
143 1.1 mrg print_generic_expr (file, TREE_TYPE (var), dump_flags);
144 1.1 mrg
145 1.1 mrg if (TREE_ADDRESSABLE (var))
146 1.1 mrg fprintf (file, ", is addressable");
147 1.1 mrg
148 1.1 mrg if (is_global_var (var))
149 1.1 mrg fprintf (file, ", is global");
150 1.1 mrg
151 1.1 mrg if (TREE_THIS_VOLATILE (var))
152 1.1 mrg fprintf (file, ", is volatile");
153 1.1 mrg
154 1.1 mrg if (cfun && ssa_default_def (cfun, var))
155 1.1 mrg {
156 1.1 mrg fprintf (file, ", default def: ");
157 1.1 mrg print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
158 1.1 mrg }
159 1.1 mrg
160 1.1 mrg if (DECL_INITIAL (var))
161 1.1 mrg {
162 1.1 mrg fprintf (file, ", initial: ");
163 1.1 mrg print_generic_expr (file, DECL_INITIAL (var), dump_flags);
164 1.1 mrg }
165 1.1 mrg
166 1.1 mrg fprintf (file, "\n");
167 1.1 mrg }
168 1.1 mrg
169 1.1 mrg
170 1.1 mrg /* Dump variable VAR and its may-aliases to stderr. */
171 1.1 mrg
172 1.1 mrg DEBUG_FUNCTION void
173 1.1 mrg debug_variable (tree var)
174 1.1 mrg {
175 1.1 mrg dump_variable (stderr, var);
176 1.1 mrg }
177 1.1 mrg
178 1.1 mrg
179 1.1 mrg /* Dump various DFA statistics to FILE. */
180 1.1 mrg
181 1.1 mrg void
182 1.1 mrg dump_dfa_stats (FILE *file)
183 1.1 mrg {
184 1.1 mrg struct dfa_stats_d dfa_stats;
185 1.1 mrg
186 1.1 mrg unsigned long size, total = 0;
187 1.1 mrg const char * const fmt_str = "%-30s%-13s%12s\n";
188 1.1 mrg const char * const fmt_str_1 = "%-30s%13lu" PRsa (11) "\n";
189 1.1 mrg const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
190 1.1 mrg const char *funcname
191 1.1 mrg = lang_hooks.decl_printable_name (current_function_decl, 2);
192 1.1 mrg
193 1.1 mrg collect_dfa_stats (&dfa_stats);
194 1.1 mrg
195 1.1 mrg fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
196 1.1 mrg
197 1.1 mrg fprintf (file, "---------------------------------------------------------\n");
198 1.1 mrg fprintf (file, fmt_str, "", " Number of ", "Memory");
199 1.1 mrg fprintf (file, fmt_str, "", " instances ", "used ");
200 1.1 mrg fprintf (file, "---------------------------------------------------------\n");
201 1.1 mrg
202 1.1 mrg size = dfa_stats.num_uses * sizeof (tree *);
203 1.1 mrg total += size;
204 1.1 mrg fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
205 1.1 mrg SIZE_AMOUNT (size));
206 1.1 mrg
207 1.1 mrg size = dfa_stats.num_defs * sizeof (tree *);
208 1.1 mrg total += size;
209 1.1 mrg fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
210 1.1 mrg SIZE_AMOUNT (size));
211 1.1 mrg
212 1.1 mrg size = dfa_stats.num_vuses * sizeof (tree *);
213 1.1 mrg total += size;
214 1.1 mrg fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
215 1.1 mrg SIZE_AMOUNT (size));
216 1.1 mrg
217 1.1 mrg size = dfa_stats.num_vdefs * sizeof (tree *);
218 1.1 mrg total += size;
219 1.1 mrg fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
220 1.1 mrg SIZE_AMOUNT (size));
221 1.1 mrg
222 1.1 mrg size = dfa_stats.num_phis * sizeof (struct gphi);
223 1.1 mrg total += size;
224 1.1 mrg fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
225 1.1 mrg SIZE_AMOUNT (size));
226 1.1 mrg
227 1.1 mrg size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
228 1.1 mrg total += size;
229 1.1 mrg fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
230 1.1 mrg SIZE_AMOUNT (size));
231 1.1 mrg
232 1.1 mrg fprintf (file, "---------------------------------------------------------\n");
233 1.1 mrg fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data",
234 1.1 mrg SIZE_AMOUNT (total));
235 1.1 mrg fprintf (file, "---------------------------------------------------------\n");
236 1.1 mrg fprintf (file, "\n");
237 1.1 mrg
238 1.1 mrg if (dfa_stats.num_phis)
239 1.1 mrg fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
240 1.1 mrg (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
241 1.1 mrg (long) dfa_stats.max_num_phi_args);
242 1.1 mrg
243 1.1 mrg fprintf (file, "\n");
244 1.1 mrg }
245 1.1 mrg
246 1.1 mrg
247 1.1 mrg /* Dump DFA statistics on stderr. */
248 1.1 mrg
249 1.1 mrg DEBUG_FUNCTION void
250 1.1 mrg debug_dfa_stats (void)
251 1.1 mrg {
252 1.1 mrg dump_dfa_stats (stderr);
253 1.1 mrg }
254 1.1 mrg
255 1.1 mrg
256 1.1 mrg /* Collect DFA statistics and store them in the structure pointed to by
257 1.1 mrg DFA_STATS_P. */
258 1.1 mrg
259 1.1 mrg static void
260 1.1 mrg collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
261 1.1 mrg {
262 1.1 mrg basic_block bb;
263 1.1 mrg
264 1.1 mrg gcc_assert (dfa_stats_p);
265 1.1 mrg
266 1.1 mrg memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
267 1.1 mrg
268 1.1 mrg /* Walk all the statements in the function counting references. */
269 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
270 1.1 mrg {
271 1.1 mrg for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
272 1.1 mrg gsi_next (&si))
273 1.1 mrg {
274 1.1 mrg gphi *phi = si.phi ();
275 1.1 mrg dfa_stats_p->num_phis++;
276 1.1 mrg dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
277 1.1 mrg if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
278 1.1 mrg dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
279 1.1 mrg }
280 1.1 mrg
281 1.1 mrg for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
282 1.1 mrg gsi_next (&si))
283 1.1 mrg {
284 1.1 mrg gimple *stmt = gsi_stmt (si);
285 1.1 mrg dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
286 1.1 mrg dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
287 1.1 mrg dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
288 1.1 mrg dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
289 1.1 mrg }
290 1.1 mrg }
291 1.1 mrg }
292 1.1 mrg
293 1.1 mrg
294 1.1 mrg /*---------------------------------------------------------------------------
295 1.1 mrg Miscellaneous helpers
296 1.1 mrg ---------------------------------------------------------------------------*/
297 1.1 mrg
298 1.1 mrg /* Lookup VAR UID in the default_defs hashtable and return the associated
299 1.1 mrg variable. */
300 1.1 mrg
301 1.1 mrg tree
302 1.1 mrg ssa_default_def (struct function *fn, tree var)
303 1.1 mrg {
304 1.1 mrg struct tree_decl_minimal ind;
305 1.1 mrg struct tree_ssa_name in;
306 1.1 mrg gcc_assert (VAR_P (var)
307 1.1 mrg || TREE_CODE (var) == PARM_DECL
308 1.1 mrg || TREE_CODE (var) == RESULT_DECL);
309 1.1 mrg
310 1.1 mrg /* Always NULL_TREE for rtl function dumps. */
311 1.1 mrg if (!fn->gimple_df)
312 1.1 mrg return NULL_TREE;
313 1.1 mrg
314 1.1 mrg in.var = (tree)&ind;
315 1.1 mrg ind.uid = DECL_UID (var);
316 1.1 mrg return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var));
317 1.1 mrg }
318 1.1 mrg
319 1.1 mrg /* Insert the pair VAR's UID, DEF into the default_defs hashtable
320 1.1 mrg of function FN. */
321 1.1 mrg
322 1.1 mrg void
323 1.1 mrg set_ssa_default_def (struct function *fn, tree var, tree def)
324 1.1 mrg {
325 1.1 mrg struct tree_decl_minimal ind;
326 1.1 mrg struct tree_ssa_name in;
327 1.1 mrg
328 1.1 mrg gcc_assert (VAR_P (var)
329 1.1 mrg || TREE_CODE (var) == PARM_DECL
330 1.1 mrg || TREE_CODE (var) == RESULT_DECL);
331 1.1 mrg in.var = (tree)&ind;
332 1.1 mrg ind.uid = DECL_UID (var);
333 1.1 mrg if (!def)
334 1.1 mrg {
335 1.1 mrg tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
336 1.1 mrg DECL_UID (var),
337 1.1 mrg NO_INSERT);
338 1.1 mrg if (loc)
339 1.1 mrg {
340 1.1 mrg SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
341 1.1 mrg DEFAULT_DEFS (fn)->clear_slot (loc);
342 1.1 mrg }
343 1.1 mrg return;
344 1.1 mrg }
345 1.1 mrg gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
346 1.1 mrg tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
347 1.1 mrg DECL_UID (var), INSERT);
348 1.1 mrg
349 1.1 mrg /* Default definition might be changed by tail call optimization. */
350 1.1 mrg if (*loc)
351 1.1 mrg SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
352 1.1 mrg
353 1.1 mrg /* Mark DEF as the default definition for VAR. */
354 1.1 mrg *loc = def;
355 1.1 mrg SSA_NAME_IS_DEFAULT_DEF (def) = true;
356 1.1 mrg }
357 1.1 mrg
358 1.1 mrg /* Retrieve or create a default definition for VAR. */
359 1.1 mrg
360 1.1 mrg tree
361 1.1 mrg get_or_create_ssa_default_def (struct function *fn, tree var)
362 1.1 mrg {
363 1.1 mrg tree ddef = ssa_default_def (fn, var);
364 1.1 mrg if (ddef == NULL_TREE)
365 1.1 mrg {
366 1.1 mrg ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
367 1.1 mrg set_ssa_default_def (fn, var, ddef);
368 1.1 mrg }
369 1.1 mrg return ddef;
370 1.1 mrg }
371 1.1 mrg
372 1.1 mrg
373 1.1 mrg /* If EXP is a handled component reference for a structure, return the
374 1.1 mrg base variable. The access range is delimited by bit positions *POFFSET and
375 1.1 mrg *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
376 1.1 mrg *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
377 1.1 mrg and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is
378 1.1 mrg true, the storage order of the reference is reversed. */
379 1.1 mrg
380 1.1 mrg tree
381 1.1 mrg get_ref_base_and_extent (tree exp, poly_int64_pod *poffset,
382 1.1 mrg poly_int64_pod *psize,
383 1.1 mrg poly_int64_pod *pmax_size,
384 1.1 mrg bool *preverse)
385 1.1 mrg {
386 1.1 mrg poly_offset_int bitsize = -1;
387 1.1 mrg poly_offset_int maxsize;
388 1.1 mrg tree size_tree = NULL_TREE;
389 1.1 mrg poly_offset_int bit_offset = 0;
390 1.1 mrg bool seen_variable_array_ref = false;
391 1.1 mrg
392 1.1 mrg /* First get the final access size and the storage order from just the
393 1.1 mrg outermost expression. */
394 1.1 mrg if (TREE_CODE (exp) == COMPONENT_REF)
395 1.1 mrg size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
396 1.1 mrg else if (TREE_CODE (exp) == BIT_FIELD_REF)
397 1.1 mrg size_tree = TREE_OPERAND (exp, 1);
398 1.1 mrg else if (TREE_CODE (exp) == WITH_SIZE_EXPR)
399 1.1 mrg {
400 1.1 mrg size_tree = TREE_OPERAND (exp, 1);
401 1.1 mrg exp = TREE_OPERAND (exp, 0);
402 1.1 mrg }
403 1.1 mrg else if (!VOID_TYPE_P (TREE_TYPE (exp)))
404 1.1 mrg {
405 1.1 mrg machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
406 1.1 mrg if (mode == BLKmode)
407 1.1 mrg size_tree = TYPE_SIZE (TREE_TYPE (exp));
408 1.1 mrg else
409 1.1 mrg bitsize = GET_MODE_BITSIZE (mode);
410 1.1 mrg }
411 1.1 mrg if (size_tree != NULL_TREE
412 1.1 mrg && poly_int_tree_p (size_tree))
413 1.1 mrg bitsize = wi::to_poly_offset (size_tree);
414 1.1 mrg
415 1.1 mrg *preverse = reverse_storage_order_for_component_p (exp);
416 1.1 mrg
417 1.1 mrg /* Initially, maxsize is the same as the accessed element size.
418 1.1 mrg In the following it will only grow (or become -1). */
419 1.1 mrg maxsize = bitsize;
420 1.1 mrg
421 1.1 mrg /* Compute cumulative bit-offset for nested component-refs and array-refs,
422 1.1 mrg and find the ultimate containing object. */
423 1.1 mrg while (1)
424 1.1 mrg {
425 1.1 mrg switch (TREE_CODE (exp))
426 1.1 mrg {
427 1.1 mrg case BIT_FIELD_REF:
428 1.1 mrg bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
429 1.1 mrg break;
430 1.1 mrg
431 1.1 mrg case COMPONENT_REF:
432 1.1 mrg {
433 1.1 mrg tree field = TREE_OPERAND (exp, 1);
434 1.1 mrg tree this_offset = component_ref_field_offset (exp);
435 1.1 mrg
436 1.1 mrg if (this_offset && poly_int_tree_p (this_offset))
437 1.1 mrg {
438 1.1 mrg poly_offset_int woffset = (wi::to_poly_offset (this_offset)
439 1.1 mrg << LOG2_BITS_PER_UNIT);
440 1.1 mrg woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
441 1.1 mrg bit_offset += woffset;
442 1.1 mrg
443 1.1 mrg /* If we had seen a variable array ref already and we just
444 1.1 mrg referenced the last field of a struct or a union member
445 1.1 mrg then we have to adjust maxsize by the padding at the end
446 1.1 mrg of our field. */
447 1.1 mrg if (seen_variable_array_ref)
448 1.1 mrg {
449 1.1 mrg tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
450 1.1 mrg tree next = DECL_CHAIN (field);
451 1.1 mrg while (next && TREE_CODE (next) != FIELD_DECL)
452 1.1 mrg next = DECL_CHAIN (next);
453 1.1 mrg if (!next
454 1.1 mrg || TREE_CODE (stype) != RECORD_TYPE)
455 1.1 mrg {
456 1.1 mrg tree fsize = DECL_SIZE_UNIT (field);
457 1.1 mrg tree ssize = TYPE_SIZE_UNIT (stype);
458 1.1 mrg if (fsize == NULL
459 1.1 mrg || !poly_int_tree_p (fsize)
460 1.1 mrg || ssize == NULL
461 1.1 mrg || !poly_int_tree_p (ssize))
462 1.1 mrg maxsize = -1;
463 1.1 mrg else if (known_size_p (maxsize))
464 1.1 mrg {
465 1.1 mrg poly_offset_int tem
466 1.1 mrg = (wi::to_poly_offset (ssize)
467 1.1 mrg - wi::to_poly_offset (fsize));
468 1.1 mrg tem <<= LOG2_BITS_PER_UNIT;
469 1.1 mrg tem -= woffset;
470 1.1 mrg maxsize += tem;
471 1.1 mrg }
472 1.1 mrg }
473 1.1 mrg /* An component ref with an adjacent field up in the
474 1.1 mrg structure hierarchy constrains the size of any variable
475 1.1 mrg array ref lower in the access hierarchy. */
476 1.1 mrg else
477 1.1 mrg seen_variable_array_ref = false;
478 1.1 mrg }
479 1.1 mrg }
480 1.1 mrg else
481 1.1 mrg {
482 1.1 mrg tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
483 1.1 mrg /* We need to adjust maxsize to the whole structure bitsize.
484 1.1 mrg But we can subtract any constant offset seen so far,
485 1.1 mrg because that would get us out of the structure otherwise. */
486 1.1 mrg if (known_size_p (maxsize)
487 1.1 mrg && csize
488 1.1 mrg && poly_int_tree_p (csize))
489 1.1 mrg maxsize = wi::to_poly_offset (csize) - bit_offset;
490 1.1 mrg else
491 1.1 mrg maxsize = -1;
492 1.1 mrg }
493 1.1 mrg }
494 1.1 mrg break;
495 1.1 mrg
496 1.1 mrg case ARRAY_REF:
497 1.1 mrg case ARRAY_RANGE_REF:
498 1.1 mrg {
499 1.1 mrg tree index = TREE_OPERAND (exp, 1);
500 1.1 mrg tree low_bound, unit_size;
501 1.1 mrg
502 1.1 mrg /* If the resulting bit-offset is constant, track it. */
503 1.1 mrg if (poly_int_tree_p (index)
504 1.1 mrg && (low_bound = array_ref_low_bound (exp),
505 1.1 mrg poly_int_tree_p (low_bound))
506 1.1 mrg && (unit_size = array_ref_element_size (exp),
507 1.1 mrg TREE_CODE (unit_size) == INTEGER_CST))
508 1.1 mrg {
509 1.1 mrg poly_offset_int woffset
510 1.1 mrg = wi::sext (wi::to_poly_offset (index)
511 1.1 mrg - wi::to_poly_offset (low_bound),
512 1.1 mrg TYPE_PRECISION (sizetype));
513 1.1 mrg woffset *= wi::to_offset (unit_size);
514 1.1 mrg woffset <<= LOG2_BITS_PER_UNIT;
515 1.1 mrg bit_offset += woffset;
516 1.1 mrg
517 1.1 mrg /* An array ref with a constant index up in the structure
518 1.1 mrg hierarchy will constrain the size of any variable array ref
519 1.1 mrg lower in the access hierarchy. */
520 1.1 mrg seen_variable_array_ref = false;
521 1.1 mrg }
522 1.1 mrg else
523 1.1 mrg {
524 1.1 mrg tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
525 1.1 mrg /* We need to adjust maxsize to the whole array bitsize.
526 1.1 mrg But we can subtract any constant offset seen so far,
527 1.1 mrg because that would get us outside of the array otherwise. */
528 1.1 mrg if (known_size_p (maxsize)
529 1.1 mrg && asize
530 1.1 mrg && poly_int_tree_p (asize))
531 1.1 mrg maxsize = wi::to_poly_offset (asize) - bit_offset;
532 1.1 mrg else
533 1.1 mrg maxsize = -1;
534 1.1 mrg
535 1.1 mrg /* Remember that we have seen an array ref with a variable
536 1.1 mrg index. */
537 1.1 mrg seen_variable_array_ref = true;
538 1.1 mrg
539 1.1 mrg value_range vr;
540 1.1 mrg range_query *query;
541 1.1 mrg if (cfun)
542 1.1 mrg query = get_range_query (cfun);
543 1.1 mrg else
544 1.1 mrg query = get_global_range_query ();
545 1.1 mrg
546 1.1 mrg if (TREE_CODE (index) == SSA_NAME
547 1.1 mrg && (low_bound = array_ref_low_bound (exp),
548 1.1 mrg poly_int_tree_p (low_bound))
549 1.1 mrg && (unit_size = array_ref_element_size (exp),
550 1.1 mrg TREE_CODE (unit_size) == INTEGER_CST)
551 1.1 mrg && query->range_of_expr (vr, index)
552 1.1 mrg && vr.kind () == VR_RANGE)
553 1.1 mrg {
554 1.1 mrg wide_int min = vr.lower_bound ();
555 1.1 mrg wide_int max = vr.upper_bound ();
556 1.1 mrg poly_offset_int lbound = wi::to_poly_offset (low_bound);
557 1.1 mrg /* Try to constrain maxsize with range information. */
558 1.1 mrg offset_int omax
559 1.1 mrg = offset_int::from (max, TYPE_SIGN (TREE_TYPE (index)));
560 1.1 mrg if (known_lt (lbound, omax))
561 1.1 mrg {
562 1.1 mrg poly_offset_int rmaxsize;
563 1.1 mrg rmaxsize = (omax - lbound + 1)
564 1.1 mrg * wi::to_offset (unit_size) << LOG2_BITS_PER_UNIT;
565 1.1 mrg if (!known_size_p (maxsize)
566 1.1 mrg || known_lt (rmaxsize, maxsize))
567 1.1 mrg {
568 1.1 mrg /* If we know an upper bound below the declared
569 1.1 mrg one this is no longer variable. */
570 1.1 mrg if (known_size_p (maxsize))
571 1.1 mrg seen_variable_array_ref = false;
572 1.1 mrg maxsize = rmaxsize;
573 1.1 mrg }
574 1.1 mrg }
575 1.1 mrg /* Try to adjust bit_offset with range information. */
576 1.1 mrg offset_int omin
577 1.1 mrg = offset_int::from (min, TYPE_SIGN (TREE_TYPE (index)));
578 1.1 mrg if (known_le (lbound, omin))
579 1.1 mrg {
580 1.1 mrg poly_offset_int woffset
581 1.1 mrg = wi::sext (omin - lbound,
582 1.1 mrg TYPE_PRECISION (sizetype));
583 1.1 mrg woffset *= wi::to_offset (unit_size);
584 1.1 mrg woffset <<= LOG2_BITS_PER_UNIT;
585 1.1 mrg bit_offset += woffset;
586 1.1 mrg if (known_size_p (maxsize))
587 1.1 mrg maxsize -= woffset;
588 1.1 mrg }
589 1.1 mrg }
590 1.1 mrg }
591 1.1 mrg }
592 1.1 mrg break;
593 1.1 mrg
594 1.1 mrg case REALPART_EXPR:
595 1.1 mrg break;
596 1.1 mrg
597 1.1 mrg case IMAGPART_EXPR:
598 1.1 mrg bit_offset += bitsize;
599 1.1 mrg break;
600 1.1 mrg
601 1.1 mrg case VIEW_CONVERT_EXPR:
602 1.1 mrg break;
603 1.1 mrg
604 1.1 mrg case TARGET_MEM_REF:
605 1.1 mrg /* Via the variable index or index2 we can reach the
606 1.1 mrg whole object. Still hand back the decl here. */
607 1.1 mrg if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
608 1.1 mrg && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
609 1.1 mrg {
610 1.1 mrg exp = TREE_OPERAND (TMR_BASE (exp), 0);
611 1.1 mrg bit_offset = 0;
612 1.1 mrg maxsize = -1;
613 1.1 mrg goto done;
614 1.1 mrg }
615 1.1 mrg /* Fallthru. */
616 1.1 mrg case MEM_REF:
617 1.1 mrg /* We need to deal with variable arrays ending structures such as
618 1.1 mrg struct { int length; int a[1]; } x; x.a[d]
619 1.1 mrg struct { struct { int a; int b; } a[1]; } x; x.a[d].a
620 1.1 mrg struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
621 1.1 mrg struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
622 1.1 mrg where we do not know maxsize for variable index accesses to
623 1.1 mrg the array. The simplest way to conservatively deal with this
624 1.1 mrg is to punt in the case that offset + maxsize reaches the
625 1.1 mrg base type boundary. This needs to include possible trailing
626 1.1 mrg padding that is there for alignment purposes. */
627 1.1 mrg if (seen_variable_array_ref
628 1.1 mrg && known_size_p (maxsize)
629 1.1 mrg && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
630 1.1 mrg || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
631 1.1 mrg || (maybe_eq
632 1.1 mrg (bit_offset + maxsize,
633 1.1 mrg wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
634 1.1 mrg maxsize = -1;
635 1.1 mrg
636 1.1 mrg /* Hand back the decl for MEM[&decl, off]. */
637 1.1 mrg if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
638 1.1 mrg {
639 1.1 mrg if (integer_zerop (TREE_OPERAND (exp, 1)))
640 1.1 mrg exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
641 1.1 mrg else
642 1.1 mrg {
643 1.1 mrg poly_offset_int off = mem_ref_offset (exp);
644 1.1 mrg off <<= LOG2_BITS_PER_UNIT;
645 1.1 mrg off += bit_offset;
646 1.1 mrg poly_int64 off_hwi;
647 1.1 mrg if (off.to_shwi (&off_hwi))
648 1.1 mrg {
649 1.1 mrg bit_offset = off_hwi;
650 1.1 mrg exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
651 1.1 mrg }
652 1.1 mrg }
653 1.1 mrg }
654 1.1 mrg goto done;
655 1.1 mrg
656 1.1 mrg default:
657 1.1 mrg goto done;
658 1.1 mrg }
659 1.1 mrg
660 1.1 mrg exp = TREE_OPERAND (exp, 0);
661 1.1 mrg }
662 1.1 mrg
663 1.1 mrg done:
664 1.1 mrg if (!bitsize.to_shwi (psize) || maybe_lt (*psize, 0))
665 1.1 mrg {
666 1.1 mrg *poffset = 0;
667 1.1 mrg *psize = -1;
668 1.1 mrg *pmax_size = -1;
669 1.1 mrg
670 1.1 mrg return exp;
671 1.1 mrg }
672 1.1 mrg
673 1.1 mrg /* ??? Due to negative offsets in ARRAY_REF we can end up with
674 1.1 mrg negative bit_offset here. We might want to store a zero offset
675 1.1 mrg in this case. */
676 1.1 mrg if (!bit_offset.to_shwi (poffset))
677 1.1 mrg {
678 1.1 mrg *poffset = 0;
679 1.1 mrg *pmax_size = -1;
680 1.1 mrg
681 1.1 mrg return exp;
682 1.1 mrg }
683 1.1 mrg
684 1.1 mrg /* In case of a decl or constant base object we can do better. */
685 1.1 mrg
686 1.1 mrg if (DECL_P (exp))
687 1.1 mrg {
688 1.1 mrg if (VAR_P (exp)
689 1.1 mrg && ((flag_unconstrained_commons && DECL_COMMON (exp))
690 1.1 mrg || (DECL_EXTERNAL (exp) && seen_variable_array_ref)))
691 1.1 mrg {
692 1.1 mrg tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
693 1.1 mrg /* If size is unknown, or we have read to the end, assume there
694 1.1 mrg may be more to the structure than we are told. */
695 1.1 mrg if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
696 1.1 mrg || (seen_variable_array_ref
697 1.1 mrg && (sz_tree == NULL_TREE
698 1.1 mrg || !poly_int_tree_p (sz_tree)
699 1.1 mrg || maybe_eq (bit_offset + maxsize,
700 1.1 mrg wi::to_poly_offset (sz_tree)))))
701 1.1 mrg maxsize = -1;
702 1.1 mrg }
703 1.1 mrg /* If maxsize is unknown adjust it according to the size of the
704 1.1 mrg base decl. */
705 1.1 mrg else if (!known_size_p (maxsize)
706 1.1 mrg && DECL_SIZE (exp)
707 1.1 mrg && poly_int_tree_p (DECL_SIZE (exp)))
708 1.1 mrg maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
709 1.1 mrg }
710 1.1 mrg else if (CONSTANT_CLASS_P (exp))
711 1.1 mrg {
712 1.1 mrg /* If maxsize is unknown adjust it according to the size of the
713 1.1 mrg base type constant. */
714 1.1 mrg if (!known_size_p (maxsize)
715 1.1 mrg && TYPE_SIZE (TREE_TYPE (exp))
716 1.1 mrg && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
717 1.1 mrg maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
718 1.1 mrg - bit_offset);
719 1.1 mrg }
720 1.1 mrg
721 1.1 mrg if (!maxsize.to_shwi (pmax_size)
722 1.1 mrg || maybe_lt (*pmax_size, 0)
723 1.1 mrg || !endpoint_representable_p (*poffset, *pmax_size))
724 1.1 mrg *pmax_size = -1;
725 1.1 mrg
726 1.1 mrg /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
727 1.1 mrg check for such overflows individually and assume it works. */
728 1.1 mrg if (!endpoint_representable_p (*poffset, *psize))
729 1.1 mrg {
730 1.1 mrg *poffset = 0;
731 1.1 mrg *psize = -1;
732 1.1 mrg *pmax_size = -1;
733 1.1 mrg
734 1.1 mrg return exp;
735 1.1 mrg }
736 1.1 mrg
737 1.1 mrg return exp;
738 1.1 mrg }
739 1.1 mrg
740 1.1 mrg /* Like get_ref_base_and_extent, but for cases in which we only care
741 1.1 mrg about constant-width accesses at constant offsets. Return null
742 1.1 mrg if the access is anything else. */
743 1.1 mrg
744 1.1 mrg tree
745 1.1 mrg get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
746 1.1 mrg HOST_WIDE_INT *psize, bool *preverse)
747 1.1 mrg {
748 1.1 mrg poly_int64 offset, size, max_size;
749 1.1 mrg HOST_WIDE_INT const_offset, const_size;
750 1.1 mrg bool reverse;
751 1.1 mrg tree decl = get_ref_base_and_extent (exp, &offset, &size, &max_size,
752 1.1 mrg &reverse);
753 1.1 mrg if (!offset.is_constant (&const_offset)
754 1.1 mrg || !size.is_constant (&const_size)
755 1.1 mrg || const_offset < 0
756 1.1 mrg || !known_size_p (max_size)
757 1.1 mrg || maybe_ne (max_size, const_size))
758 1.1 mrg return NULL_TREE;
759 1.1 mrg
760 1.1 mrg *poffset = const_offset;
761 1.1 mrg *psize = const_size;
762 1.1 mrg *preverse = reverse;
763 1.1 mrg return decl;
764 1.1 mrg }
765 1.1 mrg
766 1.1 mrg /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
767 1.1 mrg denotes the starting address of the memory access EXP.
768 1.1 mrg Returns NULL_TREE if the offset is not constant or any component
769 1.1 mrg is not BITS_PER_UNIT-aligned.
770 1.1 mrg VALUEIZE if non-NULL is used to valueize SSA names. It should return
771 1.1 mrg its argument or a constant if the argument is known to be constant. */
772 1.1 mrg
773 1.1 mrg tree
774 1.1 mrg get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
775 1.1 mrg tree (*valueize) (tree))
776 1.1 mrg {
777 1.1 mrg poly_int64 byte_offset = 0;
778 1.1 mrg
779 1.1 mrg /* Compute cumulative byte-offset for nested component-refs and array-refs,
780 1.1 mrg and find the ultimate containing object. */
781 1.1 mrg while (1)
782 1.1 mrg {
783 1.1 mrg switch (TREE_CODE (exp))
784 1.1 mrg {
785 1.1 mrg case BIT_FIELD_REF:
786 1.1 mrg {
787 1.1 mrg poly_int64 this_byte_offset;
788 1.1 mrg poly_uint64 this_bit_offset;
789 1.1 mrg if (!poly_int_tree_p (TREE_OPERAND (exp, 2), &this_bit_offset)
790 1.1 mrg || !multiple_p (this_bit_offset, BITS_PER_UNIT,
791 1.1 mrg &this_byte_offset))
792 1.1 mrg return NULL_TREE;
793 1.1 mrg byte_offset += this_byte_offset;
794 1.1 mrg }
795 1.1 mrg break;
796 1.1 mrg
797 1.1 mrg case COMPONENT_REF:
798 1.1 mrg {
799 1.1 mrg tree field = TREE_OPERAND (exp, 1);
800 1.1 mrg tree this_offset = component_ref_field_offset (exp);
801 1.1 mrg poly_int64 hthis_offset;
802 1.1 mrg
803 1.1 mrg if (!this_offset
804 1.1 mrg || !poly_int_tree_p (this_offset, &hthis_offset)
805 1.1 mrg || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
806 1.1 mrg % BITS_PER_UNIT))
807 1.1 mrg return NULL_TREE;
808 1.1 mrg
809 1.1 mrg hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
810 1.1 mrg / BITS_PER_UNIT);
811 1.1 mrg byte_offset += hthis_offset;
812 1.1 mrg }
813 1.1 mrg break;
814 1.1 mrg
815 1.1 mrg case ARRAY_REF:
816 1.1 mrg case ARRAY_RANGE_REF:
817 1.1 mrg {
818 1.1 mrg tree index = TREE_OPERAND (exp, 1);
819 1.1 mrg tree low_bound, unit_size;
820 1.1 mrg
821 1.1 mrg if (valueize
822 1.1 mrg && TREE_CODE (index) == SSA_NAME)
823 1.1 mrg index = (*valueize) (index);
824 1.1 mrg if (!poly_int_tree_p (index))
825 1.1 mrg return NULL_TREE;
826 1.1 mrg low_bound = array_ref_low_bound (exp);
827 1.1 mrg if (valueize
828 1.1 mrg && TREE_CODE (low_bound) == SSA_NAME)
829 1.1 mrg low_bound = (*valueize) (low_bound);
830 1.1 mrg if (!poly_int_tree_p (low_bound))
831 1.1 mrg return NULL_TREE;
832 1.1 mrg unit_size = array_ref_element_size (exp);
833 1.1 mrg if (TREE_CODE (unit_size) != INTEGER_CST)
834 1.1 mrg return NULL_TREE;
835 1.1 mrg
836 1.1 mrg /* If the resulting bit-offset is constant, track it. */
837 1.1 mrg poly_offset_int woffset
838 1.1 mrg = wi::sext (wi::to_poly_offset (index)
839 1.1 mrg - wi::to_poly_offset (low_bound),
840 1.1 mrg TYPE_PRECISION (sizetype));
841 1.1 mrg woffset *= wi::to_offset (unit_size);
842 1.1 mrg byte_offset += woffset.force_shwi ();
843 1.1 mrg }
844 1.1 mrg break;
845 1.1 mrg
846 1.1 mrg case REALPART_EXPR:
847 1.1 mrg break;
848 1.1 mrg
849 1.1 mrg case IMAGPART_EXPR:
850 1.1 mrg byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
851 1.1 mrg break;
852 1.1 mrg
853 1.1 mrg case VIEW_CONVERT_EXPR:
854 1.1 mrg break;
855 1.1 mrg
856 1.1 mrg case MEM_REF:
857 1.1 mrg {
858 1.1 mrg tree base = TREE_OPERAND (exp, 0);
859 1.1 mrg if (valueize
860 1.1 mrg && TREE_CODE (base) == SSA_NAME)
861 1.1 mrg base = (*valueize) (base);
862 1.1 mrg
863 1.1 mrg /* Hand back the decl for MEM[&decl, off]. */
864 1.1 mrg if (TREE_CODE (base) == ADDR_EXPR)
865 1.1 mrg {
866 1.1 mrg if (!integer_zerop (TREE_OPERAND (exp, 1)))
867 1.1 mrg {
868 1.1 mrg poly_offset_int off = mem_ref_offset (exp);
869 1.1 mrg byte_offset += off.force_shwi ();
870 1.1 mrg }
871 1.1 mrg exp = TREE_OPERAND (base, 0);
872 1.1 mrg }
873 1.1 mrg goto done;
874 1.1 mrg }
875 1.1 mrg
876 1.1 mrg case TARGET_MEM_REF:
877 1.1 mrg {
878 1.1 mrg tree base = TREE_OPERAND (exp, 0);
879 1.1 mrg if (valueize
880 1.1 mrg && TREE_CODE (base) == SSA_NAME)
881 1.1 mrg base = (*valueize) (base);
882 1.1 mrg
883 1.1 mrg /* Hand back the decl for MEM[&decl, off]. */
884 1.1 mrg if (TREE_CODE (base) == ADDR_EXPR)
885 1.1 mrg {
886 1.1 mrg if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
887 1.1 mrg return NULL_TREE;
888 1.1 mrg if (!integer_zerop (TMR_OFFSET (exp)))
889 1.1 mrg {
890 1.1 mrg poly_offset_int off = mem_ref_offset (exp);
891 1.1 mrg byte_offset += off.force_shwi ();
892 1.1 mrg }
893 1.1 mrg exp = TREE_OPERAND (base, 0);
894 1.1 mrg }
895 1.1 mrg goto done;
896 1.1 mrg }
897 1.1 mrg
898 1.1 mrg default:
899 1.1 mrg goto done;
900 1.1 mrg }
901 1.1 mrg
902 1.1 mrg exp = TREE_OPERAND (exp, 0);
903 1.1 mrg }
904 1.1 mrg done:
905 1.1 mrg
906 1.1 mrg *poffset = byte_offset;
907 1.1 mrg return exp;
908 1.1 mrg }
909 1.1 mrg
910 1.1 mrg /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
911 1.1 mrg denotes the starting address of the memory access EXP.
912 1.1 mrg Returns NULL_TREE if the offset is not constant or any component
913 1.1 mrg is not BITS_PER_UNIT-aligned. */
914 1.1 mrg
915 1.1 mrg tree
916 1.1 mrg get_addr_base_and_unit_offset (tree exp, poly_int64_pod *poffset)
917 1.1 mrg {
918 1.1 mrg return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
919 1.1 mrg }
920 1.1 mrg
921 1.1 mrg /* Returns true if STMT references an SSA_NAME that has
922 1.1 mrg SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
923 1.1 mrg
924 1.1 mrg bool
925 1.1 mrg stmt_references_abnormal_ssa_name (gimple *stmt)
926 1.1 mrg {
927 1.1 mrg ssa_op_iter oi;
928 1.1 mrg use_operand_p use_p;
929 1.1 mrg
930 1.1 mrg FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
931 1.1 mrg {
932 1.1 mrg if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
933 1.1 mrg return true;
934 1.1 mrg }
935 1.1 mrg
936 1.1 mrg return false;
937 1.1 mrg }
938 1.1 mrg
939 1.1 mrg /* If STMT takes any abnormal PHI values as input, replace them with
940 1.1 mrg local copies. */
941 1.1 mrg
942 1.1 mrg void
943 1.1 mrg replace_abnormal_ssa_names (gimple *stmt)
944 1.1 mrg {
945 1.1 mrg ssa_op_iter oi;
946 1.1 mrg use_operand_p use_p;
947 1.1 mrg
948 1.1 mrg FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
949 1.1 mrg {
950 1.1 mrg tree op = USE_FROM_PTR (use_p);
951 1.1 mrg if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
952 1.1 mrg {
953 1.1 mrg gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
954 1.1 mrg tree new_name = make_ssa_name (TREE_TYPE (op));
955 1.1 mrg gassign *assign = gimple_build_assign (new_name, op);
956 1.1 mrg gsi_insert_before (&gsi, assign, GSI_SAME_STMT);
957 1.1 mrg SET_USE (use_p, new_name);
958 1.1 mrg }
959 1.1 mrg }
960 1.1 mrg }
961 1.1 mrg
962 1.1 mrg /* Pair of tree and a sorting index, for dump_enumerated_decls. */
963 1.1 mrg struct GTY(()) numbered_tree
964 1.1 mrg {
965 1.1 mrg tree t;
966 1.1 mrg int num;
967 1.1 mrg };
968 1.1 mrg
969 1.1 mrg
970 1.1 mrg /* Compare two declarations references by their DECL_UID / sequence number.
971 1.1 mrg Called via qsort. */
972 1.1 mrg
973 1.1 mrg static int
974 1.1 mrg compare_decls_by_uid (const void *pa, const void *pb)
975 1.1 mrg {
976 1.1 mrg const numbered_tree *nt_a = ((const numbered_tree *)pa);
977 1.1 mrg const numbered_tree *nt_b = ((const numbered_tree *)pb);
978 1.1 mrg
979 1.1 mrg if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
980 1.1 mrg return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
981 1.1 mrg return nt_a->num - nt_b->num;
982 1.1 mrg }
983 1.1 mrg
984 1.1 mrg /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
985 1.1 mrg static tree
986 1.1 mrg dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
987 1.1 mrg {
988 1.1 mrg struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
989 1.1 mrg vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
990 1.1 mrg numbered_tree nt;
991 1.1 mrg
992 1.1 mrg if (!DECL_P (*tp))
993 1.1 mrg return NULL_TREE;
994 1.1 mrg nt.t = *tp;
995 1.1 mrg nt.num = list->length ();
996 1.1 mrg list->safe_push (nt);
997 1.1 mrg *walk_subtrees = 0;
998 1.1 mrg return NULL_TREE;
999 1.1 mrg }
1000 1.1 mrg
1001 1.1 mrg /* Find all the declarations used by the current function, sort them by uid,
1002 1.1 mrg and emit the sorted list. Each declaration is tagged with a sequence
1003 1.1 mrg number indicating when it was found during statement / tree walking,
1004 1.1 mrg so that TDF_NOUID comparisons of anonymous declarations are still
1005 1.1 mrg meaningful. Where a declaration was encountered more than once, we
1006 1.1 mrg emit only the sequence number of the first encounter.
1007 1.1 mrg FILE is the dump file where to output the list and FLAGS is as in
1008 1.1 mrg print_generic_expr. */
1009 1.1 mrg void
1010 1.1 mrg dump_enumerated_decls (FILE *file, dump_flags_t flags)
1011 1.1 mrg {
1012 1.1 mrg if (!cfun->cfg)
1013 1.1 mrg return;
1014 1.1 mrg
1015 1.1 mrg basic_block bb;
1016 1.1 mrg struct walk_stmt_info wi;
1017 1.1 mrg auto_vec<numbered_tree, 40> decl_list;
1018 1.1 mrg
1019 1.1 mrg memset (&wi, '\0', sizeof (wi));
1020 1.1 mrg wi.info = (void *) &decl_list;
1021 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
1022 1.1 mrg {
1023 1.1 mrg gimple_stmt_iterator gsi;
1024 1.1 mrg
1025 1.1 mrg for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1026 1.1 mrg if (!is_gimple_debug (gsi_stmt (gsi)))
1027 1.1 mrg walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1028 1.1 mrg }
1029 1.1 mrg decl_list.qsort (compare_decls_by_uid);
1030 1.1 mrg if (decl_list.length ())
1031 1.1 mrg {
1032 1.1 mrg unsigned ix;
1033 1.1 mrg numbered_tree *ntp;
1034 1.1 mrg tree last = NULL_TREE;
1035 1.1 mrg
1036 1.1 mrg fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
1037 1.1 mrg current_function_name ());
1038 1.1 mrg FOR_EACH_VEC_ELT (decl_list, ix, ntp)
1039 1.1 mrg {
1040 1.1 mrg if (ntp->t == last)
1041 1.1 mrg continue;
1042 1.1 mrg fprintf (file, "%d: ", ntp->num);
1043 1.1 mrg print_generic_decl (file, ntp->t, flags);
1044 1.1 mrg fprintf (file, "\n");
1045 1.1 mrg last = ntp->t;
1046 1.1 mrg }
1047 1.1 mrg }
1048 1.1 mrg }
1049