gimple-walk.cc revision 1.1.1.1 1 1.1 mrg /* Gimple walk support.
2 1.1 mrg
3 1.1 mrg Copyright (C) 2007-2022 Free Software Foundation, Inc.
4 1.1 mrg Contributed by Aldy Hernandez <aldyh (at) redhat.com>
5 1.1 mrg
6 1.1 mrg This file is part of GCC.
7 1.1 mrg
8 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
9 1.1 mrg the terms of the GNU General Public License as published by the Free
10 1.1 mrg Software Foundation; either version 3, or (at your option) any later
11 1.1 mrg version.
12 1.1 mrg
13 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 1.1 mrg for more details.
17 1.1 mrg
18 1.1 mrg You should have received a copy of the GNU General Public License
19 1.1 mrg along with GCC; see the file COPYING3. If not see
20 1.1 mrg <http://www.gnu.org/licenses/>. */
21 1.1 mrg
22 1.1 mrg #include "config.h"
23 1.1 mrg #include "system.h"
24 1.1 mrg #include "coretypes.h"
25 1.1 mrg #include "backend.h"
26 1.1 mrg #include "tree.h"
27 1.1 mrg #include "gimple.h"
28 1.1 mrg #include "gimple-iterator.h"
29 1.1 mrg #include "gimple-walk.h"
30 1.1 mrg #include "stmt.h"
31 1.1 mrg
32 1.1 mrg /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
33 1.1 mrg on each one. WI is as in walk_gimple_stmt.
34 1.1 mrg
35 1.1 mrg If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
36 1.1 mrg value is stored in WI->CALLBACK_RESULT. Also, the statement that
37 1.1 mrg produced the value is returned if this statement has not been
38 1.1 mrg removed by a callback (wi->removed_stmt). If the statement has
39 1.1 mrg been removed, NULL is returned.
40 1.1 mrg
41 1.1 mrg Otherwise, all the statements are walked and NULL returned. */
42 1.1 mrg
43 1.1 mrg gimple *
44 1.1 mrg walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
45 1.1 mrg walk_tree_fn callback_op, struct walk_stmt_info *wi)
46 1.1 mrg {
47 1.1 mrg gimple_stmt_iterator gsi;
48 1.1 mrg
49 1.1 mrg for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
50 1.1 mrg {
51 1.1 mrg tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
52 1.1 mrg if (ret)
53 1.1 mrg {
54 1.1 mrg /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
55 1.1 mrg to hold it. */
56 1.1 mrg gcc_assert (wi);
57 1.1 mrg wi->callback_result = ret;
58 1.1 mrg
59 1.1 mrg return wi->removed_stmt ? NULL : gsi_stmt (gsi);
60 1.1 mrg }
61 1.1 mrg
62 1.1 mrg if (!wi->removed_stmt)
63 1.1 mrg gsi_next (&gsi);
64 1.1 mrg }
65 1.1 mrg
66 1.1 mrg if (wi)
67 1.1 mrg wi->callback_result = NULL_TREE;
68 1.1 mrg
69 1.1 mrg return NULL;
70 1.1 mrg }
71 1.1 mrg
72 1.1 mrg
73 1.1 mrg /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
74 1.1 mrg changed by the callbacks. */
75 1.1 mrg
76 1.1 mrg gimple *
77 1.1 mrg walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
78 1.1 mrg walk_tree_fn callback_op, struct walk_stmt_info *wi)
79 1.1 mrg {
80 1.1 mrg gimple_seq seq2 = seq;
81 1.1 mrg gimple *ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
82 1.1 mrg gcc_assert (seq2 == seq);
83 1.1 mrg return ret;
84 1.1 mrg }
85 1.1 mrg
86 1.1 mrg
87 1.1 mrg /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
88 1.1 mrg
89 1.1 mrg static tree
90 1.1 mrg walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op,
91 1.1 mrg struct walk_stmt_info *wi)
92 1.1 mrg {
93 1.1 mrg tree ret, op;
94 1.1 mrg unsigned noutputs;
95 1.1 mrg const char **oconstraints;
96 1.1 mrg unsigned i, n;
97 1.1 mrg const char *constraint;
98 1.1 mrg bool allows_mem, allows_reg, is_inout;
99 1.1 mrg
100 1.1 mrg noutputs = gimple_asm_noutputs (stmt);
101 1.1 mrg oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
102 1.1 mrg
103 1.1 mrg for (i = 0; i < noutputs; i++)
104 1.1 mrg {
105 1.1 mrg op = gimple_asm_output_op (stmt, i);
106 1.1 mrg constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
107 1.1 mrg oconstraints[i] = constraint;
108 1.1 mrg if (wi)
109 1.1 mrg {
110 1.1 mrg if (parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
111 1.1 mrg &allows_reg, &is_inout))
112 1.1 mrg wi->val_only = (allows_reg || !allows_mem);
113 1.1 mrg }
114 1.1 mrg if (wi)
115 1.1 mrg wi->is_lhs = true;
116 1.1 mrg ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
117 1.1 mrg if (ret)
118 1.1 mrg return ret;
119 1.1 mrg }
120 1.1 mrg
121 1.1 mrg n = gimple_asm_ninputs (stmt);
122 1.1 mrg for (i = 0; i < n; i++)
123 1.1 mrg {
124 1.1 mrg op = gimple_asm_input_op (stmt, i);
125 1.1 mrg constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
126 1.1 mrg
127 1.1 mrg if (wi)
128 1.1 mrg {
129 1.1 mrg if (parse_input_constraint (&constraint, 0, 0, noutputs, 0,
130 1.1 mrg oconstraints, &allows_mem, &allows_reg))
131 1.1 mrg {
132 1.1 mrg wi->val_only = (allows_reg || !allows_mem);
133 1.1 mrg /* Although input "m" is not really a LHS, we need a lvalue. */
134 1.1 mrg wi->is_lhs = !wi->val_only;
135 1.1 mrg }
136 1.1 mrg }
137 1.1 mrg ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
138 1.1 mrg if (ret)
139 1.1 mrg return ret;
140 1.1 mrg }
141 1.1 mrg
142 1.1 mrg if (wi)
143 1.1 mrg {
144 1.1 mrg wi->is_lhs = false;
145 1.1 mrg wi->val_only = true;
146 1.1 mrg }
147 1.1 mrg
148 1.1 mrg n = gimple_asm_nlabels (stmt);
149 1.1 mrg for (i = 0; i < n; i++)
150 1.1 mrg {
151 1.1 mrg op = gimple_asm_label_op (stmt, i);
152 1.1 mrg ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
153 1.1 mrg if (ret)
154 1.1 mrg return ret;
155 1.1 mrg }
156 1.1 mrg
157 1.1 mrg return NULL_TREE;
158 1.1 mrg }
159 1.1 mrg
160 1.1 mrg
161 1.1 mrg /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
162 1.1 mrg STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
163 1.1 mrg
164 1.1 mrg CALLBACK_OP is called on each operand of STMT via walk_tree.
165 1.1 mrg Additional parameters to walk_tree must be stored in WI. For each operand
166 1.1 mrg OP, walk_tree is called as:
167 1.1 mrg
168 1.1 mrg walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
169 1.1 mrg
170 1.1 mrg If CALLBACK_OP returns non-NULL for an operand, the remaining
171 1.1 mrg operands are not scanned.
172 1.1 mrg
173 1.1 mrg The return value is that returned by the last call to walk_tree, or
174 1.1 mrg NULL_TREE if no CALLBACK_OP is specified. */
175 1.1 mrg
176 1.1 mrg tree
177 1.1 mrg walk_gimple_op (gimple *stmt, walk_tree_fn callback_op,
178 1.1 mrg struct walk_stmt_info *wi)
179 1.1 mrg {
180 1.1 mrg hash_set<tree> *pset = (wi) ? wi->pset : NULL;
181 1.1 mrg unsigned i;
182 1.1 mrg tree ret = NULL_TREE;
183 1.1 mrg
184 1.1 mrg if (wi)
185 1.1 mrg wi->stmt = stmt;
186 1.1 mrg
187 1.1 mrg switch (gimple_code (stmt))
188 1.1 mrg {
189 1.1 mrg case GIMPLE_ASSIGN:
190 1.1 mrg /* Walk the RHS operands. If the LHS is of a non-renamable type or
191 1.1 mrg is a register variable, we may use a COMPONENT_REF on the RHS. */
192 1.1 mrg if (wi)
193 1.1 mrg {
194 1.1 mrg tree lhs = gimple_assign_lhs (stmt);
195 1.1 mrg wi->val_only
196 1.1 mrg = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
197 1.1 mrg || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
198 1.1 mrg }
199 1.1 mrg
200 1.1 mrg for (i = 1; i < gimple_num_ops (stmt); i++)
201 1.1 mrg {
202 1.1 mrg ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
203 1.1 mrg pset);
204 1.1 mrg if (ret)
205 1.1 mrg return ret;
206 1.1 mrg }
207 1.1 mrg
208 1.1 mrg /* Walk the LHS. If the RHS is appropriate for a memory, we
209 1.1 mrg may use a COMPONENT_REF on the LHS. */
210 1.1 mrg if (wi)
211 1.1 mrg {
212 1.1 mrg /* If the RHS is of a non-renamable type or is a register variable,
213 1.1 mrg we may use a COMPONENT_REF on the LHS. */
214 1.1 mrg tree rhs1 = gimple_assign_rhs1 (stmt);
215 1.1 mrg wi->val_only
216 1.1 mrg = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
217 1.1 mrg || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
218 1.1 mrg wi->is_lhs = true;
219 1.1 mrg }
220 1.1 mrg
221 1.1 mrg ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
222 1.1 mrg if (ret)
223 1.1 mrg return ret;
224 1.1 mrg
225 1.1 mrg if (wi)
226 1.1 mrg {
227 1.1 mrg wi->val_only = true;
228 1.1 mrg wi->is_lhs = false;
229 1.1 mrg }
230 1.1 mrg break;
231 1.1 mrg
232 1.1 mrg case GIMPLE_CALL:
233 1.1 mrg if (wi)
234 1.1 mrg {
235 1.1 mrg wi->is_lhs = false;
236 1.1 mrg wi->val_only = true;
237 1.1 mrg }
238 1.1 mrg
239 1.1 mrg ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)),
240 1.1 mrg callback_op, wi, pset);
241 1.1 mrg if (ret)
242 1.1 mrg return ret;
243 1.1 mrg
244 1.1 mrg ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
245 1.1 mrg if (ret)
246 1.1 mrg return ret;
247 1.1 mrg
248 1.1 mrg for (i = 0; i < gimple_call_num_args (stmt); i++)
249 1.1 mrg {
250 1.1 mrg if (wi)
251 1.1 mrg wi->val_only
252 1.1 mrg = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
253 1.1 mrg ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
254 1.1 mrg pset);
255 1.1 mrg if (ret)
256 1.1 mrg return ret;
257 1.1 mrg }
258 1.1 mrg
259 1.1 mrg if (gimple_call_lhs (stmt))
260 1.1 mrg {
261 1.1 mrg if (wi)
262 1.1 mrg {
263 1.1 mrg wi->is_lhs = true;
264 1.1 mrg wi->val_only
265 1.1 mrg = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
266 1.1 mrg }
267 1.1 mrg
268 1.1 mrg ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
269 1.1 mrg if (ret)
270 1.1 mrg return ret;
271 1.1 mrg }
272 1.1 mrg
273 1.1 mrg if (wi)
274 1.1 mrg {
275 1.1 mrg wi->is_lhs = false;
276 1.1 mrg wi->val_only = true;
277 1.1 mrg }
278 1.1 mrg break;
279 1.1 mrg
280 1.1 mrg case GIMPLE_CATCH:
281 1.1 mrg ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)),
282 1.1 mrg callback_op, wi, pset);
283 1.1 mrg if (ret)
284 1.1 mrg return ret;
285 1.1 mrg break;
286 1.1 mrg
287 1.1 mrg case GIMPLE_EH_FILTER:
288 1.1 mrg ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
289 1.1 mrg pset);
290 1.1 mrg if (ret)
291 1.1 mrg return ret;
292 1.1 mrg break;
293 1.1 mrg
294 1.1 mrg case GIMPLE_ASM:
295 1.1 mrg ret = walk_gimple_asm (as_a <gasm *> (stmt), callback_op, wi);
296 1.1 mrg if (ret)
297 1.1 mrg return ret;
298 1.1 mrg break;
299 1.1 mrg
300 1.1 mrg case GIMPLE_OMP_CONTINUE:
301 1.1 mrg {
302 1.1 mrg gomp_continue *cont_stmt = as_a <gomp_continue *> (stmt);
303 1.1 mrg ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt),
304 1.1 mrg callback_op, wi, pset);
305 1.1 mrg if (ret)
306 1.1 mrg return ret;
307 1.1 mrg
308 1.1 mrg ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt),
309 1.1 mrg callback_op, wi, pset);
310 1.1 mrg if (ret)
311 1.1 mrg return ret;
312 1.1 mrg }
313 1.1 mrg break;
314 1.1 mrg
315 1.1 mrg case GIMPLE_OMP_CRITICAL:
316 1.1 mrg {
317 1.1 mrg gomp_critical *omp_stmt = as_a <gomp_critical *> (stmt);
318 1.1 mrg ret = walk_tree (gimple_omp_critical_name_ptr (omp_stmt),
319 1.1 mrg callback_op, wi, pset);
320 1.1 mrg if (ret)
321 1.1 mrg return ret;
322 1.1 mrg ret = walk_tree (gimple_omp_critical_clauses_ptr (omp_stmt),
323 1.1 mrg callback_op, wi, pset);
324 1.1 mrg if (ret)
325 1.1 mrg return ret;
326 1.1 mrg }
327 1.1 mrg break;
328 1.1 mrg
329 1.1 mrg case GIMPLE_OMP_ORDERED:
330 1.1 mrg {
331 1.1 mrg gomp_ordered *omp_stmt = as_a <gomp_ordered *> (stmt);
332 1.1 mrg ret = walk_tree (gimple_omp_ordered_clauses_ptr (omp_stmt),
333 1.1 mrg callback_op, wi, pset);
334 1.1 mrg if (ret)
335 1.1 mrg return ret;
336 1.1 mrg }
337 1.1 mrg break;
338 1.1 mrg
339 1.1 mrg case GIMPLE_OMP_SCAN:
340 1.1 mrg {
341 1.1 mrg gomp_scan *scan_stmt = as_a <gomp_scan *> (stmt);
342 1.1 mrg ret = walk_tree (gimple_omp_scan_clauses_ptr (scan_stmt),
343 1.1 mrg callback_op, wi, pset);
344 1.1 mrg if (ret)
345 1.1 mrg return ret;
346 1.1 mrg }
347 1.1 mrg break;
348 1.1 mrg
349 1.1 mrg case GIMPLE_OMP_FOR:
350 1.1 mrg ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
351 1.1 mrg pset);
352 1.1 mrg if (ret)
353 1.1 mrg return ret;
354 1.1 mrg for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
355 1.1 mrg {
356 1.1 mrg ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
357 1.1 mrg wi, pset);
358 1.1 mrg if (ret)
359 1.1 mrg return ret;
360 1.1 mrg ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
361 1.1 mrg wi, pset);
362 1.1 mrg if (ret)
363 1.1 mrg return ret;
364 1.1 mrg ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
365 1.1 mrg wi, pset);
366 1.1 mrg if (ret)
367 1.1 mrg return ret;
368 1.1 mrg ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
369 1.1 mrg wi, pset);
370 1.1 mrg if (ret)
371 1.1 mrg return ret;
372 1.1 mrg }
373 1.1 mrg break;
374 1.1 mrg
375 1.1 mrg case GIMPLE_OMP_PARALLEL:
376 1.1 mrg {
377 1.1 mrg gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
378 1.1 mrg ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt),
379 1.1 mrg callback_op, wi, pset);
380 1.1 mrg if (ret)
381 1.1 mrg return ret;
382 1.1 mrg ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt),
383 1.1 mrg callback_op, wi, pset);
384 1.1 mrg if (ret)
385 1.1 mrg return ret;
386 1.1 mrg ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt),
387 1.1 mrg callback_op, wi, pset);
388 1.1 mrg if (ret)
389 1.1 mrg return ret;
390 1.1 mrg }
391 1.1 mrg break;
392 1.1 mrg
393 1.1 mrg case GIMPLE_OMP_TASK:
394 1.1 mrg ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
395 1.1 mrg wi, pset);
396 1.1 mrg if (ret)
397 1.1 mrg return ret;
398 1.1 mrg ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
399 1.1 mrg wi, pset);
400 1.1 mrg if (ret)
401 1.1 mrg return ret;
402 1.1 mrg ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
403 1.1 mrg wi, pset);
404 1.1 mrg if (ret)
405 1.1 mrg return ret;
406 1.1 mrg ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
407 1.1 mrg wi, pset);
408 1.1 mrg if (ret)
409 1.1 mrg return ret;
410 1.1 mrg ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
411 1.1 mrg wi, pset);
412 1.1 mrg if (ret)
413 1.1 mrg return ret;
414 1.1 mrg ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
415 1.1 mrg wi, pset);
416 1.1 mrg if (ret)
417 1.1 mrg return ret;
418 1.1 mrg break;
419 1.1 mrg
420 1.1 mrg case GIMPLE_OMP_SECTIONS:
421 1.1 mrg ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
422 1.1 mrg wi, pset);
423 1.1 mrg if (ret)
424 1.1 mrg return ret;
425 1.1 mrg ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
426 1.1 mrg wi, pset);
427 1.1 mrg if (ret)
428 1.1 mrg return ret;
429 1.1 mrg
430 1.1 mrg break;
431 1.1 mrg
432 1.1 mrg case GIMPLE_OMP_SINGLE:
433 1.1 mrg ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
434 1.1 mrg pset);
435 1.1 mrg if (ret)
436 1.1 mrg return ret;
437 1.1 mrg break;
438 1.1 mrg
439 1.1 mrg case GIMPLE_OMP_TARGET:
440 1.1 mrg {
441 1.1 mrg gomp_target *omp_stmt = as_a <gomp_target *> (stmt);
442 1.1 mrg ret = walk_tree (gimple_omp_target_clauses_ptr (omp_stmt),
443 1.1 mrg callback_op, wi, pset);
444 1.1 mrg if (ret)
445 1.1 mrg return ret;
446 1.1 mrg ret = walk_tree (gimple_omp_target_child_fn_ptr (omp_stmt),
447 1.1 mrg callback_op, wi, pset);
448 1.1 mrg if (ret)
449 1.1 mrg return ret;
450 1.1 mrg ret = walk_tree (gimple_omp_target_data_arg_ptr (omp_stmt),
451 1.1 mrg callback_op, wi, pset);
452 1.1 mrg if (ret)
453 1.1 mrg return ret;
454 1.1 mrg }
455 1.1 mrg break;
456 1.1 mrg
457 1.1 mrg case GIMPLE_OMP_TEAMS:
458 1.1 mrg ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
459 1.1 mrg pset);
460 1.1 mrg if (ret)
461 1.1 mrg return ret;
462 1.1 mrg break;
463 1.1 mrg
464 1.1 mrg case GIMPLE_OMP_ATOMIC_LOAD:
465 1.1 mrg {
466 1.1 mrg gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (stmt);
467 1.1 mrg ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt),
468 1.1 mrg callback_op, wi, pset);
469 1.1 mrg if (ret)
470 1.1 mrg return ret;
471 1.1 mrg ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt),
472 1.1 mrg callback_op, wi, pset);
473 1.1 mrg if (ret)
474 1.1 mrg return ret;
475 1.1 mrg }
476 1.1 mrg break;
477 1.1 mrg
478 1.1 mrg case GIMPLE_OMP_ATOMIC_STORE:
479 1.1 mrg {
480 1.1 mrg gomp_atomic_store *omp_stmt = as_a <gomp_atomic_store *> (stmt);
481 1.1 mrg ret = walk_tree (gimple_omp_atomic_store_val_ptr (omp_stmt),
482 1.1 mrg callback_op, wi, pset);
483 1.1 mrg if (ret)
484 1.1 mrg return ret;
485 1.1 mrg }
486 1.1 mrg break;
487 1.1 mrg
488 1.1 mrg case GIMPLE_TRANSACTION:
489 1.1 mrg {
490 1.1 mrg gtransaction *txn = as_a <gtransaction *> (stmt);
491 1.1 mrg
492 1.1 mrg ret = walk_tree (gimple_transaction_label_norm_ptr (txn),
493 1.1 mrg callback_op, wi, pset);
494 1.1 mrg if (ret)
495 1.1 mrg return ret;
496 1.1 mrg ret = walk_tree (gimple_transaction_label_uninst_ptr (txn),
497 1.1 mrg callback_op, wi, pset);
498 1.1 mrg if (ret)
499 1.1 mrg return ret;
500 1.1 mrg ret = walk_tree (gimple_transaction_label_over_ptr (txn),
501 1.1 mrg callback_op, wi, pset);
502 1.1 mrg if (ret)
503 1.1 mrg return ret;
504 1.1 mrg }
505 1.1 mrg break;
506 1.1 mrg
507 1.1 mrg case GIMPLE_OMP_RETURN:
508 1.1 mrg ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
509 1.1 mrg pset);
510 1.1 mrg if (ret)
511 1.1 mrg return ret;
512 1.1 mrg break;
513 1.1 mrg
514 1.1 mrg /* Tuples that do not have operands. */
515 1.1 mrg case GIMPLE_NOP:
516 1.1 mrg case GIMPLE_RESX:
517 1.1 mrg case GIMPLE_PREDICT:
518 1.1 mrg break;
519 1.1 mrg
520 1.1 mrg case GIMPLE_PHI:
521 1.1 mrg /* PHIs are not GSS_WITH_OPS so we need to handle them explicitely. */
522 1.1 mrg {
523 1.1 mrg gphi *phi = as_a <gphi *> (stmt);
524 1.1 mrg if (wi)
525 1.1 mrg {
526 1.1 mrg wi->val_only = true;
527 1.1 mrg wi->is_lhs = true;
528 1.1 mrg }
529 1.1 mrg ret = walk_tree (gimple_phi_result_ptr (phi), callback_op, wi, pset);
530 1.1 mrg if (wi)
531 1.1 mrg wi->is_lhs = false;
532 1.1 mrg if (ret)
533 1.1 mrg return ret;
534 1.1 mrg for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
535 1.1 mrg {
536 1.1 mrg ret = walk_tree (gimple_phi_arg_def_ptr (phi, i),
537 1.1 mrg callback_op, wi, pset);
538 1.1 mrg if (ret)
539 1.1 mrg return ret;
540 1.1 mrg }
541 1.1 mrg break;
542 1.1 mrg }
543 1.1 mrg
544 1.1 mrg default:
545 1.1 mrg {
546 1.1 mrg enum gimple_statement_structure_enum gss;
547 1.1 mrg gss = gimple_statement_structure (stmt);
548 1.1 mrg if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
549 1.1 mrg for (i = 0; i < gimple_num_ops (stmt); i++)
550 1.1 mrg {
551 1.1 mrg ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
552 1.1 mrg if (ret)
553 1.1 mrg return ret;
554 1.1 mrg }
555 1.1 mrg }
556 1.1 mrg break;
557 1.1 mrg }
558 1.1 mrg
559 1.1 mrg return NULL_TREE;
560 1.1 mrg }
561 1.1 mrg
562 1.1 mrg
563 1.1 mrg /* Walk the current statement in GSI (optionally using traversal state
564 1.1 mrg stored in WI). If WI is NULL, no state is kept during traversal.
565 1.1 mrg The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
566 1.1 mrg that it has handled all the operands of the statement, its return
567 1.1 mrg value is returned. Otherwise, the return value from CALLBACK_STMT
568 1.1 mrg is discarded and its operands are scanned.
569 1.1 mrg
570 1.1 mrg If CALLBACK_STMT is NULL or it didn't handle the operands,
571 1.1 mrg CALLBACK_OP is called on each operand of the statement via
572 1.1 mrg walk_gimple_op. If walk_gimple_op returns non-NULL for any
573 1.1 mrg operand, the remaining operands are not scanned. In this case, the
574 1.1 mrg return value from CALLBACK_OP is returned.
575 1.1 mrg
576 1.1 mrg In any other case, NULL_TREE is returned. */
577 1.1 mrg
578 1.1 mrg tree
579 1.1 mrg walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
580 1.1 mrg walk_tree_fn callback_op, struct walk_stmt_info *wi)
581 1.1 mrg {
582 1.1 mrg gimple *ret;
583 1.1 mrg tree tree_ret;
584 1.1 mrg gimple *stmt = gsi_stmt (*gsi);
585 1.1 mrg
586 1.1 mrg if (wi)
587 1.1 mrg {
588 1.1 mrg wi->gsi = *gsi;
589 1.1 mrg wi->removed_stmt = false;
590 1.1 mrg
591 1.1 mrg if (wi->want_locations && gimple_has_location (stmt))
592 1.1 mrg input_location = gimple_location (stmt);
593 1.1 mrg }
594 1.1 mrg
595 1.1 mrg ret = NULL;
596 1.1 mrg
597 1.1 mrg /* Invoke the statement callback. Return if the callback handled
598 1.1 mrg all of STMT operands by itself. */
599 1.1 mrg if (callback_stmt)
600 1.1 mrg {
601 1.1 mrg bool handled_ops = false;
602 1.1 mrg tree_ret = callback_stmt (gsi, &handled_ops, wi);
603 1.1 mrg if (handled_ops)
604 1.1 mrg return tree_ret;
605 1.1 mrg
606 1.1 mrg /* If CALLBACK_STMT did not handle operands, it should not have
607 1.1 mrg a value to return. */
608 1.1 mrg gcc_assert (tree_ret == NULL);
609 1.1 mrg
610 1.1 mrg if (wi && wi->removed_stmt)
611 1.1 mrg return NULL;
612 1.1 mrg
613 1.1 mrg /* Re-read stmt in case the callback changed it. */
614 1.1 mrg stmt = gsi_stmt (*gsi);
615 1.1 mrg }
616 1.1 mrg
617 1.1 mrg /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
618 1.1 mrg if (callback_op)
619 1.1 mrg {
620 1.1 mrg tree_ret = walk_gimple_op (stmt, callback_op, wi);
621 1.1 mrg if (tree_ret)
622 1.1 mrg return tree_ret;
623 1.1 mrg }
624 1.1 mrg
625 1.1 mrg /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
626 1.1 mrg switch (gimple_code (stmt))
627 1.1 mrg {
628 1.1 mrg case GIMPLE_BIND:
629 1.1 mrg ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)),
630 1.1 mrg callback_stmt, callback_op, wi);
631 1.1 mrg if (ret)
632 1.1 mrg return wi->callback_result;
633 1.1 mrg break;
634 1.1 mrg
635 1.1 mrg case GIMPLE_CATCH:
636 1.1 mrg ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (
637 1.1 mrg as_a <gcatch *> (stmt)),
638 1.1 mrg callback_stmt, callback_op, wi);
639 1.1 mrg if (ret)
640 1.1 mrg return wi->callback_result;
641 1.1 mrg break;
642 1.1 mrg
643 1.1 mrg case GIMPLE_EH_FILTER:
644 1.1 mrg ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
645 1.1 mrg callback_op, wi);
646 1.1 mrg if (ret)
647 1.1 mrg return wi->callback_result;
648 1.1 mrg break;
649 1.1 mrg
650 1.1 mrg case GIMPLE_EH_ELSE:
651 1.1 mrg {
652 1.1 mrg geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
653 1.1 mrg ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt),
654 1.1 mrg callback_stmt, callback_op, wi);
655 1.1 mrg if (ret)
656 1.1 mrg return wi->callback_result;
657 1.1 mrg ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt),
658 1.1 mrg callback_stmt, callback_op, wi);
659 1.1 mrg if (ret)
660 1.1 mrg return wi->callback_result;
661 1.1 mrg }
662 1.1 mrg break;
663 1.1 mrg
664 1.1 mrg case GIMPLE_TRY:
665 1.1 mrg ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
666 1.1 mrg wi);
667 1.1 mrg if (ret)
668 1.1 mrg return wi->callback_result;
669 1.1 mrg
670 1.1 mrg ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
671 1.1 mrg callback_op, wi);
672 1.1 mrg if (ret)
673 1.1 mrg return wi->callback_result;
674 1.1 mrg break;
675 1.1 mrg
676 1.1 mrg case GIMPLE_OMP_FOR:
677 1.1 mrg ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
678 1.1 mrg callback_op, wi);
679 1.1 mrg if (ret)
680 1.1 mrg return wi->callback_result;
681 1.1 mrg
682 1.1 mrg /* FALL THROUGH. */
683 1.1 mrg case GIMPLE_OMP_CRITICAL:
684 1.1 mrg case GIMPLE_OMP_MASTER:
685 1.1 mrg case GIMPLE_OMP_MASKED:
686 1.1 mrg case GIMPLE_OMP_TASKGROUP:
687 1.1 mrg case GIMPLE_OMP_ORDERED:
688 1.1 mrg case GIMPLE_OMP_SCAN:
689 1.1 mrg case GIMPLE_OMP_SECTION:
690 1.1 mrg case GIMPLE_OMP_PARALLEL:
691 1.1 mrg case GIMPLE_OMP_TASK:
692 1.1 mrg case GIMPLE_OMP_SCOPE:
693 1.1 mrg case GIMPLE_OMP_SECTIONS:
694 1.1 mrg case GIMPLE_OMP_SINGLE:
695 1.1 mrg case GIMPLE_OMP_TARGET:
696 1.1 mrg case GIMPLE_OMP_TEAMS:
697 1.1 mrg ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
698 1.1 mrg callback_op, wi);
699 1.1 mrg if (ret)
700 1.1 mrg return wi->callback_result;
701 1.1 mrg break;
702 1.1 mrg
703 1.1 mrg case GIMPLE_WITH_CLEANUP_EXPR:
704 1.1 mrg ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
705 1.1 mrg callback_op, wi);
706 1.1 mrg if (ret)
707 1.1 mrg return wi->callback_result;
708 1.1 mrg break;
709 1.1 mrg
710 1.1 mrg case GIMPLE_TRANSACTION:
711 1.1 mrg ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (
712 1.1 mrg as_a <gtransaction *> (stmt)),
713 1.1 mrg callback_stmt, callback_op, wi);
714 1.1 mrg if (ret)
715 1.1 mrg return wi->callback_result;
716 1.1 mrg break;
717 1.1 mrg
718 1.1 mrg default:
719 1.1 mrg gcc_assert (!gimple_has_substatements (stmt));
720 1.1 mrg break;
721 1.1 mrg }
722 1.1 mrg
723 1.1 mrg return NULL;
724 1.1 mrg }
725 1.1 mrg
726 1.1 mrg /* From a tree operand OP return the base of a load or store operation
727 1.1 mrg or NULL_TREE if OP is not a load or a store. */
728 1.1 mrg
729 1.1 mrg static tree
730 1.1 mrg get_base_loadstore (tree op)
731 1.1 mrg {
732 1.1 mrg while (handled_component_p (op))
733 1.1 mrg op = TREE_OPERAND (op, 0);
734 1.1 mrg if (DECL_P (op)
735 1.1 mrg || INDIRECT_REF_P (op)
736 1.1 mrg || TREE_CODE (op) == MEM_REF
737 1.1 mrg || TREE_CODE (op) == TARGET_MEM_REF)
738 1.1 mrg return op;
739 1.1 mrg return NULL_TREE;
740 1.1 mrg }
741 1.1 mrg
742 1.1 mrg
743 1.1 mrg /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
744 1.1 mrg VISIT_ADDR if non-NULL on loads, store and address-taken operands
745 1.1 mrg passing the STMT, the base of the operand, the operand itself containing
746 1.1 mrg the base and DATA to it. The base will be either a decl, an indirect
747 1.1 mrg reference (including TARGET_MEM_REF) or the argument of an address
748 1.1 mrg expression.
749 1.1 mrg Returns the results of these callbacks or'ed. */
750 1.1 mrg
751 1.1 mrg bool
752 1.1 mrg walk_stmt_load_store_addr_ops (gimple *stmt, void *data,
753 1.1 mrg walk_stmt_load_store_addr_fn visit_load,
754 1.1 mrg walk_stmt_load_store_addr_fn visit_store,
755 1.1 mrg walk_stmt_load_store_addr_fn visit_addr)
756 1.1 mrg {
757 1.1 mrg bool ret = false;
758 1.1 mrg unsigned i;
759 1.1 mrg if (gimple_assign_single_p (stmt))
760 1.1 mrg {
761 1.1 mrg tree lhs, rhs, arg;
762 1.1 mrg if (visit_store)
763 1.1 mrg {
764 1.1 mrg arg = gimple_assign_lhs (stmt);
765 1.1 mrg lhs = get_base_loadstore (arg);
766 1.1 mrg if (lhs)
767 1.1 mrg ret |= visit_store (stmt, lhs, arg, data);
768 1.1 mrg }
769 1.1 mrg arg = gimple_assign_rhs1 (stmt);
770 1.1 mrg rhs = arg;
771 1.1 mrg while (handled_component_p (rhs))
772 1.1 mrg rhs = TREE_OPERAND (rhs, 0);
773 1.1 mrg if (visit_addr)
774 1.1 mrg {
775 1.1 mrg if (TREE_CODE (rhs) == ADDR_EXPR)
776 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), arg, data);
777 1.1 mrg else if (TREE_CODE (rhs) == OBJ_TYPE_REF
778 1.1 mrg && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
779 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
780 1.1 mrg 0), arg, data);
781 1.1 mrg else if (TREE_CODE (rhs) == CONSTRUCTOR)
782 1.1 mrg {
783 1.1 mrg unsigned int ix;
784 1.1 mrg tree val;
785 1.1 mrg
786 1.1 mrg FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
787 1.1 mrg if (TREE_CODE (val) == ADDR_EXPR)
788 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (val, 0), arg, data);
789 1.1 mrg else if (TREE_CODE (val) == OBJ_TYPE_REF
790 1.1 mrg && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
791 1.1 mrg ret |= visit_addr (stmt,
792 1.1 mrg TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
793 1.1 mrg 0), arg, data);
794 1.1 mrg }
795 1.1 mrg }
796 1.1 mrg if (visit_load)
797 1.1 mrg {
798 1.1 mrg rhs = get_base_loadstore (rhs);
799 1.1 mrg if (rhs)
800 1.1 mrg ret |= visit_load (stmt, rhs, arg, data);
801 1.1 mrg }
802 1.1 mrg }
803 1.1 mrg else if (visit_addr
804 1.1 mrg && (is_gimple_assign (stmt)
805 1.1 mrg || gimple_code (stmt) == GIMPLE_COND))
806 1.1 mrg {
807 1.1 mrg for (i = 0; i < gimple_num_ops (stmt); ++i)
808 1.1 mrg {
809 1.1 mrg tree op = gimple_op (stmt, i);
810 1.1 mrg if (op == NULL_TREE)
811 1.1 mrg ;
812 1.1 mrg else if (TREE_CODE (op) == ADDR_EXPR)
813 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
814 1.1 mrg /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
815 1.1 mrg tree with two operands. */
816 1.1 mrg else if (i == 1 && COMPARISON_CLASS_P (op))
817 1.1 mrg {
818 1.1 mrg if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
819 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
820 1.1 mrg 0), op, data);
821 1.1 mrg if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
822 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
823 1.1 mrg 0), op, data);
824 1.1 mrg }
825 1.1 mrg }
826 1.1 mrg }
827 1.1 mrg else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
828 1.1 mrg {
829 1.1 mrg if (visit_store)
830 1.1 mrg {
831 1.1 mrg tree arg = gimple_call_lhs (call_stmt);
832 1.1 mrg if (arg)
833 1.1 mrg {
834 1.1 mrg tree lhs = get_base_loadstore (arg);
835 1.1 mrg if (lhs)
836 1.1 mrg ret |= visit_store (stmt, lhs, arg, data);
837 1.1 mrg }
838 1.1 mrg }
839 1.1 mrg if (visit_load || visit_addr)
840 1.1 mrg for (i = 0; i < gimple_call_num_args (call_stmt); ++i)
841 1.1 mrg {
842 1.1 mrg tree arg = gimple_call_arg (call_stmt, i);
843 1.1 mrg if (visit_addr
844 1.1 mrg && TREE_CODE (arg) == ADDR_EXPR)
845 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (arg, 0), arg, data);
846 1.1 mrg else if (visit_load)
847 1.1 mrg {
848 1.1 mrg tree rhs = get_base_loadstore (arg);
849 1.1 mrg if (rhs)
850 1.1 mrg ret |= visit_load (stmt, rhs, arg, data);
851 1.1 mrg }
852 1.1 mrg }
853 1.1 mrg if (visit_addr
854 1.1 mrg && gimple_call_chain (call_stmt)
855 1.1 mrg && TREE_CODE (gimple_call_chain (call_stmt)) == ADDR_EXPR)
856 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (call_stmt), 0),
857 1.1 mrg gimple_call_chain (call_stmt), data);
858 1.1 mrg if (visit_addr
859 1.1 mrg && gimple_call_return_slot_opt_p (call_stmt)
860 1.1 mrg && gimple_call_lhs (call_stmt) != NULL_TREE
861 1.1 mrg && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt))))
862 1.1 mrg ret |= visit_addr (stmt, gimple_call_lhs (call_stmt),
863 1.1 mrg gimple_call_lhs (call_stmt), data);
864 1.1 mrg }
865 1.1 mrg else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
866 1.1 mrg {
867 1.1 mrg unsigned noutputs;
868 1.1 mrg const char *constraint;
869 1.1 mrg const char **oconstraints;
870 1.1 mrg bool allows_mem, allows_reg, is_inout;
871 1.1 mrg noutputs = gimple_asm_noutputs (asm_stmt);
872 1.1 mrg oconstraints = XALLOCAVEC (const char *, noutputs);
873 1.1 mrg if (visit_store || visit_addr)
874 1.1 mrg for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
875 1.1 mrg {
876 1.1 mrg tree link = gimple_asm_output_op (asm_stmt, i);
877 1.1 mrg tree op = get_base_loadstore (TREE_VALUE (link));
878 1.1 mrg if (op && visit_store)
879 1.1 mrg ret |= visit_store (stmt, op, TREE_VALUE (link), data);
880 1.1 mrg if (visit_addr)
881 1.1 mrg {
882 1.1 mrg constraint = TREE_STRING_POINTER
883 1.1 mrg (TREE_VALUE (TREE_PURPOSE (link)));
884 1.1 mrg oconstraints[i] = constraint;
885 1.1 mrg parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
886 1.1 mrg &allows_reg, &is_inout);
887 1.1 mrg if (op && !allows_reg && allows_mem)
888 1.1 mrg ret |= visit_addr (stmt, op, TREE_VALUE (link), data);
889 1.1 mrg }
890 1.1 mrg }
891 1.1 mrg if (visit_load || visit_addr)
892 1.1 mrg for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
893 1.1 mrg {
894 1.1 mrg tree link = gimple_asm_input_op (asm_stmt, i);
895 1.1 mrg tree op = TREE_VALUE (link);
896 1.1 mrg if (visit_addr
897 1.1 mrg && TREE_CODE (op) == ADDR_EXPR)
898 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
899 1.1 mrg else if (visit_load || visit_addr)
900 1.1 mrg {
901 1.1 mrg op = get_base_loadstore (op);
902 1.1 mrg if (op)
903 1.1 mrg {
904 1.1 mrg if (visit_load)
905 1.1 mrg ret |= visit_load (stmt, op, TREE_VALUE (link), data);
906 1.1 mrg if (visit_addr)
907 1.1 mrg {
908 1.1 mrg constraint = TREE_STRING_POINTER
909 1.1 mrg (TREE_VALUE (TREE_PURPOSE (link)));
910 1.1 mrg parse_input_constraint (&constraint, 0, 0, noutputs,
911 1.1 mrg 0, oconstraints,
912 1.1 mrg &allows_mem, &allows_reg);
913 1.1 mrg if (!allows_reg && allows_mem)
914 1.1 mrg ret |= visit_addr (stmt, op, TREE_VALUE (link),
915 1.1 mrg data);
916 1.1 mrg }
917 1.1 mrg }
918 1.1 mrg }
919 1.1 mrg }
920 1.1 mrg }
921 1.1 mrg else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
922 1.1 mrg {
923 1.1 mrg tree op = gimple_return_retval (return_stmt);
924 1.1 mrg if (op)
925 1.1 mrg {
926 1.1 mrg if (visit_addr
927 1.1 mrg && TREE_CODE (op) == ADDR_EXPR)
928 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
929 1.1 mrg else if (visit_load)
930 1.1 mrg {
931 1.1 mrg tree base = get_base_loadstore (op);
932 1.1 mrg if (base)
933 1.1 mrg ret |= visit_load (stmt, base, op, data);
934 1.1 mrg }
935 1.1 mrg }
936 1.1 mrg }
937 1.1 mrg else if (visit_addr
938 1.1 mrg && gimple_code (stmt) == GIMPLE_PHI)
939 1.1 mrg {
940 1.1 mrg for (i = 0; i < gimple_phi_num_args (stmt); ++i)
941 1.1 mrg {
942 1.1 mrg tree op = gimple_phi_arg_def (stmt, i);
943 1.1 mrg if (TREE_CODE (op) == ADDR_EXPR)
944 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
945 1.1 mrg }
946 1.1 mrg }
947 1.1 mrg else if (visit_addr
948 1.1 mrg && gimple_code (stmt) == GIMPLE_GOTO)
949 1.1 mrg {
950 1.1 mrg tree op = gimple_goto_dest (stmt);
951 1.1 mrg if (TREE_CODE (op) == ADDR_EXPR)
952 1.1 mrg ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
953 1.1 mrg }
954 1.1 mrg
955 1.1 mrg return ret;
956 1.1 mrg }
957 1.1 mrg
958 1.1 mrg /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
959 1.1 mrg should make a faster clone for this case. */
960 1.1 mrg
961 1.1 mrg bool
962 1.1 mrg walk_stmt_load_store_ops (gimple *stmt, void *data,
963 1.1 mrg walk_stmt_load_store_addr_fn visit_load,
964 1.1 mrg walk_stmt_load_store_addr_fn visit_store)
965 1.1 mrg {
966 1.1 mrg return walk_stmt_load_store_addr_ops (stmt, data,
967 1.1 mrg visit_load, visit_store, NULL);
968 1.1 mrg }
969