cp-gimplify.cc revision 1.1.1.1 1 1.1 mrg /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
2 1.1 mrg
3 1.1 mrg Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 1.1 mrg Contributed by Jason Merrill <jason (at) redhat.com>
5 1.1 mrg
6 1.1 mrg This file is part of GCC.
7 1.1 mrg
8 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
9 1.1 mrg the terms of the GNU General Public License as published by the Free
10 1.1 mrg Software Foundation; either version 3, or (at your option) any later
11 1.1 mrg version.
12 1.1 mrg
13 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 1.1 mrg for more details.
17 1.1 mrg
18 1.1 mrg You should have received a copy of the GNU General Public License
19 1.1 mrg along with GCC; see the file COPYING3. If not see
20 1.1 mrg <http://www.gnu.org/licenses/>. */
21 1.1 mrg
22 1.1 mrg #include "config.h"
23 1.1 mrg #include "system.h"
24 1.1 mrg #include "coretypes.h"
25 1.1 mrg #include "target.h"
26 1.1 mrg #include "basic-block.h"
27 1.1 mrg #include "cp-tree.h"
28 1.1 mrg #include "gimple.h"
29 1.1 mrg #include "predict.h"
30 1.1 mrg #include "stor-layout.h"
31 1.1 mrg #include "tree-iterator.h"
32 1.1 mrg #include "gimplify.h"
33 1.1 mrg #include "c-family/c-ubsan.h"
34 1.1 mrg #include "stringpool.h"
35 1.1 mrg #include "attribs.h"
36 1.1 mrg #include "asan.h"
37 1.1 mrg #include "gcc-rich-location.h"
38 1.1 mrg #include "memmodel.h"
39 1.1 mrg #include "tm_p.h"
40 1.1 mrg #include "output.h"
41 1.1 mrg #include "file-prefix-map.h"
42 1.1 mrg #include "cgraph.h"
43 1.1 mrg #include "omp-general.h"
44 1.1 mrg #include "opts.h"
45 1.1 mrg
46 1.1 mrg struct cp_fold_data
47 1.1 mrg {
48 1.1 mrg hash_set<tree> pset;
49 1.1 mrg bool genericize; // called from cp_fold_function?
50 1.1 mrg
51 1.1 mrg cp_fold_data (bool g): genericize (g) {}
52 1.1 mrg };
53 1.1 mrg
54 1.1 mrg /* Forward declarations. */
55 1.1 mrg
56 1.1 mrg static tree cp_genericize_r (tree *, int *, void *);
57 1.1 mrg static tree cp_fold_r (tree *, int *, void *);
58 1.1 mrg static void cp_genericize_tree (tree*, bool);
59 1.1 mrg static tree cp_fold (tree);
60 1.1 mrg
61 1.1 mrg /* Genericize a TRY_BLOCK. */
62 1.1 mrg
63 1.1 mrg static void
64 1.1 mrg genericize_try_block (tree *stmt_p)
65 1.1 mrg {
66 1.1 mrg tree body = TRY_STMTS (*stmt_p);
67 1.1 mrg tree cleanup = TRY_HANDLERS (*stmt_p);
68 1.1 mrg
69 1.1 mrg *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
70 1.1 mrg }
71 1.1 mrg
72 1.1 mrg /* Genericize a HANDLER by converting to a CATCH_EXPR. */
73 1.1 mrg
74 1.1 mrg static void
75 1.1 mrg genericize_catch_block (tree *stmt_p)
76 1.1 mrg {
77 1.1 mrg tree type = HANDLER_TYPE (*stmt_p);
78 1.1 mrg tree body = HANDLER_BODY (*stmt_p);
79 1.1 mrg
80 1.1 mrg /* FIXME should the caught type go in TREE_TYPE? */
81 1.1 mrg *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
82 1.1 mrg }
83 1.1 mrg
84 1.1 mrg /* A terser interface for building a representation of an exception
85 1.1 mrg specification. */
86 1.1 mrg
87 1.1 mrg static tree
88 1.1 mrg build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
89 1.1 mrg {
90 1.1 mrg tree t;
91 1.1 mrg
92 1.1 mrg /* FIXME should the allowed types go in TREE_TYPE? */
93 1.1 mrg t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
94 1.1 mrg append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
95 1.1 mrg
96 1.1 mrg t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
97 1.1 mrg append_to_statement_list (body, &TREE_OPERAND (t, 0));
98 1.1 mrg
99 1.1 mrg return t;
100 1.1 mrg }
101 1.1 mrg
102 1.1 mrg /* Genericize an EH_SPEC_BLOCK by converting it to a
103 1.1 mrg TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
104 1.1 mrg
105 1.1 mrg static void
106 1.1 mrg genericize_eh_spec_block (tree *stmt_p)
107 1.1 mrg {
108 1.1 mrg tree body = EH_SPEC_STMTS (*stmt_p);
109 1.1 mrg tree allowed = EH_SPEC_RAISES (*stmt_p);
110 1.1 mrg tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
111 1.1 mrg
112 1.1 mrg *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
113 1.1 mrg suppress_warning (*stmt_p);
114 1.1 mrg suppress_warning (TREE_OPERAND (*stmt_p, 1));
115 1.1 mrg }
116 1.1 mrg
117 1.1 mrg /* Return the first non-compound statement in STMT. */
118 1.1 mrg
119 1.1 mrg tree
120 1.1 mrg first_stmt (tree stmt)
121 1.1 mrg {
122 1.1 mrg switch (TREE_CODE (stmt))
123 1.1 mrg {
124 1.1 mrg case STATEMENT_LIST:
125 1.1 mrg if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
126 1.1 mrg return first_stmt (p->stmt);
127 1.1 mrg return void_node;
128 1.1 mrg
129 1.1 mrg case BIND_EXPR:
130 1.1 mrg return first_stmt (BIND_EXPR_BODY (stmt));
131 1.1 mrg
132 1.1 mrg default:
133 1.1 mrg return stmt;
134 1.1 mrg }
135 1.1 mrg }
136 1.1 mrg
137 1.1 mrg /* Genericize an IF_STMT by turning it into a COND_EXPR. */
138 1.1 mrg
139 1.1 mrg static void
140 1.1 mrg genericize_if_stmt (tree *stmt_p)
141 1.1 mrg {
142 1.1 mrg tree stmt, cond, then_, else_;
143 1.1 mrg location_t locus = EXPR_LOCATION (*stmt_p);
144 1.1 mrg
145 1.1 mrg stmt = *stmt_p;
146 1.1 mrg cond = IF_COND (stmt);
147 1.1 mrg then_ = THEN_CLAUSE (stmt);
148 1.1 mrg else_ = ELSE_CLAUSE (stmt);
149 1.1 mrg
150 1.1 mrg if (then_ && else_)
151 1.1 mrg {
152 1.1 mrg tree ft = first_stmt (then_);
153 1.1 mrg tree fe = first_stmt (else_);
154 1.1 mrg br_predictor pr;
155 1.1 mrg if (TREE_CODE (ft) == PREDICT_EXPR
156 1.1 mrg && TREE_CODE (fe) == PREDICT_EXPR
157 1.1 mrg && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
158 1.1 mrg && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
159 1.1 mrg {
160 1.1 mrg gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
161 1.1 mrg richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
162 1.1 mrg warning_at (&richloc, OPT_Wattributes,
163 1.1 mrg "both branches of %<if%> statement marked as %qs",
164 1.1 mrg pr == PRED_HOT_LABEL ? "likely" : "unlikely");
165 1.1 mrg }
166 1.1 mrg }
167 1.1 mrg
168 1.1 mrg if (!then_)
169 1.1 mrg then_ = build_empty_stmt (locus);
170 1.1 mrg if (!else_)
171 1.1 mrg else_ = build_empty_stmt (locus);
172 1.1 mrg
173 1.1 mrg /* consteval if has been verified not to have the then_/else_ blocks
174 1.1 mrg entered by gotos/case labels from elsewhere, and as then_ block
175 1.1 mrg can contain unfolded immediate function calls, we have to discard
176 1.1 mrg the then_ block regardless of whether else_ has side-effects or not. */
177 1.1 mrg if (IF_STMT_CONSTEVAL_P (stmt))
178 1.1 mrg {
179 1.1 mrg if (block_may_fallthru (then_))
180 1.1 mrg stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
181 1.1 mrg void_node, else_);
182 1.1 mrg else
183 1.1 mrg stmt = else_;
184 1.1 mrg }
185 1.1 mrg else if (IF_STMT_CONSTEXPR_P (stmt))
186 1.1 mrg stmt = integer_nonzerop (cond) ? then_ : else_;
187 1.1 mrg /* ??? This optimization doesn't seem to belong here, but removing it
188 1.1 mrg causes -Wreturn-type regressions (e.g. 107310). */
189 1.1 mrg else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
190 1.1 mrg stmt = then_;
191 1.1 mrg else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
192 1.1 mrg stmt = else_;
193 1.1 mrg else
194 1.1 mrg stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
195 1.1 mrg protected_set_expr_location_if_unset (stmt, locus);
196 1.1 mrg *stmt_p = stmt;
197 1.1 mrg }
198 1.1 mrg
199 1.1 mrg /* Hook into the middle of gimplifying an OMP_FOR node. */
200 1.1 mrg
201 1.1 mrg static enum gimplify_status
202 1.1 mrg cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
203 1.1 mrg {
204 1.1 mrg tree for_stmt = *expr_p;
205 1.1 mrg gimple_seq seq = NULL;
206 1.1 mrg
207 1.1 mrg /* Protect ourselves from recursion. */
208 1.1 mrg if (OMP_FOR_GIMPLIFYING_P (for_stmt))
209 1.1 mrg return GS_UNHANDLED;
210 1.1 mrg OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
211 1.1 mrg
212 1.1 mrg gimplify_and_add (for_stmt, &seq);
213 1.1 mrg gimple_seq_add_seq (pre_p, seq);
214 1.1 mrg
215 1.1 mrg OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
216 1.1 mrg
217 1.1 mrg return GS_ALL_DONE;
218 1.1 mrg }
219 1.1 mrg
220 1.1 mrg /* Gimplify an EXPR_STMT node. */
221 1.1 mrg
222 1.1 mrg static void
223 1.1 mrg gimplify_expr_stmt (tree *stmt_p)
224 1.1 mrg {
225 1.1 mrg tree stmt = EXPR_STMT_EXPR (*stmt_p);
226 1.1 mrg
227 1.1 mrg if (stmt == error_mark_node)
228 1.1 mrg stmt = NULL;
229 1.1 mrg
230 1.1 mrg /* Gimplification of a statement expression will nullify the
231 1.1 mrg statement if all its side effects are moved to *PRE_P and *POST_P.
232 1.1 mrg
233 1.1 mrg In this case we will not want to emit the gimplified statement.
234 1.1 mrg However, we may still want to emit a warning, so we do that before
235 1.1 mrg gimplification. */
236 1.1 mrg if (stmt && warn_unused_value)
237 1.1 mrg {
238 1.1 mrg if (!TREE_SIDE_EFFECTS (stmt))
239 1.1 mrg {
240 1.1 mrg if (!IS_EMPTY_STMT (stmt)
241 1.1 mrg && !VOID_TYPE_P (TREE_TYPE (stmt))
242 1.1 mrg && !warning_suppressed_p (stmt, OPT_Wunused_value))
243 1.1 mrg warning (OPT_Wunused_value, "statement with no effect");
244 1.1 mrg }
245 1.1 mrg else
246 1.1 mrg warn_if_unused_value (stmt, input_location);
247 1.1 mrg }
248 1.1 mrg
249 1.1 mrg if (stmt == NULL_TREE)
250 1.1 mrg stmt = alloc_stmt_list ();
251 1.1 mrg
252 1.1 mrg *stmt_p = stmt;
253 1.1 mrg }
254 1.1 mrg
255 1.1 mrg /* Gimplify initialization from an AGGR_INIT_EXPR. */
256 1.1 mrg
257 1.1 mrg static void
258 1.1 mrg cp_gimplify_init_expr (tree *expr_p)
259 1.1 mrg {
260 1.1 mrg tree from = TREE_OPERAND (*expr_p, 1);
261 1.1 mrg tree to = TREE_OPERAND (*expr_p, 0);
262 1.1 mrg tree t;
263 1.1 mrg
264 1.1 mrg if (TREE_CODE (from) == TARGET_EXPR)
265 1.1 mrg if (tree init = TARGET_EXPR_INITIAL (from))
266 1.1 mrg {
267 1.1 mrg if (target_expr_needs_replace (from))
268 1.1 mrg {
269 1.1 mrg /* If this was changed by cp_genericize_target_expr, we need to
270 1.1 mrg walk into it to replace uses of the slot. */
271 1.1 mrg replace_decl (&init, TARGET_EXPR_SLOT (from), to);
272 1.1 mrg *expr_p = init;
273 1.1 mrg return;
274 1.1 mrg }
275 1.1 mrg else
276 1.1 mrg from = init;
277 1.1 mrg }
278 1.1 mrg
279 1.1 mrg /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
280 1.1 mrg inside the TARGET_EXPR. */
281 1.1 mrg for (t = from; t; )
282 1.1 mrg {
283 1.1 mrg tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
284 1.1 mrg
285 1.1 mrg /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
286 1.1 mrg replace the slot operand with our target.
287 1.1 mrg
288 1.1 mrg Should we add a target parm to gimplify_expr instead? No, as in this
289 1.1 mrg case we want to replace the INIT_EXPR. */
290 1.1 mrg if (TREE_CODE (sub) == AGGR_INIT_EXPR
291 1.1 mrg || TREE_CODE (sub) == VEC_INIT_EXPR)
292 1.1 mrg {
293 1.1 mrg if (TREE_CODE (sub) == AGGR_INIT_EXPR)
294 1.1 mrg AGGR_INIT_EXPR_SLOT (sub) = to;
295 1.1 mrg else
296 1.1 mrg VEC_INIT_EXPR_SLOT (sub) = to;
297 1.1 mrg *expr_p = from;
298 1.1 mrg
299 1.1 mrg /* The initialization is now a side-effect, so the container can
300 1.1 mrg become void. */
301 1.1 mrg if (from != sub)
302 1.1 mrg TREE_TYPE (from) = void_type_node;
303 1.1 mrg }
304 1.1 mrg
305 1.1 mrg /* Handle aggregate NSDMI. */
306 1.1 mrg replace_placeholders (sub, to);
307 1.1 mrg
308 1.1 mrg if (t == sub)
309 1.1 mrg break;
310 1.1 mrg else
311 1.1 mrg t = TREE_OPERAND (t, 1);
312 1.1 mrg }
313 1.1 mrg
314 1.1 mrg }
315 1.1 mrg
316 1.1 mrg /* Gimplify a MUST_NOT_THROW_EXPR. */
317 1.1 mrg
318 1.1 mrg static enum gimplify_status
319 1.1 mrg gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
320 1.1 mrg {
321 1.1 mrg tree stmt = *expr_p;
322 1.1 mrg tree temp = voidify_wrapper_expr (stmt, NULL);
323 1.1 mrg tree body = TREE_OPERAND (stmt, 0);
324 1.1 mrg gimple_seq try_ = NULL;
325 1.1 mrg gimple_seq catch_ = NULL;
326 1.1 mrg gimple *mnt;
327 1.1 mrg
328 1.1 mrg gimplify_and_add (body, &try_);
329 1.1 mrg mnt = gimple_build_eh_must_not_throw (terminate_fn);
330 1.1 mrg gimple_seq_add_stmt_without_update (&catch_, mnt);
331 1.1 mrg mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
332 1.1 mrg
333 1.1 mrg gimple_seq_add_stmt_without_update (pre_p, mnt);
334 1.1 mrg if (temp)
335 1.1 mrg {
336 1.1 mrg *expr_p = temp;
337 1.1 mrg return GS_OK;
338 1.1 mrg }
339 1.1 mrg
340 1.1 mrg *expr_p = NULL;
341 1.1 mrg return GS_ALL_DONE;
342 1.1 mrg }
343 1.1 mrg
344 1.1 mrg /* Return TRUE if an operand (OP) of a given TYPE being copied is
345 1.1 mrg really just an empty class copy.
346 1.1 mrg
347 1.1 mrg Check that the operand has a simple form so that TARGET_EXPRs and
348 1.1 mrg non-empty CONSTRUCTORs get reduced properly, and we leave the
349 1.1 mrg return slot optimization alone because it isn't a copy. */
350 1.1 mrg
351 1.1 mrg bool
352 1.1 mrg simple_empty_class_p (tree type, tree op, tree_code code)
353 1.1 mrg {
354 1.1 mrg if (TREE_CODE (op) == COMPOUND_EXPR)
355 1.1 mrg return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
356 1.1 mrg if (SIMPLE_TARGET_EXPR_P (op)
357 1.1 mrg && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
358 1.1 mrg /* The TARGET_EXPR is itself a simple copy, look through it. */
359 1.1 mrg return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
360 1.1 mrg
361 1.1 mrg if (TREE_CODE (op) == PARM_DECL
362 1.1 mrg && TREE_ADDRESSABLE (TREE_TYPE (op)))
363 1.1 mrg {
364 1.1 mrg tree fn = DECL_CONTEXT (op);
365 1.1 mrg if (DECL_THUNK_P (fn)
366 1.1 mrg || lambda_static_thunk_p (fn))
367 1.1 mrg /* In a thunk, we pass through invisible reference parms, so this isn't
368 1.1 mrg actually a copy. */
369 1.1 mrg return false;
370 1.1 mrg }
371 1.1 mrg
372 1.1 mrg return
373 1.1 mrg (TREE_CODE (op) == EMPTY_CLASS_EXPR
374 1.1 mrg || code == MODIFY_EXPR
375 1.1 mrg || is_gimple_lvalue (op)
376 1.1 mrg || INDIRECT_REF_P (op)
377 1.1 mrg || (TREE_CODE (op) == CONSTRUCTOR
378 1.1 mrg && CONSTRUCTOR_NELTS (op) == 0)
379 1.1 mrg || (TREE_CODE (op) == CALL_EXPR
380 1.1 mrg && !CALL_EXPR_RETURN_SLOT_OPT (op)))
381 1.1 mrg && !TREE_CLOBBER_P (op)
382 1.1 mrg && is_really_empty_class (type, /*ignore_vptr*/true);
383 1.1 mrg }
384 1.1 mrg
385 1.1 mrg /* Returns true if evaluating E as an lvalue has side-effects;
386 1.1 mrg specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
387 1.1 mrg have side-effects until there is a read or write through it. */
388 1.1 mrg
389 1.1 mrg static bool
390 1.1 mrg lvalue_has_side_effects (tree e)
391 1.1 mrg {
392 1.1 mrg if (!TREE_SIDE_EFFECTS (e))
393 1.1 mrg return false;
394 1.1 mrg while (handled_component_p (e))
395 1.1 mrg {
396 1.1 mrg if (TREE_CODE (e) == ARRAY_REF
397 1.1 mrg && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
398 1.1 mrg return true;
399 1.1 mrg e = TREE_OPERAND (e, 0);
400 1.1 mrg }
401 1.1 mrg if (DECL_P (e))
402 1.1 mrg /* Just naming a variable has no side-effects. */
403 1.1 mrg return false;
404 1.1 mrg else if (INDIRECT_REF_P (e))
405 1.1 mrg /* Similarly, indirection has no side-effects. */
406 1.1 mrg return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
407 1.1 mrg else
408 1.1 mrg /* For anything else, trust TREE_SIDE_EFFECTS. */
409 1.1 mrg return TREE_SIDE_EFFECTS (e);
410 1.1 mrg }
411 1.1 mrg
412 1.1 mrg /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
413 1.1 mrg by expressions with side-effects in other operands. */
414 1.1 mrg
415 1.1 mrg static enum gimplify_status
416 1.1 mrg gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
417 1.1 mrg bool (*gimple_test_f) (tree))
418 1.1 mrg {
419 1.1 mrg enum gimplify_status t
420 1.1 mrg = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
421 1.1 mrg if (t == GS_ERROR)
422 1.1 mrg return GS_ERROR;
423 1.1 mrg else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
424 1.1 mrg *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
425 1.1 mrg return t;
426 1.1 mrg }
427 1.1 mrg
428 1.1 mrg /* Like gimplify_arg, but if ORDERED is set (which should be set if
429 1.1 mrg any of the arguments this argument is sequenced before has
430 1.1 mrg TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
431 1.1 mrg are gimplified into SSA_NAME or a fresh temporary and for
432 1.1 mrg non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
433 1.1 mrg
434 1.1 mrg static enum gimplify_status
435 1.1 mrg cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
436 1.1 mrg bool ordered)
437 1.1 mrg {
438 1.1 mrg enum gimplify_status t;
439 1.1 mrg if (ordered
440 1.1 mrg && !is_gimple_reg_type (TREE_TYPE (*arg_p))
441 1.1 mrg && TREE_CODE (*arg_p) == TARGET_EXPR)
442 1.1 mrg {
443 1.1 mrg /* gimplify_arg would strip away the TARGET_EXPR, but
444 1.1 mrg that can mean we don't copy the argument and some following
445 1.1 mrg argument with side-effect could modify it. */
446 1.1 mrg protected_set_expr_location (*arg_p, call_location);
447 1.1 mrg return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
448 1.1 mrg }
449 1.1 mrg else
450 1.1 mrg {
451 1.1 mrg t = gimplify_arg (arg_p, pre_p, call_location);
452 1.1 mrg if (t == GS_ERROR)
453 1.1 mrg return GS_ERROR;
454 1.1 mrg else if (ordered
455 1.1 mrg && is_gimple_reg_type (TREE_TYPE (*arg_p))
456 1.1 mrg && is_gimple_variable (*arg_p)
457 1.1 mrg && TREE_CODE (*arg_p) != SSA_NAME
458 1.1 mrg /* No need to force references into register, references
459 1.1 mrg can't be modified. */
460 1.1 mrg && !TYPE_REF_P (TREE_TYPE (*arg_p))
461 1.1 mrg /* And this can't be modified either. */
462 1.1 mrg && *arg_p != current_class_ptr)
463 1.1 mrg *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
464 1.1 mrg return t;
465 1.1 mrg }
466 1.1 mrg
467 1.1 mrg }
468 1.1 mrg
469 1.1 mrg /* Do C++-specific gimplification. Args are as for gimplify_expr. */
470 1.1 mrg
471 1.1 mrg int
472 1.1 mrg cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
473 1.1 mrg {
474 1.1 mrg int saved_stmts_are_full_exprs_p = 0;
475 1.1 mrg location_t loc = cp_expr_loc_or_input_loc (*expr_p);
476 1.1 mrg enum tree_code code = TREE_CODE (*expr_p);
477 1.1 mrg enum gimplify_status ret;
478 1.1 mrg
479 1.1 mrg if (STATEMENT_CODE_P (code))
480 1.1 mrg {
481 1.1 mrg saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
482 1.1 mrg current_stmt_tree ()->stmts_are_full_exprs_p
483 1.1 mrg = STMT_IS_FULL_EXPR_P (*expr_p);
484 1.1 mrg }
485 1.1 mrg
486 1.1 mrg switch (code)
487 1.1 mrg {
488 1.1 mrg case AGGR_INIT_EXPR:
489 1.1 mrg simplify_aggr_init_expr (expr_p);
490 1.1 mrg ret = GS_OK;
491 1.1 mrg break;
492 1.1 mrg
493 1.1 mrg case VEC_INIT_EXPR:
494 1.1 mrg {
495 1.1 mrg *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
496 1.1 mrg tf_warning_or_error);
497 1.1 mrg
498 1.1 mrg cp_fold_data data (/*genericize*/true);
499 1.1 mrg cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
500 1.1 mrg cp_genericize_tree (expr_p, false);
501 1.1 mrg copy_if_shared (expr_p);
502 1.1 mrg ret = GS_OK;
503 1.1 mrg }
504 1.1 mrg break;
505 1.1 mrg
506 1.1 mrg case THROW_EXPR:
507 1.1 mrg /* FIXME communicate throw type to back end, probably by moving
508 1.1 mrg THROW_EXPR into ../tree.def. */
509 1.1 mrg *expr_p = TREE_OPERAND (*expr_p, 0);
510 1.1 mrg ret = GS_OK;
511 1.1 mrg break;
512 1.1 mrg
513 1.1 mrg case MUST_NOT_THROW_EXPR:
514 1.1 mrg ret = gimplify_must_not_throw_expr (expr_p, pre_p);
515 1.1 mrg break;
516 1.1 mrg
517 1.1 mrg /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
518 1.1 mrg LHS of an assignment might also be involved in the RHS, as in bug
519 1.1 mrg 25979. */
520 1.1 mrg case INIT_EXPR:
521 1.1 mrg cp_gimplify_init_expr (expr_p);
522 1.1 mrg if (TREE_CODE (*expr_p) != INIT_EXPR)
523 1.1 mrg return GS_OK;
524 1.1 mrg /* Fall through. */
525 1.1 mrg case MODIFY_EXPR:
526 1.1 mrg modify_expr_case:
527 1.1 mrg {
528 1.1 mrg /* If the back end isn't clever enough to know that the lhs and rhs
529 1.1 mrg types are the same, add an explicit conversion. */
530 1.1 mrg tree op0 = TREE_OPERAND (*expr_p, 0);
531 1.1 mrg tree op1 = TREE_OPERAND (*expr_p, 1);
532 1.1 mrg
533 1.1 mrg if (!error_operand_p (op0)
534 1.1 mrg && !error_operand_p (op1)
535 1.1 mrg && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
536 1.1 mrg || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
537 1.1 mrg && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
538 1.1 mrg TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
539 1.1 mrg TREE_TYPE (op0), op1);
540 1.1 mrg
541 1.1 mrg else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
542 1.1 mrg {
543 1.1 mrg while (TREE_CODE (op1) == TARGET_EXPR)
544 1.1 mrg /* We're disconnecting the initializer from its target,
545 1.1 mrg don't create a temporary. */
546 1.1 mrg op1 = TARGET_EXPR_INITIAL (op1);
547 1.1 mrg
548 1.1 mrg /* Remove any copies of empty classes. Also drop volatile
549 1.1 mrg variables on the RHS to avoid infinite recursion from
550 1.1 mrg gimplify_expr trying to load the value. */
551 1.1 mrg if (TREE_SIDE_EFFECTS (op1))
552 1.1 mrg {
553 1.1 mrg if (TREE_THIS_VOLATILE (op1)
554 1.1 mrg && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
555 1.1 mrg op1 = build_fold_addr_expr (op1);
556 1.1 mrg
557 1.1 mrg gimplify_and_add (op1, pre_p);
558 1.1 mrg }
559 1.1 mrg gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
560 1.1 mrg is_gimple_lvalue, fb_lvalue);
561 1.1 mrg *expr_p = TREE_OPERAND (*expr_p, 0);
562 1.1 mrg if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
563 1.1 mrg /* Avoid 'return *<retval>;' */
564 1.1 mrg *expr_p = TREE_OPERAND (*expr_p, 0);
565 1.1 mrg }
566 1.1 mrg /* P0145 says that the RHS is sequenced before the LHS.
567 1.1 mrg gimplify_modify_expr gimplifies the RHS before the LHS, but that
568 1.1 mrg isn't quite strong enough in two cases:
569 1.1 mrg
570 1.1 mrg 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
571 1.1 mrg mean it's evaluated after the LHS.
572 1.1 mrg
573 1.1 mrg 2) the value calculation of the RHS is also sequenced before the
574 1.1 mrg LHS, so for scalar assignment we need to preevaluate if the
575 1.1 mrg RHS could be affected by LHS side-effects even if it has no
576 1.1 mrg side-effects of its own. We don't need this for classes because
577 1.1 mrg class assignment takes its RHS by reference. */
578 1.1 mrg else if (flag_strong_eval_order > 1
579 1.1 mrg && TREE_CODE (*expr_p) == MODIFY_EXPR
580 1.1 mrg && lvalue_has_side_effects (op0)
581 1.1 mrg && (TREE_CODE (op1) == CALL_EXPR
582 1.1 mrg || (SCALAR_TYPE_P (TREE_TYPE (op1))
583 1.1 mrg && !TREE_CONSTANT (op1))))
584 1.1 mrg TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
585 1.1 mrg }
586 1.1 mrg ret = GS_OK;
587 1.1 mrg break;
588 1.1 mrg
589 1.1 mrg case EMPTY_CLASS_EXPR:
590 1.1 mrg /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
591 1.1 mrg *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
592 1.1 mrg ret = GS_OK;
593 1.1 mrg break;
594 1.1 mrg
595 1.1 mrg case BASELINK:
596 1.1 mrg *expr_p = BASELINK_FUNCTIONS (*expr_p);
597 1.1 mrg ret = GS_OK;
598 1.1 mrg break;
599 1.1 mrg
600 1.1 mrg case TRY_BLOCK:
601 1.1 mrg genericize_try_block (expr_p);
602 1.1 mrg ret = GS_OK;
603 1.1 mrg break;
604 1.1 mrg
605 1.1 mrg case HANDLER:
606 1.1 mrg genericize_catch_block (expr_p);
607 1.1 mrg ret = GS_OK;
608 1.1 mrg break;
609 1.1 mrg
610 1.1 mrg case EH_SPEC_BLOCK:
611 1.1 mrg genericize_eh_spec_block (expr_p);
612 1.1 mrg ret = GS_OK;
613 1.1 mrg break;
614 1.1 mrg
615 1.1 mrg case USING_STMT:
616 1.1 mrg gcc_unreachable ();
617 1.1 mrg
618 1.1 mrg case FOR_STMT:
619 1.1 mrg case WHILE_STMT:
620 1.1 mrg case DO_STMT:
621 1.1 mrg case SWITCH_STMT:
622 1.1 mrg case CONTINUE_STMT:
623 1.1 mrg case BREAK_STMT:
624 1.1 mrg gcc_unreachable ();
625 1.1 mrg
626 1.1 mrg case OMP_FOR:
627 1.1 mrg case OMP_SIMD:
628 1.1 mrg case OMP_DISTRIBUTE:
629 1.1 mrg case OMP_LOOP:
630 1.1 mrg case OMP_TASKLOOP:
631 1.1 mrg ret = cp_gimplify_omp_for (expr_p, pre_p);
632 1.1 mrg break;
633 1.1 mrg
634 1.1 mrg case EXPR_STMT:
635 1.1 mrg gimplify_expr_stmt (expr_p);
636 1.1 mrg ret = GS_OK;
637 1.1 mrg break;
638 1.1 mrg
639 1.1 mrg case UNARY_PLUS_EXPR:
640 1.1 mrg {
641 1.1 mrg tree arg = TREE_OPERAND (*expr_p, 0);
642 1.1 mrg tree type = TREE_TYPE (*expr_p);
643 1.1 mrg *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
644 1.1 mrg : arg;
645 1.1 mrg ret = GS_OK;
646 1.1 mrg }
647 1.1 mrg break;
648 1.1 mrg
649 1.1 mrg case CALL_EXPR:
650 1.1 mrg ret = GS_OK;
651 1.1 mrg if (flag_strong_eval_order == 2
652 1.1 mrg && CALL_EXPR_FN (*expr_p)
653 1.1 mrg && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
654 1.1 mrg && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
655 1.1 mrg {
656 1.1 mrg tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
657 1.1 mrg enum gimplify_status t
658 1.1 mrg = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
659 1.1 mrg is_gimple_call_addr);
660 1.1 mrg if (t == GS_ERROR)
661 1.1 mrg ret = GS_ERROR;
662 1.1 mrg /* GIMPLE considers most pointer conversion useless, but for
663 1.1 mrg calls we actually care about the exact function pointer type. */
664 1.1 mrg else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
665 1.1 mrg CALL_EXPR_FN (*expr_p)
666 1.1 mrg = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
667 1.1 mrg }
668 1.1 mrg if (!CALL_EXPR_FN (*expr_p))
669 1.1 mrg /* Internal function call. */;
670 1.1 mrg else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
671 1.1 mrg {
672 1.1 mrg /* This is a call to a (compound) assignment operator that used
673 1.1 mrg the operator syntax; gimplify the RHS first. */
674 1.1 mrg gcc_assert (call_expr_nargs (*expr_p) == 2);
675 1.1 mrg gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
676 1.1 mrg enum gimplify_status t
677 1.1 mrg = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
678 1.1 mrg TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
679 1.1 mrg if (t == GS_ERROR)
680 1.1 mrg ret = GS_ERROR;
681 1.1 mrg }
682 1.1 mrg else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
683 1.1 mrg {
684 1.1 mrg /* Leave the last argument for gimplify_call_expr, to avoid problems
685 1.1 mrg with __builtin_va_arg_pack(). */
686 1.1 mrg int nargs = call_expr_nargs (*expr_p) - 1;
687 1.1 mrg int last_side_effects_arg = -1;
688 1.1 mrg for (int i = nargs; i > 0; --i)
689 1.1 mrg if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
690 1.1 mrg {
691 1.1 mrg last_side_effects_arg = i;
692 1.1 mrg break;
693 1.1 mrg }
694 1.1 mrg for (int i = 0; i < nargs; ++i)
695 1.1 mrg {
696 1.1 mrg enum gimplify_status t
697 1.1 mrg = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
698 1.1 mrg i < last_side_effects_arg);
699 1.1 mrg if (t == GS_ERROR)
700 1.1 mrg ret = GS_ERROR;
701 1.1 mrg }
702 1.1 mrg }
703 1.1 mrg else if (flag_strong_eval_order
704 1.1 mrg && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
705 1.1 mrg {
706 1.1 mrg /* If flag_strong_eval_order, evaluate the object argument first. */
707 1.1 mrg tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
708 1.1 mrg if (INDIRECT_TYPE_P (fntype))
709 1.1 mrg fntype = TREE_TYPE (fntype);
710 1.1 mrg if (TREE_CODE (fntype) == METHOD_TYPE)
711 1.1 mrg {
712 1.1 mrg int nargs = call_expr_nargs (*expr_p);
713 1.1 mrg bool side_effects = false;
714 1.1 mrg for (int i = 1; i < nargs; ++i)
715 1.1 mrg if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
716 1.1 mrg {
717 1.1 mrg side_effects = true;
718 1.1 mrg break;
719 1.1 mrg }
720 1.1 mrg enum gimplify_status t
721 1.1 mrg = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
722 1.1 mrg side_effects);
723 1.1 mrg if (t == GS_ERROR)
724 1.1 mrg ret = GS_ERROR;
725 1.1 mrg }
726 1.1 mrg }
727 1.1 mrg if (ret != GS_ERROR)
728 1.1 mrg {
729 1.1 mrg tree decl = cp_get_callee_fndecl_nofold (*expr_p);
730 1.1 mrg if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
731 1.1 mrg switch (DECL_FE_FUNCTION_CODE (decl))
732 1.1 mrg {
733 1.1 mrg case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
734 1.1 mrg *expr_p = boolean_false_node;
735 1.1 mrg break;
736 1.1 mrg case CP_BUILT_IN_SOURCE_LOCATION:
737 1.1 mrg *expr_p
738 1.1 mrg = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
739 1.1 mrg break;
740 1.1 mrg case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
741 1.1 mrg *expr_p
742 1.1 mrg = fold_builtin_is_corresponding_member
743 1.1 mrg (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
744 1.1 mrg &CALL_EXPR_ARG (*expr_p, 0));
745 1.1 mrg break;
746 1.1 mrg case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
747 1.1 mrg *expr_p
748 1.1 mrg = fold_builtin_is_pointer_inverconvertible_with_class
749 1.1 mrg (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
750 1.1 mrg &CALL_EXPR_ARG (*expr_p, 0));
751 1.1 mrg break;
752 1.1 mrg default:
753 1.1 mrg break;
754 1.1 mrg }
755 1.1 mrg }
756 1.1 mrg break;
757 1.1 mrg
758 1.1 mrg case TARGET_EXPR:
759 1.1 mrg /* A TARGET_EXPR that expresses direct-initialization should have been
760 1.1 mrg elided by cp_gimplify_init_expr. */
761 1.1 mrg gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
762 1.1 mrg ret = GS_UNHANDLED;
763 1.1 mrg break;
764 1.1 mrg
765 1.1 mrg case PTRMEM_CST:
766 1.1 mrg *expr_p = cplus_expand_constant (*expr_p);
767 1.1 mrg if (TREE_CODE (*expr_p) == PTRMEM_CST)
768 1.1 mrg ret = GS_ERROR;
769 1.1 mrg else
770 1.1 mrg ret = GS_OK;
771 1.1 mrg break;
772 1.1 mrg
773 1.1 mrg case RETURN_EXPR:
774 1.1 mrg if (TREE_OPERAND (*expr_p, 0)
775 1.1 mrg && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
776 1.1 mrg || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
777 1.1 mrg {
778 1.1 mrg expr_p = &TREE_OPERAND (*expr_p, 0);
779 1.1 mrg /* Avoid going through the INIT_EXPR case, which can
780 1.1 mrg degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
781 1.1 mrg goto modify_expr_case;
782 1.1 mrg }
783 1.1 mrg /* Fall through. */
784 1.1 mrg
785 1.1 mrg default:
786 1.1 mrg ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
787 1.1 mrg break;
788 1.1 mrg }
789 1.1 mrg
790 1.1 mrg /* Restore saved state. */
791 1.1 mrg if (STATEMENT_CODE_P (code))
792 1.1 mrg current_stmt_tree ()->stmts_are_full_exprs_p
793 1.1 mrg = saved_stmts_are_full_exprs_p;
794 1.1 mrg
795 1.1 mrg return ret;
796 1.1 mrg }
797 1.1 mrg
798 1.1 mrg static inline bool
799 1.1 mrg is_invisiref_parm (const_tree t)
800 1.1 mrg {
801 1.1 mrg return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
802 1.1 mrg && DECL_BY_REFERENCE (t));
803 1.1 mrg }
804 1.1 mrg
805 1.1 mrg /* A stable comparison routine for use with splay trees and DECLs. */
806 1.1 mrg
807 1.1 mrg static int
808 1.1 mrg splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
809 1.1 mrg {
810 1.1 mrg tree a = (tree) xa;
811 1.1 mrg tree b = (tree) xb;
812 1.1 mrg
813 1.1 mrg return DECL_UID (a) - DECL_UID (b);
814 1.1 mrg }
815 1.1 mrg
816 1.1 mrg /* OpenMP context during genericization. */
817 1.1 mrg
818 1.1 mrg struct cp_genericize_omp_taskreg
819 1.1 mrg {
820 1.1 mrg bool is_parallel;
821 1.1 mrg bool default_shared;
822 1.1 mrg struct cp_genericize_omp_taskreg *outer;
823 1.1 mrg splay_tree variables;
824 1.1 mrg };
825 1.1 mrg
826 1.1 mrg /* Return true if genericization should try to determine if
827 1.1 mrg DECL is firstprivate or shared within task regions. */
828 1.1 mrg
829 1.1 mrg static bool
830 1.1 mrg omp_var_to_track (tree decl)
831 1.1 mrg {
832 1.1 mrg tree type = TREE_TYPE (decl);
833 1.1 mrg if (is_invisiref_parm (decl))
834 1.1 mrg type = TREE_TYPE (type);
835 1.1 mrg else if (TYPE_REF_P (type))
836 1.1 mrg type = TREE_TYPE (type);
837 1.1 mrg while (TREE_CODE (type) == ARRAY_TYPE)
838 1.1 mrg type = TREE_TYPE (type);
839 1.1 mrg if (type == error_mark_node || !CLASS_TYPE_P (type))
840 1.1 mrg return false;
841 1.1 mrg if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
842 1.1 mrg return false;
843 1.1 mrg if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
844 1.1 mrg return false;
845 1.1 mrg return true;
846 1.1 mrg }
847 1.1 mrg
848 1.1 mrg /* Note DECL use in OpenMP region OMP_CTX during genericization. */
849 1.1 mrg
850 1.1 mrg static void
851 1.1 mrg omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
852 1.1 mrg {
853 1.1 mrg splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
854 1.1 mrg (splay_tree_key) decl);
855 1.1 mrg if (n == NULL)
856 1.1 mrg {
857 1.1 mrg int flags = OMP_CLAUSE_DEFAULT_SHARED;
858 1.1 mrg if (omp_ctx->outer)
859 1.1 mrg omp_cxx_notice_variable (omp_ctx->outer, decl);
860 1.1 mrg if (!omp_ctx->default_shared)
861 1.1 mrg {
862 1.1 mrg struct cp_genericize_omp_taskreg *octx;
863 1.1 mrg
864 1.1 mrg for (octx = omp_ctx->outer; octx; octx = octx->outer)
865 1.1 mrg {
866 1.1 mrg n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
867 1.1 mrg if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
868 1.1 mrg {
869 1.1 mrg flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
870 1.1 mrg break;
871 1.1 mrg }
872 1.1 mrg if (octx->is_parallel)
873 1.1 mrg break;
874 1.1 mrg }
875 1.1 mrg if (octx == NULL
876 1.1 mrg && (TREE_CODE (decl) == PARM_DECL
877 1.1 mrg || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
878 1.1 mrg && DECL_CONTEXT (decl) == current_function_decl)))
879 1.1 mrg flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
880 1.1 mrg if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
881 1.1 mrg {
882 1.1 mrg /* DECL is implicitly determined firstprivate in
883 1.1 mrg the current task construct. Ensure copy ctor and
884 1.1 mrg dtor are instantiated, because during gimplification
885 1.1 mrg it will be already too late. */
886 1.1 mrg tree type = TREE_TYPE (decl);
887 1.1 mrg if (is_invisiref_parm (decl))
888 1.1 mrg type = TREE_TYPE (type);
889 1.1 mrg else if (TYPE_REF_P (type))
890 1.1 mrg type = TREE_TYPE (type);
891 1.1 mrg while (TREE_CODE (type) == ARRAY_TYPE)
892 1.1 mrg type = TREE_TYPE (type);
893 1.1 mrg get_copy_ctor (type, tf_none);
894 1.1 mrg get_dtor (type, tf_none);
895 1.1 mrg }
896 1.1 mrg }
897 1.1 mrg splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
898 1.1 mrg }
899 1.1 mrg }
900 1.1 mrg
901 1.1 mrg /* If we might need to clean up a partially constructed object, break down the
902 1.1 mrg CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
903 1.1 mrg point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
904 1.1 mrg the result. */
905 1.1 mrg
906 1.1 mrg static void
907 1.1 mrg cp_genericize_init (tree *replace, tree from, tree to)
908 1.1 mrg {
909 1.1 mrg if (TREE_CODE (from) == VEC_INIT_EXPR)
910 1.1 mrg {
911 1.1 mrg tree init = expand_vec_init_expr (to, from, tf_warning_or_error);
912 1.1 mrg
913 1.1 mrg /* Make cp_gimplify_init_expr call replace_decl. */
914 1.1 mrg *replace = fold_convert (void_type_node, init);
915 1.1 mrg }
916 1.1 mrg else if (flag_exceptions
917 1.1 mrg && TREE_CODE (from) == CONSTRUCTOR
918 1.1 mrg && TREE_SIDE_EFFECTS (from)
919 1.1 mrg && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
920 1.1 mrg {
921 1.1 mrg to = cp_stabilize_reference (to);
922 1.1 mrg replace_placeholders (from, to);
923 1.1 mrg *replace = split_nonconstant_init (to, from);
924 1.1 mrg }
925 1.1 mrg }
926 1.1 mrg
927 1.1 mrg /* For an INIT_EXPR, replace the INIT_EXPR itself. */
928 1.1 mrg
929 1.1 mrg static void
930 1.1 mrg cp_genericize_init_expr (tree *stmt_p)
931 1.1 mrg {
932 1.1 mrg iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
933 1.1 mrg tree to = TREE_OPERAND (*stmt_p, 0);
934 1.1 mrg tree from = TREE_OPERAND (*stmt_p, 1);
935 1.1 mrg if (SIMPLE_TARGET_EXPR_P (from)
936 1.1 mrg /* Return gets confused if we clobber its INIT_EXPR this soon. */
937 1.1 mrg && TREE_CODE (to) != RESULT_DECL)
938 1.1 mrg from = TARGET_EXPR_INITIAL (from);
939 1.1 mrg cp_genericize_init (stmt_p, from, to);
940 1.1 mrg }
941 1.1 mrg
942 1.1 mrg /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
943 1.1 mrg replace_decl later when we know what we're initializing. */
944 1.1 mrg
945 1.1 mrg static void
946 1.1 mrg cp_genericize_target_expr (tree *stmt_p)
947 1.1 mrg {
948 1.1 mrg iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
949 1.1 mrg tree slot = TARGET_EXPR_SLOT (*stmt_p);
950 1.1 mrg cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
951 1.1 mrg TARGET_EXPR_INITIAL (*stmt_p), slot);
952 1.1 mrg gcc_assert (!DECL_INITIAL (slot));
953 1.1 mrg }
954 1.1 mrg
955 1.1 mrg /* Genericization context. */
956 1.1 mrg
957 1.1 mrg struct cp_genericize_data
958 1.1 mrg {
959 1.1 mrg hash_set<tree> *p_set;
960 1.1 mrg auto_vec<tree> bind_expr_stack;
961 1.1 mrg struct cp_genericize_omp_taskreg *omp_ctx;
962 1.1 mrg tree try_block;
963 1.1 mrg bool no_sanitize_p;
964 1.1 mrg bool handle_invisiref_parm_p;
965 1.1 mrg };
966 1.1 mrg
967 1.1 mrg /* Perform any pre-gimplification folding of C++ front end trees to
968 1.1 mrg GENERIC.
969 1.1 mrg Note: The folding of non-omp cases is something to move into
970 1.1 mrg the middle-end. As for now we have most foldings only on GENERIC
971 1.1 mrg in fold-const, we need to perform this before transformation to
972 1.1 mrg GIMPLE-form. */
973 1.1 mrg
974 1.1 mrg static tree
975 1.1 mrg cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
976 1.1 mrg {
977 1.1 mrg cp_fold_data *data = (cp_fold_data*)data_;
978 1.1 mrg tree stmt = *stmt_p;
979 1.1 mrg enum tree_code code = TREE_CODE (stmt);
980 1.1 mrg
981 1.1 mrg switch (code)
982 1.1 mrg {
983 1.1 mrg case PTRMEM_CST:
984 1.1 mrg if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
985 1.1 mrg && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
986 1.1 mrg {
987 1.1 mrg if (!data->pset.add (stmt))
988 1.1 mrg error_at (PTRMEM_CST_LOCATION (stmt),
989 1.1 mrg "taking address of an immediate function %qD",
990 1.1 mrg PTRMEM_CST_MEMBER (stmt));
991 1.1 mrg stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
992 1.1 mrg break;
993 1.1 mrg }
994 1.1 mrg break;
995 1.1 mrg
996 1.1 mrg case ADDR_EXPR:
997 1.1 mrg if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
998 1.1 mrg && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
999 1.1 mrg {
1000 1.1 mrg error_at (EXPR_LOCATION (stmt),
1001 1.1 mrg "taking address of an immediate function %qD",
1002 1.1 mrg TREE_OPERAND (stmt, 0));
1003 1.1 mrg stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1004 1.1 mrg break;
1005 1.1 mrg }
1006 1.1 mrg break;
1007 1.1 mrg
1008 1.1 mrg case CALL_EXPR:
1009 1.1 mrg if (tree fndecl = cp_get_callee_fndecl_nofold (stmt))
1010 1.1 mrg if (DECL_IMMEDIATE_FUNCTION_P (fndecl)
1011 1.1 mrg && source_location_current_p (fndecl))
1012 1.1 mrg *stmt_p = stmt = cxx_constant_value (stmt);
1013 1.1 mrg break;
1014 1.1 mrg
1015 1.1 mrg case VAR_DECL:
1016 1.1 mrg /* In initializers replace anon union artificial VAR_DECLs
1017 1.1 mrg with their DECL_VALUE_EXPRs, as nothing will do it later.
1018 1.1 mrg Ditto for structured bindings. */
1019 1.1 mrg if (!data->genericize
1020 1.1 mrg && DECL_HAS_VALUE_EXPR_P (stmt)
1021 1.1 mrg && (DECL_ANON_UNION_VAR_P (stmt)
1022 1.1 mrg || (DECL_DECOMPOSITION_P (stmt) && DECL_DECOMP_BASE (stmt))))
1023 1.1 mrg {
1024 1.1 mrg *stmt_p = stmt = unshare_expr (DECL_VALUE_EXPR (stmt));
1025 1.1 mrg break;
1026 1.1 mrg }
1027 1.1 mrg break;
1028 1.1 mrg
1029 1.1 mrg default:
1030 1.1 mrg break;
1031 1.1 mrg }
1032 1.1 mrg
1033 1.1 mrg *stmt_p = stmt = cp_fold (*stmt_p);
1034 1.1 mrg
1035 1.1 mrg if (data->pset.add (stmt))
1036 1.1 mrg {
1037 1.1 mrg /* Don't walk subtrees of stmts we've already walked once, otherwise
1038 1.1 mrg we can have exponential complexity with e.g. lots of nested
1039 1.1 mrg SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1040 1.1 mrg always the same tree, which the first time cp_fold_r has been
1041 1.1 mrg called on it had the subtrees walked. */
1042 1.1 mrg *walk_subtrees = 0;
1043 1.1 mrg return NULL;
1044 1.1 mrg }
1045 1.1 mrg
1046 1.1 mrg code = TREE_CODE (stmt);
1047 1.1 mrg switch (code)
1048 1.1 mrg {
1049 1.1 mrg tree x;
1050 1.1 mrg int i, n;
1051 1.1 mrg case OMP_FOR:
1052 1.1 mrg case OMP_SIMD:
1053 1.1 mrg case OMP_DISTRIBUTE:
1054 1.1 mrg case OMP_LOOP:
1055 1.1 mrg case OMP_TASKLOOP:
1056 1.1 mrg case OACC_LOOP:
1057 1.1 mrg cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1058 1.1 mrg cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1059 1.1 mrg cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1060 1.1 mrg x = OMP_FOR_COND (stmt);
1061 1.1 mrg if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1062 1.1 mrg {
1063 1.1 mrg cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1064 1.1 mrg cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1065 1.1 mrg }
1066 1.1 mrg else if (x && TREE_CODE (x) == TREE_VEC)
1067 1.1 mrg {
1068 1.1 mrg n = TREE_VEC_LENGTH (x);
1069 1.1 mrg for (i = 0; i < n; i++)
1070 1.1 mrg {
1071 1.1 mrg tree o = TREE_VEC_ELT (x, i);
1072 1.1 mrg if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1073 1.1 mrg cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1074 1.1 mrg }
1075 1.1 mrg }
1076 1.1 mrg x = OMP_FOR_INCR (stmt);
1077 1.1 mrg if (x && TREE_CODE (x) == TREE_VEC)
1078 1.1 mrg {
1079 1.1 mrg n = TREE_VEC_LENGTH (x);
1080 1.1 mrg for (i = 0; i < n; i++)
1081 1.1 mrg {
1082 1.1 mrg tree o = TREE_VEC_ELT (x, i);
1083 1.1 mrg if (o && TREE_CODE (o) == MODIFY_EXPR)
1084 1.1 mrg o = TREE_OPERAND (o, 1);
1085 1.1 mrg if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1086 1.1 mrg || TREE_CODE (o) == POINTER_PLUS_EXPR))
1087 1.1 mrg {
1088 1.1 mrg cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1089 1.1 mrg cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1090 1.1 mrg }
1091 1.1 mrg }
1092 1.1 mrg }
1093 1.1 mrg cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1094 1.1 mrg *walk_subtrees = 0;
1095 1.1 mrg return NULL;
1096 1.1 mrg
1097 1.1 mrg case IF_STMT:
1098 1.1 mrg if (IF_STMT_CONSTEVAL_P (stmt))
1099 1.1 mrg {
1100 1.1 mrg /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1101 1.1 mrg boolean_false_node. */
1102 1.1 mrg cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1103 1.1 mrg cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1104 1.1 mrg *walk_subtrees = 0;
1105 1.1 mrg return NULL;
1106 1.1 mrg }
1107 1.1 mrg break;
1108 1.1 mrg
1109 1.1 mrg /* These are only for genericize time; they're here rather than in
1110 1.1 mrg cp_genericize to avoid problems with the invisible reference
1111 1.1 mrg transition. */
1112 1.1 mrg case INIT_EXPR:
1113 1.1 mrg if (data->genericize)
1114 1.1 mrg cp_genericize_init_expr (stmt_p);
1115 1.1 mrg break;
1116 1.1 mrg
1117 1.1 mrg case TARGET_EXPR:
1118 1.1 mrg if (data->genericize)
1119 1.1 mrg cp_genericize_target_expr (stmt_p);
1120 1.1 mrg break;
1121 1.1 mrg
1122 1.1 mrg default:
1123 1.1 mrg break;
1124 1.1 mrg }
1125 1.1 mrg
1126 1.1 mrg return NULL;
1127 1.1 mrg }
1128 1.1 mrg
1129 1.1 mrg /* Fold ALL the trees! FIXME we should be able to remove this, but
1130 1.1 mrg apparently that still causes optimization regressions. */
1131 1.1 mrg
1132 1.1 mrg void
1133 1.1 mrg cp_fold_function (tree fndecl)
1134 1.1 mrg {
1135 1.1 mrg cp_fold_data data (/*genericize*/true);
1136 1.1 mrg cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1137 1.1 mrg }
1138 1.1 mrg
1139 1.1 mrg /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1140 1.1 mrg
1141 1.1 mrg static tree genericize_spaceship (tree expr)
1142 1.1 mrg {
1143 1.1 mrg iloc_sentinel s (cp_expr_location (expr));
1144 1.1 mrg tree type = TREE_TYPE (expr);
1145 1.1 mrg tree op0 = TREE_OPERAND (expr, 0);
1146 1.1 mrg tree op1 = TREE_OPERAND (expr, 1);
1147 1.1 mrg return genericize_spaceship (input_location, type, op0, op1);
1148 1.1 mrg }
1149 1.1 mrg
1150 1.1 mrg /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1151 1.1 mrg to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1152 1.1 mrg the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1153 1.1 mrg NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1154 1.1 mrg
1155 1.1 mrg tree
1156 1.1 mrg predeclare_vla (tree expr)
1157 1.1 mrg {
1158 1.1 mrg tree type = TREE_TYPE (expr);
1159 1.1 mrg if (type == error_mark_node)
1160 1.1 mrg return expr;
1161 1.1 mrg if (is_typedef_decl (expr))
1162 1.1 mrg type = DECL_ORIGINAL_TYPE (expr);
1163 1.1 mrg
1164 1.1 mrg /* We need to strip pointers for gimplify_type_sizes. */
1165 1.1 mrg tree vla = type;
1166 1.1 mrg while (POINTER_TYPE_P (vla))
1167 1.1 mrg {
1168 1.1 mrg if (TYPE_NAME (vla))
1169 1.1 mrg return expr;
1170 1.1 mrg vla = TREE_TYPE (vla);
1171 1.1 mrg }
1172 1.1 mrg if (vla == type || TYPE_NAME (vla)
1173 1.1 mrg || !variably_modified_type_p (vla, NULL_TREE))
1174 1.1 mrg return expr;
1175 1.1 mrg
1176 1.1 mrg tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1177 1.1 mrg DECL_ARTIFICIAL (decl) = 1;
1178 1.1 mrg TYPE_NAME (vla) = decl;
1179 1.1 mrg tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1180 1.1 mrg if (DECL_P (expr))
1181 1.1 mrg {
1182 1.1 mrg add_stmt (dexp);
1183 1.1 mrg return NULL_TREE;
1184 1.1 mrg }
1185 1.1 mrg else
1186 1.1 mrg {
1187 1.1 mrg expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1188 1.1 mrg return expr;
1189 1.1 mrg }
1190 1.1 mrg }
1191 1.1 mrg
1192 1.1 mrg /* Perform any pre-gimplification lowering of C++ front end trees to
1193 1.1 mrg GENERIC. */
1194 1.1 mrg
1195 1.1 mrg static tree
1196 1.1 mrg cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1197 1.1 mrg {
1198 1.1 mrg tree stmt = *stmt_p;
1199 1.1 mrg struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1200 1.1 mrg hash_set<tree> *p_set = wtd->p_set;
1201 1.1 mrg
1202 1.1 mrg /* If in an OpenMP context, note var uses. */
1203 1.1 mrg if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1204 1.1 mrg && (VAR_P (stmt)
1205 1.1 mrg || TREE_CODE (stmt) == PARM_DECL
1206 1.1 mrg || TREE_CODE (stmt) == RESULT_DECL)
1207 1.1 mrg && omp_var_to_track (stmt))
1208 1.1 mrg omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1209 1.1 mrg
1210 1.1 mrg /* Don't dereference parms in a thunk, pass the references through. */
1211 1.1 mrg if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1212 1.1 mrg || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1213 1.1 mrg {
1214 1.1 mrg *walk_subtrees = 0;
1215 1.1 mrg return NULL;
1216 1.1 mrg }
1217 1.1 mrg
1218 1.1 mrg /* Dereference invisible reference parms. */
1219 1.1 mrg if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1220 1.1 mrg {
1221 1.1 mrg *stmt_p = convert_from_reference (stmt);
1222 1.1 mrg p_set->add (*stmt_p);
1223 1.1 mrg *walk_subtrees = 0;
1224 1.1 mrg return NULL;
1225 1.1 mrg }
1226 1.1 mrg
1227 1.1 mrg /* Map block scope extern declarations to visible declarations with the
1228 1.1 mrg same name and type in outer scopes if any. */
1229 1.1 mrg if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1230 1.1 mrg if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1231 1.1 mrg {
1232 1.1 mrg if (alias != error_mark_node)
1233 1.1 mrg {
1234 1.1 mrg *stmt_p = alias;
1235 1.1 mrg TREE_USED (alias) |= TREE_USED (stmt);
1236 1.1 mrg }
1237 1.1 mrg *walk_subtrees = 0;
1238 1.1 mrg return NULL;
1239 1.1 mrg }
1240 1.1 mrg
1241 1.1 mrg if (TREE_CODE (stmt) == INTEGER_CST
1242 1.1 mrg && TYPE_REF_P (TREE_TYPE (stmt))
1243 1.1 mrg && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1244 1.1 mrg && !wtd->no_sanitize_p)
1245 1.1 mrg {
1246 1.1 mrg ubsan_maybe_instrument_reference (stmt_p);
1247 1.1 mrg if (*stmt_p != stmt)
1248 1.1 mrg {
1249 1.1 mrg *walk_subtrees = 0;
1250 1.1 mrg return NULL_TREE;
1251 1.1 mrg }
1252 1.1 mrg }
1253 1.1 mrg
1254 1.1 mrg /* Other than invisiref parms, don't walk the same tree twice. */
1255 1.1 mrg if (p_set->contains (stmt))
1256 1.1 mrg {
1257 1.1 mrg *walk_subtrees = 0;
1258 1.1 mrg return NULL_TREE;
1259 1.1 mrg }
1260 1.1 mrg
1261 1.1 mrg switch (TREE_CODE (stmt))
1262 1.1 mrg {
1263 1.1 mrg case ADDR_EXPR:
1264 1.1 mrg if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1265 1.1 mrg {
1266 1.1 mrg /* If in an OpenMP context, note var uses. */
1267 1.1 mrg if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1268 1.1 mrg && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1269 1.1 mrg omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1270 1.1 mrg *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1271 1.1 mrg *walk_subtrees = 0;
1272 1.1 mrg }
1273 1.1 mrg break;
1274 1.1 mrg
1275 1.1 mrg case RETURN_EXPR:
1276 1.1 mrg if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1277 1.1 mrg /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1278 1.1 mrg *walk_subtrees = 0;
1279 1.1 mrg break;
1280 1.1 mrg
1281 1.1 mrg case OMP_CLAUSE:
1282 1.1 mrg switch (OMP_CLAUSE_CODE (stmt))
1283 1.1 mrg {
1284 1.1 mrg case OMP_CLAUSE_LASTPRIVATE:
1285 1.1 mrg /* Don't dereference an invisiref in OpenMP clauses. */
1286 1.1 mrg if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1287 1.1 mrg {
1288 1.1 mrg *walk_subtrees = 0;
1289 1.1 mrg if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1290 1.1 mrg cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1291 1.1 mrg cp_genericize_r, data, NULL);
1292 1.1 mrg }
1293 1.1 mrg break;
1294 1.1 mrg case OMP_CLAUSE_PRIVATE:
1295 1.1 mrg /* Don't dereference an invisiref in OpenMP clauses. */
1296 1.1 mrg if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1297 1.1 mrg *walk_subtrees = 0;
1298 1.1 mrg else if (wtd->omp_ctx != NULL)
1299 1.1 mrg {
1300 1.1 mrg /* Private clause doesn't cause any references to the
1301 1.1 mrg var in outer contexts, avoid calling
1302 1.1 mrg omp_cxx_notice_variable for it. */
1303 1.1 mrg struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1304 1.1 mrg wtd->omp_ctx = NULL;
1305 1.1 mrg cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1306 1.1 mrg data, NULL);
1307 1.1 mrg wtd->omp_ctx = old;
1308 1.1 mrg *walk_subtrees = 0;
1309 1.1 mrg }
1310 1.1 mrg break;
1311 1.1 mrg case OMP_CLAUSE_SHARED:
1312 1.1 mrg case OMP_CLAUSE_FIRSTPRIVATE:
1313 1.1 mrg case OMP_CLAUSE_COPYIN:
1314 1.1 mrg case OMP_CLAUSE_COPYPRIVATE:
1315 1.1 mrg case OMP_CLAUSE_INCLUSIVE:
1316 1.1 mrg case OMP_CLAUSE_EXCLUSIVE:
1317 1.1 mrg /* Don't dereference an invisiref in OpenMP clauses. */
1318 1.1 mrg if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1319 1.1 mrg *walk_subtrees = 0;
1320 1.1 mrg break;
1321 1.1 mrg case OMP_CLAUSE_REDUCTION:
1322 1.1 mrg case OMP_CLAUSE_IN_REDUCTION:
1323 1.1 mrg case OMP_CLAUSE_TASK_REDUCTION:
1324 1.1 mrg /* Don't dereference an invisiref in reduction clause's
1325 1.1 mrg OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1326 1.1 mrg still needs to be genericized. */
1327 1.1 mrg if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1328 1.1 mrg {
1329 1.1 mrg *walk_subtrees = 0;
1330 1.1 mrg if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1331 1.1 mrg cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1332 1.1 mrg cp_genericize_r, data, NULL);
1333 1.1 mrg if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1334 1.1 mrg cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1335 1.1 mrg cp_genericize_r, data, NULL);
1336 1.1 mrg }
1337 1.1 mrg break;
1338 1.1 mrg default:
1339 1.1 mrg break;
1340 1.1 mrg }
1341 1.1 mrg break;
1342 1.1 mrg
1343 1.1 mrg /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1344 1.1 mrg to lower this construct before scanning it, so we need to lower these
1345 1.1 mrg before doing anything else. */
1346 1.1 mrg case CLEANUP_STMT:
1347 1.1 mrg *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1348 1.1 mrg CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1349 1.1 mrg : TRY_FINALLY_EXPR,
1350 1.1 mrg void_type_node,
1351 1.1 mrg CLEANUP_BODY (stmt),
1352 1.1 mrg CLEANUP_EXPR (stmt));
1353 1.1 mrg break;
1354 1.1 mrg
1355 1.1 mrg case IF_STMT:
1356 1.1 mrg genericize_if_stmt (stmt_p);
1357 1.1 mrg /* *stmt_p has changed, tail recurse to handle it again. */
1358 1.1 mrg return cp_genericize_r (stmt_p, walk_subtrees, data);
1359 1.1 mrg
1360 1.1 mrg /* COND_EXPR might have incompatible types in branches if one or both
1361 1.1 mrg arms are bitfields. Fix it up now. */
1362 1.1 mrg case COND_EXPR:
1363 1.1 mrg {
1364 1.1 mrg tree type_left
1365 1.1 mrg = (TREE_OPERAND (stmt, 1)
1366 1.1 mrg ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1367 1.1 mrg : NULL_TREE);
1368 1.1 mrg tree type_right
1369 1.1 mrg = (TREE_OPERAND (stmt, 2)
1370 1.1 mrg ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1371 1.1 mrg : NULL_TREE);
1372 1.1 mrg if (type_left
1373 1.1 mrg && !useless_type_conversion_p (TREE_TYPE (stmt),
1374 1.1 mrg TREE_TYPE (TREE_OPERAND (stmt, 1))))
1375 1.1 mrg {
1376 1.1 mrg TREE_OPERAND (stmt, 1)
1377 1.1 mrg = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1378 1.1 mrg gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1379 1.1 mrg type_left));
1380 1.1 mrg }
1381 1.1 mrg if (type_right
1382 1.1 mrg && !useless_type_conversion_p (TREE_TYPE (stmt),
1383 1.1 mrg TREE_TYPE (TREE_OPERAND (stmt, 2))))
1384 1.1 mrg {
1385 1.1 mrg TREE_OPERAND (stmt, 2)
1386 1.1 mrg = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1387 1.1 mrg gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1388 1.1 mrg type_right));
1389 1.1 mrg }
1390 1.1 mrg }
1391 1.1 mrg break;
1392 1.1 mrg
1393 1.1 mrg case BIND_EXPR:
1394 1.1 mrg if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1395 1.1 mrg {
1396 1.1 mrg tree decl;
1397 1.1 mrg for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1398 1.1 mrg if (VAR_P (decl)
1399 1.1 mrg && !DECL_EXTERNAL (decl)
1400 1.1 mrg && omp_var_to_track (decl))
1401 1.1 mrg {
1402 1.1 mrg splay_tree_node n
1403 1.1 mrg = splay_tree_lookup (wtd->omp_ctx->variables,
1404 1.1 mrg (splay_tree_key) decl);
1405 1.1 mrg if (n == NULL)
1406 1.1 mrg splay_tree_insert (wtd->omp_ctx->variables,
1407 1.1 mrg (splay_tree_key) decl,
1408 1.1 mrg TREE_STATIC (decl)
1409 1.1 mrg ? OMP_CLAUSE_DEFAULT_SHARED
1410 1.1 mrg : OMP_CLAUSE_DEFAULT_PRIVATE);
1411 1.1 mrg }
1412 1.1 mrg }
1413 1.1 mrg if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1414 1.1 mrg {
1415 1.1 mrg /* The point here is to not sanitize static initializers. */
1416 1.1 mrg bool no_sanitize_p = wtd->no_sanitize_p;
1417 1.1 mrg wtd->no_sanitize_p = true;
1418 1.1 mrg for (tree decl = BIND_EXPR_VARS (stmt);
1419 1.1 mrg decl;
1420 1.1 mrg decl = DECL_CHAIN (decl))
1421 1.1 mrg if (VAR_P (decl)
1422 1.1 mrg && TREE_STATIC (decl)
1423 1.1 mrg && DECL_INITIAL (decl))
1424 1.1 mrg cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1425 1.1 mrg wtd->no_sanitize_p = no_sanitize_p;
1426 1.1 mrg }
1427 1.1 mrg wtd->bind_expr_stack.safe_push (stmt);
1428 1.1 mrg cp_walk_tree (&BIND_EXPR_BODY (stmt),
1429 1.1 mrg cp_genericize_r, data, NULL);
1430 1.1 mrg wtd->bind_expr_stack.pop ();
1431 1.1 mrg break;
1432 1.1 mrg
1433 1.1 mrg case USING_STMT:
1434 1.1 mrg {
1435 1.1 mrg tree block = NULL_TREE;
1436 1.1 mrg
1437 1.1 mrg /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1438 1.1 mrg BLOCK, and append an IMPORTED_DECL to its
1439 1.1 mrg BLOCK_VARS chained list. */
1440 1.1 mrg if (wtd->bind_expr_stack.exists ())
1441 1.1 mrg {
1442 1.1 mrg int i;
1443 1.1 mrg for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1444 1.1 mrg if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1445 1.1 mrg break;
1446 1.1 mrg }
1447 1.1 mrg if (block)
1448 1.1 mrg {
1449 1.1 mrg tree decl = TREE_OPERAND (stmt, 0);
1450 1.1 mrg gcc_assert (decl);
1451 1.1 mrg
1452 1.1 mrg if (undeduced_auto_decl (decl))
1453 1.1 mrg /* Omit from the GENERIC, the back-end can't handle it. */;
1454 1.1 mrg else
1455 1.1 mrg {
1456 1.1 mrg tree using_directive = make_node (IMPORTED_DECL);
1457 1.1 mrg TREE_TYPE (using_directive) = void_type_node;
1458 1.1 mrg DECL_CONTEXT (using_directive) = current_function_decl;
1459 1.1 mrg DECL_SOURCE_LOCATION (using_directive)
1460 1.1 mrg = cp_expr_loc_or_input_loc (stmt);
1461 1.1 mrg
1462 1.1 mrg IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1463 1.1 mrg DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1464 1.1 mrg BLOCK_VARS (block) = using_directive;
1465 1.1 mrg }
1466 1.1 mrg }
1467 1.1 mrg /* The USING_STMT won't appear in GENERIC. */
1468 1.1 mrg *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1469 1.1 mrg *walk_subtrees = 0;
1470 1.1 mrg }
1471 1.1 mrg break;
1472 1.1 mrg
1473 1.1 mrg case DECL_EXPR:
1474 1.1 mrg if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1475 1.1 mrg {
1476 1.1 mrg /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1477 1.1 mrg *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1478 1.1 mrg *walk_subtrees = 0;
1479 1.1 mrg }
1480 1.1 mrg else
1481 1.1 mrg {
1482 1.1 mrg tree d = DECL_EXPR_DECL (stmt);
1483 1.1 mrg if (VAR_P (d))
1484 1.1 mrg gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1485 1.1 mrg }
1486 1.1 mrg break;
1487 1.1 mrg
1488 1.1 mrg case OMP_PARALLEL:
1489 1.1 mrg case OMP_TASK:
1490 1.1 mrg case OMP_TASKLOOP:
1491 1.1 mrg {
1492 1.1 mrg struct cp_genericize_omp_taskreg omp_ctx;
1493 1.1 mrg tree c, decl;
1494 1.1 mrg splay_tree_node n;
1495 1.1 mrg
1496 1.1 mrg *walk_subtrees = 0;
1497 1.1 mrg cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1498 1.1 mrg omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1499 1.1 mrg omp_ctx.default_shared = omp_ctx.is_parallel;
1500 1.1 mrg omp_ctx.outer = wtd->omp_ctx;
1501 1.1 mrg omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1502 1.1 mrg wtd->omp_ctx = &omp_ctx;
1503 1.1 mrg for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1504 1.1 mrg switch (OMP_CLAUSE_CODE (c))
1505 1.1 mrg {
1506 1.1 mrg case OMP_CLAUSE_SHARED:
1507 1.1 mrg case OMP_CLAUSE_PRIVATE:
1508 1.1 mrg case OMP_CLAUSE_FIRSTPRIVATE:
1509 1.1 mrg case OMP_CLAUSE_LASTPRIVATE:
1510 1.1 mrg decl = OMP_CLAUSE_DECL (c);
1511 1.1 mrg if (decl == error_mark_node || !omp_var_to_track (decl))
1512 1.1 mrg break;
1513 1.1 mrg n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1514 1.1 mrg if (n != NULL)
1515 1.1 mrg break;
1516 1.1 mrg splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1517 1.1 mrg OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1518 1.1 mrg ? OMP_CLAUSE_DEFAULT_SHARED
1519 1.1 mrg : OMP_CLAUSE_DEFAULT_PRIVATE);
1520 1.1 mrg if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1521 1.1 mrg omp_cxx_notice_variable (omp_ctx.outer, decl);
1522 1.1 mrg break;
1523 1.1 mrg case OMP_CLAUSE_DEFAULT:
1524 1.1 mrg if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1525 1.1 mrg omp_ctx.default_shared = true;
1526 1.1 mrg default:
1527 1.1 mrg break;
1528 1.1 mrg }
1529 1.1 mrg if (TREE_CODE (stmt) == OMP_TASKLOOP)
1530 1.1 mrg c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1531 1.1 mrg cp_genericize_r, cp_walk_subtrees);
1532 1.1 mrg else
1533 1.1 mrg cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1534 1.1 mrg wtd->omp_ctx = omp_ctx.outer;
1535 1.1 mrg splay_tree_delete (omp_ctx.variables);
1536 1.1 mrg }
1537 1.1 mrg break;
1538 1.1 mrg
1539 1.1 mrg case OMP_TARGET:
1540 1.1 mrg cfun->has_omp_target = true;
1541 1.1 mrg break;
1542 1.1 mrg
1543 1.1 mrg case TRY_BLOCK:
1544 1.1 mrg {
1545 1.1 mrg *walk_subtrees = 0;
1546 1.1 mrg tree try_block = wtd->try_block;
1547 1.1 mrg wtd->try_block = stmt;
1548 1.1 mrg cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1549 1.1 mrg wtd->try_block = try_block;
1550 1.1 mrg cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1551 1.1 mrg }
1552 1.1 mrg break;
1553 1.1 mrg
1554 1.1 mrg case MUST_NOT_THROW_EXPR:
1555 1.1 mrg /* MUST_NOT_THROW_COND might be something else with TM. */
1556 1.1 mrg if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1557 1.1 mrg {
1558 1.1 mrg *walk_subtrees = 0;
1559 1.1 mrg tree try_block = wtd->try_block;
1560 1.1 mrg wtd->try_block = stmt;
1561 1.1 mrg cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1562 1.1 mrg wtd->try_block = try_block;
1563 1.1 mrg }
1564 1.1 mrg break;
1565 1.1 mrg
1566 1.1 mrg case THROW_EXPR:
1567 1.1 mrg {
1568 1.1 mrg location_t loc = location_of (stmt);
1569 1.1 mrg if (warning_suppressed_p (stmt /* What warning? */))
1570 1.1 mrg /* Never mind. */;
1571 1.1 mrg else if (wtd->try_block)
1572 1.1 mrg {
1573 1.1 mrg if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1574 1.1 mrg {
1575 1.1 mrg auto_diagnostic_group d;
1576 1.1 mrg if (warning_at (loc, OPT_Wterminate,
1577 1.1 mrg "%<throw%> will always call %<terminate%>")
1578 1.1 mrg && cxx_dialect >= cxx11
1579 1.1 mrg && DECL_DESTRUCTOR_P (current_function_decl))
1580 1.1 mrg inform (loc, "in C++11 destructors default to %<noexcept%>");
1581 1.1 mrg }
1582 1.1 mrg }
1583 1.1 mrg else
1584 1.1 mrg {
1585 1.1 mrg if (warn_cxx11_compat && cxx_dialect < cxx11
1586 1.1 mrg && DECL_DESTRUCTOR_P (current_function_decl)
1587 1.1 mrg && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1588 1.1 mrg == NULL_TREE)
1589 1.1 mrg && (get_defaulted_eh_spec (current_function_decl)
1590 1.1 mrg == empty_except_spec))
1591 1.1 mrg warning_at (loc, OPT_Wc__11_compat,
1592 1.1 mrg "in C++11 this %<throw%> will call %<terminate%> "
1593 1.1 mrg "because destructors default to %<noexcept%>");
1594 1.1 mrg }
1595 1.1 mrg }
1596 1.1 mrg break;
1597 1.1 mrg
1598 1.1 mrg case CONVERT_EXPR:
1599 1.1 mrg gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1600 1.1 mrg break;
1601 1.1 mrg
1602 1.1 mrg case SPACESHIP_EXPR:
1603 1.1 mrg *stmt_p = genericize_spaceship (*stmt_p);
1604 1.1 mrg break;
1605 1.1 mrg
1606 1.1 mrg case PTRMEM_CST:
1607 1.1 mrg /* By the time we get here we're handing off to the back end, so we don't
1608 1.1 mrg need or want to preserve PTRMEM_CST anymore. */
1609 1.1 mrg *stmt_p = cplus_expand_constant (stmt);
1610 1.1 mrg *walk_subtrees = 0;
1611 1.1 mrg break;
1612 1.1 mrg
1613 1.1 mrg case MEM_REF:
1614 1.1 mrg /* For MEM_REF, make sure not to sanitize the second operand even
1615 1.1 mrg if it has reference type. It is just an offset with a type
1616 1.1 mrg holding other information. There is no other processing we
1617 1.1 mrg need to do for INTEGER_CSTs, so just ignore the second argument
1618 1.1 mrg unconditionally. */
1619 1.1 mrg cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1620 1.1 mrg *walk_subtrees = 0;
1621 1.1 mrg break;
1622 1.1 mrg
1623 1.1 mrg case NOP_EXPR:
1624 1.1 mrg *stmt_p = predeclare_vla (*stmt_p);
1625 1.1 mrg if (!wtd->no_sanitize_p
1626 1.1 mrg && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1627 1.1 mrg && TYPE_REF_P (TREE_TYPE (stmt)))
1628 1.1 mrg ubsan_maybe_instrument_reference (stmt_p);
1629 1.1 mrg break;
1630 1.1 mrg
1631 1.1 mrg case CALL_EXPR:
1632 1.1 mrg /* Evaluate function concept checks instead of treating them as
1633 1.1 mrg normal functions. */
1634 1.1 mrg if (concept_check_p (stmt))
1635 1.1 mrg {
1636 1.1 mrg *stmt_p = evaluate_concept_check (stmt);
1637 1.1 mrg * walk_subtrees = 0;
1638 1.1 mrg break;
1639 1.1 mrg }
1640 1.1 mrg
1641 1.1 mrg if (!wtd->no_sanitize_p
1642 1.1 mrg && sanitize_flags_p ((SANITIZE_NULL
1643 1.1 mrg | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1644 1.1 mrg {
1645 1.1 mrg tree fn = CALL_EXPR_FN (stmt);
1646 1.1 mrg if (fn != NULL_TREE
1647 1.1 mrg && !error_operand_p (fn)
1648 1.1 mrg && INDIRECT_TYPE_P (TREE_TYPE (fn))
1649 1.1 mrg && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1650 1.1 mrg {
1651 1.1 mrg bool is_ctor
1652 1.1 mrg = TREE_CODE (fn) == ADDR_EXPR
1653 1.1 mrg && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1654 1.1 mrg && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1655 1.1 mrg if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1656 1.1 mrg ubsan_maybe_instrument_member_call (stmt, is_ctor);
1657 1.1 mrg if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1658 1.1 mrg cp_ubsan_maybe_instrument_member_call (stmt);
1659 1.1 mrg }
1660 1.1 mrg else if (fn == NULL_TREE
1661 1.1 mrg && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1662 1.1 mrg && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1663 1.1 mrg && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1664 1.1 mrg *walk_subtrees = 0;
1665 1.1 mrg }
1666 1.1 mrg /* Fall through. */
1667 1.1 mrg case AGGR_INIT_EXPR:
1668 1.1 mrg /* For calls to a multi-versioned function, overload resolution
1669 1.1 mrg returns the function with the highest target priority, that is,
1670 1.1 mrg the version that will checked for dispatching first. If this
1671 1.1 mrg version is inlinable, a direct call to this version can be made
1672 1.1 mrg otherwise the call should go through the dispatcher. */
1673 1.1 mrg {
1674 1.1 mrg tree fn = cp_get_callee_fndecl_nofold (stmt);
1675 1.1 mrg if (fn && DECL_FUNCTION_VERSIONED (fn)
1676 1.1 mrg && (current_function_decl == NULL
1677 1.1 mrg || !targetm.target_option.can_inline_p (current_function_decl,
1678 1.1 mrg fn)))
1679 1.1 mrg if (tree dis = get_function_version_dispatcher (fn))
1680 1.1 mrg {
1681 1.1 mrg mark_versions_used (dis);
1682 1.1 mrg dis = build_address (dis);
1683 1.1 mrg if (TREE_CODE (stmt) == CALL_EXPR)
1684 1.1 mrg CALL_EXPR_FN (stmt) = dis;
1685 1.1 mrg else
1686 1.1 mrg AGGR_INIT_EXPR_FN (stmt) = dis;
1687 1.1 mrg }
1688 1.1 mrg }
1689 1.1 mrg break;
1690 1.1 mrg
1691 1.1 mrg case TARGET_EXPR:
1692 1.1 mrg if (TARGET_EXPR_INITIAL (stmt)
1693 1.1 mrg && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1694 1.1 mrg && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1695 1.1 mrg TARGET_EXPR_NO_ELIDE (stmt) = 1;
1696 1.1 mrg break;
1697 1.1 mrg
1698 1.1 mrg case TEMPLATE_ID_EXPR:
1699 1.1 mrg gcc_assert (concept_check_p (stmt));
1700 1.1 mrg /* Emit the value of the concept check. */
1701 1.1 mrg *stmt_p = evaluate_concept_check (stmt);
1702 1.1 mrg walk_subtrees = 0;
1703 1.1 mrg break;
1704 1.1 mrg
1705 1.1 mrg case OMP_DISTRIBUTE:
1706 1.1 mrg /* Need to explicitly instantiate copy ctors on class iterators of
1707 1.1 mrg composite distribute parallel for. */
1708 1.1 mrg if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1709 1.1 mrg {
1710 1.1 mrg tree *data[4] = { NULL, NULL, NULL, NULL };
1711 1.1 mrg tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1712 1.1 mrg find_combined_omp_for, data, NULL);
1713 1.1 mrg if (inner != NULL_TREE
1714 1.1 mrg && TREE_CODE (inner) == OMP_FOR)
1715 1.1 mrg {
1716 1.1 mrg for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1717 1.1 mrg if (OMP_FOR_ORIG_DECLS (inner)
1718 1.1 mrg && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1719 1.1 mrg i)) == TREE_LIST
1720 1.1 mrg && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1721 1.1 mrg i)))
1722 1.1 mrg {
1723 1.1 mrg tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1724 1.1 mrg /* Class iterators aren't allowed on OMP_SIMD, so the only
1725 1.1 mrg case we need to solve is distribute parallel for. */
1726 1.1 mrg gcc_assert (TREE_CODE (inner) == OMP_FOR
1727 1.1 mrg && data[1]);
1728 1.1 mrg tree orig_decl = TREE_PURPOSE (orig);
1729 1.1 mrg tree c, cl = NULL_TREE;
1730 1.1 mrg for (c = OMP_FOR_CLAUSES (inner);
1731 1.1 mrg c; c = OMP_CLAUSE_CHAIN (c))
1732 1.1 mrg if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1733 1.1 mrg || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1734 1.1 mrg && OMP_CLAUSE_DECL (c) == orig_decl)
1735 1.1 mrg {
1736 1.1 mrg cl = c;
1737 1.1 mrg break;
1738 1.1 mrg }
1739 1.1 mrg if (cl == NULL_TREE)
1740 1.1 mrg {
1741 1.1 mrg for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1742 1.1 mrg c; c = OMP_CLAUSE_CHAIN (c))
1743 1.1 mrg if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1744 1.1 mrg && OMP_CLAUSE_DECL (c) == orig_decl)
1745 1.1 mrg {
1746 1.1 mrg cl = c;
1747 1.1 mrg break;
1748 1.1 mrg }
1749 1.1 mrg }
1750 1.1 mrg if (cl)
1751 1.1 mrg {
1752 1.1 mrg orig_decl = require_complete_type (orig_decl);
1753 1.1 mrg tree inner_type = TREE_TYPE (orig_decl);
1754 1.1 mrg if (orig_decl == error_mark_node)
1755 1.1 mrg continue;
1756 1.1 mrg if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1757 1.1 mrg inner_type = TREE_TYPE (inner_type);
1758 1.1 mrg
1759 1.1 mrg while (TREE_CODE (inner_type) == ARRAY_TYPE)
1760 1.1 mrg inner_type = TREE_TYPE (inner_type);
1761 1.1 mrg get_copy_ctor (inner_type, tf_warning_or_error);
1762 1.1 mrg }
1763 1.1 mrg }
1764 1.1 mrg }
1765 1.1 mrg }
1766 1.1 mrg /* FALLTHRU */
1767 1.1 mrg
1768 1.1 mrg case FOR_STMT:
1769 1.1 mrg case WHILE_STMT:
1770 1.1 mrg case DO_STMT:
1771 1.1 mrg case SWITCH_STMT:
1772 1.1 mrg case CONTINUE_STMT:
1773 1.1 mrg case BREAK_STMT:
1774 1.1 mrg case OMP_FOR:
1775 1.1 mrg case OMP_SIMD:
1776 1.1 mrg case OMP_LOOP:
1777 1.1 mrg case OACC_LOOP:
1778 1.1 mrg case STATEMENT_LIST:
1779 1.1 mrg /* These cases are handled by shared code. */
1780 1.1 mrg c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1781 1.1 mrg cp_genericize_r, cp_walk_subtrees);
1782 1.1 mrg break;
1783 1.1 mrg
1784 1.1 mrg case BIT_CAST_EXPR:
1785 1.1 mrg *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1786 1.1 mrg TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1787 1.1 mrg break;
1788 1.1 mrg
1789 1.1 mrg default:
1790 1.1 mrg if (IS_TYPE_OR_DECL_P (stmt))
1791 1.1 mrg *walk_subtrees = 0;
1792 1.1 mrg break;
1793 1.1 mrg }
1794 1.1 mrg
1795 1.1 mrg p_set->add (*stmt_p);
1796 1.1 mrg
1797 1.1 mrg return NULL;
1798 1.1 mrg }
1799 1.1 mrg
1800 1.1 mrg /* Lower C++ front end trees to GENERIC in T_P. */
1801 1.1 mrg
1802 1.1 mrg static void
1803 1.1 mrg cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1804 1.1 mrg {
1805 1.1 mrg struct cp_genericize_data wtd;
1806 1.1 mrg
1807 1.1 mrg wtd.p_set = new hash_set<tree>;
1808 1.1 mrg wtd.bind_expr_stack.create (0);
1809 1.1 mrg wtd.omp_ctx = NULL;
1810 1.1 mrg wtd.try_block = NULL_TREE;
1811 1.1 mrg wtd.no_sanitize_p = false;
1812 1.1 mrg wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1813 1.1 mrg cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1814 1.1 mrg delete wtd.p_set;
1815 1.1 mrg if (sanitize_flags_p (SANITIZE_VPTR))
1816 1.1 mrg cp_ubsan_instrument_member_accesses (t_p);
1817 1.1 mrg }
1818 1.1 mrg
1819 1.1 mrg /* If a function that should end with a return in non-void
1820 1.1 mrg function doesn't obviously end with return, add ubsan
1821 1.1 mrg instrumentation code to verify it at runtime. If -fsanitize=return
1822 1.1 mrg is not enabled, instrument __builtin_unreachable. */
1823 1.1 mrg
1824 1.1 mrg static void
1825 1.1 mrg cp_maybe_instrument_return (tree fndecl)
1826 1.1 mrg {
1827 1.1 mrg if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1828 1.1 mrg || DECL_CONSTRUCTOR_P (fndecl)
1829 1.1 mrg || DECL_DESTRUCTOR_P (fndecl)
1830 1.1 mrg || !targetm.warn_func_return (fndecl))
1831 1.1 mrg return;
1832 1.1 mrg
1833 1.1 mrg if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1834 1.1 mrg /* Don't add __builtin_unreachable () if not optimizing, it will not
1835 1.1 mrg improve any optimizations in that case, just break UB code.
1836 1.1 mrg Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1837 1.1 mrg UBSan covers this with ubsan_instrument_return above where sufficient
1838 1.1 mrg information is provided, while the __builtin_unreachable () below
1839 1.1 mrg if return sanitization is disabled will just result in hard to
1840 1.1 mrg understand runtime error without location. */
1841 1.1 mrg && (!optimize
1842 1.1 mrg || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1843 1.1 mrg return;
1844 1.1 mrg
1845 1.1 mrg tree t = DECL_SAVED_TREE (fndecl);
1846 1.1 mrg while (t)
1847 1.1 mrg {
1848 1.1 mrg switch (TREE_CODE (t))
1849 1.1 mrg {
1850 1.1 mrg case BIND_EXPR:
1851 1.1 mrg t = BIND_EXPR_BODY (t);
1852 1.1 mrg continue;
1853 1.1 mrg case TRY_FINALLY_EXPR:
1854 1.1 mrg case CLEANUP_POINT_EXPR:
1855 1.1 mrg t = TREE_OPERAND (t, 0);
1856 1.1 mrg continue;
1857 1.1 mrg case STATEMENT_LIST:
1858 1.1 mrg {
1859 1.1 mrg tree_stmt_iterator i = tsi_last (t);
1860 1.1 mrg while (!tsi_end_p (i))
1861 1.1 mrg {
1862 1.1 mrg tree p = tsi_stmt (i);
1863 1.1 mrg if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1864 1.1 mrg break;
1865 1.1 mrg tsi_prev (&i);
1866 1.1 mrg }
1867 1.1 mrg if (!tsi_end_p (i))
1868 1.1 mrg {
1869 1.1 mrg t = tsi_stmt (i);
1870 1.1 mrg continue;
1871 1.1 mrg }
1872 1.1 mrg }
1873 1.1 mrg break;
1874 1.1 mrg case RETURN_EXPR:
1875 1.1 mrg return;
1876 1.1 mrg default:
1877 1.1 mrg break;
1878 1.1 mrg }
1879 1.1 mrg break;
1880 1.1 mrg }
1881 1.1 mrg if (t == NULL_TREE)
1882 1.1 mrg return;
1883 1.1 mrg tree *p = &DECL_SAVED_TREE (fndecl);
1884 1.1 mrg if (TREE_CODE (*p) == BIND_EXPR)
1885 1.1 mrg p = &BIND_EXPR_BODY (*p);
1886 1.1 mrg
1887 1.1 mrg location_t loc = DECL_SOURCE_LOCATION (fndecl);
1888 1.1 mrg if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1889 1.1 mrg t = ubsan_instrument_return (loc);
1890 1.1 mrg else
1891 1.1 mrg {
1892 1.1 mrg tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1893 1.1 mrg t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1894 1.1 mrg }
1895 1.1 mrg
1896 1.1 mrg append_to_statement_list (t, p);
1897 1.1 mrg }
1898 1.1 mrg
1899 1.1 mrg void
1900 1.1 mrg cp_genericize (tree fndecl)
1901 1.1 mrg {
1902 1.1 mrg tree t;
1903 1.1 mrg
1904 1.1 mrg /* Fix up the types of parms passed by invisible reference. */
1905 1.1 mrg for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1906 1.1 mrg if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1907 1.1 mrg {
1908 1.1 mrg /* If a function's arguments are copied to create a thunk,
1909 1.1 mrg then DECL_BY_REFERENCE will be set -- but the type of the
1910 1.1 mrg argument will be a pointer type, so we will never get
1911 1.1 mrg here. */
1912 1.1 mrg gcc_assert (!DECL_BY_REFERENCE (t));
1913 1.1 mrg gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1914 1.1 mrg TREE_TYPE (t) = DECL_ARG_TYPE (t);
1915 1.1 mrg DECL_BY_REFERENCE (t) = 1;
1916 1.1 mrg TREE_ADDRESSABLE (t) = 0;
1917 1.1 mrg relayout_decl (t);
1918 1.1 mrg }
1919 1.1 mrg
1920 1.1 mrg /* Do the same for the return value. */
1921 1.1 mrg if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1922 1.1 mrg {
1923 1.1 mrg t = DECL_RESULT (fndecl);
1924 1.1 mrg TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1925 1.1 mrg DECL_BY_REFERENCE (t) = 1;
1926 1.1 mrg TREE_ADDRESSABLE (t) = 0;
1927 1.1 mrg relayout_decl (t);
1928 1.1 mrg if (DECL_NAME (t))
1929 1.1 mrg {
1930 1.1 mrg /* Adjust DECL_VALUE_EXPR of the original var. */
1931 1.1 mrg tree outer = outer_curly_brace_block (current_function_decl);
1932 1.1 mrg tree var;
1933 1.1 mrg
1934 1.1 mrg if (outer)
1935 1.1 mrg for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1936 1.1 mrg if (VAR_P (var)
1937 1.1 mrg && DECL_NAME (t) == DECL_NAME (var)
1938 1.1 mrg && DECL_HAS_VALUE_EXPR_P (var)
1939 1.1 mrg && DECL_VALUE_EXPR (var) == t)
1940 1.1 mrg {
1941 1.1 mrg tree val = convert_from_reference (t);
1942 1.1 mrg SET_DECL_VALUE_EXPR (var, val);
1943 1.1 mrg break;
1944 1.1 mrg }
1945 1.1 mrg }
1946 1.1 mrg }
1947 1.1 mrg
1948 1.1 mrg /* If we're a clone, the body is already GIMPLE. */
1949 1.1 mrg if (DECL_CLONED_FUNCTION_P (fndecl))
1950 1.1 mrg return;
1951 1.1 mrg
1952 1.1 mrg /* Allow cp_genericize calls to be nested. */
1953 1.1 mrg bc_state_t save_state;
1954 1.1 mrg save_bc_state (&save_state);
1955 1.1 mrg
1956 1.1 mrg /* We do want to see every occurrence of the parms, so we can't just use
1957 1.1 mrg walk_tree's hash functionality. */
1958 1.1 mrg cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1959 1.1 mrg
1960 1.1 mrg cp_maybe_instrument_return (fndecl);
1961 1.1 mrg
1962 1.1 mrg /* Do everything else. */
1963 1.1 mrg c_genericize (fndecl);
1964 1.1 mrg restore_bc_state (&save_state);
1965 1.1 mrg }
1966 1.1 mrg
1967 1.1 mrg /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1969 1.1 mrg NULL if there is in fact nothing to do. ARG2 may be null if FN
1970 1.1 mrg actually only takes one argument. */
1971 1.1 mrg
1972 1.1 mrg static tree
1973 1.1 mrg cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1974 1.1 mrg {
1975 1.1 mrg tree defparm, parm, t;
1976 1.1 mrg int i = 0;
1977 1.1 mrg int nargs;
1978 1.1 mrg tree *argarray;
1979 1.1 mrg
1980 1.1 mrg if (fn == NULL)
1981 1.1 mrg return NULL;
1982 1.1 mrg
1983 1.1 mrg nargs = list_length (DECL_ARGUMENTS (fn));
1984 1.1 mrg argarray = XALLOCAVEC (tree, nargs);
1985 1.1 mrg
1986 1.1 mrg defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1987 1.1 mrg if (arg2)
1988 1.1 mrg defparm = TREE_CHAIN (defparm);
1989 1.1 mrg
1990 1.1 mrg bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1991 1.1 mrg if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1992 1.1 mrg {
1993 1.1 mrg tree inner_type = TREE_TYPE (arg1);
1994 1.1 mrg tree start1, end1, p1;
1995 1.1 mrg tree start2 = NULL, p2 = NULL;
1996 1.1 mrg tree ret = NULL, lab;
1997 1.1 mrg
1998 1.1 mrg start1 = arg1;
1999 1.1 mrg start2 = arg2;
2000 1.1 mrg do
2001 1.1 mrg {
2002 1.1 mrg inner_type = TREE_TYPE (inner_type);
2003 1.1 mrg start1 = build4 (ARRAY_REF, inner_type, start1,
2004 1.1 mrg size_zero_node, NULL, NULL);
2005 1.1 mrg if (arg2)
2006 1.1 mrg start2 = build4 (ARRAY_REF, inner_type, start2,
2007 1.1 mrg size_zero_node, NULL, NULL);
2008 1.1 mrg }
2009 1.1 mrg while (TREE_CODE (inner_type) == ARRAY_TYPE);
2010 1.1 mrg start1 = build_fold_addr_expr_loc (input_location, start1);
2011 1.1 mrg if (arg2)
2012 1.1 mrg start2 = build_fold_addr_expr_loc (input_location, start2);
2013 1.1 mrg
2014 1.1 mrg end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2015 1.1 mrg end1 = fold_build_pointer_plus (start1, end1);
2016 1.1 mrg
2017 1.1 mrg p1 = create_tmp_var (TREE_TYPE (start1));
2018 1.1 mrg t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2019 1.1 mrg append_to_statement_list (t, &ret);
2020 1.1 mrg
2021 1.1 mrg if (arg2)
2022 1.1 mrg {
2023 1.1 mrg p2 = create_tmp_var (TREE_TYPE (start2));
2024 1.1 mrg t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2025 1.1 mrg append_to_statement_list (t, &ret);
2026 1.1 mrg }
2027 1.1 mrg
2028 1.1 mrg lab = create_artificial_label (input_location);
2029 1.1 mrg t = build1 (LABEL_EXPR, void_type_node, lab);
2030 1.1 mrg append_to_statement_list (t, &ret);
2031 1.1 mrg
2032 1.1 mrg argarray[i++] = p1;
2033 1.1 mrg if (arg2)
2034 1.1 mrg argarray[i++] = p2;
2035 1.1 mrg /* Handle default arguments. */
2036 1.1 mrg for (parm = defparm; parm && parm != void_list_node;
2037 1.1 mrg parm = TREE_CHAIN (parm), i++)
2038 1.1 mrg argarray[i] = convert_default_arg (TREE_VALUE (parm),
2039 1.1 mrg TREE_PURPOSE (parm), fn,
2040 1.1 mrg i - is_method, tf_warning_or_error);
2041 1.1 mrg t = build_call_a (fn, i, argarray);
2042 1.1 mrg if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2043 1.1 mrg t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2044 1.1 mrg t = fold_convert (void_type_node, t);
2045 1.1 mrg t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2046 1.1 mrg append_to_statement_list (t, &ret);
2047 1.1 mrg
2048 1.1 mrg t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2049 1.1 mrg t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2050 1.1 mrg append_to_statement_list (t, &ret);
2051 1.1 mrg
2052 1.1 mrg if (arg2)
2053 1.1 mrg {
2054 1.1 mrg t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2055 1.1 mrg t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2056 1.1 mrg append_to_statement_list (t, &ret);
2057 1.1 mrg }
2058 1.1 mrg
2059 1.1 mrg t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2060 1.1 mrg t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2061 1.1 mrg append_to_statement_list (t, &ret);
2062 1.1 mrg
2063 1.1 mrg return ret;
2064 1.1 mrg }
2065 1.1 mrg else
2066 1.1 mrg {
2067 1.1 mrg argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2068 1.1 mrg if (arg2)
2069 1.1 mrg argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2070 1.1 mrg /* Handle default arguments. */
2071 1.1 mrg for (parm = defparm; parm && parm != void_list_node;
2072 1.1 mrg parm = TREE_CHAIN (parm), i++)
2073 1.1 mrg argarray[i] = convert_default_arg (TREE_VALUE (parm),
2074 1.1 mrg TREE_PURPOSE (parm), fn,
2075 1.1 mrg i - is_method, tf_warning_or_error);
2076 1.1 mrg t = build_call_a (fn, i, argarray);
2077 1.1 mrg if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2078 1.1 mrg t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2079 1.1 mrg t = fold_convert (void_type_node, t);
2080 1.1 mrg return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2081 1.1 mrg }
2082 1.1 mrg }
2083 1.1 mrg
2084 1.1 mrg /* Return code to initialize DECL with its default constructor, or
2085 1.1 mrg NULL if there's nothing to do. */
2086 1.1 mrg
2087 1.1 mrg tree
2088 1.1 mrg cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2089 1.1 mrg {
2090 1.1 mrg tree info = CP_OMP_CLAUSE_INFO (clause);
2091 1.1 mrg tree ret = NULL;
2092 1.1 mrg
2093 1.1 mrg if (info)
2094 1.1 mrg ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2095 1.1 mrg
2096 1.1 mrg return ret;
2097 1.1 mrg }
2098 1.1 mrg
2099 1.1 mrg /* Return code to initialize DST with a copy constructor from SRC. */
2100 1.1 mrg
2101 1.1 mrg tree
2102 1.1 mrg cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2103 1.1 mrg {
2104 1.1 mrg tree info = CP_OMP_CLAUSE_INFO (clause);
2105 1.1 mrg tree ret = NULL;
2106 1.1 mrg
2107 1.1 mrg if (info)
2108 1.1 mrg ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2109 1.1 mrg if (ret == NULL)
2110 1.1 mrg ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2111 1.1 mrg
2112 1.1 mrg return ret;
2113 1.1 mrg }
2114 1.1 mrg
2115 1.1 mrg /* Similarly, except use an assignment operator instead. */
2116 1.1 mrg
2117 1.1 mrg tree
2118 1.1 mrg cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2119 1.1 mrg {
2120 1.1 mrg tree info = CP_OMP_CLAUSE_INFO (clause);
2121 1.1 mrg tree ret = NULL;
2122 1.1 mrg
2123 1.1 mrg if (info)
2124 1.1 mrg ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2125 1.1 mrg if (ret == NULL)
2126 1.1 mrg ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2127 1.1 mrg
2128 1.1 mrg return ret;
2129 1.1 mrg }
2130 1.1 mrg
2131 1.1 mrg /* Return code to destroy DECL. */
2132 1.1 mrg
2133 1.1 mrg tree
2134 1.1 mrg cxx_omp_clause_dtor (tree clause, tree decl)
2135 1.1 mrg {
2136 1.1 mrg tree info = CP_OMP_CLAUSE_INFO (clause);
2137 1.1 mrg tree ret = NULL;
2138 1.1 mrg
2139 1.1 mrg if (info)
2140 1.1 mrg ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2141 1.1 mrg
2142 1.1 mrg return ret;
2143 1.1 mrg }
2144 1.1 mrg
2145 1.1 mrg /* True if OpenMP should privatize what this DECL points to rather
2146 1.1 mrg than the DECL itself. */
2147 1.1 mrg
2148 1.1 mrg bool
2149 1.1 mrg cxx_omp_privatize_by_reference (const_tree decl)
2150 1.1 mrg {
2151 1.1 mrg return (TYPE_REF_P (TREE_TYPE (decl))
2152 1.1 mrg || is_invisiref_parm (decl));
2153 1.1 mrg }
2154 1.1 mrg
2155 1.1 mrg /* Return true if DECL is const qualified var having no mutable member. */
2156 1.1 mrg bool
2157 1.1 mrg cxx_omp_const_qual_no_mutable (tree decl)
2158 1.1 mrg {
2159 1.1 mrg tree type = TREE_TYPE (decl);
2160 1.1 mrg if (TYPE_REF_P (type))
2161 1.1 mrg {
2162 1.1 mrg if (!is_invisiref_parm (decl))
2163 1.1 mrg return false;
2164 1.1 mrg type = TREE_TYPE (type);
2165 1.1 mrg
2166 1.1 mrg if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2167 1.1 mrg {
2168 1.1 mrg /* NVR doesn't preserve const qualification of the
2169 1.1 mrg variable's type. */
2170 1.1 mrg tree outer = outer_curly_brace_block (current_function_decl);
2171 1.1 mrg tree var;
2172 1.1 mrg
2173 1.1 mrg if (outer)
2174 1.1 mrg for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2175 1.1 mrg if (VAR_P (var)
2176 1.1 mrg && DECL_NAME (decl) == DECL_NAME (var)
2177 1.1 mrg && (TYPE_MAIN_VARIANT (type)
2178 1.1 mrg == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2179 1.1 mrg {
2180 1.1 mrg if (TYPE_READONLY (TREE_TYPE (var)))
2181 1.1 mrg type = TREE_TYPE (var);
2182 1.1 mrg break;
2183 1.1 mrg }
2184 1.1 mrg }
2185 1.1 mrg }
2186 1.1 mrg
2187 1.1 mrg if (type == error_mark_node)
2188 1.1 mrg return false;
2189 1.1 mrg
2190 1.1 mrg /* Variables with const-qualified type having no mutable member
2191 1.1 mrg are predetermined shared. */
2192 1.1 mrg if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2193 1.1 mrg return true;
2194 1.1 mrg
2195 1.1 mrg return false;
2196 1.1 mrg }
2197 1.1 mrg
2198 1.1 mrg /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2199 1.1 mrg of DECL is predetermined. */
2200 1.1 mrg
2201 1.1 mrg enum omp_clause_default_kind
2202 1.1 mrg cxx_omp_predetermined_sharing_1 (tree decl)
2203 1.1 mrg {
2204 1.1 mrg /* Static data members are predetermined shared. */
2205 1.1 mrg if (TREE_STATIC (decl))
2206 1.1 mrg {
2207 1.1 mrg tree ctx = CP_DECL_CONTEXT (decl);
2208 1.1 mrg if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2209 1.1 mrg return OMP_CLAUSE_DEFAULT_SHARED;
2210 1.1 mrg
2211 1.1 mrg if (c_omp_predefined_variable (decl))
2212 1.1 mrg return OMP_CLAUSE_DEFAULT_SHARED;
2213 1.1 mrg }
2214 1.1 mrg
2215 1.1 mrg /* this may not be specified in data-sharing clauses, still we need
2216 1.1 mrg to predetermined it firstprivate. */
2217 1.1 mrg if (decl == current_class_ptr)
2218 1.1 mrg return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2219 1.1 mrg
2220 1.1 mrg return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2221 1.1 mrg }
2222 1.1 mrg
2223 1.1 mrg /* Likewise, but also include the artificial vars. We don't want to
2224 1.1 mrg disallow the artificial vars being mentioned in explicit clauses,
2225 1.1 mrg as we use artificial vars e.g. for loop constructs with random
2226 1.1 mrg access iterators other than pointers, but during gimplification
2227 1.1 mrg we want to treat them as predetermined. */
2228 1.1 mrg
2229 1.1 mrg enum omp_clause_default_kind
2230 1.1 mrg cxx_omp_predetermined_sharing (tree decl)
2231 1.1 mrg {
2232 1.1 mrg enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2233 1.1 mrg if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2234 1.1 mrg return ret;
2235 1.1 mrg
2236 1.1 mrg /* Predetermine artificial variables holding integral values, those
2237 1.1 mrg are usually result of gimplify_one_sizepos or SAVE_EXPR
2238 1.1 mrg gimplification. */
2239 1.1 mrg if (VAR_P (decl)
2240 1.1 mrg && DECL_ARTIFICIAL (decl)
2241 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2242 1.1 mrg && !(DECL_LANG_SPECIFIC (decl)
2243 1.1 mrg && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2244 1.1 mrg return OMP_CLAUSE_DEFAULT_SHARED;
2245 1.1 mrg
2246 1.1 mrg /* Similarly for typeinfo symbols. */
2247 1.1 mrg if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2248 1.1 mrg return OMP_CLAUSE_DEFAULT_SHARED;
2249 1.1 mrg
2250 1.1 mrg return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2251 1.1 mrg }
2252 1.1 mrg
2253 1.1 mrg enum omp_clause_defaultmap_kind
2254 1.1 mrg cxx_omp_predetermined_mapping (tree decl)
2255 1.1 mrg {
2256 1.1 mrg /* Predetermine artificial variables holding integral values, those
2257 1.1 mrg are usually result of gimplify_one_sizepos or SAVE_EXPR
2258 1.1 mrg gimplification. */
2259 1.1 mrg if (VAR_P (decl)
2260 1.1 mrg && DECL_ARTIFICIAL (decl)
2261 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2262 1.1 mrg && !(DECL_LANG_SPECIFIC (decl)
2263 1.1 mrg && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2264 1.1 mrg return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2265 1.1 mrg
2266 1.1 mrg if (c_omp_predefined_variable (decl))
2267 1.1 mrg return OMP_CLAUSE_DEFAULTMAP_TO;
2268 1.1 mrg
2269 1.1 mrg return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2270 1.1 mrg }
2271 1.1 mrg
2272 1.1 mrg /* Finalize an implicitly determined clause. */
2273 1.1 mrg
2274 1.1 mrg void
2275 1.1 mrg cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2276 1.1 mrg {
2277 1.1 mrg tree decl, inner_type;
2278 1.1 mrg bool make_shared = false;
2279 1.1 mrg
2280 1.1 mrg if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2281 1.1 mrg && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2282 1.1 mrg && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2283 1.1 mrg || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2284 1.1 mrg return;
2285 1.1 mrg
2286 1.1 mrg decl = OMP_CLAUSE_DECL (c);
2287 1.1 mrg decl = require_complete_type (decl);
2288 1.1 mrg inner_type = TREE_TYPE (decl);
2289 1.1 mrg if (decl == error_mark_node)
2290 1.1 mrg make_shared = true;
2291 1.1 mrg else if (TYPE_REF_P (TREE_TYPE (decl)))
2292 1.1 mrg inner_type = TREE_TYPE (inner_type);
2293 1.1 mrg
2294 1.1 mrg /* We're interested in the base element, not arrays. */
2295 1.1 mrg while (TREE_CODE (inner_type) == ARRAY_TYPE)
2296 1.1 mrg inner_type = TREE_TYPE (inner_type);
2297 1.1 mrg
2298 1.1 mrg /* Check for special function availability by building a call to one.
2299 1.1 mrg Save the results, because later we won't be in the right context
2300 1.1 mrg for making these queries. */
2301 1.1 mrg bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2302 1.1 mrg bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2303 1.1 mrg if (!make_shared
2304 1.1 mrg && CLASS_TYPE_P (inner_type)
2305 1.1 mrg && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2306 1.1 mrg true))
2307 1.1 mrg make_shared = true;
2308 1.1 mrg
2309 1.1 mrg if (make_shared)
2310 1.1 mrg {
2311 1.1 mrg OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2312 1.1 mrg OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2313 1.1 mrg OMP_CLAUSE_SHARED_READONLY (c) = 0;
2314 1.1 mrg }
2315 1.1 mrg }
2316 1.1 mrg
2317 1.1 mrg /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2318 1.1 mrg disregarded in OpenMP construct, because it is going to be
2319 1.1 mrg remapped during OpenMP lowering. SHARED is true if DECL
2320 1.1 mrg is going to be shared, false if it is going to be privatized. */
2321 1.1 mrg
2322 1.1 mrg bool
2323 1.1 mrg cxx_omp_disregard_value_expr (tree decl, bool shared)
2324 1.1 mrg {
2325 1.1 mrg if (shared)
2326 1.1 mrg return false;
2327 1.1 mrg if (VAR_P (decl)
2328 1.1 mrg && DECL_HAS_VALUE_EXPR_P (decl)
2329 1.1 mrg && DECL_ARTIFICIAL (decl)
2330 1.1 mrg && DECL_LANG_SPECIFIC (decl)
2331 1.1 mrg && DECL_OMP_PRIVATIZED_MEMBER (decl))
2332 1.1 mrg return true;
2333 1.1 mrg if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2334 1.1 mrg return true;
2335 1.1 mrg return false;
2336 1.1 mrg }
2337 1.1 mrg
2338 1.1 mrg /* Fold expression X which is used as an rvalue if RVAL is true. */
2339 1.1 mrg
2340 1.1 mrg tree
2341 1.1 mrg cp_fold_maybe_rvalue (tree x, bool rval)
2342 1.1 mrg {
2343 1.1 mrg while (true)
2344 1.1 mrg {
2345 1.1 mrg x = cp_fold (x);
2346 1.1 mrg if (rval)
2347 1.1 mrg x = mark_rvalue_use (x);
2348 1.1 mrg if (rval && DECL_P (x)
2349 1.1 mrg && !TYPE_REF_P (TREE_TYPE (x)))
2350 1.1 mrg {
2351 1.1 mrg tree v = decl_constant_value (x);
2352 1.1 mrg if (v != x && v != error_mark_node)
2353 1.1 mrg {
2354 1.1 mrg x = v;
2355 1.1 mrg continue;
2356 1.1 mrg }
2357 1.1 mrg }
2358 1.1 mrg break;
2359 1.1 mrg }
2360 1.1 mrg return x;
2361 1.1 mrg }
2362 1.1 mrg
2363 1.1 mrg /* Fold expression X which is used as an rvalue. */
2364 1.1 mrg
2365 1.1 mrg tree
2366 1.1 mrg cp_fold_rvalue (tree x)
2367 1.1 mrg {
2368 1.1 mrg return cp_fold_maybe_rvalue (x, true);
2369 1.1 mrg }
2370 1.1 mrg
2371 1.1 mrg /* Perform folding on expression X. */
2372 1.1 mrg
2373 1.1 mrg tree
2374 1.1 mrg cp_fully_fold (tree x)
2375 1.1 mrg {
2376 1.1 mrg if (processing_template_decl)
2377 1.1 mrg return x;
2378 1.1 mrg /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2379 1.1 mrg have to call both. */
2380 1.1 mrg if (cxx_dialect >= cxx11)
2381 1.1 mrg {
2382 1.1 mrg x = maybe_constant_value (x);
2383 1.1 mrg /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2384 1.1 mrg a TARGET_EXPR; undo that here. */
2385 1.1 mrg if (TREE_CODE (x) == TARGET_EXPR)
2386 1.1 mrg x = TARGET_EXPR_INITIAL (x);
2387 1.1 mrg else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2388 1.1 mrg && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2389 1.1 mrg && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2390 1.1 mrg x = TREE_OPERAND (x, 0);
2391 1.1 mrg }
2392 1.1 mrg return cp_fold_rvalue (x);
2393 1.1 mrg }
2394 1.1 mrg
2395 1.1 mrg /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2396 1.1 mrg in some cases. */
2397 1.1 mrg
2398 1.1 mrg tree
2399 1.1 mrg cp_fully_fold_init (tree x)
2400 1.1 mrg {
2401 1.1 mrg if (processing_template_decl)
2402 1.1 mrg return x;
2403 1.1 mrg x = cp_fully_fold (x);
2404 1.1 mrg cp_fold_data data (/*genericize*/false);
2405 1.1 mrg cp_walk_tree (&x, cp_fold_r, &data, NULL);
2406 1.1 mrg return x;
2407 1.1 mrg }
2408 1.1 mrg
2409 1.1 mrg /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2410 1.1 mrg and certain changes are made to the folding done. Or should be (FIXME). We
2411 1.1 mrg never touch maybe_const, as it is only used for the C front-end
2412 1.1 mrg C_MAYBE_CONST_EXPR. */
2413 1.1 mrg
2414 1.1 mrg tree
2415 1.1 mrg c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2416 1.1 mrg {
2417 1.1 mrg return cp_fold_maybe_rvalue (x, !lval);
2418 1.1 mrg }
2419 1.1 mrg
2420 1.1 mrg static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2421 1.1 mrg
2422 1.1 mrg /* Dispose of the whole FOLD_CACHE. */
2423 1.1 mrg
2424 1.1 mrg void
2425 1.1 mrg clear_fold_cache (void)
2426 1.1 mrg {
2427 1.1 mrg if (fold_cache != NULL)
2428 1.1 mrg fold_cache->empty ();
2429 1.1 mrg }
2430 1.1 mrg
2431 1.1 mrg /* This function tries to fold an expression X.
2432 1.1 mrg To avoid combinatorial explosion, folding results are kept in fold_cache.
2433 1.1 mrg If X is invalid, we don't fold at all.
2434 1.1 mrg For performance reasons we don't cache expressions representing a
2435 1.1 mrg declaration or constant.
2436 1.1 mrg Function returns X or its folded variant. */
2437 1.1 mrg
2438 1.1 mrg static tree
2439 1.1 mrg cp_fold (tree x)
2440 1.1 mrg {
2441 1.1 mrg tree op0, op1, op2, op3;
2442 1.1 mrg tree org_x = x, r = NULL_TREE;
2443 1.1 mrg enum tree_code code;
2444 1.1 mrg location_t loc;
2445 1.1 mrg bool rval_ops = true;
2446 1.1 mrg
2447 1.1 mrg if (!x || x == error_mark_node)
2448 1.1 mrg return x;
2449 1.1 mrg
2450 1.1 mrg if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2451 1.1 mrg return x;
2452 1.1 mrg
2453 1.1 mrg /* Don't bother to cache DECLs or constants. */
2454 1.1 mrg if (DECL_P (x) || CONSTANT_CLASS_P (x))
2455 1.1 mrg return x;
2456 1.1 mrg
2457 1.1 mrg if (fold_cache == NULL)
2458 1.1 mrg fold_cache = hash_map<tree, tree>::create_ggc (101);
2459 1.1 mrg
2460 1.1 mrg if (tree *cached = fold_cache->get (x))
2461 1.1 mrg {
2462 1.1 mrg /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
2463 1.1 mrg argument has been folded into a tree invariant, make sure it is
2464 1.1 mrg unshared. See PR112727. */
2465 1.1 mrg if (TREE_CODE (x) == SAVE_EXPR && *cached != x)
2466 1.1 mrg return unshare_expr (*cached);
2467 1.1 mrg return *cached;
2468 1.1 mrg }
2469 1.1 mrg
2470 1.1 mrg uid_sensitive_constexpr_evaluation_checker c;
2471 1.1 mrg
2472 1.1 mrg code = TREE_CODE (x);
2473 1.1 mrg switch (code)
2474 1.1 mrg {
2475 1.1 mrg case CLEANUP_POINT_EXPR:
2476 1.1 mrg /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2477 1.1 mrg effects. */
2478 1.1 mrg r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2479 1.1 mrg if (!TREE_SIDE_EFFECTS (r))
2480 1.1 mrg x = r;
2481 1.1 mrg break;
2482 1.1 mrg
2483 1.1 mrg case SIZEOF_EXPR:
2484 1.1 mrg x = fold_sizeof_expr (x);
2485 1.1 mrg break;
2486 1.1 mrg
2487 1.1 mrg case VIEW_CONVERT_EXPR:
2488 1.1 mrg rval_ops = false;
2489 1.1 mrg /* FALLTHRU */
2490 1.1 mrg case CONVERT_EXPR:
2491 1.1 mrg case NOP_EXPR:
2492 1.1 mrg case NON_LVALUE_EXPR:
2493 1.1 mrg
2494 1.1 mrg if (VOID_TYPE_P (TREE_TYPE (x)))
2495 1.1 mrg {
2496 1.1 mrg /* This is just to make sure we don't end up with casts to
2497 1.1 mrg void from error_mark_node. If we just return x, then
2498 1.1 mrg cp_fold_r might fold the operand into error_mark_node and
2499 1.1 mrg leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2500 1.1 mrg during gimplification doesn't like such casts.
2501 1.1 mrg Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2502 1.1 mrg folding of the operand should be in the caches and if in cp_fold_r
2503 1.1 mrg it will modify it in place. */
2504 1.1 mrg op0 = cp_fold (TREE_OPERAND (x, 0));
2505 1.1 mrg if (op0 == error_mark_node)
2506 1.1 mrg x = error_mark_node;
2507 1.1 mrg break;
2508 1.1 mrg }
2509 1.1 mrg
2510 1.1 mrg loc = EXPR_LOCATION (x);
2511 1.1 mrg op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2512 1.1 mrg
2513 1.1 mrg if (code == CONVERT_EXPR
2514 1.1 mrg && SCALAR_TYPE_P (TREE_TYPE (x))
2515 1.1 mrg && op0 != void_node)
2516 1.1 mrg /* During parsing we used convert_to_*_nofold; re-convert now using the
2517 1.1 mrg folding variants, since fold() doesn't do those transformations. */
2518 1.1 mrg x = fold (convert (TREE_TYPE (x), op0));
2519 1.1 mrg else if (op0 != TREE_OPERAND (x, 0))
2520 1.1 mrg {
2521 1.1 mrg if (op0 == error_mark_node)
2522 1.1 mrg x = error_mark_node;
2523 1.1 mrg else
2524 1.1 mrg x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2525 1.1 mrg }
2526 1.1 mrg else
2527 1.1 mrg x = fold (x);
2528 1.1 mrg
2529 1.1 mrg /* Conversion of an out-of-range value has implementation-defined
2530 1.1 mrg behavior; the language considers it different from arithmetic
2531 1.1 mrg overflow, which is undefined. */
2532 1.1 mrg if (TREE_CODE (op0) == INTEGER_CST
2533 1.1 mrg && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2534 1.1 mrg TREE_OVERFLOW (x) = false;
2535 1.1 mrg
2536 1.1 mrg break;
2537 1.1 mrg
2538 1.1 mrg case INDIRECT_REF:
2539 1.1 mrg /* We don't need the decltype(auto) obfuscation anymore. */
2540 1.1 mrg if (REF_PARENTHESIZED_P (x))
2541 1.1 mrg {
2542 1.1 mrg tree p = maybe_undo_parenthesized_ref (x);
2543 1.1 mrg if (p != x)
2544 1.1 mrg return cp_fold (p);
2545 1.1 mrg }
2546 1.1 mrg goto unary;
2547 1.1 mrg
2548 1.1 mrg case ADDR_EXPR:
2549 1.1 mrg loc = EXPR_LOCATION (x);
2550 1.1 mrg op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2551 1.1 mrg
2552 1.1 mrg /* Cope with user tricks that amount to offsetof. */
2553 1.1 mrg if (op0 != error_mark_node
2554 1.1 mrg && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2555 1.1 mrg {
2556 1.1 mrg tree val = get_base_address (op0);
2557 1.1 mrg if (val
2558 1.1 mrg && INDIRECT_REF_P (val)
2559 1.1 mrg && COMPLETE_TYPE_P (TREE_TYPE (val))
2560 1.1 mrg && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2561 1.1 mrg {
2562 1.1 mrg val = TREE_OPERAND (val, 0);
2563 1.1 mrg STRIP_NOPS (val);
2564 1.1 mrg val = maybe_constant_value (val);
2565 1.1 mrg if (TREE_CODE (val) == INTEGER_CST)
2566 1.1 mrg return fold_offsetof (op0, TREE_TYPE (x));
2567 1.1 mrg }
2568 1.1 mrg }
2569 1.1 mrg goto finish_unary;
2570 1.1 mrg
2571 1.1 mrg case REALPART_EXPR:
2572 1.1 mrg case IMAGPART_EXPR:
2573 1.1 mrg rval_ops = false;
2574 1.1 mrg /* FALLTHRU */
2575 1.1 mrg case CONJ_EXPR:
2576 1.1 mrg case FIX_TRUNC_EXPR:
2577 1.1 mrg case FLOAT_EXPR:
2578 1.1 mrg case NEGATE_EXPR:
2579 1.1 mrg case ABS_EXPR:
2580 1.1 mrg case ABSU_EXPR:
2581 1.1 mrg case BIT_NOT_EXPR:
2582 1.1 mrg case TRUTH_NOT_EXPR:
2583 1.1 mrg case FIXED_CONVERT_EXPR:
2584 1.1 mrg unary:
2585 1.1 mrg
2586 1.1 mrg loc = EXPR_LOCATION (x);
2587 1.1 mrg op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2588 1.1 mrg
2589 1.1 mrg finish_unary:
2590 1.1 mrg if (op0 != TREE_OPERAND (x, 0))
2591 1.1 mrg {
2592 1.1 mrg if (op0 == error_mark_node)
2593 1.1 mrg x = error_mark_node;
2594 1.1 mrg else
2595 1.1 mrg {
2596 1.1 mrg x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2597 1.1 mrg if (code == INDIRECT_REF
2598 1.1 mrg && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2599 1.1 mrg {
2600 1.1 mrg TREE_READONLY (x) = TREE_READONLY (org_x);
2601 1.1 mrg TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2602 1.1 mrg TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2603 1.1 mrg }
2604 1.1 mrg }
2605 1.1 mrg }
2606 1.1 mrg else
2607 1.1 mrg x = fold (x);
2608 1.1 mrg
2609 1.1 mrg gcc_assert (TREE_CODE (x) != COND_EXPR
2610 1.1 mrg || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2611 1.1 mrg break;
2612 1.1 mrg
2613 1.1 mrg case UNARY_PLUS_EXPR:
2614 1.1 mrg op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2615 1.1 mrg if (op0 == error_mark_node)
2616 1.1 mrg x = error_mark_node;
2617 1.1 mrg else
2618 1.1 mrg x = fold_convert (TREE_TYPE (x), op0);
2619 1.1 mrg break;
2620 1.1 mrg
2621 1.1 mrg case POSTDECREMENT_EXPR:
2622 1.1 mrg case POSTINCREMENT_EXPR:
2623 1.1 mrg case INIT_EXPR:
2624 1.1 mrg case PREDECREMENT_EXPR:
2625 1.1 mrg case PREINCREMENT_EXPR:
2626 1.1 mrg case COMPOUND_EXPR:
2627 1.1 mrg case MODIFY_EXPR:
2628 1.1 mrg rval_ops = false;
2629 1.1 mrg /* FALLTHRU */
2630 1.1 mrg case POINTER_PLUS_EXPR:
2631 1.1 mrg case PLUS_EXPR:
2632 1.1 mrg case POINTER_DIFF_EXPR:
2633 1.1 mrg case MINUS_EXPR:
2634 1.1 mrg case MULT_EXPR:
2635 1.1 mrg case TRUNC_DIV_EXPR:
2636 1.1 mrg case CEIL_DIV_EXPR:
2637 1.1 mrg case FLOOR_DIV_EXPR:
2638 1.1 mrg case ROUND_DIV_EXPR:
2639 1.1 mrg case TRUNC_MOD_EXPR:
2640 1.1 mrg case CEIL_MOD_EXPR:
2641 1.1 mrg case ROUND_MOD_EXPR:
2642 1.1 mrg case RDIV_EXPR:
2643 1.1 mrg case EXACT_DIV_EXPR:
2644 1.1 mrg case MIN_EXPR:
2645 1.1 mrg case MAX_EXPR:
2646 1.1 mrg case LSHIFT_EXPR:
2647 1.1 mrg case RSHIFT_EXPR:
2648 1.1 mrg case LROTATE_EXPR:
2649 1.1 mrg case RROTATE_EXPR:
2650 1.1 mrg case BIT_AND_EXPR:
2651 1.1 mrg case BIT_IOR_EXPR:
2652 1.1 mrg case BIT_XOR_EXPR:
2653 1.1 mrg case TRUTH_AND_EXPR:
2654 1.1 mrg case TRUTH_ANDIF_EXPR:
2655 1.1 mrg case TRUTH_OR_EXPR:
2656 1.1 mrg case TRUTH_ORIF_EXPR:
2657 1.1 mrg case TRUTH_XOR_EXPR:
2658 1.1 mrg case LT_EXPR: case LE_EXPR:
2659 1.1 mrg case GT_EXPR: case GE_EXPR:
2660 1.1 mrg case EQ_EXPR: case NE_EXPR:
2661 1.1 mrg case UNORDERED_EXPR: case ORDERED_EXPR:
2662 1.1 mrg case UNLT_EXPR: case UNLE_EXPR:
2663 1.1 mrg case UNGT_EXPR: case UNGE_EXPR:
2664 1.1 mrg case UNEQ_EXPR: case LTGT_EXPR:
2665 1.1 mrg case RANGE_EXPR: case COMPLEX_EXPR:
2666 1.1 mrg
2667 1.1 mrg loc = EXPR_LOCATION (x);
2668 1.1 mrg op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2669 1.1 mrg op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2670 1.1 mrg
2671 1.1 mrg /* decltype(nullptr) has only one value, so optimize away all comparisons
2672 1.1 mrg with that type right away, keeping them in the IL causes troubles for
2673 1.1 mrg various optimizations. */
2674 1.1 mrg if (COMPARISON_CLASS_P (org_x)
2675 1.1 mrg && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2676 1.1 mrg && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2677 1.1 mrg {
2678 1.1 mrg switch (code)
2679 1.1 mrg {
2680 1.1 mrg case EQ_EXPR:
2681 1.1 mrg x = constant_boolean_node (true, TREE_TYPE (x));
2682 1.1 mrg break;
2683 1.1 mrg case NE_EXPR:
2684 1.1 mrg x = constant_boolean_node (false, TREE_TYPE (x));
2685 1.1 mrg break;
2686 1.1 mrg default:
2687 1.1 mrg gcc_unreachable ();
2688 1.1 mrg }
2689 1.1 mrg return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2690 1.1 mrg op0, op1);
2691 1.1 mrg }
2692 1.1 mrg
2693 1.1 mrg if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2694 1.1 mrg {
2695 1.1 mrg if (op0 == error_mark_node || op1 == error_mark_node)
2696 1.1 mrg x = error_mark_node;
2697 1.1 mrg else
2698 1.1 mrg x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2699 1.1 mrg }
2700 1.1 mrg else
2701 1.1 mrg x = fold (x);
2702 1.1 mrg
2703 1.1 mrg /* This is only needed for -Wnonnull-compare and only if
2704 1.1 mrg TREE_NO_WARNING (org_x), but to avoid that option affecting code
2705 1.1 mrg generation, we do it always. */
2706 1.1 mrg if (COMPARISON_CLASS_P (org_x))
2707 1.1 mrg {
2708 1.1 mrg if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2709 1.1 mrg ;
2710 1.1 mrg else if (COMPARISON_CLASS_P (x))
2711 1.1 mrg {
2712 1.1 mrg if (warn_nonnull_compare
2713 1.1 mrg && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2714 1.1 mrg suppress_warning (x, OPT_Wnonnull_compare);
2715 1.1 mrg }
2716 1.1 mrg /* Otherwise give up on optimizing these, let GIMPLE folders
2717 1.1 mrg optimize those later on. */
2718 1.1 mrg else if (op0 != TREE_OPERAND (org_x, 0)
2719 1.1 mrg || op1 != TREE_OPERAND (org_x, 1))
2720 1.1 mrg {
2721 1.1 mrg x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2722 1.1 mrg if (warn_nonnull_compare
2723 1.1 mrg && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2724 1.1 mrg suppress_warning (x, OPT_Wnonnull_compare);
2725 1.1 mrg }
2726 1.1 mrg else
2727 1.1 mrg x = org_x;
2728 1.1 mrg }
2729 1.1 mrg
2730 1.1 mrg break;
2731 1.1 mrg
2732 1.1 mrg case VEC_COND_EXPR:
2733 1.1 mrg case COND_EXPR:
2734 1.1 mrg loc = EXPR_LOCATION (x);
2735 1.1 mrg op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2736 1.1 mrg op1 = cp_fold (TREE_OPERAND (x, 1));
2737 1.1 mrg op2 = cp_fold (TREE_OPERAND (x, 2));
2738 1.1 mrg
2739 1.1 mrg if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2740 1.1 mrg {
2741 1.1 mrg warning_sentinel s (warn_int_in_bool_context);
2742 1.1 mrg if (!VOID_TYPE_P (TREE_TYPE (op1)))
2743 1.1 mrg op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2744 1.1 mrg if (!VOID_TYPE_P (TREE_TYPE (op2)))
2745 1.1 mrg op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2746 1.1 mrg }
2747 1.1 mrg else if (VOID_TYPE_P (TREE_TYPE (x)))
2748 1.1 mrg {
2749 1.1 mrg if (TREE_CODE (op0) == INTEGER_CST)
2750 1.1 mrg {
2751 1.1 mrg /* If the condition is constant, fold can fold away
2752 1.1 mrg the COND_EXPR. If some statement-level uses of COND_EXPR
2753 1.1 mrg have one of the branches NULL, avoid folding crash. */
2754 1.1 mrg if (!op1)
2755 1.1 mrg op1 = build_empty_stmt (loc);
2756 1.1 mrg if (!op2)
2757 1.1 mrg op2 = build_empty_stmt (loc);
2758 1.1 mrg }
2759 1.1 mrg else
2760 1.1 mrg {
2761 1.1 mrg /* Otherwise, don't bother folding a void condition, since
2762 1.1 mrg it can't produce a constant value. */
2763 1.1 mrg if (op0 != TREE_OPERAND (x, 0)
2764 1.1 mrg || op1 != TREE_OPERAND (x, 1)
2765 1.1 mrg || op2 != TREE_OPERAND (x, 2))
2766 1.1 mrg x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2767 1.1 mrg break;
2768 1.1 mrg }
2769 1.1 mrg }
2770 1.1 mrg
2771 1.1 mrg if (op0 != TREE_OPERAND (x, 0)
2772 1.1 mrg || op1 != TREE_OPERAND (x, 1)
2773 1.1 mrg || op2 != TREE_OPERAND (x, 2))
2774 1.1 mrg {
2775 1.1 mrg if (op0 == error_mark_node
2776 1.1 mrg || op1 == error_mark_node
2777 1.1 mrg || op2 == error_mark_node)
2778 1.1 mrg x = error_mark_node;
2779 1.1 mrg else
2780 1.1 mrg x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2781 1.1 mrg }
2782 1.1 mrg else
2783 1.1 mrg x = fold (x);
2784 1.1 mrg
2785 1.1 mrg /* A COND_EXPR might have incompatible types in branches if one or both
2786 1.1 mrg arms are bitfields. If folding exposed such a branch, fix it up. */
2787 1.1 mrg if (TREE_CODE (x) != code
2788 1.1 mrg && x != error_mark_node
2789 1.1 mrg && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2790 1.1 mrg x = fold_convert (TREE_TYPE (org_x), x);
2791 1.1 mrg
2792 1.1 mrg break;
2793 1.1 mrg
2794 1.1 mrg case CALL_EXPR:
2795 1.1 mrg {
2796 1.1 mrg tree callee = get_callee_fndecl (x);
2797 1.1 mrg
2798 1.1 mrg /* "Inline" calls to std::move/forward and other cast-like functions
2799 1.1 mrg by simply folding them into a corresponding cast to their return
2800 1.1 mrg type. This is cheaper than relying on the middle end to do so, and
2801 1.1 mrg also means we avoid generating useless debug info for them at all.
2802 1.1 mrg
2803 1.1 mrg At this point the argument has already been converted into a
2804 1.1 mrg reference, so it suffices to use a NOP_EXPR to express the
2805 1.1 mrg cast. */
2806 1.1 mrg if ((OPTION_SET_P (flag_fold_simple_inlines)
2807 1.1 mrg ? flag_fold_simple_inlines
2808 1.1 mrg : !flag_no_inline)
2809 1.1 mrg && call_expr_nargs (x) == 1
2810 1.1 mrg && decl_in_std_namespace_p (callee)
2811 1.1 mrg && DECL_NAME (callee) != NULL_TREE
2812 1.1 mrg && (id_equal (DECL_NAME (callee), "move")
2813 1.1 mrg || id_equal (DECL_NAME (callee), "forward")
2814 1.1 mrg || id_equal (DECL_NAME (callee), "addressof")
2815 1.1 mrg /* This addressof equivalent is used heavily in libstdc++. */
2816 1.1 mrg || id_equal (DECL_NAME (callee), "__addressof")
2817 1.1 mrg || id_equal (DECL_NAME (callee), "as_const")))
2818 1.1 mrg {
2819 1.1 mrg r = CALL_EXPR_ARG (x, 0);
2820 1.1 mrg /* Check that the return and argument types are sane before
2821 1.1 mrg folding. */
2822 1.1 mrg if (INDIRECT_TYPE_P (TREE_TYPE (x))
2823 1.1 mrg && INDIRECT_TYPE_P (TREE_TYPE (r)))
2824 1.1 mrg {
2825 1.1 mrg if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2826 1.1 mrg r = build_nop (TREE_TYPE (x), r);
2827 1.1 mrg x = cp_fold (r);
2828 1.1 mrg break;
2829 1.1 mrg }
2830 1.1 mrg }
2831 1.1 mrg
2832 1.1 mrg int sv = optimize, nw = sv;
2833 1.1 mrg
2834 1.1 mrg /* Some built-in function calls will be evaluated at compile-time in
2835 1.1 mrg fold (). Set optimize to 1 when folding __builtin_constant_p inside
2836 1.1 mrg a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2837 1.1 mrg if (callee && fndecl_built_in_p (callee) && !optimize
2838 1.1 mrg && DECL_IS_BUILTIN_CONSTANT_P (callee)
2839 1.1 mrg && current_function_decl
2840 1.1 mrg && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2841 1.1 mrg nw = 1;
2842 1.1 mrg
2843 1.1 mrg if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2844 1.1 mrg {
2845 1.1 mrg switch (DECL_FE_FUNCTION_CODE (callee))
2846 1.1 mrg {
2847 1.1 mrg /* Defer folding __builtin_is_constant_evaluated. */
2848 1.1 mrg case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2849 1.1 mrg break;
2850 1.1 mrg case CP_BUILT_IN_SOURCE_LOCATION:
2851 1.1 mrg x = fold_builtin_source_location (EXPR_LOCATION (x));
2852 1.1 mrg break;
2853 1.1 mrg case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2854 1.1 mrg x = fold_builtin_is_corresponding_member
2855 1.1 mrg (EXPR_LOCATION (x), call_expr_nargs (x),
2856 1.1 mrg &CALL_EXPR_ARG (x, 0));
2857 1.1 mrg break;
2858 1.1 mrg case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2859 1.1 mrg x = fold_builtin_is_pointer_inverconvertible_with_class
2860 1.1 mrg (EXPR_LOCATION (x), call_expr_nargs (x),
2861 1.1 mrg &CALL_EXPR_ARG (x, 0));
2862 1.1 mrg break;
2863 1.1 mrg default:
2864 1.1 mrg break;
2865 1.1 mrg }
2866 1.1 mrg break;
2867 1.1 mrg }
2868 1.1 mrg
2869 1.1 mrg if (callee
2870 1.1 mrg && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2871 1.1 mrg BUILT_IN_FRONTEND))
2872 1.1 mrg {
2873 1.1 mrg x = fold_builtin_source_location (EXPR_LOCATION (x));
2874 1.1 mrg break;
2875 1.1 mrg }
2876 1.1 mrg
2877 1.1 mrg bool changed = false;
2878 1.1 mrg int m = call_expr_nargs (x);
2879 1.1 mrg for (int i = 0; i < m; i++)
2880 1.1 mrg {
2881 1.1 mrg r = cp_fold (CALL_EXPR_ARG (x, i));
2882 1.1 mrg if (r != CALL_EXPR_ARG (x, i))
2883 1.1 mrg {
2884 1.1 mrg if (r == error_mark_node)
2885 1.1 mrg {
2886 1.1 mrg x = error_mark_node;
2887 1.1 mrg break;
2888 1.1 mrg }
2889 1.1 mrg if (!changed)
2890 1.1 mrg x = copy_node (x);
2891 1.1 mrg CALL_EXPR_ARG (x, i) = r;
2892 1.1 mrg changed = true;
2893 1.1 mrg }
2894 1.1 mrg }
2895 1.1 mrg if (x == error_mark_node)
2896 1.1 mrg break;
2897 1.1 mrg
2898 1.1 mrg optimize = nw;
2899 1.1 mrg r = fold (x);
2900 1.1 mrg optimize = sv;
2901 1.1 mrg
2902 1.1 mrg if (TREE_CODE (r) != CALL_EXPR)
2903 1.1 mrg {
2904 1.1 mrg x = cp_fold (r);
2905 1.1 mrg break;
2906 1.1 mrg }
2907 1.1 mrg
2908 1.1 mrg optimize = nw;
2909 1.1 mrg
2910 1.1 mrg /* Invoke maybe_constant_value for functions declared
2911 1.1 mrg constexpr and not called with AGGR_INIT_EXPRs.
2912 1.1 mrg TODO:
2913 1.1 mrg Do constexpr expansion of expressions where the call itself is not
2914 1.1 mrg constant, but the call followed by an INDIRECT_REF is. */
2915 1.1 mrg if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2916 1.1 mrg && !flag_no_inline)
2917 1.1 mrg r = maybe_constant_value (x);
2918 1.1 mrg optimize = sv;
2919 1.1 mrg
2920 1.1 mrg if (TREE_CODE (r) != CALL_EXPR)
2921 1.1 mrg {
2922 1.1 mrg if (DECL_CONSTRUCTOR_P (callee))
2923 1.1 mrg {
2924 1.1 mrg loc = EXPR_LOCATION (x);
2925 1.1 mrg tree s = build_fold_indirect_ref_loc (loc,
2926 1.1 mrg CALL_EXPR_ARG (x, 0));
2927 1.1 mrg r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2928 1.1 mrg }
2929 1.1 mrg x = r;
2930 1.1 mrg break;
2931 1.1 mrg }
2932 1.1 mrg
2933 1.1 mrg break;
2934 1.1 mrg }
2935 1.1 mrg
2936 1.1 mrg case CONSTRUCTOR:
2937 1.1 mrg {
2938 1.1 mrg unsigned i;
2939 1.1 mrg constructor_elt *p;
2940 1.1 mrg vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2941 1.1 mrg vec<constructor_elt, va_gc> *nelts = NULL;
2942 1.1 mrg FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2943 1.1 mrg {
2944 1.1 mrg tree op = cp_fold (p->value);
2945 1.1 mrg if (op != p->value)
2946 1.1 mrg {
2947 1.1 mrg if (op == error_mark_node)
2948 1.1 mrg {
2949 1.1 mrg x = error_mark_node;
2950 1.1 mrg vec_free (nelts);
2951 1.1 mrg break;
2952 1.1 mrg }
2953 1.1 mrg if (nelts == NULL)
2954 1.1 mrg nelts = elts->copy ();
2955 1.1 mrg (*nelts)[i].value = op;
2956 1.1 mrg }
2957 1.1 mrg }
2958 1.1 mrg if (nelts)
2959 1.1 mrg {
2960 1.1 mrg x = build_constructor (TREE_TYPE (x), nelts);
2961 1.1 mrg CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2962 1.1 mrg = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2963 1.1 mrg }
2964 1.1 mrg if (VECTOR_TYPE_P (TREE_TYPE (x)))
2965 1.1 mrg x = fold (x);
2966 1.1 mrg break;
2967 1.1 mrg }
2968 1.1 mrg case TREE_VEC:
2969 1.1 mrg {
2970 1.1 mrg bool changed = false;
2971 1.1 mrg int n = TREE_VEC_LENGTH (x);
2972 1.1 mrg
2973 1.1 mrg for (int i = 0; i < n; i++)
2974 1.1 mrg {
2975 1.1 mrg tree op = cp_fold (TREE_VEC_ELT (x, i));
2976 1.1 mrg if (op != TREE_VEC_ELT (x, i))
2977 1.1 mrg {
2978 1.1 mrg if (!changed)
2979 1.1 mrg x = copy_node (x);
2980 1.1 mrg TREE_VEC_ELT (x, i) = op;
2981 1.1 mrg changed = true;
2982 1.1 mrg }
2983 1.1 mrg }
2984 1.1 mrg }
2985 1.1 mrg
2986 1.1 mrg break;
2987 1.1 mrg
2988 1.1 mrg case ARRAY_REF:
2989 1.1 mrg case ARRAY_RANGE_REF:
2990 1.1 mrg
2991 1.1 mrg loc = EXPR_LOCATION (x);
2992 1.1 mrg op0 = cp_fold (TREE_OPERAND (x, 0));
2993 1.1 mrg op1 = cp_fold (TREE_OPERAND (x, 1));
2994 1.1 mrg op2 = cp_fold (TREE_OPERAND (x, 2));
2995 1.1 mrg op3 = cp_fold (TREE_OPERAND (x, 3));
2996 1.1 mrg
2997 1.1 mrg if (op0 != TREE_OPERAND (x, 0)
2998 1.1 mrg || op1 != TREE_OPERAND (x, 1)
2999 1.1 mrg || op2 != TREE_OPERAND (x, 2)
3000 1.1 mrg || op3 != TREE_OPERAND (x, 3))
3001 1.1 mrg {
3002 1.1 mrg if (op0 == error_mark_node
3003 1.1 mrg || op1 == error_mark_node
3004 1.1 mrg || op2 == error_mark_node
3005 1.1 mrg || op3 == error_mark_node)
3006 1.1 mrg x = error_mark_node;
3007 1.1 mrg else
3008 1.1 mrg {
3009 1.1 mrg x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3010 1.1 mrg TREE_READONLY (x) = TREE_READONLY (org_x);
3011 1.1 mrg TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3012 1.1 mrg TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3013 1.1 mrg }
3014 1.1 mrg }
3015 1.1 mrg
3016 1.1 mrg x = fold (x);
3017 1.1 mrg break;
3018 1.1 mrg
3019 1.1 mrg case SAVE_EXPR:
3020 1.1 mrg /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3021 1.1 mrg folding, evaluates to an invariant. In that case no need to wrap
3022 1.1 mrg this folded tree with a SAVE_EXPR. */
3023 1.1 mrg r = cp_fold (TREE_OPERAND (x, 0));
3024 1.1 mrg if (tree_invariant_p (r))
3025 1.1 mrg x = r;
3026 1.1 mrg break;
3027 1.1 mrg
3028 1.1 mrg case REQUIRES_EXPR:
3029 1.1 mrg x = evaluate_requires_expr (x);
3030 1.1 mrg break;
3031 1.1 mrg
3032 1.1 mrg default:
3033 1.1 mrg return org_x;
3034 1.1 mrg }
3035 1.1 mrg
3036 1.1 mrg if (EXPR_P (x) && TREE_CODE (x) == code)
3037 1.1 mrg {
3038 1.1 mrg TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3039 1.1 mrg copy_warning (x, org_x);
3040 1.1 mrg }
3041 1.1 mrg
3042 1.1 mrg if (!c.evaluation_restricted_p ())
3043 1.1 mrg {
3044 1.1 mrg fold_cache->put (org_x, x);
3045 1.1 mrg /* Prevent that we try to fold an already folded result again. */
3046 1.1 mrg if (x != org_x)
3047 1.1 mrg fold_cache->put (x, x);
3048 1.1 mrg }
3049 1.1 mrg
3050 1.1 mrg return x;
3051 1.1 mrg }
3052 1.1 mrg
3053 1.1 mrg /* Look up either "hot" or "cold" in attribute list LIST. */
3054 1.1 mrg
3055 1.1 mrg tree
3056 1.1 mrg lookup_hotness_attribute (tree list)
3057 1.1 mrg {
3058 1.1 mrg for (; list; list = TREE_CHAIN (list))
3059 1.1 mrg {
3060 1.1 mrg tree name = get_attribute_name (list);
3061 1.1 mrg if (is_attribute_p ("hot", name)
3062 1.1 mrg || is_attribute_p ("cold", name)
3063 1.1 mrg || is_attribute_p ("likely", name)
3064 1.1 mrg || is_attribute_p ("unlikely", name))
3065 1.1 mrg break;
3066 1.1 mrg }
3067 1.1 mrg return list;
3068 1.1 mrg }
3069 1.1 mrg
3070 1.1 mrg /* Remove both "hot" and "cold" attributes from LIST. */
3071 1.1 mrg
3072 1.1 mrg static tree
3073 1.1 mrg remove_hotness_attribute (tree list)
3074 1.1 mrg {
3075 1.1 mrg list = remove_attribute ("hot", list);
3076 1.1 mrg list = remove_attribute ("cold", list);
3077 1.1 mrg list = remove_attribute ("likely", list);
3078 1.1 mrg list = remove_attribute ("unlikely", list);
3079 1.1 mrg return list;
3080 1.1 mrg }
3081 1.1 mrg
3082 1.1 mrg /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3083 1.1 mrg PREDICT_EXPR. */
3084 1.1 mrg
3085 1.1 mrg tree
3086 1.1 mrg process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3087 1.1 mrg {
3088 1.1 mrg if (std_attrs == error_mark_node)
3089 1.1 mrg return std_attrs;
3090 1.1 mrg if (tree attr = lookup_hotness_attribute (std_attrs))
3091 1.1 mrg {
3092 1.1 mrg tree name = get_attribute_name (attr);
3093 1.1 mrg bool hot = (is_attribute_p ("hot", name)
3094 1.1 mrg || is_attribute_p ("likely", name));
3095 1.1 mrg tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3096 1.1 mrg hot ? TAKEN : NOT_TAKEN);
3097 1.1 mrg SET_EXPR_LOCATION (pred, attrs_loc);
3098 1.1 mrg add_stmt (pred);
3099 1.1 mrg if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3100 1.1 mrg warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3101 1.1 mrg get_attribute_name (other), name);
3102 1.1 mrg std_attrs = remove_hotness_attribute (std_attrs);
3103 1.1 mrg }
3104 1.1 mrg return std_attrs;
3105 1.1 mrg }
3106 1.1 mrg
3107 1.1 mrg /* Helper of fold_builtin_source_location, return the
3108 1.1 mrg std::source_location::__impl type after performing verification
3109 1.1 mrg on it. LOC is used for reporting any errors. */
3110 1.1 mrg
3111 1.1 mrg static tree
3112 1.1 mrg get_source_location_impl_type (location_t loc)
3113 1.1 mrg {
3114 1.1 mrg tree name = get_identifier ("source_location");
3115 1.1 mrg tree decl = lookup_qualified_name (std_node, name);
3116 1.1 mrg if (TREE_CODE (decl) != TYPE_DECL)
3117 1.1 mrg {
3118 1.1 mrg auto_diagnostic_group d;
3119 1.1 mrg if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3120 1.1 mrg qualified_name_lookup_error (std_node, name, decl, loc);
3121 1.1 mrg else
3122 1.1 mrg error_at (loc, "%qD is not a type", decl);
3123 1.1 mrg return error_mark_node;
3124 1.1 mrg }
3125 1.1 mrg name = get_identifier ("__impl");
3126 1.1 mrg tree type = TREE_TYPE (decl);
3127 1.1 mrg decl = lookup_qualified_name (type, name);
3128 1.1 mrg if (TREE_CODE (decl) != TYPE_DECL)
3129 1.1 mrg {
3130 1.1 mrg auto_diagnostic_group d;
3131 1.1 mrg if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3132 1.1 mrg qualified_name_lookup_error (type, name, decl, loc);
3133 1.1 mrg else
3134 1.1 mrg error_at (loc, "%qD is not a type", decl);
3135 1.1 mrg return error_mark_node;
3136 1.1 mrg }
3137 1.1 mrg type = TREE_TYPE (decl);
3138 1.1 mrg if (TREE_CODE (type) != RECORD_TYPE)
3139 1.1 mrg {
3140 1.1 mrg error_at (loc, "%qD is not a class type", decl);
3141 1.1 mrg return error_mark_node;
3142 1.1 mrg }
3143 1.1 mrg
3144 1.1 mrg int cnt = 0;
3145 1.1 mrg for (tree field = TYPE_FIELDS (type);
3146 1.1 mrg (field = next_initializable_field (field)) != NULL_TREE;
3147 1.1 mrg field = DECL_CHAIN (field))
3148 1.1 mrg {
3149 1.1 mrg if (DECL_NAME (field) != NULL_TREE)
3150 1.1 mrg {
3151 1.1 mrg const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3152 1.1 mrg if (strcmp (n, "_M_file_name") == 0
3153 1.1 mrg || strcmp (n, "_M_function_name") == 0)
3154 1.1 mrg {
3155 1.1 mrg if (TREE_TYPE (field) != const_string_type_node)
3156 1.1 mrg {
3157 1.1 mrg error_at (loc, "%qD does not have %<const char *%> type",
3158 1.1 mrg field);
3159 1.1 mrg return error_mark_node;
3160 1.1 mrg }
3161 1.1 mrg cnt++;
3162 1.1 mrg continue;
3163 1.1 mrg }
3164 1.1 mrg else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3165 1.1 mrg {
3166 1.1 mrg if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3167 1.1 mrg {
3168 1.1 mrg error_at (loc, "%qD does not have integral type", field);
3169 1.1 mrg return error_mark_node;
3170 1.1 mrg }
3171 1.1 mrg cnt++;
3172 1.1 mrg continue;
3173 1.1 mrg }
3174 1.1 mrg }
3175 1.1 mrg cnt = 0;
3176 1.1 mrg break;
3177 1.1 mrg }
3178 1.1 mrg if (cnt != 4)
3179 1.1 mrg {
3180 1.1 mrg error_at (loc, "%<std::source_location::__impl%> does not contain only "
3181 1.1 mrg "non-static data members %<_M_file_name%>, "
3182 1.1 mrg "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3183 1.1 mrg return error_mark_node;
3184 1.1 mrg }
3185 1.1 mrg return build_qualified_type (type, TYPE_QUAL_CONST);
3186 1.1 mrg }
3187 1.1 mrg
3188 1.1 mrg /* Type for source_location_table hash_set. */
3189 1.1 mrg struct GTY((for_user)) source_location_table_entry {
3190 1.1 mrg location_t loc;
3191 1.1 mrg unsigned uid;
3192 1.1 mrg tree var;
3193 1.1 mrg };
3194 1.1 mrg
3195 1.1 mrg /* Traits class for function start hash maps below. */
3196 1.1 mrg
3197 1.1 mrg struct source_location_table_entry_hash
3198 1.1 mrg : ggc_remove <source_location_table_entry>
3199 1.1 mrg {
3200 1.1 mrg typedef source_location_table_entry value_type;
3201 1.1 mrg typedef source_location_table_entry compare_type;
3202 1.1 mrg
3203 1.1 mrg static hashval_t
3204 1.1 mrg hash (const source_location_table_entry &ref)
3205 1.1 mrg {
3206 1.1 mrg inchash::hash hstate (0);
3207 1.1 mrg hstate.add_int (ref.loc);
3208 1.1 mrg hstate.add_int (ref.uid);
3209 1.1 mrg return hstate.end ();
3210 1.1 mrg }
3211 1.1 mrg
3212 1.1 mrg static bool
3213 1.1 mrg equal (const source_location_table_entry &ref1,
3214 1.1 mrg const source_location_table_entry &ref2)
3215 1.1 mrg {
3216 1.1 mrg return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3217 1.1 mrg }
3218 1.1 mrg
3219 1.1 mrg static void
3220 1.1 mrg mark_deleted (source_location_table_entry &ref)
3221 1.1 mrg {
3222 1.1 mrg ref.loc = UNKNOWN_LOCATION;
3223 1.1 mrg ref.uid = -1U;
3224 1.1 mrg ref.var = NULL_TREE;
3225 1.1 mrg }
3226 1.1 mrg
3227 1.1 mrg static const bool empty_zero_p = true;
3228 1.1 mrg
3229 1.1 mrg static void
3230 1.1 mrg mark_empty (source_location_table_entry &ref)
3231 1.1 mrg {
3232 1.1 mrg ref.loc = UNKNOWN_LOCATION;
3233 1.1 mrg ref.uid = 0;
3234 1.1 mrg ref.var = NULL_TREE;
3235 1.1 mrg }
3236 1.1 mrg
3237 1.1 mrg static bool
3238 1.1 mrg is_deleted (const source_location_table_entry &ref)
3239 1.1 mrg {
3240 1.1 mrg return (ref.loc == UNKNOWN_LOCATION
3241 1.1 mrg && ref.uid == -1U
3242 1.1 mrg && ref.var == NULL_TREE);
3243 1.1 mrg }
3244 1.1 mrg
3245 1.1 mrg static bool
3246 1.1 mrg is_empty (const source_location_table_entry &ref)
3247 1.1 mrg {
3248 1.1 mrg return (ref.loc == UNKNOWN_LOCATION
3249 1.1 mrg && ref.uid == 0
3250 1.1 mrg && ref.var == NULL_TREE);
3251 1.1 mrg }
3252 1.1 mrg
3253 1.1 mrg static void
3254 1.1 mrg pch_nx (source_location_table_entry &p)
3255 1.1 mrg {
3256 1.1 mrg extern void gt_pch_nx (source_location_table_entry &);
3257 1.1 mrg gt_pch_nx (p);
3258 1.1 mrg }
3259 1.1 mrg
3260 1.1 mrg static void
3261 1.1 mrg pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3262 1.1 mrg {
3263 1.1 mrg extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3264 1.1 mrg void *);
3265 1.1 mrg gt_pch_nx (&p, op, cookie);
3266 1.1 mrg }
3267 1.1 mrg };
3268 1.1 mrg
3269 1.1 mrg static GTY(()) hash_table <source_location_table_entry_hash>
3270 1.1 mrg *source_location_table;
3271 1.1 mrg static GTY(()) unsigned int source_location_id;
3272 1.1 mrg
3273 1.1 mrg /* Fold __builtin_source_location () call. LOC is the location
3274 1.1 mrg of the call. */
3275 1.1 mrg
3276 1.1 mrg tree
3277 1.1 mrg fold_builtin_source_location (location_t loc)
3278 1.1 mrg {
3279 1.1 mrg if (source_location_impl == NULL_TREE)
3280 1.1 mrg {
3281 1.1 mrg auto_diagnostic_group d;
3282 1.1 mrg source_location_impl = get_source_location_impl_type (loc);
3283 1.1 mrg if (source_location_impl == error_mark_node)
3284 1.1 mrg inform (loc, "evaluating %qs", "__builtin_source_location");
3285 1.1 mrg }
3286 1.1 mrg if (source_location_impl == error_mark_node)
3287 1.1 mrg return build_zero_cst (const_ptr_type_node);
3288 1.1 mrg if (source_location_table == NULL)
3289 1.1 mrg source_location_table
3290 1.1 mrg = hash_table <source_location_table_entry_hash>::create_ggc (64);
3291 1.1 mrg const line_map_ordinary *map;
3292 1.1 mrg source_location_table_entry entry;
3293 1.1 mrg entry.loc
3294 1.1 mrg = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3295 1.1 mrg &map);
3296 1.1 mrg entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3297 1.1 mrg entry.var = error_mark_node;
3298 1.1 mrg source_location_table_entry *entryp
3299 1.1 mrg = source_location_table->find_slot (entry, INSERT);
3300 1.1 mrg tree var;
3301 1.1 mrg if (entryp->var)
3302 1.1 mrg var = entryp->var;
3303 1.1 mrg else
3304 1.1 mrg {
3305 1.1 mrg char tmp_name[32];
3306 1.1 mrg ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3307 1.1 mrg var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3308 1.1 mrg source_location_impl);
3309 1.1 mrg TREE_STATIC (var) = 1;
3310 1.1 mrg TREE_PUBLIC (var) = 0;
3311 1.1 mrg DECL_ARTIFICIAL (var) = 1;
3312 1.1 mrg DECL_IGNORED_P (var) = 1;
3313 1.1 mrg DECL_EXTERNAL (var) = 0;
3314 1.1 mrg DECL_DECLARED_CONSTEXPR_P (var) = 1;
3315 1.1 mrg DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3316 1.1 mrg layout_decl (var, 0);
3317 1.1 mrg
3318 1.1 mrg vec<constructor_elt, va_gc> *v = NULL;
3319 1.1 mrg vec_alloc (v, 4);
3320 1.1 mrg for (tree field = TYPE_FIELDS (source_location_impl);
3321 1.1 mrg (field = next_initializable_field (field)) != NULL_TREE;
3322 1.1 mrg field = DECL_CHAIN (field))
3323 1.1 mrg {
3324 1.1 mrg const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3325 1.1 mrg tree val = NULL_TREE;
3326 1.1 mrg if (strcmp (n, "_M_file_name") == 0)
3327 1.1 mrg {
3328 1.1 mrg if (const char *fname = LOCATION_FILE (loc))
3329 1.1 mrg {
3330 1.1 mrg fname = remap_macro_filename (fname);
3331 1.1 mrg val = build_string_literal (strlen (fname) + 1, fname);
3332 1.1 mrg }
3333 1.1 mrg else
3334 1.1 mrg val = build_string_literal (1, "");
3335 1.1 mrg }
3336 1.1 mrg else if (strcmp (n, "_M_function_name") == 0)
3337 1.1 mrg {
3338 1.1 mrg const char *name = "";
3339 1.1 mrg
3340 1.1 mrg if (current_function_decl)
3341 1.1 mrg name = cxx_printable_name (current_function_decl, 2);
3342 1.1 mrg
3343 1.1 mrg val = build_string_literal (strlen (name) + 1, name);
3344 1.1 mrg }
3345 1.1 mrg else if (strcmp (n, "_M_line") == 0)
3346 1.1 mrg val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3347 1.1 mrg else if (strcmp (n, "_M_column") == 0)
3348 1.1 mrg val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3349 1.1 mrg else
3350 1.1 mrg gcc_unreachable ();
3351 1.1 mrg CONSTRUCTOR_APPEND_ELT (v, field, val);
3352 1.1 mrg }
3353 1.1 mrg
3354 1.1 mrg tree ctor = build_constructor (source_location_impl, v);
3355 1.1 mrg TREE_CONSTANT (ctor) = 1;
3356 1.1 mrg TREE_STATIC (ctor) = 1;
3357 1.1 mrg DECL_INITIAL (var) = ctor;
3358 1.1 mrg varpool_node::finalize_decl (var);
3359 1.1 mrg *entryp = entry;
3360 1.1 mrg entryp->var = var;
3361 1.1 mrg }
3362 1.1 mrg
3363 1.1 mrg return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3364 1.1 mrg }
3365 1.1 mrg
3366 #include "gt-cp-cp-gimplify.h"
3367