cp-gimplify.cc revision 1.1 1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
2
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason (at) redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
45
46 struct cp_fold_data
47 {
48 hash_set<tree> pset;
49 bool genericize; // called from cp_fold_function?
50
51 cp_fold_data (bool g): genericize (g) {}
52 };
53
54 /* Forward declarations. */
55
56 static tree cp_genericize_r (tree *, int *, void *);
57 static tree cp_fold_r (tree *, int *, void *);
58 static void cp_genericize_tree (tree*, bool);
59 static tree cp_fold (tree);
60
61 /* Genericize a TRY_BLOCK. */
62
63 static void
64 genericize_try_block (tree *stmt_p)
65 {
66 tree body = TRY_STMTS (*stmt_p);
67 tree cleanup = TRY_HANDLERS (*stmt_p);
68
69 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
70 }
71
72 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
73
74 static void
75 genericize_catch_block (tree *stmt_p)
76 {
77 tree type = HANDLER_TYPE (*stmt_p);
78 tree body = HANDLER_BODY (*stmt_p);
79
80 /* FIXME should the caught type go in TREE_TYPE? */
81 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
82 }
83
84 /* A terser interface for building a representation of an exception
85 specification. */
86
87 static tree
88 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
89 {
90 tree t;
91
92 /* FIXME should the allowed types go in TREE_TYPE? */
93 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
94 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
95
96 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
97 append_to_statement_list (body, &TREE_OPERAND (t, 0));
98
99 return t;
100 }
101
102 /* Genericize an EH_SPEC_BLOCK by converting it to a
103 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
104
105 static void
106 genericize_eh_spec_block (tree *stmt_p)
107 {
108 tree body = EH_SPEC_STMTS (*stmt_p);
109 tree allowed = EH_SPEC_RAISES (*stmt_p);
110 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
111
112 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
113 suppress_warning (*stmt_p);
114 suppress_warning (TREE_OPERAND (*stmt_p, 1));
115 }
116
117 /* Return the first non-compound statement in STMT. */
118
119 tree
120 first_stmt (tree stmt)
121 {
122 switch (TREE_CODE (stmt))
123 {
124 case STATEMENT_LIST:
125 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
126 return first_stmt (p->stmt);
127 return void_node;
128
129 case BIND_EXPR:
130 return first_stmt (BIND_EXPR_BODY (stmt));
131
132 default:
133 return stmt;
134 }
135 }
136
137 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
138
139 static void
140 genericize_if_stmt (tree *stmt_p)
141 {
142 tree stmt, cond, then_, else_;
143 location_t locus = EXPR_LOCATION (*stmt_p);
144
145 stmt = *stmt_p;
146 cond = IF_COND (stmt);
147 then_ = THEN_CLAUSE (stmt);
148 else_ = ELSE_CLAUSE (stmt);
149
150 if (then_ && else_)
151 {
152 tree ft = first_stmt (then_);
153 tree fe = first_stmt (else_);
154 br_predictor pr;
155 if (TREE_CODE (ft) == PREDICT_EXPR
156 && TREE_CODE (fe) == PREDICT_EXPR
157 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
158 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
159 {
160 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
161 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
162 warning_at (&richloc, OPT_Wattributes,
163 "both branches of %<if%> statement marked as %qs",
164 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
165 }
166 }
167
168 if (!then_)
169 then_ = build_empty_stmt (locus);
170 if (!else_)
171 else_ = build_empty_stmt (locus);
172
173 /* consteval if has been verified not to have the then_/else_ blocks
174 entered by gotos/case labels from elsewhere, and as then_ block
175 can contain unfolded immediate function calls, we have to discard
176 the then_ block regardless of whether else_ has side-effects or not. */
177 if (IF_STMT_CONSTEVAL_P (stmt))
178 {
179 if (block_may_fallthru (then_))
180 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
181 void_node, else_);
182 else
183 stmt = else_;
184 }
185 else if (IF_STMT_CONSTEXPR_P (stmt))
186 stmt = integer_nonzerop (cond) ? then_ : else_;
187 /* ??? This optimization doesn't seem to belong here, but removing it
188 causes -Wreturn-type regressions (e.g. 107310). */
189 else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
190 stmt = then_;
191 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
192 stmt = else_;
193 else
194 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
195 protected_set_expr_location_if_unset (stmt, locus);
196 *stmt_p = stmt;
197 }
198
199 /* Hook into the middle of gimplifying an OMP_FOR node. */
200
201 static enum gimplify_status
202 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
203 {
204 tree for_stmt = *expr_p;
205 gimple_seq seq = NULL;
206
207 /* Protect ourselves from recursion. */
208 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
209 return GS_UNHANDLED;
210 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
211
212 gimplify_and_add (for_stmt, &seq);
213 gimple_seq_add_seq (pre_p, seq);
214
215 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
216
217 return GS_ALL_DONE;
218 }
219
220 /* Gimplify an EXPR_STMT node. */
221
222 static void
223 gimplify_expr_stmt (tree *stmt_p)
224 {
225 tree stmt = EXPR_STMT_EXPR (*stmt_p);
226
227 if (stmt == error_mark_node)
228 stmt = NULL;
229
230 /* Gimplification of a statement expression will nullify the
231 statement if all its side effects are moved to *PRE_P and *POST_P.
232
233 In this case we will not want to emit the gimplified statement.
234 However, we may still want to emit a warning, so we do that before
235 gimplification. */
236 if (stmt && warn_unused_value)
237 {
238 if (!TREE_SIDE_EFFECTS (stmt))
239 {
240 if (!IS_EMPTY_STMT (stmt)
241 && !VOID_TYPE_P (TREE_TYPE (stmt))
242 && !warning_suppressed_p (stmt, OPT_Wunused_value))
243 warning (OPT_Wunused_value, "statement with no effect");
244 }
245 else
246 warn_if_unused_value (stmt, input_location);
247 }
248
249 if (stmt == NULL_TREE)
250 stmt = alloc_stmt_list ();
251
252 *stmt_p = stmt;
253 }
254
255 /* Gimplify initialization from an AGGR_INIT_EXPR. */
256
257 static void
258 cp_gimplify_init_expr (tree *expr_p)
259 {
260 tree from = TREE_OPERAND (*expr_p, 1);
261 tree to = TREE_OPERAND (*expr_p, 0);
262 tree t;
263
264 if (TREE_CODE (from) == TARGET_EXPR)
265 if (tree init = TARGET_EXPR_INITIAL (from))
266 {
267 if (target_expr_needs_replace (from))
268 {
269 /* If this was changed by cp_genericize_target_expr, we need to
270 walk into it to replace uses of the slot. */
271 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
272 *expr_p = init;
273 return;
274 }
275 else
276 from = init;
277 }
278
279 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
280 inside the TARGET_EXPR. */
281 for (t = from; t; )
282 {
283 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
284
285 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
286 replace the slot operand with our target.
287
288 Should we add a target parm to gimplify_expr instead? No, as in this
289 case we want to replace the INIT_EXPR. */
290 if (TREE_CODE (sub) == AGGR_INIT_EXPR
291 || TREE_CODE (sub) == VEC_INIT_EXPR)
292 {
293 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
294 AGGR_INIT_EXPR_SLOT (sub) = to;
295 else
296 VEC_INIT_EXPR_SLOT (sub) = to;
297 *expr_p = from;
298
299 /* The initialization is now a side-effect, so the container can
300 become void. */
301 if (from != sub)
302 TREE_TYPE (from) = void_type_node;
303 }
304
305 /* Handle aggregate NSDMI. */
306 replace_placeholders (sub, to);
307
308 if (t == sub)
309 break;
310 else
311 t = TREE_OPERAND (t, 1);
312 }
313
314 }
315
316 /* Gimplify a MUST_NOT_THROW_EXPR. */
317
318 static enum gimplify_status
319 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
320 {
321 tree stmt = *expr_p;
322 tree temp = voidify_wrapper_expr (stmt, NULL);
323 tree body = TREE_OPERAND (stmt, 0);
324 gimple_seq try_ = NULL;
325 gimple_seq catch_ = NULL;
326 gimple *mnt;
327
328 gimplify_and_add (body, &try_);
329 mnt = gimple_build_eh_must_not_throw (terminate_fn);
330 gimple_seq_add_stmt_without_update (&catch_, mnt);
331 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
332
333 gimple_seq_add_stmt_without_update (pre_p, mnt);
334 if (temp)
335 {
336 *expr_p = temp;
337 return GS_OK;
338 }
339
340 *expr_p = NULL;
341 return GS_ALL_DONE;
342 }
343
344 /* Return TRUE if an operand (OP) of a given TYPE being copied is
345 really just an empty class copy.
346
347 Check that the operand has a simple form so that TARGET_EXPRs and
348 non-empty CONSTRUCTORs get reduced properly, and we leave the
349 return slot optimization alone because it isn't a copy. */
350
351 bool
352 simple_empty_class_p (tree type, tree op, tree_code code)
353 {
354 if (TREE_CODE (op) == COMPOUND_EXPR)
355 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
356 if (SIMPLE_TARGET_EXPR_P (op)
357 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
358 /* The TARGET_EXPR is itself a simple copy, look through it. */
359 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
360
361 if (TREE_CODE (op) == PARM_DECL
362 && TREE_ADDRESSABLE (TREE_TYPE (op)))
363 {
364 tree fn = DECL_CONTEXT (op);
365 if (DECL_THUNK_P (fn)
366 || lambda_static_thunk_p (fn))
367 /* In a thunk, we pass through invisible reference parms, so this isn't
368 actually a copy. */
369 return false;
370 }
371
372 return
373 (TREE_CODE (op) == EMPTY_CLASS_EXPR
374 || code == MODIFY_EXPR
375 || is_gimple_lvalue (op)
376 || INDIRECT_REF_P (op)
377 || (TREE_CODE (op) == CONSTRUCTOR
378 && CONSTRUCTOR_NELTS (op) == 0)
379 || (TREE_CODE (op) == CALL_EXPR
380 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
381 && !TREE_CLOBBER_P (op)
382 && is_really_empty_class (type, /*ignore_vptr*/true);
383 }
384
385 /* Returns true if evaluating E as an lvalue has side-effects;
386 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
387 have side-effects until there is a read or write through it. */
388
389 static bool
390 lvalue_has_side_effects (tree e)
391 {
392 if (!TREE_SIDE_EFFECTS (e))
393 return false;
394 while (handled_component_p (e))
395 {
396 if (TREE_CODE (e) == ARRAY_REF
397 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
398 return true;
399 e = TREE_OPERAND (e, 0);
400 }
401 if (DECL_P (e))
402 /* Just naming a variable has no side-effects. */
403 return false;
404 else if (INDIRECT_REF_P (e))
405 /* Similarly, indirection has no side-effects. */
406 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
407 else
408 /* For anything else, trust TREE_SIDE_EFFECTS. */
409 return TREE_SIDE_EFFECTS (e);
410 }
411
412 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
413 by expressions with side-effects in other operands. */
414
415 static enum gimplify_status
416 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
417 bool (*gimple_test_f) (tree))
418 {
419 enum gimplify_status t
420 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
421 if (t == GS_ERROR)
422 return GS_ERROR;
423 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
424 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
425 return t;
426 }
427
428 /* Like gimplify_arg, but if ORDERED is set (which should be set if
429 any of the arguments this argument is sequenced before has
430 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
431 are gimplified into SSA_NAME or a fresh temporary and for
432 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
433
434 static enum gimplify_status
435 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
436 bool ordered)
437 {
438 enum gimplify_status t;
439 if (ordered
440 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
441 && TREE_CODE (*arg_p) == TARGET_EXPR)
442 {
443 /* gimplify_arg would strip away the TARGET_EXPR, but
444 that can mean we don't copy the argument and some following
445 argument with side-effect could modify it. */
446 protected_set_expr_location (*arg_p, call_location);
447 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
448 }
449 else
450 {
451 t = gimplify_arg (arg_p, pre_p, call_location);
452 if (t == GS_ERROR)
453 return GS_ERROR;
454 else if (ordered
455 && is_gimple_reg_type (TREE_TYPE (*arg_p))
456 && is_gimple_variable (*arg_p)
457 && TREE_CODE (*arg_p) != SSA_NAME
458 /* No need to force references into register, references
459 can't be modified. */
460 && !TYPE_REF_P (TREE_TYPE (*arg_p))
461 /* And this can't be modified either. */
462 && *arg_p != current_class_ptr)
463 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
464 return t;
465 }
466
467 }
468
469 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
470
471 int
472 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
473 {
474 int saved_stmts_are_full_exprs_p = 0;
475 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
476 enum tree_code code = TREE_CODE (*expr_p);
477 enum gimplify_status ret;
478
479 if (STATEMENT_CODE_P (code))
480 {
481 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
482 current_stmt_tree ()->stmts_are_full_exprs_p
483 = STMT_IS_FULL_EXPR_P (*expr_p);
484 }
485
486 switch (code)
487 {
488 case AGGR_INIT_EXPR:
489 simplify_aggr_init_expr (expr_p);
490 ret = GS_OK;
491 break;
492
493 case VEC_INIT_EXPR:
494 {
495 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
496 tf_warning_or_error);
497
498 cp_fold_data data (/*genericize*/true);
499 cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
500 cp_genericize_tree (expr_p, false);
501 copy_if_shared (expr_p);
502 ret = GS_OK;
503 }
504 break;
505
506 case THROW_EXPR:
507 /* FIXME communicate throw type to back end, probably by moving
508 THROW_EXPR into ../tree.def. */
509 *expr_p = TREE_OPERAND (*expr_p, 0);
510 ret = GS_OK;
511 break;
512
513 case MUST_NOT_THROW_EXPR:
514 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
515 break;
516
517 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
518 LHS of an assignment might also be involved in the RHS, as in bug
519 25979. */
520 case INIT_EXPR:
521 cp_gimplify_init_expr (expr_p);
522 if (TREE_CODE (*expr_p) != INIT_EXPR)
523 return GS_OK;
524 /* Fall through. */
525 case MODIFY_EXPR:
526 modify_expr_case:
527 {
528 /* If the back end isn't clever enough to know that the lhs and rhs
529 types are the same, add an explicit conversion. */
530 tree op0 = TREE_OPERAND (*expr_p, 0);
531 tree op1 = TREE_OPERAND (*expr_p, 1);
532
533 if (!error_operand_p (op0)
534 && !error_operand_p (op1)
535 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
536 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
537 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
538 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
539 TREE_TYPE (op0), op1);
540
541 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
542 {
543 while (TREE_CODE (op1) == TARGET_EXPR)
544 /* We're disconnecting the initializer from its target,
545 don't create a temporary. */
546 op1 = TARGET_EXPR_INITIAL (op1);
547
548 /* Remove any copies of empty classes. Also drop volatile
549 variables on the RHS to avoid infinite recursion from
550 gimplify_expr trying to load the value. */
551 if (TREE_SIDE_EFFECTS (op1))
552 {
553 if (TREE_THIS_VOLATILE (op1)
554 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
555 op1 = build_fold_addr_expr (op1);
556
557 gimplify_and_add (op1, pre_p);
558 }
559 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
560 is_gimple_lvalue, fb_lvalue);
561 *expr_p = TREE_OPERAND (*expr_p, 0);
562 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
563 /* Avoid 'return *<retval>;' */
564 *expr_p = TREE_OPERAND (*expr_p, 0);
565 }
566 /* P0145 says that the RHS is sequenced before the LHS.
567 gimplify_modify_expr gimplifies the RHS before the LHS, but that
568 isn't quite strong enough in two cases:
569
570 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
571 mean it's evaluated after the LHS.
572
573 2) the value calculation of the RHS is also sequenced before the
574 LHS, so for scalar assignment we need to preevaluate if the
575 RHS could be affected by LHS side-effects even if it has no
576 side-effects of its own. We don't need this for classes because
577 class assignment takes its RHS by reference. */
578 else if (flag_strong_eval_order > 1
579 && TREE_CODE (*expr_p) == MODIFY_EXPR
580 && lvalue_has_side_effects (op0)
581 && (TREE_CODE (op1) == CALL_EXPR
582 || (SCALAR_TYPE_P (TREE_TYPE (op1))
583 && !TREE_CONSTANT (op1))))
584 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
585 }
586 ret = GS_OK;
587 break;
588
589 case EMPTY_CLASS_EXPR:
590 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
591 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
592 ret = GS_OK;
593 break;
594
595 case BASELINK:
596 *expr_p = BASELINK_FUNCTIONS (*expr_p);
597 ret = GS_OK;
598 break;
599
600 case TRY_BLOCK:
601 genericize_try_block (expr_p);
602 ret = GS_OK;
603 break;
604
605 case HANDLER:
606 genericize_catch_block (expr_p);
607 ret = GS_OK;
608 break;
609
610 case EH_SPEC_BLOCK:
611 genericize_eh_spec_block (expr_p);
612 ret = GS_OK;
613 break;
614
615 case USING_STMT:
616 gcc_unreachable ();
617
618 case FOR_STMT:
619 case WHILE_STMT:
620 case DO_STMT:
621 case SWITCH_STMT:
622 case CONTINUE_STMT:
623 case BREAK_STMT:
624 gcc_unreachable ();
625
626 case OMP_FOR:
627 case OMP_SIMD:
628 case OMP_DISTRIBUTE:
629 case OMP_LOOP:
630 case OMP_TASKLOOP:
631 ret = cp_gimplify_omp_for (expr_p, pre_p);
632 break;
633
634 case EXPR_STMT:
635 gimplify_expr_stmt (expr_p);
636 ret = GS_OK;
637 break;
638
639 case UNARY_PLUS_EXPR:
640 {
641 tree arg = TREE_OPERAND (*expr_p, 0);
642 tree type = TREE_TYPE (*expr_p);
643 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
644 : arg;
645 ret = GS_OK;
646 }
647 break;
648
649 case CALL_EXPR:
650 ret = GS_OK;
651 if (flag_strong_eval_order == 2
652 && CALL_EXPR_FN (*expr_p)
653 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
654 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
655 {
656 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
657 enum gimplify_status t
658 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
659 is_gimple_call_addr);
660 if (t == GS_ERROR)
661 ret = GS_ERROR;
662 /* GIMPLE considers most pointer conversion useless, but for
663 calls we actually care about the exact function pointer type. */
664 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
665 CALL_EXPR_FN (*expr_p)
666 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
667 }
668 if (!CALL_EXPR_FN (*expr_p))
669 /* Internal function call. */;
670 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
671 {
672 /* This is a call to a (compound) assignment operator that used
673 the operator syntax; gimplify the RHS first. */
674 gcc_assert (call_expr_nargs (*expr_p) == 2);
675 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
676 enum gimplify_status t
677 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
678 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
679 if (t == GS_ERROR)
680 ret = GS_ERROR;
681 }
682 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
683 {
684 /* Leave the last argument for gimplify_call_expr, to avoid problems
685 with __builtin_va_arg_pack(). */
686 int nargs = call_expr_nargs (*expr_p) - 1;
687 int last_side_effects_arg = -1;
688 for (int i = nargs; i > 0; --i)
689 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
690 {
691 last_side_effects_arg = i;
692 break;
693 }
694 for (int i = 0; i < nargs; ++i)
695 {
696 enum gimplify_status t
697 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
698 i < last_side_effects_arg);
699 if (t == GS_ERROR)
700 ret = GS_ERROR;
701 }
702 }
703 else if (flag_strong_eval_order
704 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
705 {
706 /* If flag_strong_eval_order, evaluate the object argument first. */
707 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
708 if (INDIRECT_TYPE_P (fntype))
709 fntype = TREE_TYPE (fntype);
710 if (TREE_CODE (fntype) == METHOD_TYPE)
711 {
712 int nargs = call_expr_nargs (*expr_p);
713 bool side_effects = false;
714 for (int i = 1; i < nargs; ++i)
715 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
716 {
717 side_effects = true;
718 break;
719 }
720 enum gimplify_status t
721 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
722 side_effects);
723 if (t == GS_ERROR)
724 ret = GS_ERROR;
725 }
726 }
727 if (ret != GS_ERROR)
728 {
729 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
730 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
731 switch (DECL_FE_FUNCTION_CODE (decl))
732 {
733 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
734 *expr_p = boolean_false_node;
735 break;
736 case CP_BUILT_IN_SOURCE_LOCATION:
737 *expr_p
738 = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
739 break;
740 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
741 *expr_p
742 = fold_builtin_is_corresponding_member
743 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
744 &CALL_EXPR_ARG (*expr_p, 0));
745 break;
746 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
747 *expr_p
748 = fold_builtin_is_pointer_inverconvertible_with_class
749 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
750 &CALL_EXPR_ARG (*expr_p, 0));
751 break;
752 default:
753 break;
754 }
755 }
756 break;
757
758 case TARGET_EXPR:
759 /* A TARGET_EXPR that expresses direct-initialization should have been
760 elided by cp_gimplify_init_expr. */
761 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
762 ret = GS_UNHANDLED;
763 break;
764
765 case PTRMEM_CST:
766 *expr_p = cplus_expand_constant (*expr_p);
767 if (TREE_CODE (*expr_p) == PTRMEM_CST)
768 ret = GS_ERROR;
769 else
770 ret = GS_OK;
771 break;
772
773 case RETURN_EXPR:
774 if (TREE_OPERAND (*expr_p, 0)
775 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
776 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
777 {
778 expr_p = &TREE_OPERAND (*expr_p, 0);
779 /* Avoid going through the INIT_EXPR case, which can
780 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
781 goto modify_expr_case;
782 }
783 /* Fall through. */
784
785 default:
786 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
787 break;
788 }
789
790 /* Restore saved state. */
791 if (STATEMENT_CODE_P (code))
792 current_stmt_tree ()->stmts_are_full_exprs_p
793 = saved_stmts_are_full_exprs_p;
794
795 return ret;
796 }
797
798 static inline bool
799 is_invisiref_parm (const_tree t)
800 {
801 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
802 && DECL_BY_REFERENCE (t));
803 }
804
805 /* A stable comparison routine for use with splay trees and DECLs. */
806
807 static int
808 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
809 {
810 tree a = (tree) xa;
811 tree b = (tree) xb;
812
813 return DECL_UID (a) - DECL_UID (b);
814 }
815
816 /* OpenMP context during genericization. */
817
818 struct cp_genericize_omp_taskreg
819 {
820 bool is_parallel;
821 bool default_shared;
822 struct cp_genericize_omp_taskreg *outer;
823 splay_tree variables;
824 };
825
826 /* Return true if genericization should try to determine if
827 DECL is firstprivate or shared within task regions. */
828
829 static bool
830 omp_var_to_track (tree decl)
831 {
832 tree type = TREE_TYPE (decl);
833 if (is_invisiref_parm (decl))
834 type = TREE_TYPE (type);
835 else if (TYPE_REF_P (type))
836 type = TREE_TYPE (type);
837 while (TREE_CODE (type) == ARRAY_TYPE)
838 type = TREE_TYPE (type);
839 if (type == error_mark_node || !CLASS_TYPE_P (type))
840 return false;
841 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
842 return false;
843 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
844 return false;
845 return true;
846 }
847
848 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
849
850 static void
851 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
852 {
853 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
854 (splay_tree_key) decl);
855 if (n == NULL)
856 {
857 int flags = OMP_CLAUSE_DEFAULT_SHARED;
858 if (omp_ctx->outer)
859 omp_cxx_notice_variable (omp_ctx->outer, decl);
860 if (!omp_ctx->default_shared)
861 {
862 struct cp_genericize_omp_taskreg *octx;
863
864 for (octx = omp_ctx->outer; octx; octx = octx->outer)
865 {
866 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
867 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
868 {
869 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
870 break;
871 }
872 if (octx->is_parallel)
873 break;
874 }
875 if (octx == NULL
876 && (TREE_CODE (decl) == PARM_DECL
877 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
878 && DECL_CONTEXT (decl) == current_function_decl)))
879 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
880 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
881 {
882 /* DECL is implicitly determined firstprivate in
883 the current task construct. Ensure copy ctor and
884 dtor are instantiated, because during gimplification
885 it will be already too late. */
886 tree type = TREE_TYPE (decl);
887 if (is_invisiref_parm (decl))
888 type = TREE_TYPE (type);
889 else if (TYPE_REF_P (type))
890 type = TREE_TYPE (type);
891 while (TREE_CODE (type) == ARRAY_TYPE)
892 type = TREE_TYPE (type);
893 get_copy_ctor (type, tf_none);
894 get_dtor (type, tf_none);
895 }
896 }
897 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
898 }
899 }
900
901 /* If we might need to clean up a partially constructed object, break down the
902 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
903 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
904 the result. */
905
906 static void
907 cp_genericize_init (tree *replace, tree from, tree to)
908 {
909 if (TREE_CODE (from) == VEC_INIT_EXPR)
910 {
911 tree init = expand_vec_init_expr (to, from, tf_warning_or_error);
912
913 /* Make cp_gimplify_init_expr call replace_decl. */
914 *replace = fold_convert (void_type_node, init);
915 }
916 else if (flag_exceptions
917 && TREE_CODE (from) == CONSTRUCTOR
918 && TREE_SIDE_EFFECTS (from)
919 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
920 {
921 to = cp_stabilize_reference (to);
922 replace_placeholders (from, to);
923 *replace = split_nonconstant_init (to, from);
924 }
925 }
926
927 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
928
929 static void
930 cp_genericize_init_expr (tree *stmt_p)
931 {
932 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
933 tree to = TREE_OPERAND (*stmt_p, 0);
934 tree from = TREE_OPERAND (*stmt_p, 1);
935 if (SIMPLE_TARGET_EXPR_P (from)
936 /* Return gets confused if we clobber its INIT_EXPR this soon. */
937 && TREE_CODE (to) != RESULT_DECL)
938 from = TARGET_EXPR_INITIAL (from);
939 cp_genericize_init (stmt_p, from, to);
940 }
941
942 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
943 replace_decl later when we know what we're initializing. */
944
945 static void
946 cp_genericize_target_expr (tree *stmt_p)
947 {
948 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
949 tree slot = TARGET_EXPR_SLOT (*stmt_p);
950 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
951 TARGET_EXPR_INITIAL (*stmt_p), slot);
952 gcc_assert (!DECL_INITIAL (slot));
953 }
954
955 /* Genericization context. */
956
957 struct cp_genericize_data
958 {
959 hash_set<tree> *p_set;
960 auto_vec<tree> bind_expr_stack;
961 struct cp_genericize_omp_taskreg *omp_ctx;
962 tree try_block;
963 bool no_sanitize_p;
964 bool handle_invisiref_parm_p;
965 };
966
967 /* Perform any pre-gimplification folding of C++ front end trees to
968 GENERIC.
969 Note: The folding of non-omp cases is something to move into
970 the middle-end. As for now we have most foldings only on GENERIC
971 in fold-const, we need to perform this before transformation to
972 GIMPLE-form. */
973
974 static tree
975 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
976 {
977 cp_fold_data *data = (cp_fold_data*)data_;
978 tree stmt = *stmt_p;
979 enum tree_code code = TREE_CODE (stmt);
980
981 switch (code)
982 {
983 case PTRMEM_CST:
984 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
985 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
986 {
987 if (!data->pset.add (stmt))
988 error_at (PTRMEM_CST_LOCATION (stmt),
989 "taking address of an immediate function %qD",
990 PTRMEM_CST_MEMBER (stmt));
991 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
992 break;
993 }
994 break;
995
996 case ADDR_EXPR:
997 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
998 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
999 {
1000 error_at (EXPR_LOCATION (stmt),
1001 "taking address of an immediate function %qD",
1002 TREE_OPERAND (stmt, 0));
1003 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1004 break;
1005 }
1006 break;
1007
1008 case CALL_EXPR:
1009 if (tree fndecl = cp_get_callee_fndecl_nofold (stmt))
1010 if (DECL_IMMEDIATE_FUNCTION_P (fndecl)
1011 && source_location_current_p (fndecl))
1012 *stmt_p = stmt = cxx_constant_value (stmt);
1013 break;
1014
1015 case VAR_DECL:
1016 /* In initializers replace anon union artificial VAR_DECLs
1017 with their DECL_VALUE_EXPRs, as nothing will do it later.
1018 Ditto for structured bindings. */
1019 if (!data->genericize
1020 && DECL_HAS_VALUE_EXPR_P (stmt)
1021 && (DECL_ANON_UNION_VAR_P (stmt)
1022 || (DECL_DECOMPOSITION_P (stmt) && DECL_DECOMP_BASE (stmt))))
1023 {
1024 *stmt_p = stmt = unshare_expr (DECL_VALUE_EXPR (stmt));
1025 break;
1026 }
1027 break;
1028
1029 default:
1030 break;
1031 }
1032
1033 *stmt_p = stmt = cp_fold (*stmt_p);
1034
1035 if (data->pset.add (stmt))
1036 {
1037 /* Don't walk subtrees of stmts we've already walked once, otherwise
1038 we can have exponential complexity with e.g. lots of nested
1039 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1040 always the same tree, which the first time cp_fold_r has been
1041 called on it had the subtrees walked. */
1042 *walk_subtrees = 0;
1043 return NULL;
1044 }
1045
1046 code = TREE_CODE (stmt);
1047 switch (code)
1048 {
1049 tree x;
1050 int i, n;
1051 case OMP_FOR:
1052 case OMP_SIMD:
1053 case OMP_DISTRIBUTE:
1054 case OMP_LOOP:
1055 case OMP_TASKLOOP:
1056 case OACC_LOOP:
1057 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1058 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1059 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1060 x = OMP_FOR_COND (stmt);
1061 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1062 {
1063 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1064 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1065 }
1066 else if (x && TREE_CODE (x) == TREE_VEC)
1067 {
1068 n = TREE_VEC_LENGTH (x);
1069 for (i = 0; i < n; i++)
1070 {
1071 tree o = TREE_VEC_ELT (x, i);
1072 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1073 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1074 }
1075 }
1076 x = OMP_FOR_INCR (stmt);
1077 if (x && TREE_CODE (x) == TREE_VEC)
1078 {
1079 n = TREE_VEC_LENGTH (x);
1080 for (i = 0; i < n; i++)
1081 {
1082 tree o = TREE_VEC_ELT (x, i);
1083 if (o && TREE_CODE (o) == MODIFY_EXPR)
1084 o = TREE_OPERAND (o, 1);
1085 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1086 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1087 {
1088 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1089 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1090 }
1091 }
1092 }
1093 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1094 *walk_subtrees = 0;
1095 return NULL;
1096
1097 case IF_STMT:
1098 if (IF_STMT_CONSTEVAL_P (stmt))
1099 {
1100 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1101 boolean_false_node. */
1102 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1103 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1104 *walk_subtrees = 0;
1105 return NULL;
1106 }
1107 break;
1108
1109 /* These are only for genericize time; they're here rather than in
1110 cp_genericize to avoid problems with the invisible reference
1111 transition. */
1112 case INIT_EXPR:
1113 if (data->genericize)
1114 cp_genericize_init_expr (stmt_p);
1115 break;
1116
1117 case TARGET_EXPR:
1118 if (data->genericize)
1119 cp_genericize_target_expr (stmt_p);
1120 break;
1121
1122 default:
1123 break;
1124 }
1125
1126 return NULL;
1127 }
1128
1129 /* Fold ALL the trees! FIXME we should be able to remove this, but
1130 apparently that still causes optimization regressions. */
1131
1132 void
1133 cp_fold_function (tree fndecl)
1134 {
1135 cp_fold_data data (/*genericize*/true);
1136 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1137 }
1138
1139 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1140
1141 static tree genericize_spaceship (tree expr)
1142 {
1143 iloc_sentinel s (cp_expr_location (expr));
1144 tree type = TREE_TYPE (expr);
1145 tree op0 = TREE_OPERAND (expr, 0);
1146 tree op1 = TREE_OPERAND (expr, 1);
1147 return genericize_spaceship (input_location, type, op0, op1);
1148 }
1149
1150 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1151 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1152 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1153 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1154
1155 tree
1156 predeclare_vla (tree expr)
1157 {
1158 tree type = TREE_TYPE (expr);
1159 if (type == error_mark_node)
1160 return expr;
1161 if (is_typedef_decl (expr))
1162 type = DECL_ORIGINAL_TYPE (expr);
1163
1164 /* We need to strip pointers for gimplify_type_sizes. */
1165 tree vla = type;
1166 while (POINTER_TYPE_P (vla))
1167 {
1168 if (TYPE_NAME (vla))
1169 return expr;
1170 vla = TREE_TYPE (vla);
1171 }
1172 if (vla == type || TYPE_NAME (vla)
1173 || !variably_modified_type_p (vla, NULL_TREE))
1174 return expr;
1175
1176 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1177 DECL_ARTIFICIAL (decl) = 1;
1178 TYPE_NAME (vla) = decl;
1179 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1180 if (DECL_P (expr))
1181 {
1182 add_stmt (dexp);
1183 return NULL_TREE;
1184 }
1185 else
1186 {
1187 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1188 return expr;
1189 }
1190 }
1191
1192 /* Perform any pre-gimplification lowering of C++ front end trees to
1193 GENERIC. */
1194
1195 static tree
1196 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1197 {
1198 tree stmt = *stmt_p;
1199 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1200 hash_set<tree> *p_set = wtd->p_set;
1201
1202 /* If in an OpenMP context, note var uses. */
1203 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1204 && (VAR_P (stmt)
1205 || TREE_CODE (stmt) == PARM_DECL
1206 || TREE_CODE (stmt) == RESULT_DECL)
1207 && omp_var_to_track (stmt))
1208 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1209
1210 /* Don't dereference parms in a thunk, pass the references through. */
1211 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1212 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1213 {
1214 *walk_subtrees = 0;
1215 return NULL;
1216 }
1217
1218 /* Dereference invisible reference parms. */
1219 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1220 {
1221 *stmt_p = convert_from_reference (stmt);
1222 p_set->add (*stmt_p);
1223 *walk_subtrees = 0;
1224 return NULL;
1225 }
1226
1227 /* Map block scope extern declarations to visible declarations with the
1228 same name and type in outer scopes if any. */
1229 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1230 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1231 {
1232 if (alias != error_mark_node)
1233 {
1234 *stmt_p = alias;
1235 TREE_USED (alias) |= TREE_USED (stmt);
1236 }
1237 *walk_subtrees = 0;
1238 return NULL;
1239 }
1240
1241 if (TREE_CODE (stmt) == INTEGER_CST
1242 && TYPE_REF_P (TREE_TYPE (stmt))
1243 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1244 && !wtd->no_sanitize_p)
1245 {
1246 ubsan_maybe_instrument_reference (stmt_p);
1247 if (*stmt_p != stmt)
1248 {
1249 *walk_subtrees = 0;
1250 return NULL_TREE;
1251 }
1252 }
1253
1254 /* Other than invisiref parms, don't walk the same tree twice. */
1255 if (p_set->contains (stmt))
1256 {
1257 *walk_subtrees = 0;
1258 return NULL_TREE;
1259 }
1260
1261 switch (TREE_CODE (stmt))
1262 {
1263 case ADDR_EXPR:
1264 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1265 {
1266 /* If in an OpenMP context, note var uses. */
1267 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1268 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1269 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1270 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1271 *walk_subtrees = 0;
1272 }
1273 break;
1274
1275 case RETURN_EXPR:
1276 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1277 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1278 *walk_subtrees = 0;
1279 break;
1280
1281 case OMP_CLAUSE:
1282 switch (OMP_CLAUSE_CODE (stmt))
1283 {
1284 case OMP_CLAUSE_LASTPRIVATE:
1285 /* Don't dereference an invisiref in OpenMP clauses. */
1286 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1287 {
1288 *walk_subtrees = 0;
1289 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1290 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1291 cp_genericize_r, data, NULL);
1292 }
1293 break;
1294 case OMP_CLAUSE_PRIVATE:
1295 /* Don't dereference an invisiref in OpenMP clauses. */
1296 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1297 *walk_subtrees = 0;
1298 else if (wtd->omp_ctx != NULL)
1299 {
1300 /* Private clause doesn't cause any references to the
1301 var in outer contexts, avoid calling
1302 omp_cxx_notice_variable for it. */
1303 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1304 wtd->omp_ctx = NULL;
1305 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1306 data, NULL);
1307 wtd->omp_ctx = old;
1308 *walk_subtrees = 0;
1309 }
1310 break;
1311 case OMP_CLAUSE_SHARED:
1312 case OMP_CLAUSE_FIRSTPRIVATE:
1313 case OMP_CLAUSE_COPYIN:
1314 case OMP_CLAUSE_COPYPRIVATE:
1315 case OMP_CLAUSE_INCLUSIVE:
1316 case OMP_CLAUSE_EXCLUSIVE:
1317 /* Don't dereference an invisiref in OpenMP clauses. */
1318 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1319 *walk_subtrees = 0;
1320 break;
1321 case OMP_CLAUSE_REDUCTION:
1322 case OMP_CLAUSE_IN_REDUCTION:
1323 case OMP_CLAUSE_TASK_REDUCTION:
1324 /* Don't dereference an invisiref in reduction clause's
1325 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1326 still needs to be genericized. */
1327 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1328 {
1329 *walk_subtrees = 0;
1330 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1331 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1332 cp_genericize_r, data, NULL);
1333 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1334 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1335 cp_genericize_r, data, NULL);
1336 }
1337 break;
1338 default:
1339 break;
1340 }
1341 break;
1342
1343 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1344 to lower this construct before scanning it, so we need to lower these
1345 before doing anything else. */
1346 case CLEANUP_STMT:
1347 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1348 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1349 : TRY_FINALLY_EXPR,
1350 void_type_node,
1351 CLEANUP_BODY (stmt),
1352 CLEANUP_EXPR (stmt));
1353 break;
1354
1355 case IF_STMT:
1356 genericize_if_stmt (stmt_p);
1357 /* *stmt_p has changed, tail recurse to handle it again. */
1358 return cp_genericize_r (stmt_p, walk_subtrees, data);
1359
1360 /* COND_EXPR might have incompatible types in branches if one or both
1361 arms are bitfields. Fix it up now. */
1362 case COND_EXPR:
1363 {
1364 tree type_left
1365 = (TREE_OPERAND (stmt, 1)
1366 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1367 : NULL_TREE);
1368 tree type_right
1369 = (TREE_OPERAND (stmt, 2)
1370 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1371 : NULL_TREE);
1372 if (type_left
1373 && !useless_type_conversion_p (TREE_TYPE (stmt),
1374 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1375 {
1376 TREE_OPERAND (stmt, 1)
1377 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1378 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1379 type_left));
1380 }
1381 if (type_right
1382 && !useless_type_conversion_p (TREE_TYPE (stmt),
1383 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1384 {
1385 TREE_OPERAND (stmt, 2)
1386 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1387 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1388 type_right));
1389 }
1390 }
1391 break;
1392
1393 case BIND_EXPR:
1394 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1395 {
1396 tree decl;
1397 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1398 if (VAR_P (decl)
1399 && !DECL_EXTERNAL (decl)
1400 && omp_var_to_track (decl))
1401 {
1402 splay_tree_node n
1403 = splay_tree_lookup (wtd->omp_ctx->variables,
1404 (splay_tree_key) decl);
1405 if (n == NULL)
1406 splay_tree_insert (wtd->omp_ctx->variables,
1407 (splay_tree_key) decl,
1408 TREE_STATIC (decl)
1409 ? OMP_CLAUSE_DEFAULT_SHARED
1410 : OMP_CLAUSE_DEFAULT_PRIVATE);
1411 }
1412 }
1413 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1414 {
1415 /* The point here is to not sanitize static initializers. */
1416 bool no_sanitize_p = wtd->no_sanitize_p;
1417 wtd->no_sanitize_p = true;
1418 for (tree decl = BIND_EXPR_VARS (stmt);
1419 decl;
1420 decl = DECL_CHAIN (decl))
1421 if (VAR_P (decl)
1422 && TREE_STATIC (decl)
1423 && DECL_INITIAL (decl))
1424 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1425 wtd->no_sanitize_p = no_sanitize_p;
1426 }
1427 wtd->bind_expr_stack.safe_push (stmt);
1428 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1429 cp_genericize_r, data, NULL);
1430 wtd->bind_expr_stack.pop ();
1431 break;
1432
1433 case USING_STMT:
1434 {
1435 tree block = NULL_TREE;
1436
1437 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1438 BLOCK, and append an IMPORTED_DECL to its
1439 BLOCK_VARS chained list. */
1440 if (wtd->bind_expr_stack.exists ())
1441 {
1442 int i;
1443 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1444 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1445 break;
1446 }
1447 if (block)
1448 {
1449 tree decl = TREE_OPERAND (stmt, 0);
1450 gcc_assert (decl);
1451
1452 if (undeduced_auto_decl (decl))
1453 /* Omit from the GENERIC, the back-end can't handle it. */;
1454 else
1455 {
1456 tree using_directive = make_node (IMPORTED_DECL);
1457 TREE_TYPE (using_directive) = void_type_node;
1458 DECL_CONTEXT (using_directive) = current_function_decl;
1459 DECL_SOURCE_LOCATION (using_directive)
1460 = cp_expr_loc_or_input_loc (stmt);
1461
1462 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1463 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1464 BLOCK_VARS (block) = using_directive;
1465 }
1466 }
1467 /* The USING_STMT won't appear in GENERIC. */
1468 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1469 *walk_subtrees = 0;
1470 }
1471 break;
1472
1473 case DECL_EXPR:
1474 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1475 {
1476 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1477 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1478 *walk_subtrees = 0;
1479 }
1480 else
1481 {
1482 tree d = DECL_EXPR_DECL (stmt);
1483 if (VAR_P (d))
1484 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1485 }
1486 break;
1487
1488 case OMP_PARALLEL:
1489 case OMP_TASK:
1490 case OMP_TASKLOOP:
1491 {
1492 struct cp_genericize_omp_taskreg omp_ctx;
1493 tree c, decl;
1494 splay_tree_node n;
1495
1496 *walk_subtrees = 0;
1497 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1498 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1499 omp_ctx.default_shared = omp_ctx.is_parallel;
1500 omp_ctx.outer = wtd->omp_ctx;
1501 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1502 wtd->omp_ctx = &omp_ctx;
1503 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1504 switch (OMP_CLAUSE_CODE (c))
1505 {
1506 case OMP_CLAUSE_SHARED:
1507 case OMP_CLAUSE_PRIVATE:
1508 case OMP_CLAUSE_FIRSTPRIVATE:
1509 case OMP_CLAUSE_LASTPRIVATE:
1510 decl = OMP_CLAUSE_DECL (c);
1511 if (decl == error_mark_node || !omp_var_to_track (decl))
1512 break;
1513 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1514 if (n != NULL)
1515 break;
1516 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1517 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1518 ? OMP_CLAUSE_DEFAULT_SHARED
1519 : OMP_CLAUSE_DEFAULT_PRIVATE);
1520 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1521 omp_cxx_notice_variable (omp_ctx.outer, decl);
1522 break;
1523 case OMP_CLAUSE_DEFAULT:
1524 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1525 omp_ctx.default_shared = true;
1526 default:
1527 break;
1528 }
1529 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1530 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1531 cp_genericize_r, cp_walk_subtrees);
1532 else
1533 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1534 wtd->omp_ctx = omp_ctx.outer;
1535 splay_tree_delete (omp_ctx.variables);
1536 }
1537 break;
1538
1539 case OMP_TARGET:
1540 cfun->has_omp_target = true;
1541 break;
1542
1543 case TRY_BLOCK:
1544 {
1545 *walk_subtrees = 0;
1546 tree try_block = wtd->try_block;
1547 wtd->try_block = stmt;
1548 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1549 wtd->try_block = try_block;
1550 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1551 }
1552 break;
1553
1554 case MUST_NOT_THROW_EXPR:
1555 /* MUST_NOT_THROW_COND might be something else with TM. */
1556 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1557 {
1558 *walk_subtrees = 0;
1559 tree try_block = wtd->try_block;
1560 wtd->try_block = stmt;
1561 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1562 wtd->try_block = try_block;
1563 }
1564 break;
1565
1566 case THROW_EXPR:
1567 {
1568 location_t loc = location_of (stmt);
1569 if (warning_suppressed_p (stmt /* What warning? */))
1570 /* Never mind. */;
1571 else if (wtd->try_block)
1572 {
1573 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1574 {
1575 auto_diagnostic_group d;
1576 if (warning_at (loc, OPT_Wterminate,
1577 "%<throw%> will always call %<terminate%>")
1578 && cxx_dialect >= cxx11
1579 && DECL_DESTRUCTOR_P (current_function_decl))
1580 inform (loc, "in C++11 destructors default to %<noexcept%>");
1581 }
1582 }
1583 else
1584 {
1585 if (warn_cxx11_compat && cxx_dialect < cxx11
1586 && DECL_DESTRUCTOR_P (current_function_decl)
1587 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1588 == NULL_TREE)
1589 && (get_defaulted_eh_spec (current_function_decl)
1590 == empty_except_spec))
1591 warning_at (loc, OPT_Wc__11_compat,
1592 "in C++11 this %<throw%> will call %<terminate%> "
1593 "because destructors default to %<noexcept%>");
1594 }
1595 }
1596 break;
1597
1598 case CONVERT_EXPR:
1599 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1600 break;
1601
1602 case SPACESHIP_EXPR:
1603 *stmt_p = genericize_spaceship (*stmt_p);
1604 break;
1605
1606 case PTRMEM_CST:
1607 /* By the time we get here we're handing off to the back end, so we don't
1608 need or want to preserve PTRMEM_CST anymore. */
1609 *stmt_p = cplus_expand_constant (stmt);
1610 *walk_subtrees = 0;
1611 break;
1612
1613 case MEM_REF:
1614 /* For MEM_REF, make sure not to sanitize the second operand even
1615 if it has reference type. It is just an offset with a type
1616 holding other information. There is no other processing we
1617 need to do for INTEGER_CSTs, so just ignore the second argument
1618 unconditionally. */
1619 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1620 *walk_subtrees = 0;
1621 break;
1622
1623 case NOP_EXPR:
1624 *stmt_p = predeclare_vla (*stmt_p);
1625 if (!wtd->no_sanitize_p
1626 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1627 && TYPE_REF_P (TREE_TYPE (stmt)))
1628 ubsan_maybe_instrument_reference (stmt_p);
1629 break;
1630
1631 case CALL_EXPR:
1632 /* Evaluate function concept checks instead of treating them as
1633 normal functions. */
1634 if (concept_check_p (stmt))
1635 {
1636 *stmt_p = evaluate_concept_check (stmt);
1637 * walk_subtrees = 0;
1638 break;
1639 }
1640
1641 if (!wtd->no_sanitize_p
1642 && sanitize_flags_p ((SANITIZE_NULL
1643 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1644 {
1645 tree fn = CALL_EXPR_FN (stmt);
1646 if (fn != NULL_TREE
1647 && !error_operand_p (fn)
1648 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1649 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1650 {
1651 bool is_ctor
1652 = TREE_CODE (fn) == ADDR_EXPR
1653 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1654 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1655 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1656 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1657 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1658 cp_ubsan_maybe_instrument_member_call (stmt);
1659 }
1660 else if (fn == NULL_TREE
1661 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1662 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1663 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1664 *walk_subtrees = 0;
1665 }
1666 /* Fall through. */
1667 case AGGR_INIT_EXPR:
1668 /* For calls to a multi-versioned function, overload resolution
1669 returns the function with the highest target priority, that is,
1670 the version that will checked for dispatching first. If this
1671 version is inlinable, a direct call to this version can be made
1672 otherwise the call should go through the dispatcher. */
1673 {
1674 tree fn = cp_get_callee_fndecl_nofold (stmt);
1675 if (fn && DECL_FUNCTION_VERSIONED (fn)
1676 && (current_function_decl == NULL
1677 || !targetm.target_option.can_inline_p (current_function_decl,
1678 fn)))
1679 if (tree dis = get_function_version_dispatcher (fn))
1680 {
1681 mark_versions_used (dis);
1682 dis = build_address (dis);
1683 if (TREE_CODE (stmt) == CALL_EXPR)
1684 CALL_EXPR_FN (stmt) = dis;
1685 else
1686 AGGR_INIT_EXPR_FN (stmt) = dis;
1687 }
1688 }
1689 break;
1690
1691 case TARGET_EXPR:
1692 if (TARGET_EXPR_INITIAL (stmt)
1693 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1694 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1695 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1696 break;
1697
1698 case TEMPLATE_ID_EXPR:
1699 gcc_assert (concept_check_p (stmt));
1700 /* Emit the value of the concept check. */
1701 *stmt_p = evaluate_concept_check (stmt);
1702 walk_subtrees = 0;
1703 break;
1704
1705 case OMP_DISTRIBUTE:
1706 /* Need to explicitly instantiate copy ctors on class iterators of
1707 composite distribute parallel for. */
1708 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1709 {
1710 tree *data[4] = { NULL, NULL, NULL, NULL };
1711 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1712 find_combined_omp_for, data, NULL);
1713 if (inner != NULL_TREE
1714 && TREE_CODE (inner) == OMP_FOR)
1715 {
1716 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1717 if (OMP_FOR_ORIG_DECLS (inner)
1718 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1719 i)) == TREE_LIST
1720 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1721 i)))
1722 {
1723 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1724 /* Class iterators aren't allowed on OMP_SIMD, so the only
1725 case we need to solve is distribute parallel for. */
1726 gcc_assert (TREE_CODE (inner) == OMP_FOR
1727 && data[1]);
1728 tree orig_decl = TREE_PURPOSE (orig);
1729 tree c, cl = NULL_TREE;
1730 for (c = OMP_FOR_CLAUSES (inner);
1731 c; c = OMP_CLAUSE_CHAIN (c))
1732 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1733 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1734 && OMP_CLAUSE_DECL (c) == orig_decl)
1735 {
1736 cl = c;
1737 break;
1738 }
1739 if (cl == NULL_TREE)
1740 {
1741 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1742 c; c = OMP_CLAUSE_CHAIN (c))
1743 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1744 && OMP_CLAUSE_DECL (c) == orig_decl)
1745 {
1746 cl = c;
1747 break;
1748 }
1749 }
1750 if (cl)
1751 {
1752 orig_decl = require_complete_type (orig_decl);
1753 tree inner_type = TREE_TYPE (orig_decl);
1754 if (orig_decl == error_mark_node)
1755 continue;
1756 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1757 inner_type = TREE_TYPE (inner_type);
1758
1759 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1760 inner_type = TREE_TYPE (inner_type);
1761 get_copy_ctor (inner_type, tf_warning_or_error);
1762 }
1763 }
1764 }
1765 }
1766 /* FALLTHRU */
1767
1768 case FOR_STMT:
1769 case WHILE_STMT:
1770 case DO_STMT:
1771 case SWITCH_STMT:
1772 case CONTINUE_STMT:
1773 case BREAK_STMT:
1774 case OMP_FOR:
1775 case OMP_SIMD:
1776 case OMP_LOOP:
1777 case OACC_LOOP:
1778 case STATEMENT_LIST:
1779 /* These cases are handled by shared code. */
1780 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1781 cp_genericize_r, cp_walk_subtrees);
1782 break;
1783
1784 case BIT_CAST_EXPR:
1785 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1786 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1787 break;
1788
1789 default:
1790 if (IS_TYPE_OR_DECL_P (stmt))
1791 *walk_subtrees = 0;
1792 break;
1793 }
1794
1795 p_set->add (*stmt_p);
1796
1797 return NULL;
1798 }
1799
1800 /* Lower C++ front end trees to GENERIC in T_P. */
1801
1802 static void
1803 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1804 {
1805 struct cp_genericize_data wtd;
1806
1807 wtd.p_set = new hash_set<tree>;
1808 wtd.bind_expr_stack.create (0);
1809 wtd.omp_ctx = NULL;
1810 wtd.try_block = NULL_TREE;
1811 wtd.no_sanitize_p = false;
1812 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1813 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1814 delete wtd.p_set;
1815 if (sanitize_flags_p (SANITIZE_VPTR))
1816 cp_ubsan_instrument_member_accesses (t_p);
1817 }
1818
1819 /* If a function that should end with a return in non-void
1820 function doesn't obviously end with return, add ubsan
1821 instrumentation code to verify it at runtime. If -fsanitize=return
1822 is not enabled, instrument __builtin_unreachable. */
1823
1824 static void
1825 cp_maybe_instrument_return (tree fndecl)
1826 {
1827 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1828 || DECL_CONSTRUCTOR_P (fndecl)
1829 || DECL_DESTRUCTOR_P (fndecl)
1830 || !targetm.warn_func_return (fndecl))
1831 return;
1832
1833 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1834 /* Don't add __builtin_unreachable () if not optimizing, it will not
1835 improve any optimizations in that case, just break UB code.
1836 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1837 UBSan covers this with ubsan_instrument_return above where sufficient
1838 information is provided, while the __builtin_unreachable () below
1839 if return sanitization is disabled will just result in hard to
1840 understand runtime error without location. */
1841 && (!optimize
1842 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1843 return;
1844
1845 tree t = DECL_SAVED_TREE (fndecl);
1846 while (t)
1847 {
1848 switch (TREE_CODE (t))
1849 {
1850 case BIND_EXPR:
1851 t = BIND_EXPR_BODY (t);
1852 continue;
1853 case TRY_FINALLY_EXPR:
1854 case CLEANUP_POINT_EXPR:
1855 t = TREE_OPERAND (t, 0);
1856 continue;
1857 case STATEMENT_LIST:
1858 {
1859 tree_stmt_iterator i = tsi_last (t);
1860 while (!tsi_end_p (i))
1861 {
1862 tree p = tsi_stmt (i);
1863 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1864 break;
1865 tsi_prev (&i);
1866 }
1867 if (!tsi_end_p (i))
1868 {
1869 t = tsi_stmt (i);
1870 continue;
1871 }
1872 }
1873 break;
1874 case RETURN_EXPR:
1875 return;
1876 default:
1877 break;
1878 }
1879 break;
1880 }
1881 if (t == NULL_TREE)
1882 return;
1883 tree *p = &DECL_SAVED_TREE (fndecl);
1884 if (TREE_CODE (*p) == BIND_EXPR)
1885 p = &BIND_EXPR_BODY (*p);
1886
1887 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1888 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1889 t = ubsan_instrument_return (loc);
1890 else
1891 {
1892 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1893 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1894 }
1895
1896 append_to_statement_list (t, p);
1897 }
1898
1899 void
1900 cp_genericize (tree fndecl)
1901 {
1902 tree t;
1903
1904 /* Fix up the types of parms passed by invisible reference. */
1905 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1906 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1907 {
1908 /* If a function's arguments are copied to create a thunk,
1909 then DECL_BY_REFERENCE will be set -- but the type of the
1910 argument will be a pointer type, so we will never get
1911 here. */
1912 gcc_assert (!DECL_BY_REFERENCE (t));
1913 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1914 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1915 DECL_BY_REFERENCE (t) = 1;
1916 TREE_ADDRESSABLE (t) = 0;
1917 relayout_decl (t);
1918 }
1919
1920 /* Do the same for the return value. */
1921 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1922 {
1923 t = DECL_RESULT (fndecl);
1924 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1925 DECL_BY_REFERENCE (t) = 1;
1926 TREE_ADDRESSABLE (t) = 0;
1927 relayout_decl (t);
1928 if (DECL_NAME (t))
1929 {
1930 /* Adjust DECL_VALUE_EXPR of the original var. */
1931 tree outer = outer_curly_brace_block (current_function_decl);
1932 tree var;
1933
1934 if (outer)
1935 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1936 if (VAR_P (var)
1937 && DECL_NAME (t) == DECL_NAME (var)
1938 && DECL_HAS_VALUE_EXPR_P (var)
1939 && DECL_VALUE_EXPR (var) == t)
1940 {
1941 tree val = convert_from_reference (t);
1942 SET_DECL_VALUE_EXPR (var, val);
1943 break;
1944 }
1945 }
1946 }
1947
1948 /* If we're a clone, the body is already GIMPLE. */
1949 if (DECL_CLONED_FUNCTION_P (fndecl))
1950 return;
1951
1952 /* Allow cp_genericize calls to be nested. */
1953 bc_state_t save_state;
1954 save_bc_state (&save_state);
1955
1956 /* We do want to see every occurrence of the parms, so we can't just use
1957 walk_tree's hash functionality. */
1958 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1959
1960 cp_maybe_instrument_return (fndecl);
1961
1962 /* Do everything else. */
1963 c_genericize (fndecl);
1964 restore_bc_state (&save_state);
1965 }
1966
1967 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1969 NULL if there is in fact nothing to do. ARG2 may be null if FN
1970 actually only takes one argument. */
1971
1972 static tree
1973 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1974 {
1975 tree defparm, parm, t;
1976 int i = 0;
1977 int nargs;
1978 tree *argarray;
1979
1980 if (fn == NULL)
1981 return NULL;
1982
1983 nargs = list_length (DECL_ARGUMENTS (fn));
1984 argarray = XALLOCAVEC (tree, nargs);
1985
1986 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1987 if (arg2)
1988 defparm = TREE_CHAIN (defparm);
1989
1990 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1991 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1992 {
1993 tree inner_type = TREE_TYPE (arg1);
1994 tree start1, end1, p1;
1995 tree start2 = NULL, p2 = NULL;
1996 tree ret = NULL, lab;
1997
1998 start1 = arg1;
1999 start2 = arg2;
2000 do
2001 {
2002 inner_type = TREE_TYPE (inner_type);
2003 start1 = build4 (ARRAY_REF, inner_type, start1,
2004 size_zero_node, NULL, NULL);
2005 if (arg2)
2006 start2 = build4 (ARRAY_REF, inner_type, start2,
2007 size_zero_node, NULL, NULL);
2008 }
2009 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2010 start1 = build_fold_addr_expr_loc (input_location, start1);
2011 if (arg2)
2012 start2 = build_fold_addr_expr_loc (input_location, start2);
2013
2014 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2015 end1 = fold_build_pointer_plus (start1, end1);
2016
2017 p1 = create_tmp_var (TREE_TYPE (start1));
2018 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2019 append_to_statement_list (t, &ret);
2020
2021 if (arg2)
2022 {
2023 p2 = create_tmp_var (TREE_TYPE (start2));
2024 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2025 append_to_statement_list (t, &ret);
2026 }
2027
2028 lab = create_artificial_label (input_location);
2029 t = build1 (LABEL_EXPR, void_type_node, lab);
2030 append_to_statement_list (t, &ret);
2031
2032 argarray[i++] = p1;
2033 if (arg2)
2034 argarray[i++] = p2;
2035 /* Handle default arguments. */
2036 for (parm = defparm; parm && parm != void_list_node;
2037 parm = TREE_CHAIN (parm), i++)
2038 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2039 TREE_PURPOSE (parm), fn,
2040 i - is_method, tf_warning_or_error);
2041 t = build_call_a (fn, i, argarray);
2042 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2043 t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2044 t = fold_convert (void_type_node, t);
2045 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2046 append_to_statement_list (t, &ret);
2047
2048 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2049 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2050 append_to_statement_list (t, &ret);
2051
2052 if (arg2)
2053 {
2054 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2055 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2056 append_to_statement_list (t, &ret);
2057 }
2058
2059 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2060 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2061 append_to_statement_list (t, &ret);
2062
2063 return ret;
2064 }
2065 else
2066 {
2067 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2068 if (arg2)
2069 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2070 /* Handle default arguments. */
2071 for (parm = defparm; parm && parm != void_list_node;
2072 parm = TREE_CHAIN (parm), i++)
2073 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2074 TREE_PURPOSE (parm), fn,
2075 i - is_method, tf_warning_or_error);
2076 t = build_call_a (fn, i, argarray);
2077 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2078 t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2079 t = fold_convert (void_type_node, t);
2080 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2081 }
2082 }
2083
2084 /* Return code to initialize DECL with its default constructor, or
2085 NULL if there's nothing to do. */
2086
2087 tree
2088 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2089 {
2090 tree info = CP_OMP_CLAUSE_INFO (clause);
2091 tree ret = NULL;
2092
2093 if (info)
2094 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2095
2096 return ret;
2097 }
2098
2099 /* Return code to initialize DST with a copy constructor from SRC. */
2100
2101 tree
2102 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2103 {
2104 tree info = CP_OMP_CLAUSE_INFO (clause);
2105 tree ret = NULL;
2106
2107 if (info)
2108 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2109 if (ret == NULL)
2110 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2111
2112 return ret;
2113 }
2114
2115 /* Similarly, except use an assignment operator instead. */
2116
2117 tree
2118 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2119 {
2120 tree info = CP_OMP_CLAUSE_INFO (clause);
2121 tree ret = NULL;
2122
2123 if (info)
2124 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2125 if (ret == NULL)
2126 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2127
2128 return ret;
2129 }
2130
2131 /* Return code to destroy DECL. */
2132
2133 tree
2134 cxx_omp_clause_dtor (tree clause, tree decl)
2135 {
2136 tree info = CP_OMP_CLAUSE_INFO (clause);
2137 tree ret = NULL;
2138
2139 if (info)
2140 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2141
2142 return ret;
2143 }
2144
2145 /* True if OpenMP should privatize what this DECL points to rather
2146 than the DECL itself. */
2147
2148 bool
2149 cxx_omp_privatize_by_reference (const_tree decl)
2150 {
2151 return (TYPE_REF_P (TREE_TYPE (decl))
2152 || is_invisiref_parm (decl));
2153 }
2154
2155 /* Return true if DECL is const qualified var having no mutable member. */
2156 bool
2157 cxx_omp_const_qual_no_mutable (tree decl)
2158 {
2159 tree type = TREE_TYPE (decl);
2160 if (TYPE_REF_P (type))
2161 {
2162 if (!is_invisiref_parm (decl))
2163 return false;
2164 type = TREE_TYPE (type);
2165
2166 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2167 {
2168 /* NVR doesn't preserve const qualification of the
2169 variable's type. */
2170 tree outer = outer_curly_brace_block (current_function_decl);
2171 tree var;
2172
2173 if (outer)
2174 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2175 if (VAR_P (var)
2176 && DECL_NAME (decl) == DECL_NAME (var)
2177 && (TYPE_MAIN_VARIANT (type)
2178 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2179 {
2180 if (TYPE_READONLY (TREE_TYPE (var)))
2181 type = TREE_TYPE (var);
2182 break;
2183 }
2184 }
2185 }
2186
2187 if (type == error_mark_node)
2188 return false;
2189
2190 /* Variables with const-qualified type having no mutable member
2191 are predetermined shared. */
2192 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2193 return true;
2194
2195 return false;
2196 }
2197
2198 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2199 of DECL is predetermined. */
2200
2201 enum omp_clause_default_kind
2202 cxx_omp_predetermined_sharing_1 (tree decl)
2203 {
2204 /* Static data members are predetermined shared. */
2205 if (TREE_STATIC (decl))
2206 {
2207 tree ctx = CP_DECL_CONTEXT (decl);
2208 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2209 return OMP_CLAUSE_DEFAULT_SHARED;
2210
2211 if (c_omp_predefined_variable (decl))
2212 return OMP_CLAUSE_DEFAULT_SHARED;
2213 }
2214
2215 /* this may not be specified in data-sharing clauses, still we need
2216 to predetermined it firstprivate. */
2217 if (decl == current_class_ptr)
2218 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2219
2220 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2221 }
2222
2223 /* Likewise, but also include the artificial vars. We don't want to
2224 disallow the artificial vars being mentioned in explicit clauses,
2225 as we use artificial vars e.g. for loop constructs with random
2226 access iterators other than pointers, but during gimplification
2227 we want to treat them as predetermined. */
2228
2229 enum omp_clause_default_kind
2230 cxx_omp_predetermined_sharing (tree decl)
2231 {
2232 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2233 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2234 return ret;
2235
2236 /* Predetermine artificial variables holding integral values, those
2237 are usually result of gimplify_one_sizepos or SAVE_EXPR
2238 gimplification. */
2239 if (VAR_P (decl)
2240 && DECL_ARTIFICIAL (decl)
2241 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2242 && !(DECL_LANG_SPECIFIC (decl)
2243 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2244 return OMP_CLAUSE_DEFAULT_SHARED;
2245
2246 /* Similarly for typeinfo symbols. */
2247 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2248 return OMP_CLAUSE_DEFAULT_SHARED;
2249
2250 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2251 }
2252
2253 enum omp_clause_defaultmap_kind
2254 cxx_omp_predetermined_mapping (tree decl)
2255 {
2256 /* Predetermine artificial variables holding integral values, those
2257 are usually result of gimplify_one_sizepos or SAVE_EXPR
2258 gimplification. */
2259 if (VAR_P (decl)
2260 && DECL_ARTIFICIAL (decl)
2261 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2262 && !(DECL_LANG_SPECIFIC (decl)
2263 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2264 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2265
2266 if (c_omp_predefined_variable (decl))
2267 return OMP_CLAUSE_DEFAULTMAP_TO;
2268
2269 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2270 }
2271
2272 /* Finalize an implicitly determined clause. */
2273
2274 void
2275 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2276 {
2277 tree decl, inner_type;
2278 bool make_shared = false;
2279
2280 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2281 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2282 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2283 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2284 return;
2285
2286 decl = OMP_CLAUSE_DECL (c);
2287 decl = require_complete_type (decl);
2288 inner_type = TREE_TYPE (decl);
2289 if (decl == error_mark_node)
2290 make_shared = true;
2291 else if (TYPE_REF_P (TREE_TYPE (decl)))
2292 inner_type = TREE_TYPE (inner_type);
2293
2294 /* We're interested in the base element, not arrays. */
2295 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2296 inner_type = TREE_TYPE (inner_type);
2297
2298 /* Check for special function availability by building a call to one.
2299 Save the results, because later we won't be in the right context
2300 for making these queries. */
2301 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2302 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2303 if (!make_shared
2304 && CLASS_TYPE_P (inner_type)
2305 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2306 true))
2307 make_shared = true;
2308
2309 if (make_shared)
2310 {
2311 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2312 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2313 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2314 }
2315 }
2316
2317 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2318 disregarded in OpenMP construct, because it is going to be
2319 remapped during OpenMP lowering. SHARED is true if DECL
2320 is going to be shared, false if it is going to be privatized. */
2321
2322 bool
2323 cxx_omp_disregard_value_expr (tree decl, bool shared)
2324 {
2325 if (shared)
2326 return false;
2327 if (VAR_P (decl)
2328 && DECL_HAS_VALUE_EXPR_P (decl)
2329 && DECL_ARTIFICIAL (decl)
2330 && DECL_LANG_SPECIFIC (decl)
2331 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2332 return true;
2333 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2334 return true;
2335 return false;
2336 }
2337
2338 /* Fold expression X which is used as an rvalue if RVAL is true. */
2339
2340 tree
2341 cp_fold_maybe_rvalue (tree x, bool rval)
2342 {
2343 while (true)
2344 {
2345 x = cp_fold (x);
2346 if (rval)
2347 x = mark_rvalue_use (x);
2348 if (rval && DECL_P (x)
2349 && !TYPE_REF_P (TREE_TYPE (x)))
2350 {
2351 tree v = decl_constant_value (x);
2352 if (v != x && v != error_mark_node)
2353 {
2354 x = v;
2355 continue;
2356 }
2357 }
2358 break;
2359 }
2360 return x;
2361 }
2362
2363 /* Fold expression X which is used as an rvalue. */
2364
2365 tree
2366 cp_fold_rvalue (tree x)
2367 {
2368 return cp_fold_maybe_rvalue (x, true);
2369 }
2370
2371 /* Perform folding on expression X. */
2372
2373 tree
2374 cp_fully_fold (tree x)
2375 {
2376 if (processing_template_decl)
2377 return x;
2378 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2379 have to call both. */
2380 if (cxx_dialect >= cxx11)
2381 {
2382 x = maybe_constant_value (x);
2383 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2384 a TARGET_EXPR; undo that here. */
2385 if (TREE_CODE (x) == TARGET_EXPR)
2386 x = TARGET_EXPR_INITIAL (x);
2387 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2388 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2389 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2390 x = TREE_OPERAND (x, 0);
2391 }
2392 return cp_fold_rvalue (x);
2393 }
2394
2395 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2396 in some cases. */
2397
2398 tree
2399 cp_fully_fold_init (tree x)
2400 {
2401 if (processing_template_decl)
2402 return x;
2403 x = cp_fully_fold (x);
2404 cp_fold_data data (/*genericize*/false);
2405 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2406 return x;
2407 }
2408
2409 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2410 and certain changes are made to the folding done. Or should be (FIXME). We
2411 never touch maybe_const, as it is only used for the C front-end
2412 C_MAYBE_CONST_EXPR. */
2413
2414 tree
2415 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2416 {
2417 return cp_fold_maybe_rvalue (x, !lval);
2418 }
2419
2420 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2421
2422 /* Dispose of the whole FOLD_CACHE. */
2423
2424 void
2425 clear_fold_cache (void)
2426 {
2427 if (fold_cache != NULL)
2428 fold_cache->empty ();
2429 }
2430
2431 /* This function tries to fold an expression X.
2432 To avoid combinatorial explosion, folding results are kept in fold_cache.
2433 If X is invalid, we don't fold at all.
2434 For performance reasons we don't cache expressions representing a
2435 declaration or constant.
2436 Function returns X or its folded variant. */
2437
2438 static tree
2439 cp_fold (tree x)
2440 {
2441 tree op0, op1, op2, op3;
2442 tree org_x = x, r = NULL_TREE;
2443 enum tree_code code;
2444 location_t loc;
2445 bool rval_ops = true;
2446
2447 if (!x || x == error_mark_node)
2448 return x;
2449
2450 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2451 return x;
2452
2453 /* Don't bother to cache DECLs or constants. */
2454 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2455 return x;
2456
2457 if (fold_cache == NULL)
2458 fold_cache = hash_map<tree, tree>::create_ggc (101);
2459
2460 if (tree *cached = fold_cache->get (x))
2461 {
2462 /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
2463 argument has been folded into a tree invariant, make sure it is
2464 unshared. See PR112727. */
2465 if (TREE_CODE (x) == SAVE_EXPR && *cached != x)
2466 return unshare_expr (*cached);
2467 return *cached;
2468 }
2469
2470 uid_sensitive_constexpr_evaluation_checker c;
2471
2472 code = TREE_CODE (x);
2473 switch (code)
2474 {
2475 case CLEANUP_POINT_EXPR:
2476 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2477 effects. */
2478 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2479 if (!TREE_SIDE_EFFECTS (r))
2480 x = r;
2481 break;
2482
2483 case SIZEOF_EXPR:
2484 x = fold_sizeof_expr (x);
2485 break;
2486
2487 case VIEW_CONVERT_EXPR:
2488 rval_ops = false;
2489 /* FALLTHRU */
2490 case CONVERT_EXPR:
2491 case NOP_EXPR:
2492 case NON_LVALUE_EXPR:
2493
2494 if (VOID_TYPE_P (TREE_TYPE (x)))
2495 {
2496 /* This is just to make sure we don't end up with casts to
2497 void from error_mark_node. If we just return x, then
2498 cp_fold_r might fold the operand into error_mark_node and
2499 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2500 during gimplification doesn't like such casts.
2501 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2502 folding of the operand should be in the caches and if in cp_fold_r
2503 it will modify it in place. */
2504 op0 = cp_fold (TREE_OPERAND (x, 0));
2505 if (op0 == error_mark_node)
2506 x = error_mark_node;
2507 break;
2508 }
2509
2510 loc = EXPR_LOCATION (x);
2511 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2512
2513 if (code == CONVERT_EXPR
2514 && SCALAR_TYPE_P (TREE_TYPE (x))
2515 && op0 != void_node)
2516 /* During parsing we used convert_to_*_nofold; re-convert now using the
2517 folding variants, since fold() doesn't do those transformations. */
2518 x = fold (convert (TREE_TYPE (x), op0));
2519 else if (op0 != TREE_OPERAND (x, 0))
2520 {
2521 if (op0 == error_mark_node)
2522 x = error_mark_node;
2523 else
2524 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2525 }
2526 else
2527 x = fold (x);
2528
2529 /* Conversion of an out-of-range value has implementation-defined
2530 behavior; the language considers it different from arithmetic
2531 overflow, which is undefined. */
2532 if (TREE_CODE (op0) == INTEGER_CST
2533 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2534 TREE_OVERFLOW (x) = false;
2535
2536 break;
2537
2538 case INDIRECT_REF:
2539 /* We don't need the decltype(auto) obfuscation anymore. */
2540 if (REF_PARENTHESIZED_P (x))
2541 {
2542 tree p = maybe_undo_parenthesized_ref (x);
2543 if (p != x)
2544 return cp_fold (p);
2545 }
2546 goto unary;
2547
2548 case ADDR_EXPR:
2549 loc = EXPR_LOCATION (x);
2550 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2551
2552 /* Cope with user tricks that amount to offsetof. */
2553 if (op0 != error_mark_node
2554 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2555 {
2556 tree val = get_base_address (op0);
2557 if (val
2558 && INDIRECT_REF_P (val)
2559 && COMPLETE_TYPE_P (TREE_TYPE (val))
2560 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2561 {
2562 val = TREE_OPERAND (val, 0);
2563 STRIP_NOPS (val);
2564 val = maybe_constant_value (val);
2565 if (TREE_CODE (val) == INTEGER_CST)
2566 return fold_offsetof (op0, TREE_TYPE (x));
2567 }
2568 }
2569 goto finish_unary;
2570
2571 case REALPART_EXPR:
2572 case IMAGPART_EXPR:
2573 rval_ops = false;
2574 /* FALLTHRU */
2575 case CONJ_EXPR:
2576 case FIX_TRUNC_EXPR:
2577 case FLOAT_EXPR:
2578 case NEGATE_EXPR:
2579 case ABS_EXPR:
2580 case ABSU_EXPR:
2581 case BIT_NOT_EXPR:
2582 case TRUTH_NOT_EXPR:
2583 case FIXED_CONVERT_EXPR:
2584 unary:
2585
2586 loc = EXPR_LOCATION (x);
2587 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2588
2589 finish_unary:
2590 if (op0 != TREE_OPERAND (x, 0))
2591 {
2592 if (op0 == error_mark_node)
2593 x = error_mark_node;
2594 else
2595 {
2596 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2597 if (code == INDIRECT_REF
2598 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2599 {
2600 TREE_READONLY (x) = TREE_READONLY (org_x);
2601 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2602 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2603 }
2604 }
2605 }
2606 else
2607 x = fold (x);
2608
2609 gcc_assert (TREE_CODE (x) != COND_EXPR
2610 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2611 break;
2612
2613 case UNARY_PLUS_EXPR:
2614 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2615 if (op0 == error_mark_node)
2616 x = error_mark_node;
2617 else
2618 x = fold_convert (TREE_TYPE (x), op0);
2619 break;
2620
2621 case POSTDECREMENT_EXPR:
2622 case POSTINCREMENT_EXPR:
2623 case INIT_EXPR:
2624 case PREDECREMENT_EXPR:
2625 case PREINCREMENT_EXPR:
2626 case COMPOUND_EXPR:
2627 case MODIFY_EXPR:
2628 rval_ops = false;
2629 /* FALLTHRU */
2630 case POINTER_PLUS_EXPR:
2631 case PLUS_EXPR:
2632 case POINTER_DIFF_EXPR:
2633 case MINUS_EXPR:
2634 case MULT_EXPR:
2635 case TRUNC_DIV_EXPR:
2636 case CEIL_DIV_EXPR:
2637 case FLOOR_DIV_EXPR:
2638 case ROUND_DIV_EXPR:
2639 case TRUNC_MOD_EXPR:
2640 case CEIL_MOD_EXPR:
2641 case ROUND_MOD_EXPR:
2642 case RDIV_EXPR:
2643 case EXACT_DIV_EXPR:
2644 case MIN_EXPR:
2645 case MAX_EXPR:
2646 case LSHIFT_EXPR:
2647 case RSHIFT_EXPR:
2648 case LROTATE_EXPR:
2649 case RROTATE_EXPR:
2650 case BIT_AND_EXPR:
2651 case BIT_IOR_EXPR:
2652 case BIT_XOR_EXPR:
2653 case TRUTH_AND_EXPR:
2654 case TRUTH_ANDIF_EXPR:
2655 case TRUTH_OR_EXPR:
2656 case TRUTH_ORIF_EXPR:
2657 case TRUTH_XOR_EXPR:
2658 case LT_EXPR: case LE_EXPR:
2659 case GT_EXPR: case GE_EXPR:
2660 case EQ_EXPR: case NE_EXPR:
2661 case UNORDERED_EXPR: case ORDERED_EXPR:
2662 case UNLT_EXPR: case UNLE_EXPR:
2663 case UNGT_EXPR: case UNGE_EXPR:
2664 case UNEQ_EXPR: case LTGT_EXPR:
2665 case RANGE_EXPR: case COMPLEX_EXPR:
2666
2667 loc = EXPR_LOCATION (x);
2668 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2669 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2670
2671 /* decltype(nullptr) has only one value, so optimize away all comparisons
2672 with that type right away, keeping them in the IL causes troubles for
2673 various optimizations. */
2674 if (COMPARISON_CLASS_P (org_x)
2675 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2676 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2677 {
2678 switch (code)
2679 {
2680 case EQ_EXPR:
2681 x = constant_boolean_node (true, TREE_TYPE (x));
2682 break;
2683 case NE_EXPR:
2684 x = constant_boolean_node (false, TREE_TYPE (x));
2685 break;
2686 default:
2687 gcc_unreachable ();
2688 }
2689 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2690 op0, op1);
2691 }
2692
2693 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2694 {
2695 if (op0 == error_mark_node || op1 == error_mark_node)
2696 x = error_mark_node;
2697 else
2698 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2699 }
2700 else
2701 x = fold (x);
2702
2703 /* This is only needed for -Wnonnull-compare and only if
2704 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2705 generation, we do it always. */
2706 if (COMPARISON_CLASS_P (org_x))
2707 {
2708 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2709 ;
2710 else if (COMPARISON_CLASS_P (x))
2711 {
2712 if (warn_nonnull_compare
2713 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2714 suppress_warning (x, OPT_Wnonnull_compare);
2715 }
2716 /* Otherwise give up on optimizing these, let GIMPLE folders
2717 optimize those later on. */
2718 else if (op0 != TREE_OPERAND (org_x, 0)
2719 || op1 != TREE_OPERAND (org_x, 1))
2720 {
2721 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2722 if (warn_nonnull_compare
2723 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2724 suppress_warning (x, OPT_Wnonnull_compare);
2725 }
2726 else
2727 x = org_x;
2728 }
2729
2730 break;
2731
2732 case VEC_COND_EXPR:
2733 case COND_EXPR:
2734 loc = EXPR_LOCATION (x);
2735 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2736 op1 = cp_fold (TREE_OPERAND (x, 1));
2737 op2 = cp_fold (TREE_OPERAND (x, 2));
2738
2739 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2740 {
2741 warning_sentinel s (warn_int_in_bool_context);
2742 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2743 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2744 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2745 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2746 }
2747 else if (VOID_TYPE_P (TREE_TYPE (x)))
2748 {
2749 if (TREE_CODE (op0) == INTEGER_CST)
2750 {
2751 /* If the condition is constant, fold can fold away
2752 the COND_EXPR. If some statement-level uses of COND_EXPR
2753 have one of the branches NULL, avoid folding crash. */
2754 if (!op1)
2755 op1 = build_empty_stmt (loc);
2756 if (!op2)
2757 op2 = build_empty_stmt (loc);
2758 }
2759 else
2760 {
2761 /* Otherwise, don't bother folding a void condition, since
2762 it can't produce a constant value. */
2763 if (op0 != TREE_OPERAND (x, 0)
2764 || op1 != TREE_OPERAND (x, 1)
2765 || op2 != TREE_OPERAND (x, 2))
2766 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2767 break;
2768 }
2769 }
2770
2771 if (op0 != TREE_OPERAND (x, 0)
2772 || op1 != TREE_OPERAND (x, 1)
2773 || op2 != TREE_OPERAND (x, 2))
2774 {
2775 if (op0 == error_mark_node
2776 || op1 == error_mark_node
2777 || op2 == error_mark_node)
2778 x = error_mark_node;
2779 else
2780 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2781 }
2782 else
2783 x = fold (x);
2784
2785 /* A COND_EXPR might have incompatible types in branches if one or both
2786 arms are bitfields. If folding exposed such a branch, fix it up. */
2787 if (TREE_CODE (x) != code
2788 && x != error_mark_node
2789 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2790 x = fold_convert (TREE_TYPE (org_x), x);
2791
2792 break;
2793
2794 case CALL_EXPR:
2795 {
2796 tree callee = get_callee_fndecl (x);
2797
2798 /* "Inline" calls to std::move/forward and other cast-like functions
2799 by simply folding them into a corresponding cast to their return
2800 type. This is cheaper than relying on the middle end to do so, and
2801 also means we avoid generating useless debug info for them at all.
2802
2803 At this point the argument has already been converted into a
2804 reference, so it suffices to use a NOP_EXPR to express the
2805 cast. */
2806 if ((OPTION_SET_P (flag_fold_simple_inlines)
2807 ? flag_fold_simple_inlines
2808 : !flag_no_inline)
2809 && call_expr_nargs (x) == 1
2810 && decl_in_std_namespace_p (callee)
2811 && DECL_NAME (callee) != NULL_TREE
2812 && (id_equal (DECL_NAME (callee), "move")
2813 || id_equal (DECL_NAME (callee), "forward")
2814 || id_equal (DECL_NAME (callee), "addressof")
2815 /* This addressof equivalent is used heavily in libstdc++. */
2816 || id_equal (DECL_NAME (callee), "__addressof")
2817 || id_equal (DECL_NAME (callee), "as_const")))
2818 {
2819 r = CALL_EXPR_ARG (x, 0);
2820 /* Check that the return and argument types are sane before
2821 folding. */
2822 if (INDIRECT_TYPE_P (TREE_TYPE (x))
2823 && INDIRECT_TYPE_P (TREE_TYPE (r)))
2824 {
2825 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2826 r = build_nop (TREE_TYPE (x), r);
2827 x = cp_fold (r);
2828 break;
2829 }
2830 }
2831
2832 int sv = optimize, nw = sv;
2833
2834 /* Some built-in function calls will be evaluated at compile-time in
2835 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2836 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2837 if (callee && fndecl_built_in_p (callee) && !optimize
2838 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2839 && current_function_decl
2840 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2841 nw = 1;
2842
2843 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2844 {
2845 switch (DECL_FE_FUNCTION_CODE (callee))
2846 {
2847 /* Defer folding __builtin_is_constant_evaluated. */
2848 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2849 break;
2850 case CP_BUILT_IN_SOURCE_LOCATION:
2851 x = fold_builtin_source_location (EXPR_LOCATION (x));
2852 break;
2853 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2854 x = fold_builtin_is_corresponding_member
2855 (EXPR_LOCATION (x), call_expr_nargs (x),
2856 &CALL_EXPR_ARG (x, 0));
2857 break;
2858 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2859 x = fold_builtin_is_pointer_inverconvertible_with_class
2860 (EXPR_LOCATION (x), call_expr_nargs (x),
2861 &CALL_EXPR_ARG (x, 0));
2862 break;
2863 default:
2864 break;
2865 }
2866 break;
2867 }
2868
2869 if (callee
2870 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2871 BUILT_IN_FRONTEND))
2872 {
2873 x = fold_builtin_source_location (EXPR_LOCATION (x));
2874 break;
2875 }
2876
2877 bool changed = false;
2878 int m = call_expr_nargs (x);
2879 for (int i = 0; i < m; i++)
2880 {
2881 r = cp_fold (CALL_EXPR_ARG (x, i));
2882 if (r != CALL_EXPR_ARG (x, i))
2883 {
2884 if (r == error_mark_node)
2885 {
2886 x = error_mark_node;
2887 break;
2888 }
2889 if (!changed)
2890 x = copy_node (x);
2891 CALL_EXPR_ARG (x, i) = r;
2892 changed = true;
2893 }
2894 }
2895 if (x == error_mark_node)
2896 break;
2897
2898 optimize = nw;
2899 r = fold (x);
2900 optimize = sv;
2901
2902 if (TREE_CODE (r) != CALL_EXPR)
2903 {
2904 x = cp_fold (r);
2905 break;
2906 }
2907
2908 optimize = nw;
2909
2910 /* Invoke maybe_constant_value for functions declared
2911 constexpr and not called with AGGR_INIT_EXPRs.
2912 TODO:
2913 Do constexpr expansion of expressions where the call itself is not
2914 constant, but the call followed by an INDIRECT_REF is. */
2915 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2916 && !flag_no_inline)
2917 r = maybe_constant_value (x);
2918 optimize = sv;
2919
2920 if (TREE_CODE (r) != CALL_EXPR)
2921 {
2922 if (DECL_CONSTRUCTOR_P (callee))
2923 {
2924 loc = EXPR_LOCATION (x);
2925 tree s = build_fold_indirect_ref_loc (loc,
2926 CALL_EXPR_ARG (x, 0));
2927 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2928 }
2929 x = r;
2930 break;
2931 }
2932
2933 break;
2934 }
2935
2936 case CONSTRUCTOR:
2937 {
2938 unsigned i;
2939 constructor_elt *p;
2940 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2941 vec<constructor_elt, va_gc> *nelts = NULL;
2942 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2943 {
2944 tree op = cp_fold (p->value);
2945 if (op != p->value)
2946 {
2947 if (op == error_mark_node)
2948 {
2949 x = error_mark_node;
2950 vec_free (nelts);
2951 break;
2952 }
2953 if (nelts == NULL)
2954 nelts = elts->copy ();
2955 (*nelts)[i].value = op;
2956 }
2957 }
2958 if (nelts)
2959 {
2960 x = build_constructor (TREE_TYPE (x), nelts);
2961 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2962 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2963 }
2964 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2965 x = fold (x);
2966 break;
2967 }
2968 case TREE_VEC:
2969 {
2970 bool changed = false;
2971 int n = TREE_VEC_LENGTH (x);
2972
2973 for (int i = 0; i < n; i++)
2974 {
2975 tree op = cp_fold (TREE_VEC_ELT (x, i));
2976 if (op != TREE_VEC_ELT (x, i))
2977 {
2978 if (!changed)
2979 x = copy_node (x);
2980 TREE_VEC_ELT (x, i) = op;
2981 changed = true;
2982 }
2983 }
2984 }
2985
2986 break;
2987
2988 case ARRAY_REF:
2989 case ARRAY_RANGE_REF:
2990
2991 loc = EXPR_LOCATION (x);
2992 op0 = cp_fold (TREE_OPERAND (x, 0));
2993 op1 = cp_fold (TREE_OPERAND (x, 1));
2994 op2 = cp_fold (TREE_OPERAND (x, 2));
2995 op3 = cp_fold (TREE_OPERAND (x, 3));
2996
2997 if (op0 != TREE_OPERAND (x, 0)
2998 || op1 != TREE_OPERAND (x, 1)
2999 || op2 != TREE_OPERAND (x, 2)
3000 || op3 != TREE_OPERAND (x, 3))
3001 {
3002 if (op0 == error_mark_node
3003 || op1 == error_mark_node
3004 || op2 == error_mark_node
3005 || op3 == error_mark_node)
3006 x = error_mark_node;
3007 else
3008 {
3009 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3010 TREE_READONLY (x) = TREE_READONLY (org_x);
3011 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3012 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3013 }
3014 }
3015
3016 x = fold (x);
3017 break;
3018
3019 case SAVE_EXPR:
3020 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3021 folding, evaluates to an invariant. In that case no need to wrap
3022 this folded tree with a SAVE_EXPR. */
3023 r = cp_fold (TREE_OPERAND (x, 0));
3024 if (tree_invariant_p (r))
3025 x = r;
3026 break;
3027
3028 case REQUIRES_EXPR:
3029 x = evaluate_requires_expr (x);
3030 break;
3031
3032 default:
3033 return org_x;
3034 }
3035
3036 if (EXPR_P (x) && TREE_CODE (x) == code)
3037 {
3038 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3039 copy_warning (x, org_x);
3040 }
3041
3042 if (!c.evaluation_restricted_p ())
3043 {
3044 fold_cache->put (org_x, x);
3045 /* Prevent that we try to fold an already folded result again. */
3046 if (x != org_x)
3047 fold_cache->put (x, x);
3048 }
3049
3050 return x;
3051 }
3052
3053 /* Look up either "hot" or "cold" in attribute list LIST. */
3054
3055 tree
3056 lookup_hotness_attribute (tree list)
3057 {
3058 for (; list; list = TREE_CHAIN (list))
3059 {
3060 tree name = get_attribute_name (list);
3061 if (is_attribute_p ("hot", name)
3062 || is_attribute_p ("cold", name)
3063 || is_attribute_p ("likely", name)
3064 || is_attribute_p ("unlikely", name))
3065 break;
3066 }
3067 return list;
3068 }
3069
3070 /* Remove both "hot" and "cold" attributes from LIST. */
3071
3072 static tree
3073 remove_hotness_attribute (tree list)
3074 {
3075 list = remove_attribute ("hot", list);
3076 list = remove_attribute ("cold", list);
3077 list = remove_attribute ("likely", list);
3078 list = remove_attribute ("unlikely", list);
3079 return list;
3080 }
3081
3082 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3083 PREDICT_EXPR. */
3084
3085 tree
3086 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3087 {
3088 if (std_attrs == error_mark_node)
3089 return std_attrs;
3090 if (tree attr = lookup_hotness_attribute (std_attrs))
3091 {
3092 tree name = get_attribute_name (attr);
3093 bool hot = (is_attribute_p ("hot", name)
3094 || is_attribute_p ("likely", name));
3095 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3096 hot ? TAKEN : NOT_TAKEN);
3097 SET_EXPR_LOCATION (pred, attrs_loc);
3098 add_stmt (pred);
3099 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3100 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3101 get_attribute_name (other), name);
3102 std_attrs = remove_hotness_attribute (std_attrs);
3103 }
3104 return std_attrs;
3105 }
3106
3107 /* Helper of fold_builtin_source_location, return the
3108 std::source_location::__impl type after performing verification
3109 on it. LOC is used for reporting any errors. */
3110
3111 static tree
3112 get_source_location_impl_type (location_t loc)
3113 {
3114 tree name = get_identifier ("source_location");
3115 tree decl = lookup_qualified_name (std_node, name);
3116 if (TREE_CODE (decl) != TYPE_DECL)
3117 {
3118 auto_diagnostic_group d;
3119 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3120 qualified_name_lookup_error (std_node, name, decl, loc);
3121 else
3122 error_at (loc, "%qD is not a type", decl);
3123 return error_mark_node;
3124 }
3125 name = get_identifier ("__impl");
3126 tree type = TREE_TYPE (decl);
3127 decl = lookup_qualified_name (type, name);
3128 if (TREE_CODE (decl) != TYPE_DECL)
3129 {
3130 auto_diagnostic_group d;
3131 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3132 qualified_name_lookup_error (type, name, decl, loc);
3133 else
3134 error_at (loc, "%qD is not a type", decl);
3135 return error_mark_node;
3136 }
3137 type = TREE_TYPE (decl);
3138 if (TREE_CODE (type) != RECORD_TYPE)
3139 {
3140 error_at (loc, "%qD is not a class type", decl);
3141 return error_mark_node;
3142 }
3143
3144 int cnt = 0;
3145 for (tree field = TYPE_FIELDS (type);
3146 (field = next_initializable_field (field)) != NULL_TREE;
3147 field = DECL_CHAIN (field))
3148 {
3149 if (DECL_NAME (field) != NULL_TREE)
3150 {
3151 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3152 if (strcmp (n, "_M_file_name") == 0
3153 || strcmp (n, "_M_function_name") == 0)
3154 {
3155 if (TREE_TYPE (field) != const_string_type_node)
3156 {
3157 error_at (loc, "%qD does not have %<const char *%> type",
3158 field);
3159 return error_mark_node;
3160 }
3161 cnt++;
3162 continue;
3163 }
3164 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3165 {
3166 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3167 {
3168 error_at (loc, "%qD does not have integral type", field);
3169 return error_mark_node;
3170 }
3171 cnt++;
3172 continue;
3173 }
3174 }
3175 cnt = 0;
3176 break;
3177 }
3178 if (cnt != 4)
3179 {
3180 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3181 "non-static data members %<_M_file_name%>, "
3182 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3183 return error_mark_node;
3184 }
3185 return build_qualified_type (type, TYPE_QUAL_CONST);
3186 }
3187
3188 /* Type for source_location_table hash_set. */
3189 struct GTY((for_user)) source_location_table_entry {
3190 location_t loc;
3191 unsigned uid;
3192 tree var;
3193 };
3194
3195 /* Traits class for function start hash maps below. */
3196
3197 struct source_location_table_entry_hash
3198 : ggc_remove <source_location_table_entry>
3199 {
3200 typedef source_location_table_entry value_type;
3201 typedef source_location_table_entry compare_type;
3202
3203 static hashval_t
3204 hash (const source_location_table_entry &ref)
3205 {
3206 inchash::hash hstate (0);
3207 hstate.add_int (ref.loc);
3208 hstate.add_int (ref.uid);
3209 return hstate.end ();
3210 }
3211
3212 static bool
3213 equal (const source_location_table_entry &ref1,
3214 const source_location_table_entry &ref2)
3215 {
3216 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3217 }
3218
3219 static void
3220 mark_deleted (source_location_table_entry &ref)
3221 {
3222 ref.loc = UNKNOWN_LOCATION;
3223 ref.uid = -1U;
3224 ref.var = NULL_TREE;
3225 }
3226
3227 static const bool empty_zero_p = true;
3228
3229 static void
3230 mark_empty (source_location_table_entry &ref)
3231 {
3232 ref.loc = UNKNOWN_LOCATION;
3233 ref.uid = 0;
3234 ref.var = NULL_TREE;
3235 }
3236
3237 static bool
3238 is_deleted (const source_location_table_entry &ref)
3239 {
3240 return (ref.loc == UNKNOWN_LOCATION
3241 && ref.uid == -1U
3242 && ref.var == NULL_TREE);
3243 }
3244
3245 static bool
3246 is_empty (const source_location_table_entry &ref)
3247 {
3248 return (ref.loc == UNKNOWN_LOCATION
3249 && ref.uid == 0
3250 && ref.var == NULL_TREE);
3251 }
3252
3253 static void
3254 pch_nx (source_location_table_entry &p)
3255 {
3256 extern void gt_pch_nx (source_location_table_entry &);
3257 gt_pch_nx (p);
3258 }
3259
3260 static void
3261 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3262 {
3263 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3264 void *);
3265 gt_pch_nx (&p, op, cookie);
3266 }
3267 };
3268
3269 static GTY(()) hash_table <source_location_table_entry_hash>
3270 *source_location_table;
3271 static GTY(()) unsigned int source_location_id;
3272
3273 /* Fold __builtin_source_location () call. LOC is the location
3274 of the call. */
3275
3276 tree
3277 fold_builtin_source_location (location_t loc)
3278 {
3279 if (source_location_impl == NULL_TREE)
3280 {
3281 auto_diagnostic_group d;
3282 source_location_impl = get_source_location_impl_type (loc);
3283 if (source_location_impl == error_mark_node)
3284 inform (loc, "evaluating %qs", "__builtin_source_location");
3285 }
3286 if (source_location_impl == error_mark_node)
3287 return build_zero_cst (const_ptr_type_node);
3288 if (source_location_table == NULL)
3289 source_location_table
3290 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3291 const line_map_ordinary *map;
3292 source_location_table_entry entry;
3293 entry.loc
3294 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3295 &map);
3296 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3297 entry.var = error_mark_node;
3298 source_location_table_entry *entryp
3299 = source_location_table->find_slot (entry, INSERT);
3300 tree var;
3301 if (entryp->var)
3302 var = entryp->var;
3303 else
3304 {
3305 char tmp_name[32];
3306 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3307 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3308 source_location_impl);
3309 TREE_STATIC (var) = 1;
3310 TREE_PUBLIC (var) = 0;
3311 DECL_ARTIFICIAL (var) = 1;
3312 DECL_IGNORED_P (var) = 1;
3313 DECL_EXTERNAL (var) = 0;
3314 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3315 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3316 layout_decl (var, 0);
3317
3318 vec<constructor_elt, va_gc> *v = NULL;
3319 vec_alloc (v, 4);
3320 for (tree field = TYPE_FIELDS (source_location_impl);
3321 (field = next_initializable_field (field)) != NULL_TREE;
3322 field = DECL_CHAIN (field))
3323 {
3324 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3325 tree val = NULL_TREE;
3326 if (strcmp (n, "_M_file_name") == 0)
3327 {
3328 if (const char *fname = LOCATION_FILE (loc))
3329 {
3330 fname = remap_macro_filename (fname);
3331 val = build_string_literal (strlen (fname) + 1, fname);
3332 }
3333 else
3334 val = build_string_literal (1, "");
3335 }
3336 else if (strcmp (n, "_M_function_name") == 0)
3337 {
3338 const char *name = "";
3339
3340 if (current_function_decl)
3341 name = cxx_printable_name (current_function_decl, 2);
3342
3343 val = build_string_literal (strlen (name) + 1, name);
3344 }
3345 else if (strcmp (n, "_M_line") == 0)
3346 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3347 else if (strcmp (n, "_M_column") == 0)
3348 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3349 else
3350 gcc_unreachable ();
3351 CONSTRUCTOR_APPEND_ELT (v, field, val);
3352 }
3353
3354 tree ctor = build_constructor (source_location_impl, v);
3355 TREE_CONSTANT (ctor) = 1;
3356 TREE_STATIC (ctor) = 1;
3357 DECL_INITIAL (var) = ctor;
3358 varpool_node::finalize_decl (var);
3359 *entryp = entry;
3360 entryp->var = var;
3361 }
3362
3363 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3364 }
3365
3366 #include "gt-cp-cp-gimplify.h"
3367