gimple-match-head.cc revision 1.1 1 1.1 mrg /* Preamble and helpers for the autogenerated gimple-match.cc file.
2 1.1 mrg Copyright (C) 2014-2022 Free Software Foundation, Inc.
3 1.1 mrg
4 1.1 mrg This file is part of GCC.
5 1.1 mrg
6 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
7 1.1 mrg the terms of the GNU General Public License as published by the Free
8 1.1 mrg Software Foundation; either version 3, or (at your option) any later
9 1.1 mrg version.
10 1.1 mrg
11 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 1.1 mrg for more details.
15 1.1 mrg
16 1.1 mrg You should have received a copy of the GNU General Public License
17 1.1 mrg along with GCC; see the file COPYING3. If not see
18 1.1 mrg <http://www.gnu.org/licenses/>. */
19 1.1 mrg
20 1.1 mrg #include "config.h"
21 1.1 mrg #include "system.h"
22 1.1 mrg #include "coretypes.h"
23 1.1 mrg #include "backend.h"
24 1.1 mrg #include "target.h"
25 1.1 mrg #include "rtl.h"
26 1.1 mrg #include "tree.h"
27 1.1 mrg #include "gimple.h"
28 1.1 mrg #include "ssa.h"
29 1.1 mrg #include "cgraph.h"
30 1.1 mrg #include "vec-perm-indices.h"
31 1.1 mrg #include "fold-const.h"
32 1.1 mrg #include "fold-const-call.h"
33 1.1 mrg #include "stor-layout.h"
34 1.1 mrg #include "gimple-fold.h"
35 1.1 mrg #include "calls.h"
36 1.1 mrg #include "tree-dfa.h"
37 1.1 mrg #include "builtins.h"
38 1.1 mrg #include "gimple-match.h"
39 1.1 mrg #include "tree-pass.h"
40 1.1 mrg #include "internal-fn.h"
41 1.1 mrg #include "case-cfn-macros.h"
42 1.1 mrg #include "gimplify.h"
43 1.1 mrg #include "optabs-tree.h"
44 1.1 mrg #include "tree-eh.h"
45 1.1 mrg #include "dbgcnt.h"
46 1.1 mrg #include "tm.h"
47 1.1 mrg #include "gimple-range.h"
48 1.1 mrg #include "attribs.h"
49 1.1 mrg #include "asan.h"
50 1.1 mrg
51 1.1 mrg /* Forward declarations of the private auto-generated matchers.
52 1.1 mrg They expect valueized operands in canonical order and do not
53 1.1 mrg perform simplification of all-constant operands. */
54 1.1 mrg static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 1.1 mrg code_helper, tree, tree);
56 1.1 mrg static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 1.1 mrg code_helper, tree, tree, tree);
58 1.1 mrg static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 1.1 mrg code_helper, tree, tree, tree, tree);
60 1.1 mrg static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
61 1.1 mrg code_helper, tree, tree, tree, tree, tree);
62 1.1 mrg static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
63 1.1 mrg code_helper, tree, tree, tree, tree, tree, tree);
64 1.1 mrg static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
65 1.1 mrg tree (*)(tree));
66 1.1 mrg static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
67 1.1 mrg tree (*)(tree));
68 1.1 mrg static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
69 1.1 mrg tree (*)(tree));
70 1.1 mrg static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
71 1.1 mrg tree (*)(tree));
72 1.1 mrg static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
73 1.1 mrg tree (*)(tree));
74 1.1 mrg
75 1.1 mrg const unsigned int gimple_match_op::MAX_NUM_OPS;
76 1.1 mrg
77 1.1 mrg /* Return whether T is a constant that we'll dispatch to fold to
78 1.1 mrg evaluate fully constant expressions. */
79 1.1 mrg
80 1.1 mrg static inline bool
81 1.1 mrg constant_for_folding (tree t)
82 1.1 mrg {
83 1.1 mrg return (CONSTANT_CLASS_P (t)
84 1.1 mrg /* The following is only interesting to string builtins. */
85 1.1 mrg || (TREE_CODE (t) == ADDR_EXPR
86 1.1 mrg && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
87 1.1 mrg }
88 1.1 mrg
89 1.1 mrg /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
90 1.1 mrg operation. Return true on success, storing the new operation in NEW_OP. */
91 1.1 mrg
92 1.1 mrg static bool
93 1.1 mrg convert_conditional_op (gimple_match_op *orig_op,
94 1.1 mrg gimple_match_op *new_op)
95 1.1 mrg {
96 1.1 mrg internal_fn ifn;
97 1.1 mrg if (orig_op->code.is_tree_code ())
98 1.1 mrg ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
99 1.1 mrg else
100 1.1 mrg {
101 1.1 mrg auto cfn = combined_fn (orig_op->code);
102 1.1 mrg if (!internal_fn_p (cfn))
103 1.1 mrg return false;
104 1.1 mrg ifn = get_conditional_internal_fn (as_internal_fn (cfn));
105 1.1 mrg }
106 1.1 mrg if (ifn == IFN_LAST)
107 1.1 mrg return false;
108 1.1 mrg unsigned int num_ops = orig_op->num_ops;
109 1.1 mrg new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
110 1.1 mrg new_op->ops[0] = orig_op->cond.cond;
111 1.1 mrg for (unsigned int i = 0; i < num_ops; ++i)
112 1.1 mrg new_op->ops[i + 1] = orig_op->ops[i];
113 1.1 mrg tree else_value = orig_op->cond.else_value;
114 1.1 mrg if (!else_value)
115 1.1 mrg else_value = targetm.preferred_else_value (ifn, orig_op->type,
116 1.1 mrg num_ops, orig_op->ops);
117 1.1 mrg new_op->ops[num_ops + 1] = else_value;
118 1.1 mrg return true;
119 1.1 mrg }
120 1.1 mrg
121 1.1 mrg /* RES_OP is the result of a simplification. If it is conditional,
122 1.1 mrg try to replace it with the equivalent UNCOND form, such as an
123 1.1 mrg IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
124 1.1 mrg result of the replacement if appropriate, adding any new statements to
125 1.1 mrg SEQ and using VALUEIZE as the valueization function. Return true if
126 1.1 mrg this resimplification occurred and resulted in at least one change. */
127 1.1 mrg
128 1.1 mrg static bool
129 1.1 mrg maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
130 1.1 mrg tree (*valueize) (tree))
131 1.1 mrg {
132 1.1 mrg if (!res_op->cond.cond)
133 1.1 mrg return false;
134 1.1 mrg
135 1.1 mrg if (!res_op->cond.else_value
136 1.1 mrg && res_op->code.is_tree_code ())
137 1.1 mrg {
138 1.1 mrg /* The "else" value doesn't matter. If the "then" value is a
139 1.1 mrg gimple value, just use it unconditionally. This isn't a
140 1.1 mrg simplification in itself, since there was no operation to
141 1.1 mrg build in the first place. */
142 1.1 mrg if (gimple_simplified_result_is_gimple_val (res_op))
143 1.1 mrg {
144 1.1 mrg res_op->cond.cond = NULL_TREE;
145 1.1 mrg return false;
146 1.1 mrg }
147 1.1 mrg
148 1.1 mrg /* Likewise if the operation would not trap. */
149 1.1 mrg bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
150 1.1 mrg && TYPE_OVERFLOW_TRAPS (res_op->type));
151 1.1 mrg tree_code op_code = (tree_code) res_op->code;
152 1.1 mrg bool op_could_trap;
153 1.1 mrg
154 1.1 mrg /* COND_EXPR will trap if, and only if, the condition
155 1.1 mrg traps and hence we have to check this. For all other operations, we
156 1.1 mrg don't need to consider the operands. */
157 1.1 mrg if (op_code == COND_EXPR)
158 1.1 mrg op_could_trap = generic_expr_could_trap_p (res_op->ops[0]);
159 1.1 mrg else
160 1.1 mrg op_could_trap = operation_could_trap_p ((tree_code) res_op->code,
161 1.1 mrg FLOAT_TYPE_P (res_op->type),
162 1.1 mrg honor_trapv,
163 1.1 mrg res_op->op_or_null (1));
164 1.1 mrg
165 1.1 mrg if (!op_could_trap)
166 1.1 mrg {
167 1.1 mrg res_op->cond.cond = NULL_TREE;
168 1.1 mrg return false;
169 1.1 mrg }
170 1.1 mrg }
171 1.1 mrg
172 1.1 mrg /* If the "then" value is a gimple value and the "else" value matters,
173 1.1 mrg create a VEC_COND_EXPR between them, then see if it can be further
174 1.1 mrg simplified. */
175 1.1 mrg gimple_match_op new_op;
176 1.1 mrg if (res_op->cond.else_value
177 1.1 mrg && VECTOR_TYPE_P (res_op->type)
178 1.1 mrg && gimple_simplified_result_is_gimple_val (res_op))
179 1.1 mrg {
180 1.1 mrg new_op.set_op (VEC_COND_EXPR, res_op->type,
181 1.1 mrg res_op->cond.cond, res_op->ops[0],
182 1.1 mrg res_op->cond.else_value);
183 1.1 mrg *res_op = new_op;
184 1.1 mrg return gimple_resimplify3 (seq, res_op, valueize);
185 1.1 mrg }
186 1.1 mrg
187 1.1 mrg /* Otherwise try rewriting the operation as an IFN_COND_* call.
188 1.1 mrg Again, this isn't a simplification in itself, since it's what
189 1.1 mrg RES_OP already described. */
190 1.1 mrg if (convert_conditional_op (res_op, &new_op))
191 1.1 mrg *res_op = new_op;
192 1.1 mrg
193 1.1 mrg return false;
194 1.1 mrg }
195 1.1 mrg
196 1.1 mrg /* Helper that matches and simplifies the toplevel result from
197 1.1 mrg a gimple_simplify run (where we don't want to build
198 1.1 mrg a stmt in case it's used in in-place folding). Replaces
199 1.1 mrg RES_OP with a simplified and/or canonicalized result and
200 1.1 mrg returns whether any change was made. */
201 1.1 mrg
202 1.1 mrg static bool
203 1.1 mrg gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
204 1.1 mrg tree (*valueize)(tree))
205 1.1 mrg {
206 1.1 mrg if (constant_for_folding (res_op->ops[0]))
207 1.1 mrg {
208 1.1 mrg tree tem = NULL_TREE;
209 1.1 mrg if (res_op->code.is_tree_code ())
210 1.1 mrg {
211 1.1 mrg auto code = tree_code (res_op->code);
212 1.1 mrg if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
213 1.1 mrg && TREE_CODE_LENGTH (code) == 1)
214 1.1 mrg tem = const_unop (code, res_op->type, res_op->ops[0]);
215 1.1 mrg }
216 1.1 mrg else
217 1.1 mrg tem = fold_const_call (combined_fn (res_op->code), res_op->type,
218 1.1 mrg res_op->ops[0]);
219 1.1 mrg if (tem != NULL_TREE
220 1.1 mrg && CONSTANT_CLASS_P (tem))
221 1.1 mrg {
222 1.1 mrg if (TREE_OVERFLOW_P (tem))
223 1.1 mrg tem = drop_tree_overflow (tem);
224 1.1 mrg res_op->set_value (tem);
225 1.1 mrg maybe_resimplify_conditional_op (seq, res_op, valueize);
226 1.1 mrg return true;
227 1.1 mrg }
228 1.1 mrg }
229 1.1 mrg
230 1.1 mrg /* Limit recursion, there are cases like PR80887 and others, for
231 1.1 mrg example when value-numbering presents us with unfolded expressions
232 1.1 mrg that we are really not prepared to handle without eventual
233 1.1 mrg oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
234 1.1 mrg itself as available expression. */
235 1.1 mrg static unsigned depth;
236 1.1 mrg if (depth > 10)
237 1.1 mrg {
238 1.1 mrg if (dump_file && (dump_flags & TDF_FOLDING))
239 1.1 mrg fprintf (dump_file, "Aborting expression simplification due to "
240 1.1 mrg "deep recursion\n");
241 1.1 mrg return false;
242 1.1 mrg }
243 1.1 mrg
244 1.1 mrg ++depth;
245 1.1 mrg gimple_match_op res_op2 (*res_op);
246 1.1 mrg if (gimple_simplify (&res_op2, seq, valueize,
247 1.1 mrg res_op->code, res_op->type, res_op->ops[0]))
248 1.1 mrg {
249 1.1 mrg --depth;
250 1.1 mrg *res_op = res_op2;
251 1.1 mrg return true;
252 1.1 mrg }
253 1.1 mrg --depth;
254 1.1 mrg
255 1.1 mrg if (maybe_resimplify_conditional_op (seq, res_op, valueize))
256 1.1 mrg return true;
257 1.1 mrg
258 1.1 mrg return false;
259 1.1 mrg }
260 1.1 mrg
261 1.1 mrg /* Helper that matches and simplifies the toplevel result from
262 1.1 mrg a gimple_simplify run (where we don't want to build
263 1.1 mrg a stmt in case it's used in in-place folding). Replaces
264 1.1 mrg RES_OP with a simplified and/or canonicalized result and
265 1.1 mrg returns whether any change was made. */
266 1.1 mrg
267 1.1 mrg static bool
268 1.1 mrg gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
269 1.1 mrg tree (*valueize)(tree))
270 1.1 mrg {
271 1.1 mrg if (constant_for_folding (res_op->ops[0])
272 1.1 mrg && constant_for_folding (res_op->ops[1]))
273 1.1 mrg {
274 1.1 mrg tree tem = NULL_TREE;
275 1.1 mrg if (res_op->code.is_tree_code ())
276 1.1 mrg {
277 1.1 mrg auto code = tree_code (res_op->code);
278 1.1 mrg if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
279 1.1 mrg && TREE_CODE_LENGTH (code) == 2)
280 1.1 mrg tem = const_binop (code, res_op->type,
281 1.1 mrg res_op->ops[0], res_op->ops[1]);
282 1.1 mrg }
283 1.1 mrg else
284 1.1 mrg tem = fold_const_call (combined_fn (res_op->code), res_op->type,
285 1.1 mrg res_op->ops[0], res_op->ops[1]);
286 1.1 mrg if (tem != NULL_TREE
287 1.1 mrg && CONSTANT_CLASS_P (tem))
288 1.1 mrg {
289 1.1 mrg if (TREE_OVERFLOW_P (tem))
290 1.1 mrg tem = drop_tree_overflow (tem);
291 1.1 mrg res_op->set_value (tem);
292 1.1 mrg maybe_resimplify_conditional_op (seq, res_op, valueize);
293 1.1 mrg return true;
294 1.1 mrg }
295 1.1 mrg }
296 1.1 mrg
297 1.1 mrg /* Canonicalize operand order. */
298 1.1 mrg bool canonicalized = false;
299 1.1 mrg bool is_comparison
300 1.1 mrg = (res_op->code.is_tree_code ()
301 1.1 mrg && TREE_CODE_CLASS (tree_code (res_op->code)) == tcc_comparison);
302 1.1 mrg if ((is_comparison || commutative_binary_op_p (res_op->code, res_op->type))
303 1.1 mrg && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
304 1.1 mrg {
305 1.1 mrg std::swap (res_op->ops[0], res_op->ops[1]);
306 1.1 mrg if (is_comparison)
307 1.1 mrg res_op->code = swap_tree_comparison (tree_code (res_op->code));
308 1.1 mrg canonicalized = true;
309 1.1 mrg }
310 1.1 mrg
311 1.1 mrg /* Limit recursion, see gimple_resimplify1. */
312 1.1 mrg static unsigned depth;
313 1.1 mrg if (depth > 10)
314 1.1 mrg {
315 1.1 mrg if (dump_file && (dump_flags & TDF_FOLDING))
316 1.1 mrg fprintf (dump_file, "Aborting expression simplification due to "
317 1.1 mrg "deep recursion\n");
318 1.1 mrg return false;
319 1.1 mrg }
320 1.1 mrg
321 1.1 mrg ++depth;
322 1.1 mrg gimple_match_op res_op2 (*res_op);
323 1.1 mrg if (gimple_simplify (&res_op2, seq, valueize,
324 1.1 mrg res_op->code, res_op->type,
325 1.1 mrg res_op->ops[0], res_op->ops[1]))
326 1.1 mrg {
327 1.1 mrg --depth;
328 1.1 mrg *res_op = res_op2;
329 1.1 mrg return true;
330 1.1 mrg }
331 1.1 mrg --depth;
332 1.1 mrg
333 1.1 mrg if (maybe_resimplify_conditional_op (seq, res_op, valueize))
334 1.1 mrg return true;
335 1.1 mrg
336 1.1 mrg return canonicalized;
337 1.1 mrg }
338 1.1 mrg
339 1.1 mrg /* Helper that matches and simplifies the toplevel result from
340 1.1 mrg a gimple_simplify run (where we don't want to build
341 1.1 mrg a stmt in case it's used in in-place folding). Replaces
342 1.1 mrg RES_OP with a simplified and/or canonicalized result and
343 1.1 mrg returns whether any change was made. */
344 1.1 mrg
345 1.1 mrg static bool
346 1.1 mrg gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
347 1.1 mrg tree (*valueize)(tree))
348 1.1 mrg {
349 1.1 mrg if (constant_for_folding (res_op->ops[0])
350 1.1 mrg && constant_for_folding (res_op->ops[1])
351 1.1 mrg && constant_for_folding (res_op->ops[2]))
352 1.1 mrg {
353 1.1 mrg tree tem = NULL_TREE;
354 1.1 mrg if (res_op->code.is_tree_code ())
355 1.1 mrg {
356 1.1 mrg auto code = tree_code (res_op->code);
357 1.1 mrg if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
358 1.1 mrg && TREE_CODE_LENGTH (code) == 3)
359 1.1 mrg tem = fold_ternary/*_to_constant*/ (code, res_op->type,
360 1.1 mrg res_op->ops[0], res_op->ops[1],
361 1.1 mrg res_op->ops[2]);
362 1.1 mrg }
363 1.1 mrg else
364 1.1 mrg tem = fold_const_call (combined_fn (res_op->code), res_op->type,
365 1.1 mrg res_op->ops[0], res_op->ops[1], res_op->ops[2]);
366 1.1 mrg if (tem != NULL_TREE
367 1.1 mrg && CONSTANT_CLASS_P (tem))
368 1.1 mrg {
369 1.1 mrg if (TREE_OVERFLOW_P (tem))
370 1.1 mrg tem = drop_tree_overflow (tem);
371 1.1 mrg res_op->set_value (tem);
372 1.1 mrg maybe_resimplify_conditional_op (seq, res_op, valueize);
373 1.1 mrg return true;
374 1.1 mrg }
375 1.1 mrg }
376 1.1 mrg
377 1.1 mrg /* Canonicalize operand order. */
378 1.1 mrg bool canonicalized = false;
379 1.1 mrg int argno = first_commutative_argument (res_op->code, res_op->type);
380 1.1 mrg if (argno >= 0
381 1.1 mrg && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
382 1.1 mrg {
383 1.1 mrg std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
384 1.1 mrg canonicalized = true;
385 1.1 mrg }
386 1.1 mrg
387 1.1 mrg /* Limit recursion, see gimple_resimplify1. */
388 1.1 mrg static unsigned depth;
389 1.1 mrg if (depth > 10)
390 1.1 mrg {
391 1.1 mrg if (dump_file && (dump_flags & TDF_FOLDING))
392 1.1 mrg fprintf (dump_file, "Aborting expression simplification due to "
393 1.1 mrg "deep recursion\n");
394 1.1 mrg return false;
395 1.1 mrg }
396 1.1 mrg
397 1.1 mrg ++depth;
398 1.1 mrg gimple_match_op res_op2 (*res_op);
399 1.1 mrg if (gimple_simplify (&res_op2, seq, valueize,
400 1.1 mrg res_op->code, res_op->type,
401 1.1 mrg res_op->ops[0], res_op->ops[1], res_op->ops[2]))
402 1.1 mrg {
403 1.1 mrg --depth;
404 1.1 mrg *res_op = res_op2;
405 1.1 mrg return true;
406 1.1 mrg }
407 1.1 mrg --depth;
408 1.1 mrg
409 1.1 mrg if (maybe_resimplify_conditional_op (seq, res_op, valueize))
410 1.1 mrg return true;
411 1.1 mrg
412 1.1 mrg return canonicalized;
413 1.1 mrg }
414 1.1 mrg
415 1.1 mrg /* Helper that matches and simplifies the toplevel result from
416 1.1 mrg a gimple_simplify run (where we don't want to build
417 1.1 mrg a stmt in case it's used in in-place folding). Replaces
418 1.1 mrg RES_OP with a simplified and/or canonicalized result and
419 1.1 mrg returns whether any change was made. */
420 1.1 mrg
421 1.1 mrg static bool
422 1.1 mrg gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
423 1.1 mrg tree (*valueize)(tree))
424 1.1 mrg {
425 1.1 mrg /* No constant folding is defined for four-operand functions. */
426 1.1 mrg
427 1.1 mrg /* Canonicalize operand order. */
428 1.1 mrg bool canonicalized = false;
429 1.1 mrg int argno = first_commutative_argument (res_op->code, res_op->type);
430 1.1 mrg if (argno >= 0
431 1.1 mrg && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
432 1.1 mrg {
433 1.1 mrg std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
434 1.1 mrg canonicalized = true;
435 1.1 mrg }
436 1.1 mrg
437 1.1 mrg /* Limit recursion, see gimple_resimplify1. */
438 1.1 mrg static unsigned depth;
439 1.1 mrg if (depth > 10)
440 1.1 mrg {
441 1.1 mrg if (dump_file && (dump_flags & TDF_FOLDING))
442 1.1 mrg fprintf (dump_file, "Aborting expression simplification due to "
443 1.1 mrg "deep recursion\n");
444 1.1 mrg return false;
445 1.1 mrg }
446 1.1 mrg
447 1.1 mrg ++depth;
448 1.1 mrg gimple_match_op res_op2 (*res_op);
449 1.1 mrg if (gimple_simplify (&res_op2, seq, valueize,
450 1.1 mrg res_op->code, res_op->type,
451 1.1 mrg res_op->ops[0], res_op->ops[1], res_op->ops[2],
452 1.1 mrg res_op->ops[3]))
453 1.1 mrg {
454 1.1 mrg --depth;
455 1.1 mrg *res_op = res_op2;
456 1.1 mrg return true;
457 1.1 mrg }
458 1.1 mrg --depth;
459 1.1 mrg
460 1.1 mrg if (maybe_resimplify_conditional_op (seq, res_op, valueize))
461 1.1 mrg return true;
462 1.1 mrg
463 1.1 mrg return canonicalized;
464 1.1 mrg }
465 1.1 mrg
466 1.1 mrg /* Helper that matches and simplifies the toplevel result from
467 1.1 mrg a gimple_simplify run (where we don't want to build
468 1.1 mrg a stmt in case it's used in in-place folding). Replaces
469 1.1 mrg RES_OP with a simplified and/or canonicalized result and
470 1.1 mrg returns whether any change was made. */
471 1.1 mrg
472 1.1 mrg static bool
473 1.1 mrg gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
474 1.1 mrg tree (*valueize)(tree))
475 1.1 mrg {
476 1.1 mrg /* No constant folding is defined for five-operand functions. */
477 1.1 mrg
478 1.1 mrg /* Canonicalize operand order. */
479 1.1 mrg bool canonicalized = false;
480 1.1 mrg int argno = first_commutative_argument (res_op->code, res_op->type);
481 1.1 mrg if (argno >= 0
482 1.1 mrg && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
483 1.1 mrg {
484 1.1 mrg std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
485 1.1 mrg canonicalized = true;
486 1.1 mrg }
487 1.1 mrg
488 1.1 mrg gimple_match_op res_op2 (*res_op);
489 1.1 mrg if (gimple_simplify (&res_op2, seq, valueize,
490 1.1 mrg res_op->code, res_op->type,
491 1.1 mrg res_op->ops[0], res_op->ops[1], res_op->ops[2],
492 1.1 mrg res_op->ops[3], res_op->ops[4]))
493 1.1 mrg {
494 1.1 mrg *res_op = res_op2;
495 1.1 mrg return true;
496 1.1 mrg }
497 1.1 mrg
498 1.1 mrg if (maybe_resimplify_conditional_op (seq, res_op, valueize))
499 1.1 mrg return true;
500 1.1 mrg
501 1.1 mrg return canonicalized;
502 1.1 mrg }
503 1.1 mrg
504 1.1 mrg /* Match and simplify the toplevel valueized operation THIS.
505 1.1 mrg Replaces THIS with a simplified and/or canonicalized result and
506 1.1 mrg returns whether any change was made. */
507 1.1 mrg
508 1.1 mrg bool
509 1.1 mrg gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
510 1.1 mrg {
511 1.1 mrg switch (num_ops)
512 1.1 mrg {
513 1.1 mrg case 1:
514 1.1 mrg return gimple_resimplify1 (seq, this, valueize);
515 1.1 mrg case 2:
516 1.1 mrg return gimple_resimplify2 (seq, this, valueize);
517 1.1 mrg case 3:
518 1.1 mrg return gimple_resimplify3 (seq, this, valueize);
519 1.1 mrg case 4:
520 1.1 mrg return gimple_resimplify4 (seq, this, valueize);
521 1.1 mrg case 5:
522 1.1 mrg return gimple_resimplify5 (seq, this, valueize);
523 1.1 mrg default:
524 1.1 mrg gcc_unreachable ();
525 1.1 mrg }
526 1.1 mrg }
527 1.1 mrg
528 1.1 mrg /* If in GIMPLE the operation described by RES_OP should be single-rhs,
529 1.1 mrg build a GENERIC tree for that expression and update RES_OP accordingly. */
530 1.1 mrg
531 1.1 mrg void
532 1.1 mrg maybe_build_generic_op (gimple_match_op *res_op)
533 1.1 mrg {
534 1.1 mrg tree_code code = (tree_code) res_op->code;
535 1.1 mrg tree val;
536 1.1 mrg switch (code)
537 1.1 mrg {
538 1.1 mrg case REALPART_EXPR:
539 1.1 mrg case IMAGPART_EXPR:
540 1.1 mrg case VIEW_CONVERT_EXPR:
541 1.1 mrg val = build1 (code, res_op->type, res_op->ops[0]);
542 1.1 mrg res_op->set_value (val);
543 1.1 mrg break;
544 1.1 mrg case BIT_FIELD_REF:
545 1.1 mrg val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
546 1.1 mrg res_op->ops[2]);
547 1.1 mrg REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
548 1.1 mrg res_op->set_value (val);
549 1.1 mrg break;
550 1.1 mrg default:;
551 1.1 mrg }
552 1.1 mrg }
553 1.1 mrg
554 1.1 mrg tree (*mprts_hook) (gimple_match_op *);
555 1.1 mrg
556 1.1 mrg /* Try to build RES_OP, which is known to be a call to FN. Return null
557 1.1 mrg if the target doesn't support the function. */
558 1.1 mrg
559 1.1 mrg static gcall *
560 1.1 mrg build_call_internal (internal_fn fn, gimple_match_op *res_op)
561 1.1 mrg {
562 1.1 mrg if (direct_internal_fn_p (fn))
563 1.1 mrg {
564 1.1 mrg tree_pair types = direct_internal_fn_types (fn, res_op->type,
565 1.1 mrg res_op->ops);
566 1.1 mrg if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
567 1.1 mrg return NULL;
568 1.1 mrg }
569 1.1 mrg return gimple_build_call_internal (fn, res_op->num_ops,
570 1.1 mrg res_op->op_or_null (0),
571 1.1 mrg res_op->op_or_null (1),
572 1.1 mrg res_op->op_or_null (2),
573 1.1 mrg res_op->op_or_null (3),
574 1.1 mrg res_op->op_or_null (4));
575 1.1 mrg }
576 1.1 mrg
577 1.1 mrg /* Push the exploded expression described by RES_OP as a statement to
578 1.1 mrg SEQ if necessary and return a gimple value denoting the value of the
579 1.1 mrg expression. If RES is not NULL then the result will be always RES
580 1.1 mrg and even gimple values are pushed to SEQ. */
581 1.1 mrg
582 1.1 mrg tree
583 1.1 mrg maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
584 1.1 mrg {
585 1.1 mrg tree *ops = res_op->ops;
586 1.1 mrg unsigned num_ops = res_op->num_ops;
587 1.1 mrg
588 1.1 mrg /* The caller should have converted conditional operations into an UNCOND
589 1.1 mrg form and resimplified as appropriate. The conditional form only
590 1.1 mrg survives this far if that conversion failed. */
591 1.1 mrg if (res_op->cond.cond)
592 1.1 mrg return NULL_TREE;
593 1.1 mrg
594 1.1 mrg if (res_op->code.is_tree_code ())
595 1.1 mrg {
596 1.1 mrg if (!res
597 1.1 mrg && gimple_simplified_result_is_gimple_val (res_op))
598 1.1 mrg return ops[0];
599 1.1 mrg if (mprts_hook)
600 1.1 mrg {
601 1.1 mrg tree tem = mprts_hook (res_op);
602 1.1 mrg if (tem)
603 1.1 mrg return tem;
604 1.1 mrg }
605 1.1 mrg }
606 1.1 mrg
607 1.1 mrg if (!seq)
608 1.1 mrg return NULL_TREE;
609 1.1 mrg
610 1.1 mrg /* Play safe and do not allow abnormals to be mentioned in
611 1.1 mrg newly created statements. */
612 1.1 mrg for (unsigned int i = 0; i < num_ops; ++i)
613 1.1 mrg if (TREE_CODE (ops[i]) == SSA_NAME
614 1.1 mrg && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
615 1.1 mrg return NULL_TREE;
616 1.1 mrg
617 1.1 mrg if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
618 1.1 mrg for (unsigned int i = 0; i < 2; ++i)
619 1.1 mrg if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
620 1.1 mrg && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
621 1.1 mrg return NULL_TREE;
622 1.1 mrg
623 1.1 mrg if (res_op->code.is_tree_code ())
624 1.1 mrg {
625 1.1 mrg auto code = tree_code (res_op->code);
626 1.1 mrg if (!res)
627 1.1 mrg {
628 1.1 mrg if (gimple_in_ssa_p (cfun))
629 1.1 mrg res = make_ssa_name (res_op->type);
630 1.1 mrg else
631 1.1 mrg res = create_tmp_reg (res_op->type);
632 1.1 mrg }
633 1.1 mrg maybe_build_generic_op (res_op);
634 1.1 mrg gimple *new_stmt = gimple_build_assign (res, code,
635 1.1 mrg res_op->op_or_null (0),
636 1.1 mrg res_op->op_or_null (1),
637 1.1 mrg res_op->op_or_null (2));
638 1.1 mrg gimple_seq_add_stmt_without_update (seq, new_stmt);
639 1.1 mrg return res;
640 1.1 mrg }
641 1.1 mrg else
642 1.1 mrg {
643 1.1 mrg gcc_assert (num_ops != 0);
644 1.1 mrg auto fn = combined_fn (res_op->code);
645 1.1 mrg gcall *new_stmt = NULL;
646 1.1 mrg if (internal_fn_p (fn))
647 1.1 mrg {
648 1.1 mrg /* Generate the given function if we can. */
649 1.1 mrg internal_fn ifn = as_internal_fn (fn);
650 1.1 mrg new_stmt = build_call_internal (ifn, res_op);
651 1.1 mrg if (!new_stmt)
652 1.1 mrg return NULL_TREE;
653 1.1 mrg }
654 1.1 mrg else
655 1.1 mrg {
656 1.1 mrg /* Find the function we want to call. */
657 1.1 mrg tree decl = builtin_decl_implicit (as_builtin_fn (fn));
658 1.1 mrg if (!decl)
659 1.1 mrg return NULL;
660 1.1 mrg
661 1.1 mrg /* We can't and should not emit calls to non-const functions. */
662 1.1 mrg if (!(flags_from_decl_or_type (decl) & ECF_CONST))
663 1.1 mrg return NULL;
664 1.1 mrg
665 1.1 mrg new_stmt = gimple_build_call (decl, num_ops,
666 1.1 mrg res_op->op_or_null (0),
667 1.1 mrg res_op->op_or_null (1),
668 1.1 mrg res_op->op_or_null (2),
669 1.1 mrg res_op->op_or_null (3),
670 1.1 mrg res_op->op_or_null (4));
671 1.1 mrg }
672 1.1 mrg if (!res)
673 1.1 mrg {
674 1.1 mrg if (gimple_in_ssa_p (cfun))
675 1.1 mrg res = make_ssa_name (res_op->type);
676 1.1 mrg else
677 1.1 mrg res = create_tmp_reg (res_op->type);
678 1.1 mrg }
679 1.1 mrg gimple_call_set_lhs (new_stmt, res);
680 1.1 mrg gimple_seq_add_stmt_without_update (seq, new_stmt);
681 1.1 mrg return res;
682 1.1 mrg }
683 1.1 mrg }
684 1.1 mrg
685 1.1 mrg
686 1.1 mrg /* Public API overloads follow for operation being tree_code or
687 1.1 mrg built_in_function and for one to three operands or arguments.
688 1.1 mrg They return NULL_TREE if nothing could be simplified or
689 1.1 mrg the resulting simplified value with parts pushed to SEQ.
690 1.1 mrg If SEQ is NULL then if the simplification needs to create
691 1.1 mrg new stmts it will fail. If VALUEIZE is non-NULL then all
692 1.1 mrg SSA names will be valueized using that hook prior to
693 1.1 mrg applying simplifications. */
694 1.1 mrg
695 1.1 mrg /* Unary ops. */
696 1.1 mrg
697 1.1 mrg tree
698 1.1 mrg gimple_simplify (enum tree_code code, tree type,
699 1.1 mrg tree op0,
700 1.1 mrg gimple_seq *seq, tree (*valueize)(tree))
701 1.1 mrg {
702 1.1 mrg if (constant_for_folding (op0))
703 1.1 mrg {
704 1.1 mrg tree res = const_unop (code, type, op0);
705 1.1 mrg if (res != NULL_TREE
706 1.1 mrg && CONSTANT_CLASS_P (res))
707 1.1 mrg return res;
708 1.1 mrg }
709 1.1 mrg
710 1.1 mrg gimple_match_op res_op;
711 1.1 mrg if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
712 1.1 mrg return NULL_TREE;
713 1.1 mrg return maybe_push_res_to_seq (&res_op, seq);
714 1.1 mrg }
715 1.1 mrg
716 1.1 mrg /* Binary ops. */
717 1.1 mrg
718 1.1 mrg tree
719 1.1 mrg gimple_simplify (enum tree_code code, tree type,
720 1.1 mrg tree op0, tree op1,
721 1.1 mrg gimple_seq *seq, tree (*valueize)(tree))
722 1.1 mrg {
723 1.1 mrg if (constant_for_folding (op0) && constant_for_folding (op1))
724 1.1 mrg {
725 1.1 mrg tree res = const_binop (code, type, op0, op1);
726 1.1 mrg if (res != NULL_TREE
727 1.1 mrg && CONSTANT_CLASS_P (res))
728 1.1 mrg return res;
729 1.1 mrg }
730 1.1 mrg
731 1.1 mrg /* Canonicalize operand order both for matching and fallback stmt
732 1.1 mrg generation. */
733 1.1 mrg if ((commutative_tree_code (code)
734 1.1 mrg || TREE_CODE_CLASS (code) == tcc_comparison)
735 1.1 mrg && tree_swap_operands_p (op0, op1))
736 1.1 mrg {
737 1.1 mrg std::swap (op0, op1);
738 1.1 mrg if (TREE_CODE_CLASS (code) == tcc_comparison)
739 1.1 mrg code = swap_tree_comparison (code);
740 1.1 mrg }
741 1.1 mrg
742 1.1 mrg gimple_match_op res_op;
743 1.1 mrg if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
744 1.1 mrg return NULL_TREE;
745 1.1 mrg return maybe_push_res_to_seq (&res_op, seq);
746 1.1 mrg }
747 1.1 mrg
748 1.1 mrg /* Ternary ops. */
749 1.1 mrg
750 1.1 mrg tree
751 1.1 mrg gimple_simplify (enum tree_code code, tree type,
752 1.1 mrg tree op0, tree op1, tree op2,
753 1.1 mrg gimple_seq *seq, tree (*valueize)(tree))
754 1.1 mrg {
755 1.1 mrg if (constant_for_folding (op0) && constant_for_folding (op1)
756 1.1 mrg && constant_for_folding (op2))
757 1.1 mrg {
758 1.1 mrg tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
759 1.1 mrg if (res != NULL_TREE
760 1.1 mrg && CONSTANT_CLASS_P (res))
761 1.1 mrg return res;
762 1.1 mrg }
763 1.1 mrg
764 1.1 mrg /* Canonicalize operand order both for matching and fallback stmt
765 1.1 mrg generation. */
766 1.1 mrg if (commutative_ternary_tree_code (code)
767 1.1 mrg && tree_swap_operands_p (op0, op1))
768 1.1 mrg std::swap (op0, op1);
769 1.1 mrg
770 1.1 mrg gimple_match_op res_op;
771 1.1 mrg if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
772 1.1 mrg return NULL_TREE;
773 1.1 mrg return maybe_push_res_to_seq (&res_op, seq);
774 1.1 mrg }
775 1.1 mrg
776 1.1 mrg /* Builtin or internal function with one argument. */
777 1.1 mrg
778 1.1 mrg tree
779 1.1 mrg gimple_simplify (combined_fn fn, tree type,
780 1.1 mrg tree arg0,
781 1.1 mrg gimple_seq *seq, tree (*valueize)(tree))
782 1.1 mrg {
783 1.1 mrg if (constant_for_folding (arg0))
784 1.1 mrg {
785 1.1 mrg tree res = fold_const_call (fn, type, arg0);
786 1.1 mrg if (res && CONSTANT_CLASS_P (res))
787 1.1 mrg return res;
788 1.1 mrg }
789 1.1 mrg
790 1.1 mrg gimple_match_op res_op;
791 1.1 mrg if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
792 1.1 mrg return NULL_TREE;
793 1.1 mrg return maybe_push_res_to_seq (&res_op, seq);
794 1.1 mrg }
795 1.1 mrg
796 1.1 mrg /* Builtin or internal function with two arguments. */
797 1.1 mrg
798 1.1 mrg tree
799 1.1 mrg gimple_simplify (combined_fn fn, tree type,
800 1.1 mrg tree arg0, tree arg1,
801 1.1 mrg gimple_seq *seq, tree (*valueize)(tree))
802 1.1 mrg {
803 1.1 mrg if (constant_for_folding (arg0)
804 1.1 mrg && constant_for_folding (arg1))
805 1.1 mrg {
806 1.1 mrg tree res = fold_const_call (fn, type, arg0, arg1);
807 1.1 mrg if (res && CONSTANT_CLASS_P (res))
808 1.1 mrg return res;
809 1.1 mrg }
810 1.1 mrg
811 1.1 mrg gimple_match_op res_op;
812 1.1 mrg if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
813 1.1 mrg return NULL_TREE;
814 1.1 mrg return maybe_push_res_to_seq (&res_op, seq);
815 1.1 mrg }
816 1.1 mrg
817 1.1 mrg /* Builtin or internal function with three arguments. */
818 1.1 mrg
819 1.1 mrg tree
820 1.1 mrg gimple_simplify (combined_fn fn, tree type,
821 1.1 mrg tree arg0, tree arg1, tree arg2,
822 1.1 mrg gimple_seq *seq, tree (*valueize)(tree))
823 1.1 mrg {
824 1.1 mrg if (constant_for_folding (arg0)
825 1.1 mrg && constant_for_folding (arg1)
826 1.1 mrg && constant_for_folding (arg2))
827 1.1 mrg {
828 1.1 mrg tree res = fold_const_call (fn, type, arg0, arg1, arg2);
829 1.1 mrg if (res && CONSTANT_CLASS_P (res))
830 1.1 mrg return res;
831 1.1 mrg }
832 1.1 mrg
833 1.1 mrg gimple_match_op res_op;
834 1.1 mrg if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
835 1.1 mrg return NULL_TREE;
836 1.1 mrg return maybe_push_res_to_seq (&res_op, seq);
837 1.1 mrg }
838 1.1 mrg
839 1.1 mrg /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
840 1.1 mrg VALUEIZED to true if valueization changed OP. */
841 1.1 mrg
842 1.1 mrg static inline tree
843 1.1 mrg do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
844 1.1 mrg {
845 1.1 mrg if (valueize && TREE_CODE (op) == SSA_NAME)
846 1.1 mrg {
847 1.1 mrg tree tem = valueize (op);
848 1.1 mrg if (tem && tem != op)
849 1.1 mrg {
850 1.1 mrg op = tem;
851 1.1 mrg valueized = true;
852 1.1 mrg }
853 1.1 mrg }
854 1.1 mrg return op;
855 1.1 mrg }
856 1.1 mrg
857 1.1 mrg /* If RES_OP is a call to a conditional internal function, try simplifying
858 1.1 mrg the associated unconditional operation and using the result to build
859 1.1 mrg a new conditional operation. For example, if RES_OP is:
860 1.1 mrg
861 1.1 mrg IFN_COND_ADD (COND, A, B, ELSE)
862 1.1 mrg
863 1.1 mrg try simplifying (plus A B) and using the result to build a replacement
864 1.1 mrg for the whole IFN_COND_ADD.
865 1.1 mrg
866 1.1 mrg Return true if this approach led to a simplification, otherwise leave
867 1.1 mrg RES_OP unchanged (and so suitable for other simplifications). When
868 1.1 mrg returning true, add any new statements to SEQ and use VALUEIZE as the
869 1.1 mrg valueization function.
870 1.1 mrg
871 1.1 mrg RES_OP is known to be a call to IFN. */
872 1.1 mrg
873 1.1 mrg static bool
874 1.1 mrg try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
875 1.1 mrg gimple_seq *seq, tree (*valueize) (tree))
876 1.1 mrg {
877 1.1 mrg code_helper op;
878 1.1 mrg tree_code code = conditional_internal_fn_code (ifn);
879 1.1 mrg if (code != ERROR_MARK)
880 1.1 mrg op = code;
881 1.1 mrg else
882 1.1 mrg {
883 1.1 mrg ifn = get_unconditional_internal_fn (ifn);
884 1.1 mrg if (ifn == IFN_LAST)
885 1.1 mrg return false;
886 1.1 mrg op = as_combined_fn (ifn);
887 1.1 mrg }
888 1.1 mrg
889 1.1 mrg unsigned int num_ops = res_op->num_ops;
890 1.1 mrg gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
891 1.1 mrg res_op->ops[num_ops - 1]),
892 1.1 mrg op, res_op->type, num_ops - 2);
893 1.1 mrg
894 1.1 mrg memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops);
895 1.1 mrg switch (num_ops - 2)
896 1.1 mrg {
897 1.1 mrg case 1:
898 1.1 mrg if (!gimple_resimplify1 (seq, &cond_op, valueize))
899 1.1 mrg return false;
900 1.1 mrg break;
901 1.1 mrg case 2:
902 1.1 mrg if (!gimple_resimplify2 (seq, &cond_op, valueize))
903 1.1 mrg return false;
904 1.1 mrg break;
905 1.1 mrg case 3:
906 1.1 mrg if (!gimple_resimplify3 (seq, &cond_op, valueize))
907 1.1 mrg return false;
908 1.1 mrg break;
909 1.1 mrg default:
910 1.1 mrg gcc_unreachable ();
911 1.1 mrg }
912 1.1 mrg *res_op = cond_op;
913 1.1 mrg maybe_resimplify_conditional_op (seq, res_op, valueize);
914 1.1 mrg return true;
915 1.1 mrg }
916 1.1 mrg
917 1.1 mrg /* Common subroutine of gimple_extract_op and gimple_simplify. Try to
918 1.1 mrg describe STMT in RES_OP, returning true on success. Before recording
919 1.1 mrg an operand, call:
920 1.1 mrg
921 1.1 mrg - VALUEIZE_CONDITION for a COND_EXPR condition
922 1.1 mrg - VALUEIZE_OP for every other top-level operand
923 1.1 mrg
924 1.1 mrg Both routines take a tree argument and returns a tree. */
925 1.1 mrg
926 1.1 mrg template<typename ValueizeOp, typename ValueizeCondition>
927 1.1 mrg inline bool
928 1.1 mrg gimple_extract (gimple *stmt, gimple_match_op *res_op,
929 1.1 mrg ValueizeOp valueize_op,
930 1.1 mrg ValueizeCondition valueize_condition)
931 1.1 mrg {
932 1.1 mrg switch (gimple_code (stmt))
933 1.1 mrg {
934 1.1 mrg case GIMPLE_ASSIGN:
935 1.1 mrg {
936 1.1 mrg enum tree_code code = gimple_assign_rhs_code (stmt);
937 1.1 mrg tree type = TREE_TYPE (gimple_assign_lhs (stmt));
938 1.1 mrg switch (gimple_assign_rhs_class (stmt))
939 1.1 mrg {
940 1.1 mrg case GIMPLE_SINGLE_RHS:
941 1.1 mrg if (code == REALPART_EXPR
942 1.1 mrg || code == IMAGPART_EXPR
943 1.1 mrg || code == VIEW_CONVERT_EXPR)
944 1.1 mrg {
945 1.1 mrg tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
946 1.1 mrg /* op0 needs to be a SSA name or an min invariant. */
947 1.1 mrg if (TREE_CODE (op0) != SSA_NAME && !is_gimple_min_invariant (op0))
948 1.1 mrg return false;
949 1.1 mrg res_op->set_op (code, type, valueize_op (op0));
950 1.1 mrg return true;
951 1.1 mrg }
952 1.1 mrg else if (code == BIT_FIELD_REF)
953 1.1 mrg {
954 1.1 mrg tree rhs1 = gimple_assign_rhs1 (stmt);
955 1.1 mrg tree op0 = valueize_op (TREE_OPERAND (rhs1, 0));
956 1.1 mrg /* op0 needs to be a SSA name or an min invariant. */
957 1.1 mrg if (TREE_CODE (op0) != SSA_NAME && !is_gimple_min_invariant (op0))
958 1.1 mrg return false;
959 1.1 mrg res_op->set_op (code, type, op0,
960 1.1 mrg TREE_OPERAND (rhs1, 1),
961 1.1 mrg TREE_OPERAND (rhs1, 2),
962 1.1 mrg REF_REVERSE_STORAGE_ORDER (rhs1));
963 1.1 mrg return true;
964 1.1 mrg }
965 1.1 mrg else if (code == SSA_NAME)
966 1.1 mrg {
967 1.1 mrg tree op0 = gimple_assign_rhs1 (stmt);
968 1.1 mrg res_op->set_op (TREE_CODE (op0), type, valueize_op (op0));
969 1.1 mrg return true;
970 1.1 mrg }
971 1.1 mrg break;
972 1.1 mrg case GIMPLE_UNARY_RHS:
973 1.1 mrg {
974 1.1 mrg tree rhs1 = gimple_assign_rhs1 (stmt);
975 1.1 mrg res_op->set_op (code, type, valueize_op (rhs1));
976 1.1 mrg return true;
977 1.1 mrg }
978 1.1 mrg case GIMPLE_BINARY_RHS:
979 1.1 mrg {
980 1.1 mrg tree rhs1 = valueize_op (gimple_assign_rhs1 (stmt));
981 1.1 mrg tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt));
982 1.1 mrg res_op->set_op (code, type, rhs1, rhs2);
983 1.1 mrg return true;
984 1.1 mrg }
985 1.1 mrg case GIMPLE_TERNARY_RHS:
986 1.1 mrg {
987 1.1 mrg tree rhs1 = gimple_assign_rhs1 (stmt);
988 1.1 mrg if (code == COND_EXPR && COMPARISON_CLASS_P (rhs1))
989 1.1 mrg rhs1 = valueize_condition (rhs1);
990 1.1 mrg else
991 1.1 mrg rhs1 = valueize_op (rhs1);
992 1.1 mrg tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt));
993 1.1 mrg tree rhs3 = valueize_op (gimple_assign_rhs3 (stmt));
994 1.1 mrg res_op->set_op (code, type, rhs1, rhs2, rhs3);
995 1.1 mrg return true;
996 1.1 mrg }
997 1.1 mrg default:
998 1.1 mrg gcc_unreachable ();
999 1.1 mrg }
1000 1.1 mrg break;
1001 1.1 mrg }
1002 1.1 mrg
1003 1.1 mrg case GIMPLE_CALL:
1004 1.1 mrg /* ??? This way we can't simplify calls with side-effects. */
1005 1.1 mrg if (gimple_call_lhs (stmt) != NULL_TREE
1006 1.1 mrg && gimple_call_num_args (stmt) >= 1
1007 1.1 mrg && gimple_call_num_args (stmt) <= 5)
1008 1.1 mrg {
1009 1.1 mrg combined_fn cfn;
1010 1.1 mrg if (gimple_call_internal_p (stmt))
1011 1.1 mrg cfn = as_combined_fn (gimple_call_internal_fn (stmt));
1012 1.1 mrg else
1013 1.1 mrg {
1014 1.1 mrg tree fn = gimple_call_fn (stmt);
1015 1.1 mrg if (!fn)
1016 1.1 mrg return false;
1017 1.1 mrg
1018 1.1 mrg fn = valueize_op (fn);
1019 1.1 mrg if (TREE_CODE (fn) != ADDR_EXPR
1020 1.1 mrg || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1021 1.1 mrg return false;
1022 1.1 mrg
1023 1.1 mrg tree decl = TREE_OPERAND (fn, 0);
1024 1.1 mrg if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1025 1.1 mrg || !gimple_builtin_call_types_compatible_p (stmt, decl))
1026 1.1 mrg return false;
1027 1.1 mrg
1028 1.1 mrg cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1029 1.1 mrg }
1030 1.1 mrg
1031 1.1 mrg unsigned int num_args = gimple_call_num_args (stmt);
1032 1.1 mrg res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1033 1.1 mrg for (unsigned i = 0; i < num_args; ++i)
1034 1.1 mrg res_op->ops[i] = valueize_op (gimple_call_arg (stmt, i));
1035 1.1 mrg return true;
1036 1.1 mrg }
1037 1.1 mrg break;
1038 1.1 mrg
1039 1.1 mrg case GIMPLE_COND:
1040 1.1 mrg {
1041 1.1 mrg tree lhs = valueize_op (gimple_cond_lhs (stmt));
1042 1.1 mrg tree rhs = valueize_op (gimple_cond_rhs (stmt));
1043 1.1 mrg res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1044 1.1 mrg return true;
1045 1.1 mrg }
1046 1.1 mrg
1047 1.1 mrg default:
1048 1.1 mrg break;
1049 1.1 mrg }
1050 1.1 mrg
1051 1.1 mrg return false;
1052 1.1 mrg }
1053 1.1 mrg
1054 1.1 mrg /* Try to describe STMT in RES_OP, returning true on success.
1055 1.1 mrg For GIMPLE_CONDs, describe the condition that is being tested.
1056 1.1 mrg For GIMPLE_ASSIGNs, describe the rhs of the assignment.
1057 1.1 mrg For GIMPLE_CALLs, describe the call. */
1058 1.1 mrg
1059 1.1 mrg bool
1060 1.1 mrg gimple_extract_op (gimple *stmt, gimple_match_op *res_op)
1061 1.1 mrg {
1062 1.1 mrg auto nop = [](tree op) { return op; };
1063 1.1 mrg return gimple_extract (stmt, res_op, nop, nop);
1064 1.1 mrg }
1065 1.1 mrg
1066 1.1 mrg /* The main STMT based simplification entry. It is used by the fold_stmt
1067 1.1 mrg and the fold_stmt_to_constant APIs. */
1068 1.1 mrg
1069 1.1 mrg bool
1070 1.1 mrg gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
1071 1.1 mrg tree (*valueize)(tree), tree (*top_valueize)(tree))
1072 1.1 mrg {
1073 1.1 mrg bool valueized = false;
1074 1.1 mrg auto valueize_op = [&](tree op)
1075 1.1 mrg {
1076 1.1 mrg return do_valueize (op, top_valueize, valueized);
1077 1.1 mrg };
1078 1.1 mrg auto valueize_condition = [&](tree op) -> tree
1079 1.1 mrg {
1080 1.1 mrg bool cond_valueized = false;
1081 1.1 mrg tree lhs = do_valueize (TREE_OPERAND (op, 0), top_valueize,
1082 1.1 mrg cond_valueized);
1083 1.1 mrg tree rhs = do_valueize (TREE_OPERAND (op, 1), top_valueize,
1084 1.1 mrg cond_valueized);
1085 1.1 mrg gimple_match_op res_op2 (res_op->cond, TREE_CODE (op),
1086 1.1 mrg TREE_TYPE (op), lhs, rhs);
1087 1.1 mrg if ((gimple_resimplify2 (seq, &res_op2, valueize)
1088 1.1 mrg || cond_valueized)
1089 1.1 mrg && res_op2.code.is_tree_code ())
1090 1.1 mrg {
1091 1.1 mrg auto code = tree_code (res_op2.code);
1092 1.1 mrg if (TREE_CODE_CLASS (code) == tcc_comparison)
1093 1.1 mrg {
1094 1.1 mrg valueized = true;
1095 1.1 mrg return build2 (code, TREE_TYPE (op),
1096 1.1 mrg res_op2.ops[0], res_op2.ops[1]);
1097 1.1 mrg }
1098 1.1 mrg else if (code == SSA_NAME
1099 1.1 mrg || code == INTEGER_CST
1100 1.1 mrg || code == VECTOR_CST)
1101 1.1 mrg {
1102 1.1 mrg valueized = true;
1103 1.1 mrg return res_op2.ops[0];
1104 1.1 mrg }
1105 1.1 mrg }
1106 1.1 mrg return valueize_op (op);
1107 1.1 mrg };
1108 1.1 mrg
1109 1.1 mrg if (!gimple_extract (stmt, res_op, valueize_op, valueize_condition))
1110 1.1 mrg return false;
1111 1.1 mrg
1112 1.1 mrg if (res_op->code.is_internal_fn ())
1113 1.1 mrg {
1114 1.1 mrg internal_fn ifn = internal_fn (res_op->code);
1115 1.1 mrg if (try_conditional_simplification (ifn, res_op, seq, valueize))
1116 1.1 mrg return true;
1117 1.1 mrg }
1118 1.1 mrg
1119 1.1 mrg if (!res_op->reverse
1120 1.1 mrg && res_op->num_ops
1121 1.1 mrg && res_op->resimplify (seq, valueize))
1122 1.1 mrg return true;
1123 1.1 mrg
1124 1.1 mrg return valueized;
1125 1.1 mrg }
1126 1.1 mrg
1127 1.1 mrg /* Helper for the autogenerated code, valueize OP. */
1128 1.1 mrg
1129 1.1 mrg inline tree
1130 1.1 mrg do_valueize (tree (*valueize)(tree), tree op)
1131 1.1 mrg {
1132 1.1 mrg if (valueize && TREE_CODE (op) == SSA_NAME)
1133 1.1 mrg {
1134 1.1 mrg tree tem = valueize (op);
1135 1.1 mrg if (tem)
1136 1.1 mrg return tem;
1137 1.1 mrg }
1138 1.1 mrg return op;
1139 1.1 mrg }
1140 1.1 mrg
1141 1.1 mrg /* Helper for the autogenerated code, get at the definition of NAME when
1142 1.1 mrg VALUEIZE allows that. */
1143 1.1 mrg
1144 1.1 mrg inline gimple *
1145 1.1 mrg get_def (tree (*valueize)(tree), tree name)
1146 1.1 mrg {
1147 1.1 mrg if (valueize && ! valueize (name))
1148 1.1 mrg return NULL;
1149 1.1 mrg return SSA_NAME_DEF_STMT (name);
1150 1.1 mrg }
1151 1.1 mrg
1152 1.1 mrg /* Routine to determine if the types T1 and T2 are effectively
1153 1.1 mrg the same for GIMPLE. If T1 or T2 is not a type, the test
1154 1.1 mrg applies to their TREE_TYPE. */
1155 1.1 mrg
1156 1.1 mrg static inline bool
1157 1.1 mrg types_match (tree t1, tree t2)
1158 1.1 mrg {
1159 1.1 mrg if (!TYPE_P (t1))
1160 1.1 mrg t1 = TREE_TYPE (t1);
1161 1.1 mrg if (!TYPE_P (t2))
1162 1.1 mrg t2 = TREE_TYPE (t2);
1163 1.1 mrg
1164 1.1 mrg return types_compatible_p (t1, t2);
1165 1.1 mrg }
1166 1.1 mrg
1167 1.1 mrg /* Return if T has a single use. For GIMPLE, we also allow any
1168 1.1 mrg non-SSA_NAME (ie constants) and zero uses to cope with uses
1169 1.1 mrg that aren't linked up yet. */
1170 1.1 mrg
1171 1.1 mrg static bool
1172 1.1 mrg single_use (const_tree) ATTRIBUTE_PURE;
1173 1.1 mrg
1174 1.1 mrg static bool
1175 1.1 mrg single_use (const_tree t)
1176 1.1 mrg {
1177 1.1 mrg if (TREE_CODE (t) != SSA_NAME)
1178 1.1 mrg return true;
1179 1.1 mrg
1180 1.1 mrg /* Inline return has_zero_uses (t) || has_single_use (t); */
1181 1.1 mrg const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (t));
1182 1.1 mrg const ssa_use_operand_t *ptr;
1183 1.1 mrg bool single = false;
1184 1.1 mrg
1185 1.1 mrg for (ptr = head->next; ptr != head; ptr = ptr->next)
1186 1.1 mrg if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1187 1.1 mrg {
1188 1.1 mrg if (single)
1189 1.1 mrg return false;
1190 1.1 mrg single = true;
1191 1.1 mrg }
1192 1.1 mrg return true;
1193 1.1 mrg }
1194 1.1 mrg
1195 1.1 mrg /* Return true if math operations should be canonicalized,
1196 1.1 mrg e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1197 1.1 mrg
1198 1.1 mrg static inline bool
1199 1.1 mrg canonicalize_math_p ()
1200 1.1 mrg {
1201 1.1 mrg return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1202 1.1 mrg }
1203 1.1 mrg
1204 1.1 mrg /* Return true if math operations that are beneficial only after
1205 1.1 mrg vectorization should be canonicalized. */
1206 1.1 mrg
1207 1.1 mrg static inline bool
1208 1.1 mrg canonicalize_math_after_vectorization_p ()
1209 1.1 mrg {
1210 1.1 mrg return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1211 1.1 mrg }
1212 1.1 mrg
1213 1.1 mrg /* Return true if we can still perform transformations that may introduce
1214 1.1 mrg vector operations that are not supported by the target. Vector lowering
1215 1.1 mrg normally handles those, but after that pass, it becomes unsafe. */
1216 1.1 mrg
1217 1.1 mrg static inline bool
1218 1.1 mrg optimize_vectors_before_lowering_p ()
1219 1.1 mrg {
1220 1.1 mrg return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0;
1221 1.1 mrg }
1222 1.1 mrg
1223 1.1 mrg /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1224 1.1 mrg As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1225 1.1 mrg is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1226 1.1 mrg where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1227 1.1 mrg will likely be exact, while exp (log (arg0) * arg1) might be not.
1228 1.1 mrg Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1229 1.1 mrg
1230 1.1 mrg static bool
1231 1.1 mrg optimize_pow_to_exp (tree arg0, tree arg1)
1232 1.1 mrg {
1233 1.1 mrg gcc_assert (TREE_CODE (arg0) == REAL_CST);
1234 1.1 mrg if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1235 1.1 mrg return true;
1236 1.1 mrg
1237 1.1 mrg if (TREE_CODE (arg1) != SSA_NAME)
1238 1.1 mrg return true;
1239 1.1 mrg
1240 1.1 mrg gimple *def = SSA_NAME_DEF_STMT (arg1);
1241 1.1 mrg gphi *phi = dyn_cast <gphi *> (def);
1242 1.1 mrg tree cst1 = NULL_TREE;
1243 1.1 mrg enum tree_code code = ERROR_MARK;
1244 1.1 mrg if (!phi)
1245 1.1 mrg {
1246 1.1 mrg if (!is_gimple_assign (def))
1247 1.1 mrg return true;
1248 1.1 mrg code = gimple_assign_rhs_code (def);
1249 1.1 mrg switch (code)
1250 1.1 mrg {
1251 1.1 mrg case PLUS_EXPR:
1252 1.1 mrg case MINUS_EXPR:
1253 1.1 mrg break;
1254 1.1 mrg default:
1255 1.1 mrg return true;
1256 1.1 mrg }
1257 1.1 mrg if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1258 1.1 mrg || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1259 1.1 mrg return true;
1260 1.1 mrg
1261 1.1 mrg cst1 = gimple_assign_rhs2 (def);
1262 1.1 mrg
1263 1.1 mrg phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1264 1.1 mrg if (!phi)
1265 1.1 mrg return true;
1266 1.1 mrg }
1267 1.1 mrg
1268 1.1 mrg tree cst2 = NULL_TREE;
1269 1.1 mrg int n = gimple_phi_num_args (phi);
1270 1.1 mrg for (int i = 0; i < n; i++)
1271 1.1 mrg {
1272 1.1 mrg tree arg = PHI_ARG_DEF (phi, i);
1273 1.1 mrg if (TREE_CODE (arg) != REAL_CST)
1274 1.1 mrg continue;
1275 1.1 mrg else if (cst2 == NULL_TREE)
1276 1.1 mrg cst2 = arg;
1277 1.1 mrg else if (!operand_equal_p (cst2, arg, 0))
1278 1.1 mrg return true;
1279 1.1 mrg }
1280 1.1 mrg
1281 1.1 mrg if (cst1 && cst2)
1282 1.1 mrg cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1283 1.1 mrg if (cst2
1284 1.1 mrg && TREE_CODE (cst2) == REAL_CST
1285 1.1 mrg && real_isinteger (TREE_REAL_CST_PTR (cst2),
1286 1.1 mrg TYPE_MODE (TREE_TYPE (cst2))))
1287 1.1 mrg return false;
1288 1.1 mrg return true;
1289 1.1 mrg }
1290 1.1 mrg
1291 1.1 mrg /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1292 1.1 mrg is another division can be optimized. Don't optimize if INNER_DIV
1293 1.1 mrg is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1294 1.1 mrg
1295 1.1 mrg static bool
1296 1.1 mrg optimize_successive_divisions_p (tree divisor, tree inner_div)
1297 1.1 mrg {
1298 1.1 mrg if (!gimple_in_ssa_p (cfun))
1299 1.1 mrg return false;
1300 1.1 mrg
1301 1.1 mrg imm_use_iterator imm_iter;
1302 1.1 mrg use_operand_p use_p;
1303 1.1 mrg FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1304 1.1 mrg {
1305 1.1 mrg gimple *use_stmt = USE_STMT (use_p);
1306 1.1 mrg if (!is_gimple_assign (use_stmt)
1307 1.1 mrg || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1308 1.1 mrg || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1309 1.1 mrg continue;
1310 1.1 mrg return false;
1311 1.1 mrg }
1312 1.1 mrg return true;
1313 1.1 mrg }
1314 1.1 mrg
1315 1.1 mrg /* Return a canonical form for CODE when operating on TYPE. The idea
1316 1.1 mrg is to remove redundant ways of representing the same operation so
1317 1.1 mrg that code_helpers can be hashed and compared for equality.
1318 1.1 mrg
1319 1.1 mrg The only current canonicalization is to replace built-in functions
1320 1.1 mrg with internal functions, in cases where internal-fn.def defines
1321 1.1 mrg such an internal function.
1322 1.1 mrg
1323 1.1 mrg Note that the new code_helper cannot necessarily be used in place of
1324 1.1 mrg the original code_helper. For example, the new code_helper might be
1325 1.1 mrg an internal function that the target does not support. */
1326 1.1 mrg
1327 1.1 mrg code_helper
1328 1.1 mrg canonicalize_code (code_helper code, tree type)
1329 1.1 mrg {
1330 1.1 mrg if (code.is_fn_code ())
1331 1.1 mrg return associated_internal_fn (combined_fn (code), type);
1332 1.1 mrg return code;
1333 1.1 mrg }
1334 1.1 mrg
1335 1.1 mrg /* Return true if CODE is a binary operation and if CODE is commutative when
1336 1.1 mrg operating on type TYPE. */
1337 1.1 mrg
1338 1.1 mrg bool
1339 1.1 mrg commutative_binary_op_p (code_helper code, tree type)
1340 1.1 mrg {
1341 1.1 mrg if (code.is_tree_code ())
1342 1.1 mrg return commutative_tree_code (tree_code (code));
1343 1.1 mrg auto cfn = combined_fn (code);
1344 1.1 mrg return commutative_binary_fn_p (associated_internal_fn (cfn, type));
1345 1.1 mrg }
1346 1.1 mrg
1347 1.1 mrg /* Return true if CODE represents a ternary operation and if the first two
1348 1.1 mrg operands are commutative when CODE is operating on TYPE. */
1349 1.1 mrg
1350 1.1 mrg bool
1351 1.1 mrg commutative_ternary_op_p (code_helper code, tree type)
1352 1.1 mrg {
1353 1.1 mrg if (code.is_tree_code ())
1354 1.1 mrg return commutative_ternary_tree_code (tree_code (code));
1355 1.1 mrg auto cfn = combined_fn (code);
1356 1.1 mrg return commutative_ternary_fn_p (associated_internal_fn (cfn, type));
1357 1.1 mrg }
1358 1.1 mrg
1359 1.1 mrg /* If CODE is commutative in two consecutive operands, return the
1360 1.1 mrg index of the first, otherwise return -1. */
1361 1.1 mrg
1362 1.1 mrg int
1363 1.1 mrg first_commutative_argument (code_helper code, tree type)
1364 1.1 mrg {
1365 1.1 mrg if (code.is_tree_code ())
1366 1.1 mrg {
1367 1.1 mrg auto tcode = tree_code (code);
1368 1.1 mrg if (commutative_tree_code (tcode)
1369 1.1 mrg || commutative_ternary_tree_code (tcode))
1370 1.1 mrg return 0;
1371 1.1 mrg return -1;
1372 1.1 mrg }
1373 1.1 mrg auto cfn = combined_fn (code);
1374 1.1 mrg return first_commutative_argument (associated_internal_fn (cfn, type));
1375 1.1 mrg }
1376 1.1 mrg
1377 1.1 mrg /* Return true if CODE is a binary operation that is associative when
1378 1.1 mrg operating on type TYPE. */
1379 1.1 mrg
1380 1.1 mrg bool
1381 1.1 mrg associative_binary_op_p (code_helper code, tree type)
1382 1.1 mrg {
1383 1.1 mrg if (code.is_tree_code ())
1384 1.1 mrg return associative_tree_code (tree_code (code));
1385 1.1 mrg auto cfn = combined_fn (code);
1386 1.1 mrg return associative_binary_fn_p (associated_internal_fn (cfn, type));
1387 1.1 mrg }
1388 1.1 mrg
1389 1.1 mrg /* Return true if the target directly supports operation CODE on type TYPE.
1390 1.1 mrg QUERY_TYPE acts as for optab_for_tree_code. */
1391 1.1 mrg
1392 1.1 mrg bool
1393 1.1 mrg directly_supported_p (code_helper code, tree type, optab_subtype query_type)
1394 1.1 mrg {
1395 1.1 mrg if (code.is_tree_code ())
1396 1.1 mrg {
1397 1.1 mrg direct_optab optab = optab_for_tree_code (tree_code (code), type,
1398 1.1 mrg query_type);
1399 1.1 mrg return (optab != unknown_optab
1400 1.1 mrg && optab_handler (optab, TYPE_MODE (type)) != CODE_FOR_nothing);
1401 1.1 mrg }
1402 1.1 mrg gcc_assert (query_type == optab_default
1403 1.1 mrg || (query_type == optab_vector && VECTOR_TYPE_P (type))
1404 1.1 mrg || (query_type == optab_scalar && !VECTOR_TYPE_P (type)));
1405 1.1 mrg internal_fn ifn = associated_internal_fn (combined_fn (code), type);
1406 1.1 mrg return (direct_internal_fn_p (ifn)
1407 1.1 mrg && direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED));
1408 1.1 mrg }
1409 1.1 mrg
1410 1.1 mrg /* A wrapper around the internal-fn.cc versions of get_conditional_internal_fn
1411 1.1 mrg for a code_helper CODE operating on type TYPE. */
1412 1.1 mrg
1413 1.1 mrg internal_fn
1414 1.1 mrg get_conditional_internal_fn (code_helper code, tree type)
1415 1.1 mrg {
1416 1.1 mrg if (code.is_tree_code ())
1417 1.1 mrg return get_conditional_internal_fn (tree_code (code));
1418 1.1 mrg auto cfn = combined_fn (code);
1419 1.1 mrg return get_conditional_internal_fn (associated_internal_fn (cfn, type));
1420 1.1 mrg }
1421