builtins.cc revision 1.1 1 1.1 mrg /* Expand builtin functions.
2 1.1 mrg Copyright (C) 1988-2022 Free Software Foundation, Inc.
3 1.1 mrg
4 1.1 mrg This file is part of GCC.
5 1.1 mrg
6 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
7 1.1 mrg the terms of the GNU General Public License as published by the Free
8 1.1 mrg Software Foundation; either version 3, or (at your option) any later
9 1.1 mrg version.
10 1.1 mrg
11 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 1.1 mrg for more details.
15 1.1 mrg
16 1.1 mrg You should have received a copy of the GNU General Public License
17 1.1 mrg along with GCC; see the file COPYING3. If not see
18 1.1 mrg <http://www.gnu.org/licenses/>. */
19 1.1 mrg
20 1.1 mrg /* Legacy warning! Please add no further builtin simplifications here
21 1.1 mrg (apart from pure constant folding) - builtin simplifications should go
22 1.1 mrg to match.pd or gimple-fold.cc instead. */
23 1.1 mrg
24 1.1 mrg #include "config.h"
25 1.1 mrg #include "system.h"
26 1.1 mrg #include "coretypes.h"
27 1.1 mrg #include "backend.h"
28 1.1 mrg #include "target.h"
29 1.1 mrg #include "rtl.h"
30 1.1 mrg #include "tree.h"
31 1.1 mrg #include "memmodel.h"
32 1.1 mrg #include "gimple.h"
33 1.1 mrg #include "predict.h"
34 1.1 mrg #include "tm_p.h"
35 1.1 mrg #include "stringpool.h"
36 1.1 mrg #include "tree-vrp.h"
37 1.1 mrg #include "tree-ssanames.h"
38 1.1 mrg #include "expmed.h"
39 1.1 mrg #include "optabs.h"
40 1.1 mrg #include "emit-rtl.h"
41 1.1 mrg #include "recog.h"
42 1.1 mrg #include "diagnostic-core.h"
43 1.1 mrg #include "alias.h"
44 1.1 mrg #include "fold-const.h"
45 1.1 mrg #include "fold-const-call.h"
46 1.1 mrg #include "gimple-ssa-warn-access.h"
47 1.1 mrg #include "stor-layout.h"
48 1.1 mrg #include "calls.h"
49 1.1 mrg #include "varasm.h"
50 1.1 mrg #include "tree-object-size.h"
51 1.1 mrg #include "tree-ssa-strlen.h"
52 1.1 mrg #include "realmpfr.h"
53 1.1 mrg #include "cfgrtl.h"
54 1.1 mrg #include "except.h"
55 1.1 mrg #include "dojump.h"
56 1.1 mrg #include "explow.h"
57 1.1 mrg #include "stmt.h"
58 1.1 mrg #include "expr.h"
59 1.1 mrg #include "libfuncs.h"
60 1.1 mrg #include "output.h"
61 1.1 mrg #include "typeclass.h"
62 1.1 mrg #include "langhooks.h"
63 1.1 mrg #include "value-prof.h"
64 1.1 mrg #include "builtins.h"
65 1.1 mrg #include "stringpool.h"
66 1.1 mrg #include "attribs.h"
67 1.1 mrg #include "asan.h"
68 1.1 mrg #include "internal-fn.h"
69 1.1 mrg #include "case-cfn-macros.h"
70 1.1 mrg #include "gimple-fold.h"
71 1.1 mrg #include "intl.h"
72 1.1 mrg #include "file-prefix-map.h" /* remap_macro_filename() */
73 1.1 mrg #include "gomp-constants.h"
74 1.1 mrg #include "omp-general.h"
75 1.1 mrg #include "tree-dfa.h"
76 1.1 mrg #include "gimple-iterator.h"
77 1.1 mrg #include "gimple-ssa.h"
78 1.1 mrg #include "tree-ssa-live.h"
79 1.1 mrg #include "tree-outof-ssa.h"
80 1.1 mrg #include "attr-fnspec.h"
81 1.1 mrg #include "demangle.h"
82 1.1 mrg #include "gimple-range.h"
83 1.1 mrg #include "pointer-query.h"
84 1.1 mrg
85 1.1 mrg struct target_builtins default_target_builtins;
86 1.1 mrg #if SWITCHABLE_TARGET
87 1.1 mrg struct target_builtins *this_target_builtins = &default_target_builtins;
88 1.1 mrg #endif
89 1.1 mrg
90 1.1 mrg /* Define the names of the builtin function types and codes. */
91 1.1 mrg const char *const built_in_class_names[BUILT_IN_LAST]
92 1.1 mrg = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
93 1.1 mrg
94 1.1 mrg #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 1.1 mrg const char * built_in_names[(int) END_BUILTINS] =
96 1.1 mrg {
97 1.1 mrg #include "builtins.def"
98 1.1 mrg };
99 1.1 mrg
100 1.1 mrg /* Setup an array of builtin_info_type, make sure each element decl is
101 1.1 mrg initialized to NULL_TREE. */
102 1.1 mrg builtin_info_type builtin_info[(int)END_BUILTINS];
103 1.1 mrg
104 1.1 mrg /* Non-zero if __builtin_constant_p should be folded right away. */
105 1.1 mrg bool force_folding_builtin_constant_p;
106 1.1 mrg
107 1.1 mrg static int target_char_cast (tree, char *);
108 1.1 mrg static int apply_args_size (void);
109 1.1 mrg static int apply_result_size (void);
110 1.1 mrg static rtx result_vector (int, rtx);
111 1.1 mrg static void expand_builtin_prefetch (tree);
112 1.1 mrg static rtx expand_builtin_apply_args (void);
113 1.1 mrg static rtx expand_builtin_apply_args_1 (void);
114 1.1 mrg static rtx expand_builtin_apply (rtx, rtx, rtx);
115 1.1 mrg static void expand_builtin_return (rtx);
116 1.1 mrg static enum type_class type_to_class (tree);
117 1.1 mrg static rtx expand_builtin_classify_type (tree);
118 1.1 mrg static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 1.1 mrg static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 1.1 mrg static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 1.1 mrg static rtx expand_builtin_sincos (tree);
122 1.1 mrg static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 1.1 mrg static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 1.1 mrg optab);
125 1.1 mrg static rtx expand_builtin_cexpi (tree, rtx);
126 1.1 mrg static rtx expand_builtin_int_roundingfn (tree, rtx);
127 1.1 mrg static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
128 1.1 mrg static rtx expand_builtin_next_arg (void);
129 1.1 mrg static rtx expand_builtin_va_start (tree);
130 1.1 mrg static rtx expand_builtin_va_end (tree);
131 1.1 mrg static rtx expand_builtin_va_copy (tree);
132 1.1 mrg static rtx inline_expand_builtin_bytecmp (tree, rtx);
133 1.1 mrg static rtx expand_builtin_strcmp (tree, rtx);
134 1.1 mrg static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
135 1.1 mrg static rtx expand_builtin_memcpy (tree, rtx);
136 1.1 mrg static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
137 1.1 mrg rtx target, tree exp,
138 1.1 mrg memop_ret retmode,
139 1.1 mrg bool might_overlap);
140 1.1 mrg static rtx expand_builtin_memmove (tree, rtx);
141 1.1 mrg static rtx expand_builtin_mempcpy (tree, rtx);
142 1.1 mrg static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
143 1.1 mrg static rtx expand_builtin_strcpy (tree, rtx);
144 1.1 mrg static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
145 1.1 mrg static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
146 1.1 mrg static rtx expand_builtin_strncpy (tree, rtx);
147 1.1 mrg static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 1.1 mrg static rtx expand_builtin_bzero (tree);
149 1.1 mrg static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 1.1 mrg static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 1.1 mrg static rtx expand_builtin_alloca (tree);
152 1.1 mrg static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 1.1 mrg static rtx expand_builtin_frame_address (tree, tree);
154 1.1 mrg static tree stabilize_va_list_loc (location_t, tree, int);
155 1.1 mrg static rtx expand_builtin_expect (tree, rtx);
156 1.1 mrg static rtx expand_builtin_expect_with_probability (tree, rtx);
157 1.1 mrg static tree fold_builtin_constant_p (tree);
158 1.1 mrg static tree fold_builtin_classify_type (tree);
159 1.1 mrg static tree fold_builtin_strlen (location_t, tree, tree, tree);
160 1.1 mrg static tree fold_builtin_inf (location_t, tree, int);
161 1.1 mrg static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 1.1 mrg static bool validate_arg (const_tree, enum tree_code code);
163 1.1 mrg static rtx expand_builtin_fabs (tree, rtx, rtx);
164 1.1 mrg static rtx expand_builtin_signbit (tree, rtx);
165 1.1 mrg static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 1.1 mrg static tree fold_builtin_isascii (location_t, tree);
167 1.1 mrg static tree fold_builtin_toascii (location_t, tree);
168 1.1 mrg static tree fold_builtin_isdigit (location_t, tree);
169 1.1 mrg static tree fold_builtin_fabs (location_t, tree, tree);
170 1.1 mrg static tree fold_builtin_abs (location_t, tree, tree);
171 1.1 mrg static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 1.1 mrg enum tree_code);
173 1.1 mrg static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 1.1 mrg
175 1.1 mrg static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
176 1.1 mrg static tree fold_builtin_strspn (location_t, tree, tree, tree);
177 1.1 mrg static tree fold_builtin_strcspn (location_t, tree, tree, tree);
178 1.1 mrg
179 1.1 mrg static rtx expand_builtin_object_size (tree);
180 1.1 mrg static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 1.1 mrg enum built_in_function);
182 1.1 mrg static void maybe_emit_chk_warning (tree, enum built_in_function);
183 1.1 mrg static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 1.1 mrg static tree fold_builtin_object_size (tree, tree, enum built_in_function);
185 1.1 mrg
186 1.1 mrg unsigned HOST_WIDE_INT target_newline;
187 1.1 mrg unsigned HOST_WIDE_INT target_percent;
188 1.1 mrg static unsigned HOST_WIDE_INT target_c;
189 1.1 mrg static unsigned HOST_WIDE_INT target_s;
190 1.1 mrg char target_percent_c[3];
191 1.1 mrg char target_percent_s[3];
192 1.1 mrg char target_percent_s_newline[4];
193 1.1 mrg static tree do_mpfr_remquo (tree, tree, tree);
194 1.1 mrg static tree do_mpfr_lgamma_r (tree, tree, tree);
195 1.1 mrg static void expand_builtin_sync_synchronize (void);
196 1.1 mrg
197 1.1 mrg /* Return true if NAME starts with __builtin_ or __sync_. */
198 1.1 mrg
199 1.1 mrg static bool
200 1.1 mrg is_builtin_name (const char *name)
201 1.1 mrg {
202 1.1 mrg return (startswith (name, "__builtin_")
203 1.1 mrg || startswith (name, "__sync_")
204 1.1 mrg || startswith (name, "__atomic_"));
205 1.1 mrg }
206 1.1 mrg
207 1.1 mrg /* Return true if NODE should be considered for inline expansion regardless
208 1.1 mrg of the optimization level. This means whenever a function is invoked with
209 1.1 mrg its "internal" name, which normally contains the prefix "__builtin". */
210 1.1 mrg
211 1.1 mrg bool
212 1.1 mrg called_as_built_in (tree node)
213 1.1 mrg {
214 1.1 mrg /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
215 1.1 mrg we want the name used to call the function, not the name it
216 1.1 mrg will have. */
217 1.1 mrg const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
218 1.1 mrg return is_builtin_name (name);
219 1.1 mrg }
220 1.1 mrg
221 1.1 mrg /* Compute values M and N such that M divides (address of EXP - N) and such
222 1.1 mrg that N < M. If these numbers can be determined, store M in alignp and N in
223 1.1 mrg *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
224 1.1 mrg *alignp and any bit-offset to *bitposp.
225 1.1 mrg
226 1.1 mrg Note that the address (and thus the alignment) computed here is based
227 1.1 mrg on the address to which a symbol resolves, whereas DECL_ALIGN is based
228 1.1 mrg on the address at which an object is actually located. These two
229 1.1 mrg addresses are not always the same. For example, on ARM targets,
230 1.1 mrg the address &foo of a Thumb function foo() has the lowest bit set,
231 1.1 mrg whereas foo() itself starts on an even address.
232 1.1 mrg
233 1.1 mrg If ADDR_P is true we are taking the address of the memory reference EXP
234 1.1 mrg and thus cannot rely on the access taking place. */
235 1.1 mrg
236 1.1 mrg bool
237 1.1 mrg get_object_alignment_2 (tree exp, unsigned int *alignp,
238 1.1 mrg unsigned HOST_WIDE_INT *bitposp, bool addr_p)
239 1.1 mrg {
240 1.1 mrg poly_int64 bitsize, bitpos;
241 1.1 mrg tree offset;
242 1.1 mrg machine_mode mode;
243 1.1 mrg int unsignedp, reversep, volatilep;
244 1.1 mrg unsigned int align = BITS_PER_UNIT;
245 1.1 mrg bool known_alignment = false;
246 1.1 mrg
247 1.1 mrg /* Get the innermost object and the constant (bitpos) and possibly
248 1.1 mrg variable (offset) offset of the access. */
249 1.1 mrg exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
250 1.1 mrg &unsignedp, &reversep, &volatilep);
251 1.1 mrg
252 1.1 mrg /* Extract alignment information from the innermost object and
253 1.1 mrg possibly adjust bitpos and offset. */
254 1.1 mrg if (TREE_CODE (exp) == FUNCTION_DECL)
255 1.1 mrg {
256 1.1 mrg /* Function addresses can encode extra information besides their
257 1.1 mrg alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
258 1.1 mrg allows the low bit to be used as a virtual bit, we know
259 1.1 mrg that the address itself must be at least 2-byte aligned. */
260 1.1 mrg if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
261 1.1 mrg align = 2 * BITS_PER_UNIT;
262 1.1 mrg }
263 1.1 mrg else if (TREE_CODE (exp) == LABEL_DECL)
264 1.1 mrg ;
265 1.1 mrg else if (TREE_CODE (exp) == CONST_DECL)
266 1.1 mrg {
267 1.1 mrg /* The alignment of a CONST_DECL is determined by its initializer. */
268 1.1 mrg exp = DECL_INITIAL (exp);
269 1.1 mrg align = TYPE_ALIGN (TREE_TYPE (exp));
270 1.1 mrg if (CONSTANT_CLASS_P (exp))
271 1.1 mrg align = targetm.constant_alignment (exp, align);
272 1.1 mrg
273 1.1 mrg known_alignment = true;
274 1.1 mrg }
275 1.1 mrg else if (DECL_P (exp))
276 1.1 mrg {
277 1.1 mrg align = DECL_ALIGN (exp);
278 1.1 mrg known_alignment = true;
279 1.1 mrg }
280 1.1 mrg else if (TREE_CODE (exp) == INDIRECT_REF
281 1.1 mrg || TREE_CODE (exp) == MEM_REF
282 1.1 mrg || TREE_CODE (exp) == TARGET_MEM_REF)
283 1.1 mrg {
284 1.1 mrg tree addr = TREE_OPERAND (exp, 0);
285 1.1 mrg unsigned ptr_align;
286 1.1 mrg unsigned HOST_WIDE_INT ptr_bitpos;
287 1.1 mrg unsigned HOST_WIDE_INT ptr_bitmask = ~0;
288 1.1 mrg
289 1.1 mrg /* If the address is explicitely aligned, handle that. */
290 1.1 mrg if (TREE_CODE (addr) == BIT_AND_EXPR
291 1.1 mrg && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
292 1.1 mrg {
293 1.1 mrg ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
294 1.1 mrg ptr_bitmask *= BITS_PER_UNIT;
295 1.1 mrg align = least_bit_hwi (ptr_bitmask);
296 1.1 mrg addr = TREE_OPERAND (addr, 0);
297 1.1 mrg }
298 1.1 mrg
299 1.1 mrg known_alignment
300 1.1 mrg = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
301 1.1 mrg align = MAX (ptr_align, align);
302 1.1 mrg
303 1.1 mrg /* Re-apply explicit alignment to the bitpos. */
304 1.1 mrg ptr_bitpos &= ptr_bitmask;
305 1.1 mrg
306 1.1 mrg /* The alignment of the pointer operand in a TARGET_MEM_REF
307 1.1 mrg has to take the variable offset parts into account. */
308 1.1 mrg if (TREE_CODE (exp) == TARGET_MEM_REF)
309 1.1 mrg {
310 1.1 mrg if (TMR_INDEX (exp))
311 1.1 mrg {
312 1.1 mrg unsigned HOST_WIDE_INT step = 1;
313 1.1 mrg if (TMR_STEP (exp))
314 1.1 mrg step = TREE_INT_CST_LOW (TMR_STEP (exp));
315 1.1 mrg align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
316 1.1 mrg }
317 1.1 mrg if (TMR_INDEX2 (exp))
318 1.1 mrg align = BITS_PER_UNIT;
319 1.1 mrg known_alignment = false;
320 1.1 mrg }
321 1.1 mrg
322 1.1 mrg /* When EXP is an actual memory reference then we can use
323 1.1 mrg TYPE_ALIGN of a pointer indirection to derive alignment.
324 1.1 mrg Do so only if get_pointer_alignment_1 did not reveal absolute
325 1.1 mrg alignment knowledge and if using that alignment would
326 1.1 mrg improve the situation. */
327 1.1 mrg unsigned int talign;
328 1.1 mrg if (!addr_p && !known_alignment
329 1.1 mrg && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
330 1.1 mrg && talign > align)
331 1.1 mrg align = talign;
332 1.1 mrg else
333 1.1 mrg {
334 1.1 mrg /* Else adjust bitpos accordingly. */
335 1.1 mrg bitpos += ptr_bitpos;
336 1.1 mrg if (TREE_CODE (exp) == MEM_REF
337 1.1 mrg || TREE_CODE (exp) == TARGET_MEM_REF)
338 1.1 mrg bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
339 1.1 mrg }
340 1.1 mrg }
341 1.1 mrg else if (TREE_CODE (exp) == STRING_CST)
342 1.1 mrg {
343 1.1 mrg /* STRING_CST are the only constant objects we allow to be not
344 1.1 mrg wrapped inside a CONST_DECL. */
345 1.1 mrg align = TYPE_ALIGN (TREE_TYPE (exp));
346 1.1 mrg if (CONSTANT_CLASS_P (exp))
347 1.1 mrg align = targetm.constant_alignment (exp, align);
348 1.1 mrg
349 1.1 mrg known_alignment = true;
350 1.1 mrg }
351 1.1 mrg
352 1.1 mrg /* If there is a non-constant offset part extract the maximum
353 1.1 mrg alignment that can prevail. */
354 1.1 mrg if (offset)
355 1.1 mrg {
356 1.1 mrg unsigned int trailing_zeros = tree_ctz (offset);
357 1.1 mrg if (trailing_zeros < HOST_BITS_PER_INT)
358 1.1 mrg {
359 1.1 mrg unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
360 1.1 mrg if (inner)
361 1.1 mrg align = MIN (align, inner);
362 1.1 mrg }
363 1.1 mrg }
364 1.1 mrg
365 1.1 mrg /* Account for the alignment of runtime coefficients, so that the constant
366 1.1 mrg bitpos is guaranteed to be accurate. */
367 1.1 mrg unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
368 1.1 mrg if (alt_align != 0 && alt_align < align)
369 1.1 mrg {
370 1.1 mrg align = alt_align;
371 1.1 mrg known_alignment = false;
372 1.1 mrg }
373 1.1 mrg
374 1.1 mrg *alignp = align;
375 1.1 mrg *bitposp = bitpos.coeffs[0] & (align - 1);
376 1.1 mrg return known_alignment;
377 1.1 mrg }
378 1.1 mrg
379 1.1 mrg /* For a memory reference expression EXP compute values M and N such that M
380 1.1 mrg divides (&EXP - N) and such that N < M. If these numbers can be determined,
381 1.1 mrg store M in alignp and N in *BITPOSP and return true. Otherwise return false
382 1.1 mrg and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
383 1.1 mrg
384 1.1 mrg bool
385 1.1 mrg get_object_alignment_1 (tree exp, unsigned int *alignp,
386 1.1 mrg unsigned HOST_WIDE_INT *bitposp)
387 1.1 mrg {
388 1.1 mrg /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
389 1.1 mrg with it. */
390 1.1 mrg if (TREE_CODE (exp) == WITH_SIZE_EXPR)
391 1.1 mrg exp = TREE_OPERAND (exp, 0);
392 1.1 mrg return get_object_alignment_2 (exp, alignp, bitposp, false);
393 1.1 mrg }
394 1.1 mrg
395 1.1 mrg /* Return the alignment in bits of EXP, an object. */
396 1.1 mrg
397 1.1 mrg unsigned int
398 1.1 mrg get_object_alignment (tree exp)
399 1.1 mrg {
400 1.1 mrg unsigned HOST_WIDE_INT bitpos = 0;
401 1.1 mrg unsigned int align;
402 1.1 mrg
403 1.1 mrg get_object_alignment_1 (exp, &align, &bitpos);
404 1.1 mrg
405 1.1 mrg /* align and bitpos now specify known low bits of the pointer.
406 1.1 mrg ptr & (align - 1) == bitpos. */
407 1.1 mrg
408 1.1 mrg if (bitpos != 0)
409 1.1 mrg align = least_bit_hwi (bitpos);
410 1.1 mrg return align;
411 1.1 mrg }
412 1.1 mrg
413 1.1 mrg /* For a pointer valued expression EXP compute values M and N such that M
414 1.1 mrg divides (EXP - N) and such that N < M. If these numbers can be determined,
415 1.1 mrg store M in alignp and N in *BITPOSP and return true. Return false if
416 1.1 mrg the results are just a conservative approximation.
417 1.1 mrg
418 1.1 mrg If EXP is not a pointer, false is returned too. */
419 1.1 mrg
420 1.1 mrg bool
421 1.1 mrg get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 1.1 mrg unsigned HOST_WIDE_INT *bitposp)
423 1.1 mrg {
424 1.1 mrg STRIP_NOPS (exp);
425 1.1 mrg
426 1.1 mrg if (TREE_CODE (exp) == ADDR_EXPR)
427 1.1 mrg return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 1.1 mrg alignp, bitposp, true);
429 1.1 mrg else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 1.1 mrg {
431 1.1 mrg unsigned int align;
432 1.1 mrg unsigned HOST_WIDE_INT bitpos;
433 1.1 mrg bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 1.1 mrg &align, &bitpos);
435 1.1 mrg if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 1.1 mrg bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 1.1 mrg else
438 1.1 mrg {
439 1.1 mrg unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 1.1 mrg if (trailing_zeros < HOST_BITS_PER_INT)
441 1.1 mrg {
442 1.1 mrg unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 1.1 mrg if (inner)
444 1.1 mrg align = MIN (align, inner);
445 1.1 mrg }
446 1.1 mrg }
447 1.1 mrg *alignp = align;
448 1.1 mrg *bitposp = bitpos & (align - 1);
449 1.1 mrg return res;
450 1.1 mrg }
451 1.1 mrg else if (TREE_CODE (exp) == SSA_NAME
452 1.1 mrg && POINTER_TYPE_P (TREE_TYPE (exp)))
453 1.1 mrg {
454 1.1 mrg unsigned int ptr_align, ptr_misalign;
455 1.1 mrg struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456 1.1 mrg
457 1.1 mrg if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 1.1 mrg {
459 1.1 mrg *bitposp = ptr_misalign * BITS_PER_UNIT;
460 1.1 mrg *alignp = ptr_align * BITS_PER_UNIT;
461 1.1 mrg /* Make sure to return a sensible alignment when the multiplication
462 1.1 mrg by BITS_PER_UNIT overflowed. */
463 1.1 mrg if (*alignp == 0)
464 1.1 mrg *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 1.1 mrg /* We cannot really tell whether this result is an approximation. */
466 1.1 mrg return false;
467 1.1 mrg }
468 1.1 mrg else
469 1.1 mrg {
470 1.1 mrg *bitposp = 0;
471 1.1 mrg *alignp = BITS_PER_UNIT;
472 1.1 mrg return false;
473 1.1 mrg }
474 1.1 mrg }
475 1.1 mrg else if (TREE_CODE (exp) == INTEGER_CST)
476 1.1 mrg {
477 1.1 mrg *alignp = BIGGEST_ALIGNMENT;
478 1.1 mrg *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 1.1 mrg & (BIGGEST_ALIGNMENT - 1));
480 1.1 mrg return true;
481 1.1 mrg }
482 1.1 mrg
483 1.1 mrg *bitposp = 0;
484 1.1 mrg *alignp = BITS_PER_UNIT;
485 1.1 mrg return false;
486 1.1 mrg }
487 1.1 mrg
488 1.1 mrg /* Return the alignment in bits of EXP, a pointer valued expression.
489 1.1 mrg The alignment returned is, by default, the alignment of the thing that
490 1.1 mrg EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491 1.1 mrg
492 1.1 mrg Otherwise, look at the expression to see if we can do better, i.e., if the
493 1.1 mrg expression is actually pointing at an object whose alignment is tighter. */
494 1.1 mrg
495 1.1 mrg unsigned int
496 1.1 mrg get_pointer_alignment (tree exp)
497 1.1 mrg {
498 1.1 mrg unsigned HOST_WIDE_INT bitpos = 0;
499 1.1 mrg unsigned int align;
500 1.1 mrg
501 1.1 mrg get_pointer_alignment_1 (exp, &align, &bitpos);
502 1.1 mrg
503 1.1 mrg /* align and bitpos now specify known low bits of the pointer.
504 1.1 mrg ptr & (align - 1) == bitpos. */
505 1.1 mrg
506 1.1 mrg if (bitpos != 0)
507 1.1 mrg align = least_bit_hwi (bitpos);
508 1.1 mrg
509 1.1 mrg return align;
510 1.1 mrg }
511 1.1 mrg
512 1.1 mrg /* Return the number of leading non-zero elements in the sequence
513 1.1 mrg [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 1.1 mrg ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515 1.1 mrg
516 1.1 mrg unsigned
517 1.1 mrg string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518 1.1 mrg {
519 1.1 mrg gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520 1.1 mrg
521 1.1 mrg unsigned n;
522 1.1 mrg
523 1.1 mrg if (eltsize == 1)
524 1.1 mrg {
525 1.1 mrg /* Optimize the common case of plain char. */
526 1.1 mrg for (n = 0; n < maxelts; n++)
527 1.1 mrg {
528 1.1 mrg const char *elt = (const char*) ptr + n;
529 1.1 mrg if (!*elt)
530 1.1 mrg break;
531 1.1 mrg }
532 1.1 mrg }
533 1.1 mrg else
534 1.1 mrg {
535 1.1 mrg for (n = 0; n < maxelts; n++)
536 1.1 mrg {
537 1.1 mrg const char *elt = (const char*) ptr + n * eltsize;
538 1.1 mrg if (!memcmp (elt, "\0\0\0\0", eltsize))
539 1.1 mrg break;
540 1.1 mrg }
541 1.1 mrg }
542 1.1 mrg return n;
543 1.1 mrg }
544 1.1 mrg
545 1.1 mrg /* Compute the length of a null-terminated character string or wide
546 1.1 mrg character string handling character sizes of 1, 2, and 4 bytes.
547 1.1 mrg TREE_STRING_LENGTH is not the right way because it evaluates to
548 1.1 mrg the size of the character array in bytes (as opposed to characters)
549 1.1 mrg and because it can contain a zero byte in the middle.
550 1.1 mrg
551 1.1 mrg ONLY_VALUE should be nonzero if the result is not going to be emitted
552 1.1 mrg into the instruction stream and zero if it is going to be expanded.
553 1.1 mrg E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 1.1 mrg is returned, otherwise NULL, since
555 1.1 mrg len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
556 1.1 mrg evaluate the side-effects.
557 1.1 mrg
558 1.1 mrg If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 1.1 mrg accesses. Note that this implies the result is not going to be emitted
560 1.1 mrg into the instruction stream.
561 1.1 mrg
562 1.1 mrg Additional information about the string accessed may be recorded
563 1.1 mrg in DATA. For example, if ARG references an unterminated string,
564 1.1 mrg then the declaration will be stored in the DECL field. If the
565 1.1 mrg length of the unterminated string can be determined, it'll be
566 1.1 mrg stored in the LEN field. Note this length could well be different
567 1.1 mrg than what a C strlen call would return.
568 1.1 mrg
569 1.1 mrg ELTSIZE is 1 for normal single byte character strings, and 2 or
570 1.1 mrg 4 for wide characer strings. ELTSIZE is by default 1.
571 1.1 mrg
572 1.1 mrg The value returned is of type `ssizetype'. */
573 1.1 mrg
574 1.1 mrg tree
575 1.1 mrg c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
576 1.1 mrg {
577 1.1 mrg /* If we were not passed a DATA pointer, then get one to a local
578 1.1 mrg structure. That avoids having to check DATA for NULL before
579 1.1 mrg each time we want to use it. */
580 1.1 mrg c_strlen_data local_strlen_data = { };
581 1.1 mrg if (!data)
582 1.1 mrg data = &local_strlen_data;
583 1.1 mrg
584 1.1 mrg gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
585 1.1 mrg
586 1.1 mrg tree src = STRIP_NOPS (arg);
587 1.1 mrg if (TREE_CODE (src) == COND_EXPR
588 1.1 mrg && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 1.1 mrg {
590 1.1 mrg tree len1, len2;
591 1.1 mrg
592 1.1 mrg len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
593 1.1 mrg len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
594 1.1 mrg if (tree_int_cst_equal (len1, len2))
595 1.1 mrg return len1;
596 1.1 mrg }
597 1.1 mrg
598 1.1 mrg if (TREE_CODE (src) == COMPOUND_EXPR
599 1.1 mrg && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
600 1.1 mrg return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
601 1.1 mrg
602 1.1 mrg location_t loc = EXPR_LOC_OR_LOC (src, input_location);
603 1.1 mrg
604 1.1 mrg /* Offset from the beginning of the string in bytes. */
605 1.1 mrg tree byteoff;
606 1.1 mrg tree memsize;
607 1.1 mrg tree decl;
608 1.1 mrg src = string_constant (src, &byteoff, &memsize, &decl);
609 1.1 mrg if (src == 0)
610 1.1 mrg return NULL_TREE;
611 1.1 mrg
612 1.1 mrg /* Determine the size of the string element. */
613 1.1 mrg if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
614 1.1 mrg return NULL_TREE;
615 1.1 mrg
616 1.1 mrg /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
617 1.1 mrg length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
618 1.1 mrg in case the latter is less than the size of the array, such as when
619 1.1 mrg SRC refers to a short string literal used to initialize a large array.
620 1.1 mrg In that case, the elements of the array after the terminating NUL are
621 1.1 mrg all NUL. */
622 1.1 mrg HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
623 1.1 mrg strelts = strelts / eltsize;
624 1.1 mrg
625 1.1 mrg if (!tree_fits_uhwi_p (memsize))
626 1.1 mrg return NULL_TREE;
627 1.1 mrg
628 1.1 mrg HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
629 1.1 mrg
630 1.1 mrg /* PTR can point to the byte representation of any string type, including
631 1.1 mrg char* and wchar_t*. */
632 1.1 mrg const char *ptr = TREE_STRING_POINTER (src);
633 1.1 mrg
634 1.1 mrg if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
635 1.1 mrg {
636 1.1 mrg /* The code below works only for single byte character types. */
637 1.1 mrg if (eltsize != 1)
638 1.1 mrg return NULL_TREE;
639 1.1 mrg
640 1.1 mrg /* If the string has an internal NUL character followed by any
641 1.1 mrg non-NUL characters (e.g., "foo\0bar"), we can't compute
642 1.1 mrg the offset to the following NUL if we don't know where to
643 1.1 mrg start searching for it. */
644 1.1 mrg unsigned len = string_length (ptr, eltsize, strelts);
645 1.1 mrg
646 1.1 mrg /* Return when an embedded null character is found or none at all.
647 1.1 mrg In the latter case, set the DECL/LEN field in the DATA structure
648 1.1 mrg so that callers may examine them. */
649 1.1 mrg if (len + 1 < strelts)
650 1.1 mrg return NULL_TREE;
651 1.1 mrg else if (len >= maxelts)
652 1.1 mrg {
653 1.1 mrg data->decl = decl;
654 1.1 mrg data->off = byteoff;
655 1.1 mrg data->minlen = ssize_int (len);
656 1.1 mrg return NULL_TREE;
657 1.1 mrg }
658 1.1 mrg
659 1.1 mrg /* For empty strings the result should be zero. */
660 1.1 mrg if (len == 0)
661 1.1 mrg return ssize_int (0);
662 1.1 mrg
663 1.1 mrg /* We don't know the starting offset, but we do know that the string
664 1.1 mrg has no internal zero bytes. If the offset falls within the bounds
665 1.1 mrg of the string subtract the offset from the length of the string,
666 1.1 mrg and return that. Otherwise the length is zero. Take care to
667 1.1 mrg use SAVE_EXPR in case the OFFSET has side-effects. */
668 1.1 mrg tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
669 1.1 mrg : byteoff;
670 1.1 mrg offsave = fold_convert_loc (loc, sizetype, offsave);
671 1.1 mrg tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
672 1.1 mrg size_int (len));
673 1.1 mrg tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
674 1.1 mrg offsave);
675 1.1 mrg lenexp = fold_convert_loc (loc, ssizetype, lenexp);
676 1.1 mrg return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
677 1.1 mrg build_zero_cst (ssizetype));
678 1.1 mrg }
679 1.1 mrg
680 1.1 mrg /* Offset from the beginning of the string in elements. */
681 1.1 mrg HOST_WIDE_INT eltoff;
682 1.1 mrg
683 1.1 mrg /* We have a known offset into the string. Start searching there for
684 1.1 mrg a null character if we can represent it as a single HOST_WIDE_INT. */
685 1.1 mrg if (byteoff == 0)
686 1.1 mrg eltoff = 0;
687 1.1 mrg else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
688 1.1 mrg eltoff = -1;
689 1.1 mrg else
690 1.1 mrg eltoff = tree_to_uhwi (byteoff) / eltsize;
691 1.1 mrg
692 1.1 mrg /* If the offset is known to be out of bounds, warn, and call strlen at
693 1.1 mrg runtime. */
694 1.1 mrg if (eltoff < 0 || eltoff >= maxelts)
695 1.1 mrg {
696 1.1 mrg /* Suppress multiple warnings for propagated constant strings. */
697 1.1 mrg if (only_value != 2
698 1.1 mrg && !warning_suppressed_p (arg, OPT_Warray_bounds)
699 1.1 mrg && warning_at (loc, OPT_Warray_bounds,
700 1.1 mrg "offset %qwi outside bounds of constant string",
701 1.1 mrg eltoff))
702 1.1 mrg {
703 1.1 mrg if (decl)
704 1.1 mrg inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
705 1.1 mrg suppress_warning (arg, OPT_Warray_bounds);
706 1.1 mrg }
707 1.1 mrg return NULL_TREE;
708 1.1 mrg }
709 1.1 mrg
710 1.1 mrg /* If eltoff is larger than strelts but less than maxelts the
711 1.1 mrg string length is zero, since the excess memory will be zero. */
712 1.1 mrg if (eltoff > strelts)
713 1.1 mrg return ssize_int (0);
714 1.1 mrg
715 1.1 mrg /* Use strlen to search for the first zero byte. Since any strings
716 1.1 mrg constructed with build_string will have nulls appended, we win even
717 1.1 mrg if we get handed something like (char[4])"abcd".
718 1.1 mrg
719 1.1 mrg Since ELTOFF is our starting index into the string, no further
720 1.1 mrg calculation is needed. */
721 1.1 mrg unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
722 1.1 mrg strelts - eltoff);
723 1.1 mrg
724 1.1 mrg /* Don't know what to return if there was no zero termination.
725 1.1 mrg Ideally this would turn into a gcc_checking_assert over time.
726 1.1 mrg Set DECL/LEN so callers can examine them. */
727 1.1 mrg if (len >= maxelts - eltoff)
728 1.1 mrg {
729 1.1 mrg data->decl = decl;
730 1.1 mrg data->off = byteoff;
731 1.1 mrg data->minlen = ssize_int (len);
732 1.1 mrg return NULL_TREE;
733 1.1 mrg }
734 1.1 mrg
735 1.1 mrg return ssize_int (len);
736 1.1 mrg }
737 1.1 mrg
738 1.1 mrg /* Return a constant integer corresponding to target reading
739 1.1 mrg GET_MODE_BITSIZE (MODE) bits from string constant STR. If
740 1.1 mrg NULL_TERMINATED_P, reading stops after '\0' character, all further ones
741 1.1 mrg are assumed to be zero, otherwise it reads as many characters
742 1.1 mrg as needed. */
743 1.1 mrg
744 1.1 mrg rtx
745 1.1 mrg c_readstr (const char *str, scalar_int_mode mode,
746 1.1 mrg bool null_terminated_p/*=true*/)
747 1.1 mrg {
748 1.1 mrg HOST_WIDE_INT ch;
749 1.1 mrg unsigned int i, j;
750 1.1 mrg HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
751 1.1 mrg
752 1.1 mrg gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
753 1.1 mrg unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
754 1.1 mrg / HOST_BITS_PER_WIDE_INT;
755 1.1 mrg
756 1.1 mrg gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
757 1.1 mrg for (i = 0; i < len; i++)
758 1.1 mrg tmp[i] = 0;
759 1.1 mrg
760 1.1 mrg ch = 1;
761 1.1 mrg for (i = 0; i < GET_MODE_SIZE (mode); i++)
762 1.1 mrg {
763 1.1 mrg j = i;
764 1.1 mrg if (WORDS_BIG_ENDIAN)
765 1.1 mrg j = GET_MODE_SIZE (mode) - i - 1;
766 1.1 mrg if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
767 1.1 mrg && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
768 1.1 mrg j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
769 1.1 mrg j *= BITS_PER_UNIT;
770 1.1 mrg
771 1.1 mrg if (ch || !null_terminated_p)
772 1.1 mrg ch = (unsigned char) str[i];
773 1.1 mrg tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
774 1.1 mrg }
775 1.1 mrg
776 1.1 mrg wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
777 1.1 mrg return immed_wide_int_const (c, mode);
778 1.1 mrg }
779 1.1 mrg
780 1.1 mrg /* Cast a target constant CST to target CHAR and if that value fits into
781 1.1 mrg host char type, return zero and put that value into variable pointed to by
782 1.1 mrg P. */
783 1.1 mrg
784 1.1 mrg static int
785 1.1 mrg target_char_cast (tree cst, char *p)
786 1.1 mrg {
787 1.1 mrg unsigned HOST_WIDE_INT val, hostval;
788 1.1 mrg
789 1.1 mrg if (TREE_CODE (cst) != INTEGER_CST
790 1.1 mrg || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
791 1.1 mrg return 1;
792 1.1 mrg
793 1.1 mrg /* Do not care if it fits or not right here. */
794 1.1 mrg val = TREE_INT_CST_LOW (cst);
795 1.1 mrg
796 1.1 mrg if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
797 1.1 mrg val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
798 1.1 mrg
799 1.1 mrg hostval = val;
800 1.1 mrg if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
801 1.1 mrg hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
802 1.1 mrg
803 1.1 mrg if (val != hostval)
804 1.1 mrg return 1;
805 1.1 mrg
806 1.1 mrg *p = hostval;
807 1.1 mrg return 0;
808 1.1 mrg }
809 1.1 mrg
810 1.1 mrg /* Similar to save_expr, but assumes that arbitrary code is not executed
811 1.1 mrg in between the multiple evaluations. In particular, we assume that a
812 1.1 mrg non-addressable local variable will not be modified. */
813 1.1 mrg
814 1.1 mrg static tree
815 1.1 mrg builtin_save_expr (tree exp)
816 1.1 mrg {
817 1.1 mrg if (TREE_CODE (exp) == SSA_NAME
818 1.1 mrg || (TREE_ADDRESSABLE (exp) == 0
819 1.1 mrg && (TREE_CODE (exp) == PARM_DECL
820 1.1 mrg || (VAR_P (exp) && !TREE_STATIC (exp)))))
821 1.1 mrg return exp;
822 1.1 mrg
823 1.1 mrg return save_expr (exp);
824 1.1 mrg }
825 1.1 mrg
826 1.1 mrg /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
827 1.1 mrg times to get the address of either a higher stack frame, or a return
828 1.1 mrg address located within it (depending on FNDECL_CODE). */
829 1.1 mrg
830 1.1 mrg static rtx
831 1.1 mrg expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
832 1.1 mrg {
833 1.1 mrg int i;
834 1.1 mrg rtx tem = INITIAL_FRAME_ADDRESS_RTX;
835 1.1 mrg if (tem == NULL_RTX)
836 1.1 mrg {
837 1.1 mrg /* For a zero count with __builtin_return_address, we don't care what
838 1.1 mrg frame address we return, because target-specific definitions will
839 1.1 mrg override us. Therefore frame pointer elimination is OK, and using
840 1.1 mrg the soft frame pointer is OK.
841 1.1 mrg
842 1.1 mrg For a nonzero count, or a zero count with __builtin_frame_address,
843 1.1 mrg we require a stable offset from the current frame pointer to the
844 1.1 mrg previous one, so we must use the hard frame pointer, and
845 1.1 mrg we must disable frame pointer elimination. */
846 1.1 mrg if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
847 1.1 mrg tem = frame_pointer_rtx;
848 1.1 mrg else
849 1.1 mrg {
850 1.1 mrg tem = hard_frame_pointer_rtx;
851 1.1 mrg
852 1.1 mrg /* Tell reload not to eliminate the frame pointer. */
853 1.1 mrg crtl->accesses_prior_frames = 1;
854 1.1 mrg }
855 1.1 mrg }
856 1.1 mrg
857 1.1 mrg if (count > 0)
858 1.1 mrg SETUP_FRAME_ADDRESSES ();
859 1.1 mrg
860 1.1 mrg /* On the SPARC, the return address is not in the frame, it is in a
861 1.1 mrg register. There is no way to access it off of the current frame
862 1.1 mrg pointer, but it can be accessed off the previous frame pointer by
863 1.1 mrg reading the value from the register window save area. */
864 1.1 mrg if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
865 1.1 mrg count--;
866 1.1 mrg
867 1.1 mrg /* Scan back COUNT frames to the specified frame. */
868 1.1 mrg for (i = 0; i < count; i++)
869 1.1 mrg {
870 1.1 mrg /* Assume the dynamic chain pointer is in the word that the
871 1.1 mrg frame address points to, unless otherwise specified. */
872 1.1 mrg tem = DYNAMIC_CHAIN_ADDRESS (tem);
873 1.1 mrg tem = memory_address (Pmode, tem);
874 1.1 mrg tem = gen_frame_mem (Pmode, tem);
875 1.1 mrg tem = copy_to_reg (tem);
876 1.1 mrg }
877 1.1 mrg
878 1.1 mrg /* For __builtin_frame_address, return what we've got. But, on
879 1.1 mrg the SPARC for example, we may have to add a bias. */
880 1.1 mrg if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
881 1.1 mrg return FRAME_ADDR_RTX (tem);
882 1.1 mrg
883 1.1 mrg /* For __builtin_return_address, get the return address from that frame. */
884 1.1 mrg #ifdef RETURN_ADDR_RTX
885 1.1 mrg tem = RETURN_ADDR_RTX (count, tem);
886 1.1 mrg #else
887 1.1 mrg tem = memory_address (Pmode,
888 1.1 mrg plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
889 1.1 mrg tem = gen_frame_mem (Pmode, tem);
890 1.1 mrg #endif
891 1.1 mrg return tem;
892 1.1 mrg }
893 1.1 mrg
894 1.1 mrg /* Alias set used for setjmp buffer. */
895 1.1 mrg static alias_set_type setjmp_alias_set = -1;
896 1.1 mrg
897 1.1 mrg /* Construct the leading half of a __builtin_setjmp call. Control will
898 1.1 mrg return to RECEIVER_LABEL. This is also called directly by the SJLJ
899 1.1 mrg exception handling code. */
900 1.1 mrg
901 1.1 mrg void
902 1.1 mrg expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
903 1.1 mrg {
904 1.1 mrg machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
905 1.1 mrg rtx stack_save;
906 1.1 mrg rtx mem;
907 1.1 mrg
908 1.1 mrg if (setjmp_alias_set == -1)
909 1.1 mrg setjmp_alias_set = new_alias_set ();
910 1.1 mrg
911 1.1 mrg buf_addr = convert_memory_address (Pmode, buf_addr);
912 1.1 mrg
913 1.1 mrg buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
914 1.1 mrg
915 1.1 mrg /* We store the frame pointer and the address of receiver_label in
916 1.1 mrg the buffer and use the rest of it for the stack save area, which
917 1.1 mrg is machine-dependent. */
918 1.1 mrg
919 1.1 mrg mem = gen_rtx_MEM (Pmode, buf_addr);
920 1.1 mrg set_mem_alias_set (mem, setjmp_alias_set);
921 1.1 mrg emit_move_insn (mem, hard_frame_pointer_rtx);
922 1.1 mrg
923 1.1 mrg mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
924 1.1 mrg GET_MODE_SIZE (Pmode))),
925 1.1 mrg set_mem_alias_set (mem, setjmp_alias_set);
926 1.1 mrg
927 1.1 mrg emit_move_insn (validize_mem (mem),
928 1.1 mrg force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
929 1.1 mrg
930 1.1 mrg stack_save = gen_rtx_MEM (sa_mode,
931 1.1 mrg plus_constant (Pmode, buf_addr,
932 1.1 mrg 2 * GET_MODE_SIZE (Pmode)));
933 1.1 mrg set_mem_alias_set (stack_save, setjmp_alias_set);
934 1.1 mrg emit_stack_save (SAVE_NONLOCAL, &stack_save);
935 1.1 mrg
936 1.1 mrg /* If there is further processing to do, do it. */
937 1.1 mrg if (targetm.have_builtin_setjmp_setup ())
938 1.1 mrg emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
939 1.1 mrg
940 1.1 mrg /* We have a nonlocal label. */
941 1.1 mrg cfun->has_nonlocal_label = 1;
942 1.1 mrg }
943 1.1 mrg
944 1.1 mrg /* Construct the trailing part of a __builtin_setjmp call. This is
945 1.1 mrg also called directly by the SJLJ exception handling code.
946 1.1 mrg If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
947 1.1 mrg
948 1.1 mrg void
949 1.1 mrg expand_builtin_setjmp_receiver (rtx receiver_label)
950 1.1 mrg {
951 1.1 mrg rtx chain;
952 1.1 mrg
953 1.1 mrg /* Mark the FP as used when we get here, so we have to make sure it's
954 1.1 mrg marked as used by this function. */
955 1.1 mrg emit_use (hard_frame_pointer_rtx);
956 1.1 mrg
957 1.1 mrg /* Mark the static chain as clobbered here so life information
958 1.1 mrg doesn't get messed up for it. */
959 1.1 mrg chain = rtx_for_static_chain (current_function_decl, true);
960 1.1 mrg if (chain && REG_P (chain))
961 1.1 mrg emit_clobber (chain);
962 1.1 mrg
963 1.1 mrg if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
964 1.1 mrg {
965 1.1 mrg /* If the argument pointer can be eliminated in favor of the
966 1.1 mrg frame pointer, we don't need to restore it. We assume here
967 1.1 mrg that if such an elimination is present, it can always be used.
968 1.1 mrg This is the case on all known machines; if we don't make this
969 1.1 mrg assumption, we do unnecessary saving on many machines. */
970 1.1 mrg size_t i;
971 1.1 mrg static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
972 1.1 mrg
973 1.1 mrg for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
974 1.1 mrg if (elim_regs[i].from == ARG_POINTER_REGNUM
975 1.1 mrg && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
976 1.1 mrg break;
977 1.1 mrg
978 1.1 mrg if (i == ARRAY_SIZE (elim_regs))
979 1.1 mrg {
980 1.1 mrg /* Now restore our arg pointer from the address at which it
981 1.1 mrg was saved in our stack frame. */
982 1.1 mrg emit_move_insn (crtl->args.internal_arg_pointer,
983 1.1 mrg copy_to_reg (get_arg_pointer_save_area ()));
984 1.1 mrg }
985 1.1 mrg }
986 1.1 mrg
987 1.1 mrg if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
988 1.1 mrg emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
989 1.1 mrg else if (targetm.have_nonlocal_goto_receiver ())
990 1.1 mrg emit_insn (targetm.gen_nonlocal_goto_receiver ());
991 1.1 mrg else
992 1.1 mrg { /* Nothing */ }
993 1.1 mrg
994 1.1 mrg /* We must not allow the code we just generated to be reordered by
995 1.1 mrg scheduling. Specifically, the update of the frame pointer must
996 1.1 mrg happen immediately, not later. */
997 1.1 mrg emit_insn (gen_blockage ());
998 1.1 mrg }
999 1.1 mrg
1000 1.1 mrg /* __builtin_longjmp is passed a pointer to an array of five words (not
1001 1.1 mrg all will be used on all machines). It operates similarly to the C
1002 1.1 mrg library function of the same name, but is more efficient. Much of
1003 1.1 mrg the code below is copied from the handling of non-local gotos. */
1004 1.1 mrg
1005 1.1 mrg static void
1006 1.1 mrg expand_builtin_longjmp (rtx buf_addr, rtx value)
1007 1.1 mrg {
1008 1.1 mrg rtx fp, lab, stack;
1009 1.1 mrg rtx_insn *insn, *last;
1010 1.1 mrg machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1011 1.1 mrg
1012 1.1 mrg /* DRAP is needed for stack realign if longjmp is expanded to current
1013 1.1 mrg function */
1014 1.1 mrg if (SUPPORTS_STACK_ALIGNMENT)
1015 1.1 mrg crtl->need_drap = true;
1016 1.1 mrg
1017 1.1 mrg if (setjmp_alias_set == -1)
1018 1.1 mrg setjmp_alias_set = new_alias_set ();
1019 1.1 mrg
1020 1.1 mrg buf_addr = convert_memory_address (Pmode, buf_addr);
1021 1.1 mrg
1022 1.1 mrg buf_addr = force_reg (Pmode, buf_addr);
1023 1.1 mrg
1024 1.1 mrg /* We require that the user must pass a second argument of 1, because
1025 1.1 mrg that is what builtin_setjmp will return. */
1026 1.1 mrg gcc_assert (value == const1_rtx);
1027 1.1 mrg
1028 1.1 mrg last = get_last_insn ();
1029 1.1 mrg if (targetm.have_builtin_longjmp ())
1030 1.1 mrg emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1031 1.1 mrg else
1032 1.1 mrg {
1033 1.1 mrg fp = gen_rtx_MEM (Pmode, buf_addr);
1034 1.1 mrg lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 1.1 mrg GET_MODE_SIZE (Pmode)));
1036 1.1 mrg
1037 1.1 mrg stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1038 1.1 mrg 2 * GET_MODE_SIZE (Pmode)));
1039 1.1 mrg set_mem_alias_set (fp, setjmp_alias_set);
1040 1.1 mrg set_mem_alias_set (lab, setjmp_alias_set);
1041 1.1 mrg set_mem_alias_set (stack, setjmp_alias_set);
1042 1.1 mrg
1043 1.1 mrg /* Pick up FP, label, and SP from the block and jump. This code is
1044 1.1 mrg from expand_goto in stmt.cc; see there for detailed comments. */
1045 1.1 mrg if (targetm.have_nonlocal_goto ())
1046 1.1 mrg /* We have to pass a value to the nonlocal_goto pattern that will
1047 1.1 mrg get copied into the static_chain pointer, but it does not matter
1048 1.1 mrg what that value is, because builtin_setjmp does not use it. */
1049 1.1 mrg emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1050 1.1 mrg else
1051 1.1 mrg {
1052 1.1 mrg emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1053 1.1 mrg emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1054 1.1 mrg
1055 1.1 mrg lab = copy_to_reg (lab);
1056 1.1 mrg
1057 1.1 mrg /* Restore the frame pointer and stack pointer. We must use a
1058 1.1 mrg temporary since the setjmp buffer may be a local. */
1059 1.1 mrg fp = copy_to_reg (fp);
1060 1.1 mrg emit_stack_restore (SAVE_NONLOCAL, stack);
1061 1.1 mrg
1062 1.1 mrg /* Ensure the frame pointer move is not optimized. */
1063 1.1 mrg emit_insn (gen_blockage ());
1064 1.1 mrg emit_clobber (hard_frame_pointer_rtx);
1065 1.1 mrg emit_clobber (frame_pointer_rtx);
1066 1.1 mrg emit_move_insn (hard_frame_pointer_rtx, fp);
1067 1.1 mrg
1068 1.1 mrg emit_use (hard_frame_pointer_rtx);
1069 1.1 mrg emit_use (stack_pointer_rtx);
1070 1.1 mrg emit_indirect_jump (lab);
1071 1.1 mrg }
1072 1.1 mrg }
1073 1.1 mrg
1074 1.1 mrg /* Search backwards and mark the jump insn as a non-local goto.
1075 1.1 mrg Note that this precludes the use of __builtin_longjmp to a
1076 1.1 mrg __builtin_setjmp target in the same function. However, we've
1077 1.1 mrg already cautioned the user that these functions are for
1078 1.1 mrg internal exception handling use only. */
1079 1.1 mrg for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1080 1.1 mrg {
1081 1.1 mrg gcc_assert (insn != last);
1082 1.1 mrg
1083 1.1 mrg if (JUMP_P (insn))
1084 1.1 mrg {
1085 1.1 mrg add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1086 1.1 mrg break;
1087 1.1 mrg }
1088 1.1 mrg else if (CALL_P (insn))
1089 1.1 mrg break;
1090 1.1 mrg }
1091 1.1 mrg }
1092 1.1 mrg
1093 1.1 mrg static inline bool
1094 1.1 mrg more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1095 1.1 mrg {
1096 1.1 mrg return (iter->i < iter->n);
1097 1.1 mrg }
1098 1.1 mrg
1099 1.1 mrg /* This function validates the types of a function call argument list
1100 1.1 mrg against a specified list of tree_codes. If the last specifier is a 0,
1101 1.1 mrg that represents an ellipsis, otherwise the last specifier must be a
1102 1.1 mrg VOID_TYPE. */
1103 1.1 mrg
1104 1.1 mrg static bool
1105 1.1 mrg validate_arglist (const_tree callexpr, ...)
1106 1.1 mrg {
1107 1.1 mrg enum tree_code code;
1108 1.1 mrg bool res = 0;
1109 1.1 mrg va_list ap;
1110 1.1 mrg const_call_expr_arg_iterator iter;
1111 1.1 mrg const_tree arg;
1112 1.1 mrg
1113 1.1 mrg va_start (ap, callexpr);
1114 1.1 mrg init_const_call_expr_arg_iterator (callexpr, &iter);
1115 1.1 mrg
1116 1.1 mrg /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1117 1.1 mrg tree fn = CALL_EXPR_FN (callexpr);
1118 1.1 mrg bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1119 1.1 mrg
1120 1.1 mrg for (unsigned argno = 1; ; ++argno)
1121 1.1 mrg {
1122 1.1 mrg code = (enum tree_code) va_arg (ap, int);
1123 1.1 mrg
1124 1.1 mrg switch (code)
1125 1.1 mrg {
1126 1.1 mrg case 0:
1127 1.1 mrg /* This signifies an ellipses, any further arguments are all ok. */
1128 1.1 mrg res = true;
1129 1.1 mrg goto end;
1130 1.1 mrg case VOID_TYPE:
1131 1.1 mrg /* This signifies an endlink, if no arguments remain, return
1132 1.1 mrg true, otherwise return false. */
1133 1.1 mrg res = !more_const_call_expr_args_p (&iter);
1134 1.1 mrg goto end;
1135 1.1 mrg case POINTER_TYPE:
1136 1.1 mrg /* The actual argument must be nonnull when either the whole
1137 1.1 mrg called function has been declared nonnull, or when the formal
1138 1.1 mrg argument corresponding to the actual argument has been. */
1139 1.1 mrg if (argmap
1140 1.1 mrg && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1141 1.1 mrg {
1142 1.1 mrg arg = next_const_call_expr_arg (&iter);
1143 1.1 mrg if (!validate_arg (arg, code) || integer_zerop (arg))
1144 1.1 mrg goto end;
1145 1.1 mrg break;
1146 1.1 mrg }
1147 1.1 mrg /* FALLTHRU */
1148 1.1 mrg default:
1149 1.1 mrg /* If no parameters remain or the parameter's code does not
1150 1.1 mrg match the specified code, return false. Otherwise continue
1151 1.1 mrg checking any remaining arguments. */
1152 1.1 mrg arg = next_const_call_expr_arg (&iter);
1153 1.1 mrg if (!validate_arg (arg, code))
1154 1.1 mrg goto end;
1155 1.1 mrg break;
1156 1.1 mrg }
1157 1.1 mrg }
1158 1.1 mrg
1159 1.1 mrg /* We need gotos here since we can only have one VA_CLOSE in a
1160 1.1 mrg function. */
1161 1.1 mrg end: ;
1162 1.1 mrg va_end (ap);
1163 1.1 mrg
1164 1.1 mrg BITMAP_FREE (argmap);
1165 1.1 mrg
1166 1.1 mrg return res;
1167 1.1 mrg }
1168 1.1 mrg
1169 1.1 mrg /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1170 1.1 mrg and the address of the save area. */
1171 1.1 mrg
1172 1.1 mrg static rtx
1173 1.1 mrg expand_builtin_nonlocal_goto (tree exp)
1174 1.1 mrg {
1175 1.1 mrg tree t_label, t_save_area;
1176 1.1 mrg rtx r_label, r_save_area, r_fp, r_sp;
1177 1.1 mrg rtx_insn *insn;
1178 1.1 mrg
1179 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1180 1.1 mrg return NULL_RTX;
1181 1.1 mrg
1182 1.1 mrg t_label = CALL_EXPR_ARG (exp, 0);
1183 1.1 mrg t_save_area = CALL_EXPR_ARG (exp, 1);
1184 1.1 mrg
1185 1.1 mrg r_label = expand_normal (t_label);
1186 1.1 mrg r_label = convert_memory_address (Pmode, r_label);
1187 1.1 mrg r_save_area = expand_normal (t_save_area);
1188 1.1 mrg r_save_area = convert_memory_address (Pmode, r_save_area);
1189 1.1 mrg /* Copy the address of the save location to a register just in case it was
1190 1.1 mrg based on the frame pointer. */
1191 1.1 mrg r_save_area = copy_to_reg (r_save_area);
1192 1.1 mrg r_fp = gen_rtx_MEM (Pmode, r_save_area);
1193 1.1 mrg r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1194 1.1 mrg plus_constant (Pmode, r_save_area,
1195 1.1 mrg GET_MODE_SIZE (Pmode)));
1196 1.1 mrg
1197 1.1 mrg crtl->has_nonlocal_goto = 1;
1198 1.1 mrg
1199 1.1 mrg /* ??? We no longer need to pass the static chain value, afaik. */
1200 1.1 mrg if (targetm.have_nonlocal_goto ())
1201 1.1 mrg emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1202 1.1 mrg else
1203 1.1 mrg {
1204 1.1 mrg emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1205 1.1 mrg emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1206 1.1 mrg
1207 1.1 mrg r_label = copy_to_reg (r_label);
1208 1.1 mrg
1209 1.1 mrg /* Restore the frame pointer and stack pointer. We must use a
1210 1.1 mrg temporary since the setjmp buffer may be a local. */
1211 1.1 mrg r_fp = copy_to_reg (r_fp);
1212 1.1 mrg emit_stack_restore (SAVE_NONLOCAL, r_sp);
1213 1.1 mrg
1214 1.1 mrg /* Ensure the frame pointer move is not optimized. */
1215 1.1 mrg emit_insn (gen_blockage ());
1216 1.1 mrg emit_clobber (hard_frame_pointer_rtx);
1217 1.1 mrg emit_clobber (frame_pointer_rtx);
1218 1.1 mrg emit_move_insn (hard_frame_pointer_rtx, r_fp);
1219 1.1 mrg
1220 1.1 mrg /* USE of hard_frame_pointer_rtx added for consistency;
1221 1.1 mrg not clear if really needed. */
1222 1.1 mrg emit_use (hard_frame_pointer_rtx);
1223 1.1 mrg emit_use (stack_pointer_rtx);
1224 1.1 mrg
1225 1.1 mrg /* If the architecture is using a GP register, we must
1226 1.1 mrg conservatively assume that the target function makes use of it.
1227 1.1 mrg The prologue of functions with nonlocal gotos must therefore
1228 1.1 mrg initialize the GP register to the appropriate value, and we
1229 1.1 mrg must then make sure that this value is live at the point
1230 1.1 mrg of the jump. (Note that this doesn't necessarily apply
1231 1.1 mrg to targets with a nonlocal_goto pattern; they are free
1232 1.1 mrg to implement it in their own way. Note also that this is
1233 1.1 mrg a no-op if the GP register is a global invariant.) */
1234 1.1 mrg unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1235 1.1 mrg if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1236 1.1 mrg emit_use (pic_offset_table_rtx);
1237 1.1 mrg
1238 1.1 mrg emit_indirect_jump (r_label);
1239 1.1 mrg }
1240 1.1 mrg
1241 1.1 mrg /* Search backwards to the jump insn and mark it as a
1242 1.1 mrg non-local goto. */
1243 1.1 mrg for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1244 1.1 mrg {
1245 1.1 mrg if (JUMP_P (insn))
1246 1.1 mrg {
1247 1.1 mrg add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1248 1.1 mrg break;
1249 1.1 mrg }
1250 1.1 mrg else if (CALL_P (insn))
1251 1.1 mrg break;
1252 1.1 mrg }
1253 1.1 mrg
1254 1.1 mrg return const0_rtx;
1255 1.1 mrg }
1256 1.1 mrg
1257 1.1 mrg /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1258 1.1 mrg (not all will be used on all machines) that was passed to __builtin_setjmp.
1259 1.1 mrg It updates the stack pointer in that block to the current value. This is
1260 1.1 mrg also called directly by the SJLJ exception handling code. */
1261 1.1 mrg
1262 1.1 mrg void
1263 1.1 mrg expand_builtin_update_setjmp_buf (rtx buf_addr)
1264 1.1 mrg {
1265 1.1 mrg machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1266 1.1 mrg buf_addr = convert_memory_address (Pmode, buf_addr);
1267 1.1 mrg rtx stack_save
1268 1.1 mrg = gen_rtx_MEM (sa_mode,
1269 1.1 mrg memory_address
1270 1.1 mrg (sa_mode,
1271 1.1 mrg plus_constant (Pmode, buf_addr,
1272 1.1 mrg 2 * GET_MODE_SIZE (Pmode))));
1273 1.1 mrg
1274 1.1 mrg emit_stack_save (SAVE_NONLOCAL, &stack_save);
1275 1.1 mrg }
1276 1.1 mrg
1277 1.1 mrg /* Expand a call to __builtin_prefetch. For a target that does not support
1278 1.1 mrg data prefetch, evaluate the memory address argument in case it has side
1279 1.1 mrg effects. */
1280 1.1 mrg
1281 1.1 mrg static void
1282 1.1 mrg expand_builtin_prefetch (tree exp)
1283 1.1 mrg {
1284 1.1 mrg tree arg0, arg1, arg2;
1285 1.1 mrg int nargs;
1286 1.1 mrg rtx op0, op1, op2;
1287 1.1 mrg
1288 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, 0))
1289 1.1 mrg return;
1290 1.1 mrg
1291 1.1 mrg arg0 = CALL_EXPR_ARG (exp, 0);
1292 1.1 mrg
1293 1.1 mrg /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1294 1.1 mrg zero (read) and argument 2 (locality) defaults to 3 (high degree of
1295 1.1 mrg locality). */
1296 1.1 mrg nargs = call_expr_nargs (exp);
1297 1.1 mrg if (nargs > 1)
1298 1.1 mrg arg1 = CALL_EXPR_ARG (exp, 1);
1299 1.1 mrg else
1300 1.1 mrg arg1 = integer_zero_node;
1301 1.1 mrg if (nargs > 2)
1302 1.1 mrg arg2 = CALL_EXPR_ARG (exp, 2);
1303 1.1 mrg else
1304 1.1 mrg arg2 = integer_three_node;
1305 1.1 mrg
1306 1.1 mrg /* Argument 0 is an address. */
1307 1.1 mrg op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1308 1.1 mrg
1309 1.1 mrg /* Argument 1 (read/write flag) must be a compile-time constant int. */
1310 1.1 mrg if (TREE_CODE (arg1) != INTEGER_CST)
1311 1.1 mrg {
1312 1.1 mrg error ("second argument to %<__builtin_prefetch%> must be a constant");
1313 1.1 mrg arg1 = integer_zero_node;
1314 1.1 mrg }
1315 1.1 mrg op1 = expand_normal (arg1);
1316 1.1 mrg /* Argument 1 must be either zero or one. */
1317 1.1 mrg if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1318 1.1 mrg {
1319 1.1 mrg warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1320 1.1 mrg " using zero");
1321 1.1 mrg op1 = const0_rtx;
1322 1.1 mrg }
1323 1.1 mrg
1324 1.1 mrg /* Argument 2 (locality) must be a compile-time constant int. */
1325 1.1 mrg if (TREE_CODE (arg2) != INTEGER_CST)
1326 1.1 mrg {
1327 1.1 mrg error ("third argument to %<__builtin_prefetch%> must be a constant");
1328 1.1 mrg arg2 = integer_zero_node;
1329 1.1 mrg }
1330 1.1 mrg op2 = expand_normal (arg2);
1331 1.1 mrg /* Argument 2 must be 0, 1, 2, or 3. */
1332 1.1 mrg if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1333 1.1 mrg {
1334 1.1 mrg warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1335 1.1 mrg op2 = const0_rtx;
1336 1.1 mrg }
1337 1.1 mrg
1338 1.1 mrg if (targetm.have_prefetch ())
1339 1.1 mrg {
1340 1.1 mrg class expand_operand ops[3];
1341 1.1 mrg
1342 1.1 mrg create_address_operand (&ops[0], op0);
1343 1.1 mrg create_integer_operand (&ops[1], INTVAL (op1));
1344 1.1 mrg create_integer_operand (&ops[2], INTVAL (op2));
1345 1.1 mrg if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1346 1.1 mrg return;
1347 1.1 mrg }
1348 1.1 mrg
1349 1.1 mrg /* Don't do anything with direct references to volatile memory, but
1350 1.1 mrg generate code to handle other side effects. */
1351 1.1 mrg if (!MEM_P (op0) && side_effects_p (op0))
1352 1.1 mrg emit_insn (op0);
1353 1.1 mrg }
1354 1.1 mrg
1355 1.1 mrg /* Get a MEM rtx for expression EXP which is the address of an operand
1356 1.1 mrg to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1357 1.1 mrg the maximum length of the block of memory that might be accessed or
1358 1.1 mrg NULL if unknown. */
1359 1.1 mrg
1360 1.1 mrg rtx
1361 1.1 mrg get_memory_rtx (tree exp, tree len)
1362 1.1 mrg {
1363 1.1 mrg tree orig_exp = exp, base;
1364 1.1 mrg rtx addr, mem;
1365 1.1 mrg
1366 1.1 mrg /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1367 1.1 mrg from its expression, for expr->a.b only <variable>.a.b is recorded. */
1368 1.1 mrg if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1369 1.1 mrg exp = TREE_OPERAND (exp, 0);
1370 1.1 mrg
1371 1.1 mrg addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1372 1.1 mrg mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1373 1.1 mrg
1374 1.1 mrg /* Get an expression we can use to find the attributes to assign to MEM.
1375 1.1 mrg First remove any nops. */
1376 1.1 mrg while (CONVERT_EXPR_P (exp)
1377 1.1 mrg && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1378 1.1 mrg exp = TREE_OPERAND (exp, 0);
1379 1.1 mrg
1380 1.1 mrg /* Build a MEM_REF representing the whole accessed area as a byte blob,
1381 1.1 mrg (as builtin stringops may alias with anything). */
1382 1.1 mrg exp = fold_build2 (MEM_REF,
1383 1.1 mrg build_array_type (char_type_node,
1384 1.1 mrg build_range_type (sizetype,
1385 1.1 mrg size_one_node, len)),
1386 1.1 mrg exp, build_int_cst (ptr_type_node, 0));
1387 1.1 mrg
1388 1.1 mrg /* If the MEM_REF has no acceptable address, try to get the base object
1389 1.1 mrg from the original address we got, and build an all-aliasing
1390 1.1 mrg unknown-sized access to that one. */
1391 1.1 mrg if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1392 1.1 mrg set_mem_attributes (mem, exp, 0);
1393 1.1 mrg else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1394 1.1 mrg && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1395 1.1 mrg 0))))
1396 1.1 mrg {
1397 1.1 mrg unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1398 1.1 mrg exp = build_fold_addr_expr (base);
1399 1.1 mrg exp = fold_build2 (MEM_REF,
1400 1.1 mrg build_array_type (char_type_node,
1401 1.1 mrg build_range_type (sizetype,
1402 1.1 mrg size_zero_node,
1403 1.1 mrg NULL)),
1404 1.1 mrg exp, build_int_cst (ptr_type_node, 0));
1405 1.1 mrg set_mem_attributes (mem, exp, 0);
1406 1.1 mrg /* Since we stripped parts make sure the offset is unknown and the
1407 1.1 mrg alignment is computed from the original address. */
1408 1.1 mrg clear_mem_offset (mem);
1409 1.1 mrg set_mem_align (mem, align);
1410 1.1 mrg }
1411 1.1 mrg set_mem_alias_set (mem, 0);
1412 1.1 mrg return mem;
1413 1.1 mrg }
1414 1.1 mrg
1415 1.1 mrg /* Built-in functions to perform an untyped call and return. */
1417 1.1 mrg
1418 1.1 mrg #define apply_args_mode \
1419 1.1 mrg (this_target_builtins->x_apply_args_mode)
1420 1.1 mrg #define apply_result_mode \
1421 1.1 mrg (this_target_builtins->x_apply_result_mode)
1422 1.1 mrg
1423 1.1 mrg /* Return the size required for the block returned by __builtin_apply_args,
1424 1.1 mrg and initialize apply_args_mode. */
1425 1.1 mrg
1426 1.1 mrg static int
1427 1.1 mrg apply_args_size (void)
1428 1.1 mrg {
1429 1.1 mrg static int size = -1;
1430 1.1 mrg int align;
1431 1.1 mrg unsigned int regno;
1432 1.1 mrg
1433 1.1 mrg /* The values computed by this function never change. */
1434 1.1 mrg if (size < 0)
1435 1.1 mrg {
1436 1.1 mrg /* The first value is the incoming arg-pointer. */
1437 1.1 mrg size = GET_MODE_SIZE (Pmode);
1438 1.1 mrg
1439 1.1 mrg /* The second value is the structure value address unless this is
1440 1.1 mrg passed as an "invisible" first argument. */
1441 1.1 mrg if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1442 1.1 mrg size += GET_MODE_SIZE (Pmode);
1443 1.1 mrg
1444 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1445 1.1 mrg if (FUNCTION_ARG_REGNO_P (regno))
1446 1.1 mrg {
1447 1.1 mrg fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1448 1.1 mrg
1449 1.1 mrg gcc_assert (mode != VOIDmode);
1450 1.1 mrg
1451 1.1 mrg align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1452 1.1 mrg if (size % align != 0)
1453 1.1 mrg size = CEIL (size, align) * align;
1454 1.1 mrg size += GET_MODE_SIZE (mode);
1455 1.1 mrg apply_args_mode[regno] = mode;
1456 1.1 mrg }
1457 1.1 mrg else
1458 1.1 mrg {
1459 1.1 mrg apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1460 1.1 mrg }
1461 1.1 mrg }
1462 1.1 mrg return size;
1463 1.1 mrg }
1464 1.1 mrg
1465 1.1 mrg /* Return the size required for the block returned by __builtin_apply,
1466 1.1 mrg and initialize apply_result_mode. */
1467 1.1 mrg
1468 1.1 mrg static int
1469 1.1 mrg apply_result_size (void)
1470 1.1 mrg {
1471 1.1 mrg static int size = -1;
1472 1.1 mrg int align, regno;
1473 1.1 mrg
1474 1.1 mrg /* The values computed by this function never change. */
1475 1.1 mrg if (size < 0)
1476 1.1 mrg {
1477 1.1 mrg size = 0;
1478 1.1 mrg
1479 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1480 1.1 mrg if (targetm.calls.function_value_regno_p (regno))
1481 1.1 mrg {
1482 1.1 mrg fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1483 1.1 mrg
1484 1.1 mrg gcc_assert (mode != VOIDmode);
1485 1.1 mrg
1486 1.1 mrg align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 1.1 mrg if (size % align != 0)
1488 1.1 mrg size = CEIL (size, align) * align;
1489 1.1 mrg size += GET_MODE_SIZE (mode);
1490 1.1 mrg apply_result_mode[regno] = mode;
1491 1.1 mrg }
1492 1.1 mrg else
1493 1.1 mrg apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1494 1.1 mrg
1495 1.1 mrg /* Allow targets that use untyped_call and untyped_return to override
1496 1.1 mrg the size so that machine-specific information can be stored here. */
1497 1.1 mrg #ifdef APPLY_RESULT_SIZE
1498 1.1 mrg size = APPLY_RESULT_SIZE;
1499 1.1 mrg #endif
1500 1.1 mrg }
1501 1.1 mrg return size;
1502 1.1 mrg }
1503 1.1 mrg
1504 1.1 mrg /* Create a vector describing the result block RESULT. If SAVEP is true,
1505 1.1 mrg the result block is used to save the values; otherwise it is used to
1506 1.1 mrg restore the values. */
1507 1.1 mrg
1508 1.1 mrg static rtx
1509 1.1 mrg result_vector (int savep, rtx result)
1510 1.1 mrg {
1511 1.1 mrg int regno, size, align, nelts;
1512 1.1 mrg fixed_size_mode mode;
1513 1.1 mrg rtx reg, mem;
1514 1.1 mrg rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1515 1.1 mrg
1516 1.1 mrg size = nelts = 0;
1517 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1518 1.1 mrg if ((mode = apply_result_mode[regno]) != VOIDmode)
1519 1.1 mrg {
1520 1.1 mrg align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1521 1.1 mrg if (size % align != 0)
1522 1.1 mrg size = CEIL (size, align) * align;
1523 1.1 mrg reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1524 1.1 mrg mem = adjust_address (result, mode, size);
1525 1.1 mrg savevec[nelts++] = (savep
1526 1.1 mrg ? gen_rtx_SET (mem, reg)
1527 1.1 mrg : gen_rtx_SET (reg, mem));
1528 1.1 mrg size += GET_MODE_SIZE (mode);
1529 1.1 mrg }
1530 1.1 mrg return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1531 1.1 mrg }
1532 1.1 mrg
1533 1.1 mrg /* Save the state required to perform an untyped call with the same
1534 1.1 mrg arguments as were passed to the current function. */
1535 1.1 mrg
1536 1.1 mrg static rtx
1537 1.1 mrg expand_builtin_apply_args_1 (void)
1538 1.1 mrg {
1539 1.1 mrg rtx registers, tem;
1540 1.1 mrg int size, align, regno;
1541 1.1 mrg fixed_size_mode mode;
1542 1.1 mrg rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1543 1.1 mrg
1544 1.1 mrg /* Create a block where the arg-pointer, structure value address,
1545 1.1 mrg and argument registers can be saved. */
1546 1.1 mrg registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1547 1.1 mrg
1548 1.1 mrg /* Walk past the arg-pointer and structure value address. */
1549 1.1 mrg size = GET_MODE_SIZE (Pmode);
1550 1.1 mrg if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1551 1.1 mrg size += GET_MODE_SIZE (Pmode);
1552 1.1 mrg
1553 1.1 mrg /* Save each register used in calling a function to the block. */
1554 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1555 1.1 mrg if ((mode = apply_args_mode[regno]) != VOIDmode)
1556 1.1 mrg {
1557 1.1 mrg align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1558 1.1 mrg if (size % align != 0)
1559 1.1 mrg size = CEIL (size, align) * align;
1560 1.1 mrg
1561 1.1 mrg tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1562 1.1 mrg
1563 1.1 mrg emit_move_insn (adjust_address (registers, mode, size), tem);
1564 1.1 mrg size += GET_MODE_SIZE (mode);
1565 1.1 mrg }
1566 1.1 mrg
1567 1.1 mrg /* Save the arg pointer to the block. */
1568 1.1 mrg tem = copy_to_reg (crtl->args.internal_arg_pointer);
1569 1.1 mrg /* We need the pointer as the caller actually passed them to us, not
1570 1.1 mrg as we might have pretended they were passed. Make sure it's a valid
1571 1.1 mrg operand, as emit_move_insn isn't expected to handle a PLUS. */
1572 1.1 mrg if (STACK_GROWS_DOWNWARD)
1573 1.1 mrg tem
1574 1.1 mrg = force_operand (plus_constant (Pmode, tem,
1575 1.1 mrg crtl->args.pretend_args_size),
1576 1.1 mrg NULL_RTX);
1577 1.1 mrg emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1578 1.1 mrg
1579 1.1 mrg size = GET_MODE_SIZE (Pmode);
1580 1.1 mrg
1581 1.1 mrg /* Save the structure value address unless this is passed as an
1582 1.1 mrg "invisible" first argument. */
1583 1.1 mrg if (struct_incoming_value)
1584 1.1 mrg emit_move_insn (adjust_address (registers, Pmode, size),
1585 1.1 mrg copy_to_reg (struct_incoming_value));
1586 1.1 mrg
1587 1.1 mrg /* Return the address of the block. */
1588 1.1 mrg return copy_addr_to_reg (XEXP (registers, 0));
1589 1.1 mrg }
1590 1.1 mrg
1591 1.1 mrg /* __builtin_apply_args returns block of memory allocated on
1592 1.1 mrg the stack into which is stored the arg pointer, structure
1593 1.1 mrg value address, static chain, and all the registers that might
1594 1.1 mrg possibly be used in performing a function call. The code is
1595 1.1 mrg moved to the start of the function so the incoming values are
1596 1.1 mrg saved. */
1597 1.1 mrg
1598 1.1 mrg static rtx
1599 1.1 mrg expand_builtin_apply_args (void)
1600 1.1 mrg {
1601 1.1 mrg /* Don't do __builtin_apply_args more than once in a function.
1602 1.1 mrg Save the result of the first call and reuse it. */
1603 1.1 mrg if (apply_args_value != 0)
1604 1.1 mrg return apply_args_value;
1605 1.1 mrg {
1606 1.1 mrg /* When this function is called, it means that registers must be
1607 1.1 mrg saved on entry to this function. So we migrate the
1608 1.1 mrg call to the first insn of this function. */
1609 1.1 mrg rtx temp;
1610 1.1 mrg
1611 1.1 mrg start_sequence ();
1612 1.1 mrg temp = expand_builtin_apply_args_1 ();
1613 1.1 mrg rtx_insn *seq = get_insns ();
1614 1.1 mrg end_sequence ();
1615 1.1 mrg
1616 1.1 mrg apply_args_value = temp;
1617 1.1 mrg
1618 1.1 mrg /* Put the insns after the NOTE that starts the function.
1619 1.1 mrg If this is inside a start_sequence, make the outer-level insn
1620 1.1 mrg chain current, so the code is placed at the start of the
1621 1.1 mrg function. If internal_arg_pointer is a non-virtual pseudo,
1622 1.1 mrg it needs to be placed after the function that initializes
1623 1.1 mrg that pseudo. */
1624 1.1 mrg push_topmost_sequence ();
1625 1.1 mrg if (REG_P (crtl->args.internal_arg_pointer)
1626 1.1 mrg && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1627 1.1 mrg emit_insn_before (seq, parm_birth_insn);
1628 1.1 mrg else
1629 1.1 mrg emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1630 1.1 mrg pop_topmost_sequence ();
1631 1.1 mrg return temp;
1632 1.1 mrg }
1633 1.1 mrg }
1634 1.1 mrg
1635 1.1 mrg /* Perform an untyped call and save the state required to perform an
1636 1.1 mrg untyped return of whatever value was returned by the given function. */
1637 1.1 mrg
1638 1.1 mrg static rtx
1639 1.1 mrg expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1640 1.1 mrg {
1641 1.1 mrg int size, align, regno;
1642 1.1 mrg fixed_size_mode mode;
1643 1.1 mrg rtx incoming_args, result, reg, dest, src;
1644 1.1 mrg rtx_call_insn *call_insn;
1645 1.1 mrg rtx old_stack_level = 0;
1646 1.1 mrg rtx call_fusage = 0;
1647 1.1 mrg rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1648 1.1 mrg
1649 1.1 mrg arguments = convert_memory_address (Pmode, arguments);
1650 1.1 mrg
1651 1.1 mrg /* Create a block where the return registers can be saved. */
1652 1.1 mrg result = assign_stack_local (BLKmode, apply_result_size (), -1);
1653 1.1 mrg
1654 1.1 mrg /* Fetch the arg pointer from the ARGUMENTS block. */
1655 1.1 mrg incoming_args = gen_reg_rtx (Pmode);
1656 1.1 mrg emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1657 1.1 mrg if (!STACK_GROWS_DOWNWARD)
1658 1.1 mrg incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1659 1.1 mrg incoming_args, 0, OPTAB_LIB_WIDEN);
1660 1.1 mrg
1661 1.1 mrg /* Push a new argument block and copy the arguments. Do not allow
1662 1.1 mrg the (potential) memcpy call below to interfere with our stack
1663 1.1 mrg manipulations. */
1664 1.1 mrg do_pending_stack_adjust ();
1665 1.1 mrg NO_DEFER_POP;
1666 1.1 mrg
1667 1.1 mrg /* Save the stack with nonlocal if available. */
1668 1.1 mrg if (targetm.have_save_stack_nonlocal ())
1669 1.1 mrg emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1670 1.1 mrg else
1671 1.1 mrg emit_stack_save (SAVE_BLOCK, &old_stack_level);
1672 1.1 mrg
1673 1.1 mrg /* Allocate a block of memory onto the stack and copy the memory
1674 1.1 mrg arguments to the outgoing arguments address. We can pass TRUE
1675 1.1 mrg as the 4th argument because we just saved the stack pointer
1676 1.1 mrg and will restore it right after the call. */
1677 1.1 mrg allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1678 1.1 mrg
1679 1.1 mrg /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1680 1.1 mrg may have already set current_function_calls_alloca to true.
1681 1.1 mrg current_function_calls_alloca won't be set if argsize is zero,
1682 1.1 mrg so we have to guarantee need_drap is true here. */
1683 1.1 mrg if (SUPPORTS_STACK_ALIGNMENT)
1684 1.1 mrg crtl->need_drap = true;
1685 1.1 mrg
1686 1.1 mrg dest = virtual_outgoing_args_rtx;
1687 1.1 mrg if (!STACK_GROWS_DOWNWARD)
1688 1.1 mrg {
1689 1.1 mrg if (CONST_INT_P (argsize))
1690 1.1 mrg dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1691 1.1 mrg else
1692 1.1 mrg dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1693 1.1 mrg }
1694 1.1 mrg dest = gen_rtx_MEM (BLKmode, dest);
1695 1.1 mrg set_mem_align (dest, PARM_BOUNDARY);
1696 1.1 mrg src = gen_rtx_MEM (BLKmode, incoming_args);
1697 1.1 mrg set_mem_align (src, PARM_BOUNDARY);
1698 1.1 mrg emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1699 1.1 mrg
1700 1.1 mrg /* Refer to the argument block. */
1701 1.1 mrg apply_args_size ();
1702 1.1 mrg arguments = gen_rtx_MEM (BLKmode, arguments);
1703 1.1 mrg set_mem_align (arguments, PARM_BOUNDARY);
1704 1.1 mrg
1705 1.1 mrg /* Walk past the arg-pointer and structure value address. */
1706 1.1 mrg size = GET_MODE_SIZE (Pmode);
1707 1.1 mrg if (struct_value)
1708 1.1 mrg size += GET_MODE_SIZE (Pmode);
1709 1.1 mrg
1710 1.1 mrg /* Restore each of the registers previously saved. Make USE insns
1711 1.1 mrg for each of these registers for use in making the call. */
1712 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1713 1.1 mrg if ((mode = apply_args_mode[regno]) != VOIDmode)
1714 1.1 mrg {
1715 1.1 mrg align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1716 1.1 mrg if (size % align != 0)
1717 1.1 mrg size = CEIL (size, align) * align;
1718 1.1 mrg reg = gen_rtx_REG (mode, regno);
1719 1.1 mrg emit_move_insn (reg, adjust_address (arguments, mode, size));
1720 1.1 mrg use_reg (&call_fusage, reg);
1721 1.1 mrg size += GET_MODE_SIZE (mode);
1722 1.1 mrg }
1723 1.1 mrg
1724 1.1 mrg /* Restore the structure value address unless this is passed as an
1725 1.1 mrg "invisible" first argument. */
1726 1.1 mrg size = GET_MODE_SIZE (Pmode);
1727 1.1 mrg if (struct_value)
1728 1.1 mrg {
1729 1.1 mrg rtx value = gen_reg_rtx (Pmode);
1730 1.1 mrg emit_move_insn (value, adjust_address (arguments, Pmode, size));
1731 1.1 mrg emit_move_insn (struct_value, value);
1732 1.1 mrg if (REG_P (struct_value))
1733 1.1 mrg use_reg (&call_fusage, struct_value);
1734 1.1 mrg }
1735 1.1 mrg
1736 1.1 mrg /* All arguments and registers used for the call are set up by now! */
1737 1.1 mrg function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1738 1.1 mrg
1739 1.1 mrg /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1740 1.1 mrg and we don't want to load it into a register as an optimization,
1741 1.1 mrg because prepare_call_address already did it if it should be done. */
1742 1.1 mrg if (GET_CODE (function) != SYMBOL_REF)
1743 1.1 mrg function = memory_address (FUNCTION_MODE, function);
1744 1.1 mrg
1745 1.1 mrg /* Generate the actual call instruction and save the return value. */
1746 1.1 mrg if (targetm.have_untyped_call ())
1747 1.1 mrg {
1748 1.1 mrg rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1749 1.1 mrg rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1750 1.1 mrg result_vector (1, result));
1751 1.1 mrg for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1752 1.1 mrg if (CALL_P (insn))
1753 1.1 mrg add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1754 1.1 mrg emit_insn (seq);
1755 1.1 mrg }
1756 1.1 mrg else if (targetm.have_call_value ())
1757 1.1 mrg {
1758 1.1 mrg rtx valreg = 0;
1759 1.1 mrg
1760 1.1 mrg /* Locate the unique return register. It is not possible to
1761 1.1 mrg express a call that sets more than one return register using
1762 1.1 mrg call_value; use untyped_call for that. In fact, untyped_call
1763 1.1 mrg only needs to save the return registers in the given block. */
1764 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 1.1 mrg if ((mode = apply_result_mode[regno]) != VOIDmode)
1766 1.1 mrg {
1767 1.1 mrg gcc_assert (!valreg); /* have_untyped_call required. */
1768 1.1 mrg
1769 1.1 mrg valreg = gen_rtx_REG (mode, regno);
1770 1.1 mrg }
1771 1.1 mrg
1772 1.1 mrg emit_insn (targetm.gen_call_value (valreg,
1773 1.1 mrg gen_rtx_MEM (FUNCTION_MODE, function),
1774 1.1 mrg const0_rtx, NULL_RTX, const0_rtx));
1775 1.1 mrg
1776 1.1 mrg emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1777 1.1 mrg }
1778 1.1 mrg else
1779 1.1 mrg gcc_unreachable ();
1780 1.1 mrg
1781 1.1 mrg /* Find the CALL insn we just emitted, and attach the register usage
1782 1.1 mrg information. */
1783 1.1 mrg call_insn = last_call_insn ();
1784 1.1 mrg add_function_usage_to (call_insn, call_fusage);
1785 1.1 mrg
1786 1.1 mrg /* Restore the stack. */
1787 1.1 mrg if (targetm.have_save_stack_nonlocal ())
1788 1.1 mrg emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1789 1.1 mrg else
1790 1.1 mrg emit_stack_restore (SAVE_BLOCK, old_stack_level);
1791 1.1 mrg fixup_args_size_notes (call_insn, get_last_insn (), 0);
1792 1.1 mrg
1793 1.1 mrg OK_DEFER_POP;
1794 1.1 mrg
1795 1.1 mrg /* Return the address of the result block. */
1796 1.1 mrg result = copy_addr_to_reg (XEXP (result, 0));
1797 1.1 mrg return convert_memory_address (ptr_mode, result);
1798 1.1 mrg }
1799 1.1 mrg
1800 1.1 mrg /* Perform an untyped return. */
1801 1.1 mrg
1802 1.1 mrg static void
1803 1.1 mrg expand_builtin_return (rtx result)
1804 1.1 mrg {
1805 1.1 mrg int size, align, regno;
1806 1.1 mrg fixed_size_mode mode;
1807 1.1 mrg rtx reg;
1808 1.1 mrg rtx_insn *call_fusage = 0;
1809 1.1 mrg
1810 1.1 mrg result = convert_memory_address (Pmode, result);
1811 1.1 mrg
1812 1.1 mrg apply_result_size ();
1813 1.1 mrg result = gen_rtx_MEM (BLKmode, result);
1814 1.1 mrg
1815 1.1 mrg if (targetm.have_untyped_return ())
1816 1.1 mrg {
1817 1.1 mrg rtx vector = result_vector (0, result);
1818 1.1 mrg emit_jump_insn (targetm.gen_untyped_return (result, vector));
1819 1.1 mrg emit_barrier ();
1820 1.1 mrg return;
1821 1.1 mrg }
1822 1.1 mrg
1823 1.1 mrg /* Restore the return value and note that each value is used. */
1824 1.1 mrg size = 0;
1825 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1826 1.1 mrg if ((mode = apply_result_mode[regno]) != VOIDmode)
1827 1.1 mrg {
1828 1.1 mrg align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1829 1.1 mrg if (size % align != 0)
1830 1.1 mrg size = CEIL (size, align) * align;
1831 1.1 mrg reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1832 1.1 mrg emit_move_insn (reg, adjust_address (result, mode, size));
1833 1.1 mrg
1834 1.1 mrg push_to_sequence (call_fusage);
1835 1.1 mrg emit_use (reg);
1836 1.1 mrg call_fusage = get_insns ();
1837 1.1 mrg end_sequence ();
1838 1.1 mrg size += GET_MODE_SIZE (mode);
1839 1.1 mrg }
1840 1.1 mrg
1841 1.1 mrg /* Put the USE insns before the return. */
1842 1.1 mrg emit_insn (call_fusage);
1843 1.1 mrg
1844 1.1 mrg /* Return whatever values was restored by jumping directly to the end
1845 1.1 mrg of the function. */
1846 1.1 mrg expand_naked_return ();
1847 1.1 mrg }
1848 1.1 mrg
1849 1.1 mrg /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1850 1.1 mrg
1851 1.1 mrg static enum type_class
1852 1.1 mrg type_to_class (tree type)
1853 1.1 mrg {
1854 1.1 mrg switch (TREE_CODE (type))
1855 1.1 mrg {
1856 1.1 mrg case VOID_TYPE: return void_type_class;
1857 1.1 mrg case INTEGER_TYPE: return integer_type_class;
1858 1.1 mrg case ENUMERAL_TYPE: return enumeral_type_class;
1859 1.1 mrg case BOOLEAN_TYPE: return boolean_type_class;
1860 1.1 mrg case POINTER_TYPE: return pointer_type_class;
1861 1.1 mrg case REFERENCE_TYPE: return reference_type_class;
1862 1.1 mrg case OFFSET_TYPE: return offset_type_class;
1863 1.1 mrg case REAL_TYPE: return real_type_class;
1864 1.1 mrg case COMPLEX_TYPE: return complex_type_class;
1865 1.1 mrg case FUNCTION_TYPE: return function_type_class;
1866 1.1 mrg case METHOD_TYPE: return method_type_class;
1867 1.1 mrg case RECORD_TYPE: return record_type_class;
1868 1.1 mrg case UNION_TYPE:
1869 1.1 mrg case QUAL_UNION_TYPE: return union_type_class;
1870 1.1 mrg case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1871 1.1 mrg ? string_type_class : array_type_class);
1872 1.1 mrg case LANG_TYPE: return lang_type_class;
1873 1.1 mrg case OPAQUE_TYPE: return opaque_type_class;
1874 1.1 mrg default: return no_type_class;
1875 1.1 mrg }
1876 1.1 mrg }
1877 1.1 mrg
1878 1.1 mrg /* Expand a call EXP to __builtin_classify_type. */
1879 1.1 mrg
1880 1.1 mrg static rtx
1881 1.1 mrg expand_builtin_classify_type (tree exp)
1882 1.1 mrg {
1883 1.1 mrg if (call_expr_nargs (exp))
1884 1.1 mrg return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1885 1.1 mrg return GEN_INT (no_type_class);
1886 1.1 mrg }
1887 1.1 mrg
1888 1.1 mrg /* This helper macro, meant to be used in mathfn_built_in below, determines
1889 1.1 mrg which among a set of builtin math functions is appropriate for a given type
1890 1.1 mrg mode. The `F' (float) and `L' (long double) are automatically generated
1891 1.1 mrg from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1892 1.1 mrg types, there are additional types that are considered with 'F32', 'F64',
1893 1.1 mrg 'F128', etc. suffixes. */
1894 1.1 mrg #define CASE_MATHFN(MATHFN) \
1895 1.1 mrg CASE_CFN_##MATHFN: \
1896 1.1 mrg fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1897 1.1 mrg fcodel = BUILT_IN_##MATHFN##L ; break;
1898 1.1 mrg /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1899 1.1 mrg types. */
1900 1.1 mrg #define CASE_MATHFN_FLOATN(MATHFN) \
1901 1.1 mrg CASE_CFN_##MATHFN: \
1902 1.1 mrg fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1903 1.1 mrg fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1904 1.1 mrg fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1905 1.1 mrg fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1906 1.1 mrg fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1907 1.1 mrg break;
1908 1.1 mrg /* Similar to above, but appends _R after any F/L suffix. */
1909 1.1 mrg #define CASE_MATHFN_REENT(MATHFN) \
1910 1.1 mrg case CFN_BUILT_IN_##MATHFN##_R: \
1911 1.1 mrg case CFN_BUILT_IN_##MATHFN##F_R: \
1912 1.1 mrg case CFN_BUILT_IN_##MATHFN##L_R: \
1913 1.1 mrg fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1914 1.1 mrg fcodel = BUILT_IN_##MATHFN##L_R ; break;
1915 1.1 mrg
1916 1.1 mrg /* Return a function equivalent to FN but operating on floating-point
1917 1.1 mrg values of type TYPE, or END_BUILTINS if no such function exists.
1918 1.1 mrg This is purely an operation on function codes; it does not guarantee
1919 1.1 mrg that the target actually has an implementation of the function. */
1920 1.1 mrg
1921 1.1 mrg static built_in_function
1922 1.1 mrg mathfn_built_in_2 (tree type, combined_fn fn)
1923 1.1 mrg {
1924 1.1 mrg tree mtype;
1925 1.1 mrg built_in_function fcode, fcodef, fcodel;
1926 1.1 mrg built_in_function fcodef16 = END_BUILTINS;
1927 1.1 mrg built_in_function fcodef32 = END_BUILTINS;
1928 1.1 mrg built_in_function fcodef64 = END_BUILTINS;
1929 1.1 mrg built_in_function fcodef128 = END_BUILTINS;
1930 1.1 mrg built_in_function fcodef32x = END_BUILTINS;
1931 1.1 mrg built_in_function fcodef64x = END_BUILTINS;
1932 1.1 mrg built_in_function fcodef128x = END_BUILTINS;
1933 1.1 mrg
1934 1.1 mrg switch (fn)
1935 1.1 mrg {
1936 1.1 mrg #define SEQ_OF_CASE_MATHFN \
1937 1.1 mrg CASE_MATHFN (ACOS) \
1938 1.1 mrg CASE_MATHFN (ACOSH) \
1939 1.1 mrg CASE_MATHFN (ASIN) \
1940 1.1 mrg CASE_MATHFN (ASINH) \
1941 1.1 mrg CASE_MATHFN (ATAN) \
1942 1.1 mrg CASE_MATHFN (ATAN2) \
1943 1.1 mrg CASE_MATHFN (ATANH) \
1944 1.1 mrg CASE_MATHFN (CBRT) \
1945 1.1 mrg CASE_MATHFN_FLOATN (CEIL) \
1946 1.1 mrg CASE_MATHFN (CEXPI) \
1947 1.1 mrg CASE_MATHFN_FLOATN (COPYSIGN) \
1948 1.1 mrg CASE_MATHFN (COS) \
1949 1.1 mrg CASE_MATHFN (COSH) \
1950 1.1 mrg CASE_MATHFN (DREM) \
1951 1.1 mrg CASE_MATHFN (ERF) \
1952 1.1 mrg CASE_MATHFN (ERFC) \
1953 1.1 mrg CASE_MATHFN (EXP) \
1954 1.1 mrg CASE_MATHFN (EXP10) \
1955 1.1 mrg CASE_MATHFN (EXP2) \
1956 1.1 mrg CASE_MATHFN (EXPM1) \
1957 1.1 mrg CASE_MATHFN (FABS) \
1958 1.1 mrg CASE_MATHFN (FDIM) \
1959 1.1 mrg CASE_MATHFN_FLOATN (FLOOR) \
1960 1.1 mrg CASE_MATHFN_FLOATN (FMA) \
1961 1.1 mrg CASE_MATHFN_FLOATN (FMAX) \
1962 1.1 mrg CASE_MATHFN_FLOATN (FMIN) \
1963 1.1 mrg CASE_MATHFN (FMOD) \
1964 1.1 mrg CASE_MATHFN (FREXP) \
1965 1.1 mrg CASE_MATHFN (GAMMA) \
1966 1.1 mrg CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1967 1.1 mrg CASE_MATHFN (HUGE_VAL) \
1968 1.1 mrg CASE_MATHFN (HYPOT) \
1969 1.1 mrg CASE_MATHFN (ILOGB) \
1970 1.1 mrg CASE_MATHFN (ICEIL) \
1971 1.1 mrg CASE_MATHFN (IFLOOR) \
1972 1.1 mrg CASE_MATHFN (INF) \
1973 1.1 mrg CASE_MATHFN (IRINT) \
1974 1.1 mrg CASE_MATHFN (IROUND) \
1975 1.1 mrg CASE_MATHFN (ISINF) \
1976 1.1 mrg CASE_MATHFN (J0) \
1977 1.1 mrg CASE_MATHFN (J1) \
1978 1.1 mrg CASE_MATHFN (JN) \
1979 1.1 mrg CASE_MATHFN (LCEIL) \
1980 1.1 mrg CASE_MATHFN (LDEXP) \
1981 1.1 mrg CASE_MATHFN (LFLOOR) \
1982 1.1 mrg CASE_MATHFN (LGAMMA) \
1983 1.1 mrg CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1984 1.1 mrg CASE_MATHFN (LLCEIL) \
1985 1.1 mrg CASE_MATHFN (LLFLOOR) \
1986 1.1 mrg CASE_MATHFN (LLRINT) \
1987 1.1 mrg CASE_MATHFN (LLROUND) \
1988 1.1 mrg CASE_MATHFN (LOG) \
1989 1.1 mrg CASE_MATHFN (LOG10) \
1990 1.1 mrg CASE_MATHFN (LOG1P) \
1991 1.1 mrg CASE_MATHFN (LOG2) \
1992 1.1 mrg CASE_MATHFN (LOGB) \
1993 1.1 mrg CASE_MATHFN (LRINT) \
1994 1.1 mrg CASE_MATHFN (LROUND) \
1995 1.1 mrg CASE_MATHFN (MODF) \
1996 1.1 mrg CASE_MATHFN (NAN) \
1997 1.1 mrg CASE_MATHFN (NANS) \
1998 1.1 mrg CASE_MATHFN_FLOATN (NEARBYINT) \
1999 1.1 mrg CASE_MATHFN (NEXTAFTER) \
2000 1.1 mrg CASE_MATHFN (NEXTTOWARD) \
2001 1.1 mrg CASE_MATHFN (POW) \
2002 1.1 mrg CASE_MATHFN (POWI) \
2003 1.1 mrg CASE_MATHFN (POW10) \
2004 1.1 mrg CASE_MATHFN (REMAINDER) \
2005 1.1 mrg CASE_MATHFN (REMQUO) \
2006 1.1 mrg CASE_MATHFN_FLOATN (RINT) \
2007 1.1 mrg CASE_MATHFN_FLOATN (ROUND) \
2008 1.1 mrg CASE_MATHFN_FLOATN (ROUNDEVEN) \
2009 1.1 mrg CASE_MATHFN (SCALB) \
2010 1.1 mrg CASE_MATHFN (SCALBLN) \
2011 1.1 mrg CASE_MATHFN (SCALBN) \
2012 1.1 mrg CASE_MATHFN (SIGNBIT) \
2013 1.1 mrg CASE_MATHFN (SIGNIFICAND) \
2014 1.1 mrg CASE_MATHFN (SIN) \
2015 1.1 mrg CASE_MATHFN (SINCOS) \
2016 1.1 mrg CASE_MATHFN (SINH) \
2017 1.1 mrg CASE_MATHFN_FLOATN (SQRT) \
2018 1.1 mrg CASE_MATHFN (TAN) \
2019 1.1 mrg CASE_MATHFN (TANH) \
2020 1.1 mrg CASE_MATHFN (TGAMMA) \
2021 1.1 mrg CASE_MATHFN_FLOATN (TRUNC) \
2022 1.1 mrg CASE_MATHFN (Y0) \
2023 1.1 mrg CASE_MATHFN (Y1) \
2024 1.1 mrg CASE_MATHFN (YN)
2025 1.1 mrg
2026 1.1 mrg SEQ_OF_CASE_MATHFN
2027 1.1 mrg
2028 1.1 mrg default:
2029 1.1 mrg return END_BUILTINS;
2030 1.1 mrg }
2031 1.1 mrg
2032 1.1 mrg mtype = TYPE_MAIN_VARIANT (type);
2033 1.1 mrg if (mtype == double_type_node)
2034 1.1 mrg return fcode;
2035 1.1 mrg else if (mtype == float_type_node)
2036 1.1 mrg return fcodef;
2037 1.1 mrg else if (mtype == long_double_type_node)
2038 1.1 mrg return fcodel;
2039 1.1 mrg else if (mtype == float16_type_node)
2040 1.1 mrg return fcodef16;
2041 1.1 mrg else if (mtype == float32_type_node)
2042 1.1 mrg return fcodef32;
2043 1.1 mrg else if (mtype == float64_type_node)
2044 1.1 mrg return fcodef64;
2045 1.1 mrg else if (mtype == float128_type_node)
2046 1.1 mrg return fcodef128;
2047 1.1 mrg else if (mtype == float32x_type_node)
2048 1.1 mrg return fcodef32x;
2049 1.1 mrg else if (mtype == float64x_type_node)
2050 1.1 mrg return fcodef64x;
2051 1.1 mrg else if (mtype == float128x_type_node)
2052 1.1 mrg return fcodef128x;
2053 1.1 mrg else
2054 1.1 mrg return END_BUILTINS;
2055 1.1 mrg }
2056 1.1 mrg
2057 1.1 mrg #undef CASE_MATHFN
2058 1.1 mrg #undef CASE_MATHFN_FLOATN
2059 1.1 mrg #undef CASE_MATHFN_REENT
2060 1.1 mrg
2061 1.1 mrg /* Return mathematic function equivalent to FN but operating directly on TYPE,
2062 1.1 mrg if available. If IMPLICIT_P is true use the implicit builtin declaration,
2063 1.1 mrg otherwise use the explicit declaration. If we can't do the conversion,
2064 1.1 mrg return null. */
2065 1.1 mrg
2066 1.1 mrg static tree
2067 1.1 mrg mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2068 1.1 mrg {
2069 1.1 mrg built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2070 1.1 mrg if (fcode2 == END_BUILTINS)
2071 1.1 mrg return NULL_TREE;
2072 1.1 mrg
2073 1.1 mrg if (implicit_p && !builtin_decl_implicit_p (fcode2))
2074 1.1 mrg return NULL_TREE;
2075 1.1 mrg
2076 1.1 mrg return builtin_decl_explicit (fcode2);
2077 1.1 mrg }
2078 1.1 mrg
2079 1.1 mrg /* Like mathfn_built_in_1, but always use the implicit array. */
2080 1.1 mrg
2081 1.1 mrg tree
2082 1.1 mrg mathfn_built_in (tree type, combined_fn fn)
2083 1.1 mrg {
2084 1.1 mrg return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2085 1.1 mrg }
2086 1.1 mrg
2087 1.1 mrg /* Like mathfn_built_in_1, but take a built_in_function and
2088 1.1 mrg always use the implicit array. */
2089 1.1 mrg
2090 1.1 mrg tree
2091 1.1 mrg mathfn_built_in (tree type, enum built_in_function fn)
2092 1.1 mrg {
2093 1.1 mrg return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2094 1.1 mrg }
2095 1.1 mrg
2096 1.1 mrg /* Return the type associated with a built in function, i.e., the one
2097 1.1 mrg to be passed to mathfn_built_in to get the type-specific
2098 1.1 mrg function. */
2099 1.1 mrg
2100 1.1 mrg tree
2101 1.1 mrg mathfn_built_in_type (combined_fn fn)
2102 1.1 mrg {
2103 1.1 mrg #define CASE_MATHFN(MATHFN) \
2104 1.1 mrg case CFN_BUILT_IN_##MATHFN: \
2105 1.1 mrg return double_type_node; \
2106 1.1 mrg case CFN_BUILT_IN_##MATHFN##F: \
2107 1.1 mrg return float_type_node; \
2108 1.1 mrg case CFN_BUILT_IN_##MATHFN##L: \
2109 1.1 mrg return long_double_type_node;
2110 1.1 mrg
2111 1.1 mrg #define CASE_MATHFN_FLOATN(MATHFN) \
2112 1.1 mrg CASE_MATHFN(MATHFN) \
2113 1.1 mrg case CFN_BUILT_IN_##MATHFN##F16: \
2114 1.1 mrg return float16_type_node; \
2115 1.1 mrg case CFN_BUILT_IN_##MATHFN##F32: \
2116 1.1 mrg return float32_type_node; \
2117 1.1 mrg case CFN_BUILT_IN_##MATHFN##F64: \
2118 1.1 mrg return float64_type_node; \
2119 1.1 mrg case CFN_BUILT_IN_##MATHFN##F128: \
2120 1.1 mrg return float128_type_node; \
2121 1.1 mrg case CFN_BUILT_IN_##MATHFN##F32X: \
2122 1.1 mrg return float32x_type_node; \
2123 1.1 mrg case CFN_BUILT_IN_##MATHFN##F64X: \
2124 1.1 mrg return float64x_type_node; \
2125 1.1 mrg case CFN_BUILT_IN_##MATHFN##F128X: \
2126 1.1 mrg return float128x_type_node;
2127 1.1 mrg
2128 1.1 mrg /* Similar to above, but appends _R after any F/L suffix. */
2129 1.1 mrg #define CASE_MATHFN_REENT(MATHFN) \
2130 1.1 mrg case CFN_BUILT_IN_##MATHFN##_R: \
2131 1.1 mrg return double_type_node; \
2132 1.1 mrg case CFN_BUILT_IN_##MATHFN##F_R: \
2133 1.1 mrg return float_type_node; \
2134 1.1 mrg case CFN_BUILT_IN_##MATHFN##L_R: \
2135 1.1 mrg return long_double_type_node;
2136 1.1 mrg
2137 1.1 mrg switch (fn)
2138 1.1 mrg {
2139 1.1 mrg SEQ_OF_CASE_MATHFN
2140 1.1 mrg
2141 1.1 mrg default:
2142 1.1 mrg return NULL_TREE;
2143 1.1 mrg }
2144 1.1 mrg
2145 1.1 mrg #undef CASE_MATHFN
2146 1.1 mrg #undef CASE_MATHFN_FLOATN
2147 1.1 mrg #undef CASE_MATHFN_REENT
2148 1.1 mrg #undef SEQ_OF_CASE_MATHFN
2149 1.1 mrg }
2150 1.1 mrg
2151 1.1 mrg /* Check whether there is an internal function associated with function FN
2152 1.1 mrg and return type RETURN_TYPE. Return the function if so, otherwise return
2153 1.1 mrg IFN_LAST.
2154 1.1 mrg
2155 1.1 mrg Note that this function only tests whether the function is defined in
2156 1.1 mrg internals.def, not whether it is actually available on the target. */
2157 1.1 mrg
2158 1.1 mrg static internal_fn
2159 1.1 mrg associated_internal_fn (built_in_function fn, tree return_type)
2160 1.1 mrg {
2161 1.1 mrg switch (fn)
2162 1.1 mrg {
2163 1.1 mrg #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2164 1.1 mrg CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2165 1.1 mrg #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2166 1.1 mrg CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2167 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2168 1.1 mrg #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2169 1.1 mrg CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2170 1.1 mrg #include "internal-fn.def"
2171 1.1 mrg
2172 1.1 mrg CASE_FLT_FN (BUILT_IN_POW10):
2173 1.1 mrg return IFN_EXP10;
2174 1.1 mrg
2175 1.1 mrg CASE_FLT_FN (BUILT_IN_DREM):
2176 1.1 mrg return IFN_REMAINDER;
2177 1.1 mrg
2178 1.1 mrg CASE_FLT_FN (BUILT_IN_SCALBN):
2179 1.1 mrg CASE_FLT_FN (BUILT_IN_SCALBLN):
2180 1.1 mrg if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2181 1.1 mrg return IFN_LDEXP;
2182 1.1 mrg return IFN_LAST;
2183 1.1 mrg
2184 1.1 mrg default:
2185 1.1 mrg return IFN_LAST;
2186 1.1 mrg }
2187 1.1 mrg }
2188 1.1 mrg
2189 1.1 mrg /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2190 1.1 mrg return its code, otherwise return IFN_LAST. Note that this function
2191 1.1 mrg only tests whether the function is defined in internals.def, not whether
2192 1.1 mrg it is actually available on the target. */
2193 1.1 mrg
2194 1.1 mrg internal_fn
2195 1.1 mrg associated_internal_fn (tree fndecl)
2196 1.1 mrg {
2197 1.1 mrg gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2198 1.1 mrg return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2199 1.1 mrg TREE_TYPE (TREE_TYPE (fndecl)));
2200 1.1 mrg }
2201 1.1 mrg
2202 1.1 mrg /* Check whether there is an internal function associated with function CFN
2203 1.1 mrg and return type RETURN_TYPE. Return the function if so, otherwise return
2204 1.1 mrg IFN_LAST.
2205 1.1 mrg
2206 1.1 mrg Note that this function only tests whether the function is defined in
2207 1.1 mrg internals.def, not whether it is actually available on the target. */
2208 1.1 mrg
2209 1.1 mrg internal_fn
2210 1.1 mrg associated_internal_fn (combined_fn cfn, tree return_type)
2211 1.1 mrg {
2212 1.1 mrg if (internal_fn_p (cfn))
2213 1.1 mrg return as_internal_fn (cfn);
2214 1.1 mrg return associated_internal_fn (as_builtin_fn (cfn), return_type);
2215 1.1 mrg }
2216 1.1 mrg
2217 1.1 mrg /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2218 1.1 mrg on the current target by a call to an internal function, return the
2219 1.1 mrg code of that internal function, otherwise return IFN_LAST. The caller
2220 1.1 mrg is responsible for ensuring that any side-effects of the built-in
2221 1.1 mrg call are dealt with correctly. E.g. if CALL sets errno, the caller
2222 1.1 mrg must decide that the errno result isn't needed or make it available
2223 1.1 mrg in some other way. */
2224 1.1 mrg
2225 1.1 mrg internal_fn
2226 1.1 mrg replacement_internal_fn (gcall *call)
2227 1.1 mrg {
2228 1.1 mrg if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2229 1.1 mrg {
2230 1.1 mrg internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2231 1.1 mrg if (ifn != IFN_LAST)
2232 1.1 mrg {
2233 1.1 mrg tree_pair types = direct_internal_fn_types (ifn, call);
2234 1.1 mrg optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2235 1.1 mrg if (direct_internal_fn_supported_p (ifn, types, opt_type))
2236 1.1 mrg return ifn;
2237 1.1 mrg }
2238 1.1 mrg }
2239 1.1 mrg return IFN_LAST;
2240 1.1 mrg }
2241 1.1 mrg
2242 1.1 mrg /* Expand a call to the builtin trinary math functions (fma).
2243 1.1 mrg Return NULL_RTX if a normal call should be emitted rather than expanding the
2244 1.1 mrg function in-line. EXP is the expression that is a call to the builtin
2245 1.1 mrg function; if convenient, the result should be placed in TARGET.
2246 1.1 mrg SUBTARGET may be used as the target for computing one of EXP's
2247 1.1 mrg operands. */
2248 1.1 mrg
2249 1.1 mrg static rtx
2250 1.1 mrg expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2251 1.1 mrg {
2252 1.1 mrg optab builtin_optab;
2253 1.1 mrg rtx op0, op1, op2, result;
2254 1.1 mrg rtx_insn *insns;
2255 1.1 mrg tree fndecl = get_callee_fndecl (exp);
2256 1.1 mrg tree arg0, arg1, arg2;
2257 1.1 mrg machine_mode mode;
2258 1.1 mrg
2259 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2260 1.1 mrg return NULL_RTX;
2261 1.1 mrg
2262 1.1 mrg arg0 = CALL_EXPR_ARG (exp, 0);
2263 1.1 mrg arg1 = CALL_EXPR_ARG (exp, 1);
2264 1.1 mrg arg2 = CALL_EXPR_ARG (exp, 2);
2265 1.1 mrg
2266 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
2267 1.1 mrg {
2268 1.1 mrg CASE_FLT_FN (BUILT_IN_FMA):
2269 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2270 1.1 mrg builtin_optab = fma_optab; break;
2271 1.1 mrg default:
2272 1.1 mrg gcc_unreachable ();
2273 1.1 mrg }
2274 1.1 mrg
2275 1.1 mrg /* Make a suitable register to place result in. */
2276 1.1 mrg mode = TYPE_MODE (TREE_TYPE (exp));
2277 1.1 mrg
2278 1.1 mrg /* Before working hard, check whether the instruction is available. */
2279 1.1 mrg if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2280 1.1 mrg return NULL_RTX;
2281 1.1 mrg
2282 1.1 mrg result = gen_reg_rtx (mode);
2283 1.1 mrg
2284 1.1 mrg /* Always stabilize the argument list. */
2285 1.1 mrg CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2286 1.1 mrg CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2287 1.1 mrg CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2288 1.1 mrg
2289 1.1 mrg op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2290 1.1 mrg op1 = expand_normal (arg1);
2291 1.1 mrg op2 = expand_normal (arg2);
2292 1.1 mrg
2293 1.1 mrg start_sequence ();
2294 1.1 mrg
2295 1.1 mrg /* Compute into RESULT.
2296 1.1 mrg Set RESULT to wherever the result comes back. */
2297 1.1 mrg result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2298 1.1 mrg result, 0);
2299 1.1 mrg
2300 1.1 mrg /* If we were unable to expand via the builtin, stop the sequence
2301 1.1 mrg (without outputting the insns) and call to the library function
2302 1.1 mrg with the stabilized argument list. */
2303 1.1 mrg if (result == 0)
2304 1.1 mrg {
2305 1.1 mrg end_sequence ();
2306 1.1 mrg return expand_call (exp, target, target == const0_rtx);
2307 1.1 mrg }
2308 1.1 mrg
2309 1.1 mrg /* Output the entire sequence. */
2310 1.1 mrg insns = get_insns ();
2311 1.1 mrg end_sequence ();
2312 1.1 mrg emit_insn (insns);
2313 1.1 mrg
2314 1.1 mrg return result;
2315 1.1 mrg }
2316 1.1 mrg
2317 1.1 mrg /* Expand a call to the builtin sin and cos math functions.
2318 1.1 mrg Return NULL_RTX if a normal call should be emitted rather than expanding the
2319 1.1 mrg function in-line. EXP is the expression that is a call to the builtin
2320 1.1 mrg function; if convenient, the result should be placed in TARGET.
2321 1.1 mrg SUBTARGET may be used as the target for computing one of EXP's
2322 1.1 mrg operands. */
2323 1.1 mrg
2324 1.1 mrg static rtx
2325 1.1 mrg expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2326 1.1 mrg {
2327 1.1 mrg optab builtin_optab;
2328 1.1 mrg rtx op0;
2329 1.1 mrg rtx_insn *insns;
2330 1.1 mrg tree fndecl = get_callee_fndecl (exp);
2331 1.1 mrg machine_mode mode;
2332 1.1 mrg tree arg;
2333 1.1 mrg
2334 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2335 1.1 mrg return NULL_RTX;
2336 1.1 mrg
2337 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
2338 1.1 mrg
2339 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
2340 1.1 mrg {
2341 1.1 mrg CASE_FLT_FN (BUILT_IN_SIN):
2342 1.1 mrg CASE_FLT_FN (BUILT_IN_COS):
2343 1.1 mrg builtin_optab = sincos_optab; break;
2344 1.1 mrg default:
2345 1.1 mrg gcc_unreachable ();
2346 1.1 mrg }
2347 1.1 mrg
2348 1.1 mrg /* Make a suitable register to place result in. */
2349 1.1 mrg mode = TYPE_MODE (TREE_TYPE (exp));
2350 1.1 mrg
2351 1.1 mrg /* Check if sincos insn is available, otherwise fallback
2352 1.1 mrg to sin or cos insn. */
2353 1.1 mrg if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2354 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
2355 1.1 mrg {
2356 1.1 mrg CASE_FLT_FN (BUILT_IN_SIN):
2357 1.1 mrg builtin_optab = sin_optab; break;
2358 1.1 mrg CASE_FLT_FN (BUILT_IN_COS):
2359 1.1 mrg builtin_optab = cos_optab; break;
2360 1.1 mrg default:
2361 1.1 mrg gcc_unreachable ();
2362 1.1 mrg }
2363 1.1 mrg
2364 1.1 mrg /* Before working hard, check whether the instruction is available. */
2365 1.1 mrg if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2366 1.1 mrg {
2367 1.1 mrg rtx result = gen_reg_rtx (mode);
2368 1.1 mrg
2369 1.1 mrg /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2370 1.1 mrg need to expand the argument again. This way, we will not perform
2371 1.1 mrg side-effects more the once. */
2372 1.1 mrg CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2373 1.1 mrg
2374 1.1 mrg op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2375 1.1 mrg
2376 1.1 mrg start_sequence ();
2377 1.1 mrg
2378 1.1 mrg /* Compute into RESULT.
2379 1.1 mrg Set RESULT to wherever the result comes back. */
2380 1.1 mrg if (builtin_optab == sincos_optab)
2381 1.1 mrg {
2382 1.1 mrg int ok;
2383 1.1 mrg
2384 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
2385 1.1 mrg {
2386 1.1 mrg CASE_FLT_FN (BUILT_IN_SIN):
2387 1.1 mrg ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2388 1.1 mrg break;
2389 1.1 mrg CASE_FLT_FN (BUILT_IN_COS):
2390 1.1 mrg ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2391 1.1 mrg break;
2392 1.1 mrg default:
2393 1.1 mrg gcc_unreachable ();
2394 1.1 mrg }
2395 1.1 mrg gcc_assert (ok);
2396 1.1 mrg }
2397 1.1 mrg else
2398 1.1 mrg result = expand_unop (mode, builtin_optab, op0, result, 0);
2399 1.1 mrg
2400 1.1 mrg if (result != 0)
2401 1.1 mrg {
2402 1.1 mrg /* Output the entire sequence. */
2403 1.1 mrg insns = get_insns ();
2404 1.1 mrg end_sequence ();
2405 1.1 mrg emit_insn (insns);
2406 1.1 mrg return result;
2407 1.1 mrg }
2408 1.1 mrg
2409 1.1 mrg /* If we were unable to expand via the builtin, stop the sequence
2410 1.1 mrg (without outputting the insns) and call to the library function
2411 1.1 mrg with the stabilized argument list. */
2412 1.1 mrg end_sequence ();
2413 1.1 mrg }
2414 1.1 mrg
2415 1.1 mrg return expand_call (exp, target, target == const0_rtx);
2416 1.1 mrg }
2417 1.1 mrg
2418 1.1 mrg /* Given an interclass math builtin decl FNDECL and it's argument ARG
2419 1.1 mrg return an RTL instruction code that implements the functionality.
2420 1.1 mrg If that isn't possible or available return CODE_FOR_nothing. */
2421 1.1 mrg
2422 1.1 mrg static enum insn_code
2423 1.1 mrg interclass_mathfn_icode (tree arg, tree fndecl)
2424 1.1 mrg {
2425 1.1 mrg bool errno_set = false;
2426 1.1 mrg optab builtin_optab = unknown_optab;
2427 1.1 mrg machine_mode mode;
2428 1.1 mrg
2429 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
2430 1.1 mrg {
2431 1.1 mrg CASE_FLT_FN (BUILT_IN_ILOGB):
2432 1.1 mrg errno_set = true; builtin_optab = ilogb_optab; break;
2433 1.1 mrg CASE_FLT_FN (BUILT_IN_ISINF):
2434 1.1 mrg builtin_optab = isinf_optab; break;
2435 1.1 mrg case BUILT_IN_ISNORMAL:
2436 1.1 mrg case BUILT_IN_ISFINITE:
2437 1.1 mrg CASE_FLT_FN (BUILT_IN_FINITE):
2438 1.1 mrg case BUILT_IN_FINITED32:
2439 1.1 mrg case BUILT_IN_FINITED64:
2440 1.1 mrg case BUILT_IN_FINITED128:
2441 1.1 mrg case BUILT_IN_ISINFD32:
2442 1.1 mrg case BUILT_IN_ISINFD64:
2443 1.1 mrg case BUILT_IN_ISINFD128:
2444 1.1 mrg /* These builtins have no optabs (yet). */
2445 1.1 mrg break;
2446 1.1 mrg default:
2447 1.1 mrg gcc_unreachable ();
2448 1.1 mrg }
2449 1.1 mrg
2450 1.1 mrg /* There's no easy way to detect the case we need to set EDOM. */
2451 1.1 mrg if (flag_errno_math && errno_set)
2452 1.1 mrg return CODE_FOR_nothing;
2453 1.1 mrg
2454 1.1 mrg /* Optab mode depends on the mode of the input argument. */
2455 1.1 mrg mode = TYPE_MODE (TREE_TYPE (arg));
2456 1.1 mrg
2457 1.1 mrg if (builtin_optab)
2458 1.1 mrg return optab_handler (builtin_optab, mode);
2459 1.1 mrg return CODE_FOR_nothing;
2460 1.1 mrg }
2461 1.1 mrg
2462 1.1 mrg /* Expand a call to one of the builtin math functions that operate on
2463 1.1 mrg floating point argument and output an integer result (ilogb, isinf,
2464 1.1 mrg isnan, etc).
2465 1.1 mrg Return 0 if a normal call should be emitted rather than expanding the
2466 1.1 mrg function in-line. EXP is the expression that is a call to the builtin
2467 1.1 mrg function; if convenient, the result should be placed in TARGET. */
2468 1.1 mrg
2469 1.1 mrg static rtx
2470 1.1 mrg expand_builtin_interclass_mathfn (tree exp, rtx target)
2471 1.1 mrg {
2472 1.1 mrg enum insn_code icode = CODE_FOR_nothing;
2473 1.1 mrg rtx op0;
2474 1.1 mrg tree fndecl = get_callee_fndecl (exp);
2475 1.1 mrg machine_mode mode;
2476 1.1 mrg tree arg;
2477 1.1 mrg
2478 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2479 1.1 mrg return NULL_RTX;
2480 1.1 mrg
2481 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
2482 1.1 mrg icode = interclass_mathfn_icode (arg, fndecl);
2483 1.1 mrg mode = TYPE_MODE (TREE_TYPE (arg));
2484 1.1 mrg
2485 1.1 mrg if (icode != CODE_FOR_nothing)
2486 1.1 mrg {
2487 1.1 mrg class expand_operand ops[1];
2488 1.1 mrg rtx_insn *last = get_last_insn ();
2489 1.1 mrg tree orig_arg = arg;
2490 1.1 mrg
2491 1.1 mrg /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2492 1.1 mrg need to expand the argument again. This way, we will not perform
2493 1.1 mrg side-effects more the once. */
2494 1.1 mrg CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2495 1.1 mrg
2496 1.1 mrg op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2497 1.1 mrg
2498 1.1 mrg if (mode != GET_MODE (op0))
2499 1.1 mrg op0 = convert_to_mode (mode, op0, 0);
2500 1.1 mrg
2501 1.1 mrg create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2502 1.1 mrg if (maybe_legitimize_operands (icode, 0, 1, ops)
2503 1.1 mrg && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2504 1.1 mrg return ops[0].value;
2505 1.1 mrg
2506 1.1 mrg delete_insns_since (last);
2507 1.1 mrg CALL_EXPR_ARG (exp, 0) = orig_arg;
2508 1.1 mrg }
2509 1.1 mrg
2510 1.1 mrg return NULL_RTX;
2511 1.1 mrg }
2512 1.1 mrg
2513 1.1 mrg /* Expand a call to the builtin sincos math function.
2514 1.1 mrg Return NULL_RTX if a normal call should be emitted rather than expanding the
2515 1.1 mrg function in-line. EXP is the expression that is a call to the builtin
2516 1.1 mrg function. */
2517 1.1 mrg
2518 1.1 mrg static rtx
2519 1.1 mrg expand_builtin_sincos (tree exp)
2520 1.1 mrg {
2521 1.1 mrg rtx op0, op1, op2, target1, target2;
2522 1.1 mrg machine_mode mode;
2523 1.1 mrg tree arg, sinp, cosp;
2524 1.1 mrg int result;
2525 1.1 mrg location_t loc = EXPR_LOCATION (exp);
2526 1.1 mrg tree alias_type, alias_off;
2527 1.1 mrg
2528 1.1 mrg if (!validate_arglist (exp, REAL_TYPE,
2529 1.1 mrg POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2530 1.1 mrg return NULL_RTX;
2531 1.1 mrg
2532 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
2533 1.1 mrg sinp = CALL_EXPR_ARG (exp, 1);
2534 1.1 mrg cosp = CALL_EXPR_ARG (exp, 2);
2535 1.1 mrg
2536 1.1 mrg /* Make a suitable register to place result in. */
2537 1.1 mrg mode = TYPE_MODE (TREE_TYPE (arg));
2538 1.1 mrg
2539 1.1 mrg /* Check if sincos insn is available, otherwise emit the call. */
2540 1.1 mrg if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2541 1.1 mrg return NULL_RTX;
2542 1.1 mrg
2543 1.1 mrg target1 = gen_reg_rtx (mode);
2544 1.1 mrg target2 = gen_reg_rtx (mode);
2545 1.1 mrg
2546 1.1 mrg op0 = expand_normal (arg);
2547 1.1 mrg alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2548 1.1 mrg alias_off = build_int_cst (alias_type, 0);
2549 1.1 mrg op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2550 1.1 mrg sinp, alias_off));
2551 1.1 mrg op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2552 1.1 mrg cosp, alias_off));
2553 1.1 mrg
2554 1.1 mrg /* Compute into target1 and target2.
2555 1.1 mrg Set TARGET to wherever the result comes back. */
2556 1.1 mrg result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2557 1.1 mrg gcc_assert (result);
2558 1.1 mrg
2559 1.1 mrg /* Move target1 and target2 to the memory locations indicated
2560 1.1 mrg by op1 and op2. */
2561 1.1 mrg emit_move_insn (op1, target1);
2562 1.1 mrg emit_move_insn (op2, target2);
2563 1.1 mrg
2564 1.1 mrg return const0_rtx;
2565 1.1 mrg }
2566 1.1 mrg
2567 1.1 mrg /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2568 1.1 mrg result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2569 1.1 mrg static rtx
2570 1.1 mrg expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2571 1.1 mrg {
2572 1.1 mrg if (!validate_arglist (exp, VOID_TYPE))
2573 1.1 mrg return NULL_RTX;
2574 1.1 mrg
2575 1.1 mrg insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2576 1.1 mrg if (icode == CODE_FOR_nothing)
2577 1.1 mrg return NULL_RTX;
2578 1.1 mrg
2579 1.1 mrg if (target == 0
2580 1.1 mrg || GET_MODE (target) != target_mode
2581 1.1 mrg || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2582 1.1 mrg target = gen_reg_rtx (target_mode);
2583 1.1 mrg
2584 1.1 mrg rtx pat = GEN_FCN (icode) (target);
2585 1.1 mrg if (!pat)
2586 1.1 mrg return NULL_RTX;
2587 1.1 mrg emit_insn (pat);
2588 1.1 mrg
2589 1.1 mrg return target;
2590 1.1 mrg }
2591 1.1 mrg
2592 1.1 mrg /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2593 1.1 mrg fenv.h), returning the result and setting it in TARGET. Otherwise return
2594 1.1 mrg NULL_RTX on failure. */
2595 1.1 mrg static rtx
2596 1.1 mrg expand_builtin_feclear_feraise_except (tree exp, rtx target,
2597 1.1 mrg machine_mode target_mode, optab op_optab)
2598 1.1 mrg {
2599 1.1 mrg if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2600 1.1 mrg return NULL_RTX;
2601 1.1 mrg rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2602 1.1 mrg
2603 1.1 mrg insn_code icode = direct_optab_handler (op_optab, SImode);
2604 1.1 mrg if (icode == CODE_FOR_nothing)
2605 1.1 mrg return NULL_RTX;
2606 1.1 mrg
2607 1.1 mrg if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2608 1.1 mrg return NULL_RTX;
2609 1.1 mrg
2610 1.1 mrg if (target == 0
2611 1.1 mrg || GET_MODE (target) != target_mode
2612 1.1 mrg || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2613 1.1 mrg target = gen_reg_rtx (target_mode);
2614 1.1 mrg
2615 1.1 mrg rtx pat = GEN_FCN (icode) (target, op0);
2616 1.1 mrg if (!pat)
2617 1.1 mrg return NULL_RTX;
2618 1.1 mrg emit_insn (pat);
2619 1.1 mrg
2620 1.1 mrg return target;
2621 1.1 mrg }
2622 1.1 mrg
2623 1.1 mrg /* Expand a call to the internal cexpi builtin to the sincos math function.
2624 1.1 mrg EXP is the expression that is a call to the builtin function; if convenient,
2625 1.1 mrg the result should be placed in TARGET. */
2626 1.1 mrg
2627 1.1 mrg static rtx
2628 1.1 mrg expand_builtin_cexpi (tree exp, rtx target)
2629 1.1 mrg {
2630 1.1 mrg tree fndecl = get_callee_fndecl (exp);
2631 1.1 mrg tree arg, type;
2632 1.1 mrg machine_mode mode;
2633 1.1 mrg rtx op0, op1, op2;
2634 1.1 mrg location_t loc = EXPR_LOCATION (exp);
2635 1.1 mrg
2636 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2637 1.1 mrg return NULL_RTX;
2638 1.1 mrg
2639 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
2640 1.1 mrg type = TREE_TYPE (arg);
2641 1.1 mrg mode = TYPE_MODE (TREE_TYPE (arg));
2642 1.1 mrg
2643 1.1 mrg /* Try expanding via a sincos optab, fall back to emitting a libcall
2644 1.1 mrg to sincos or cexp. We are sure we have sincos or cexp because cexpi
2645 1.1 mrg is only generated from sincos, cexp or if we have either of them. */
2646 1.1 mrg if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2647 1.1 mrg {
2648 1.1 mrg op1 = gen_reg_rtx (mode);
2649 1.1 mrg op2 = gen_reg_rtx (mode);
2650 1.1 mrg
2651 1.1 mrg op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2652 1.1 mrg
2653 1.1 mrg /* Compute into op1 and op2. */
2654 1.1 mrg expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2655 1.1 mrg }
2656 1.1 mrg else if (targetm.libc_has_function (function_sincos, type))
2657 1.1 mrg {
2658 1.1 mrg tree call, fn = NULL_TREE;
2659 1.1 mrg tree top1, top2;
2660 1.1 mrg rtx op1a, op2a;
2661 1.1 mrg
2662 1.1 mrg if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2663 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2664 1.1 mrg else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2665 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2666 1.1 mrg else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2667 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2668 1.1 mrg else
2669 1.1 mrg gcc_unreachable ();
2670 1.1 mrg
2671 1.1 mrg op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2672 1.1 mrg op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2673 1.1 mrg op1a = copy_addr_to_reg (XEXP (op1, 0));
2674 1.1 mrg op2a = copy_addr_to_reg (XEXP (op2, 0));
2675 1.1 mrg top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2676 1.1 mrg top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2677 1.1 mrg
2678 1.1 mrg /* Make sure not to fold the sincos call again. */
2679 1.1 mrg call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2680 1.1 mrg expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2681 1.1 mrg call, 3, arg, top1, top2));
2682 1.1 mrg }
2683 1.1 mrg else
2684 1.1 mrg {
2685 1.1 mrg tree call, fn = NULL_TREE, narg;
2686 1.1 mrg tree ctype = build_complex_type (type);
2687 1.1 mrg
2688 1.1 mrg if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2689 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2690 1.1 mrg else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2691 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_CEXP);
2692 1.1 mrg else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2693 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2694 1.1 mrg else
2695 1.1 mrg gcc_unreachable ();
2696 1.1 mrg
2697 1.1 mrg /* If we don't have a decl for cexp create one. This is the
2698 1.1 mrg friendliest fallback if the user calls __builtin_cexpi
2699 1.1 mrg without full target C99 function support. */
2700 1.1 mrg if (fn == NULL_TREE)
2701 1.1 mrg {
2702 1.1 mrg tree fntype;
2703 1.1 mrg const char *name = NULL;
2704 1.1 mrg
2705 1.1 mrg if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2706 1.1 mrg name = "cexpf";
2707 1.1 mrg else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2708 1.1 mrg name = "cexp";
2709 1.1 mrg else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2710 1.1 mrg name = "cexpl";
2711 1.1 mrg
2712 1.1 mrg fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2713 1.1 mrg fn = build_fn_decl (name, fntype);
2714 1.1 mrg }
2715 1.1 mrg
2716 1.1 mrg narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2717 1.1 mrg build_real (type, dconst0), arg);
2718 1.1 mrg
2719 1.1 mrg /* Make sure not to fold the cexp call again. */
2720 1.1 mrg call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2721 1.1 mrg return expand_expr (build_call_nary (ctype, call, 1, narg),
2722 1.1 mrg target, VOIDmode, EXPAND_NORMAL);
2723 1.1 mrg }
2724 1.1 mrg
2725 1.1 mrg /* Now build the proper return type. */
2726 1.1 mrg return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2727 1.1 mrg make_tree (TREE_TYPE (arg), op2),
2728 1.1 mrg make_tree (TREE_TYPE (arg), op1)),
2729 1.1 mrg target, VOIDmode, EXPAND_NORMAL);
2730 1.1 mrg }
2731 1.1 mrg
2732 1.1 mrg /* Conveniently construct a function call expression. FNDECL names the
2733 1.1 mrg function to be called, N is the number of arguments, and the "..."
2734 1.1 mrg parameters are the argument expressions. Unlike build_call_exr
2735 1.1 mrg this doesn't fold the call, hence it will always return a CALL_EXPR. */
2736 1.1 mrg
2737 1.1 mrg static tree
2738 1.1 mrg build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2739 1.1 mrg {
2740 1.1 mrg va_list ap;
2741 1.1 mrg tree fntype = TREE_TYPE (fndecl);
2742 1.1 mrg tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2743 1.1 mrg
2744 1.1 mrg va_start (ap, n);
2745 1.1 mrg fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2746 1.1 mrg va_end (ap);
2747 1.1 mrg SET_EXPR_LOCATION (fn, loc);
2748 1.1 mrg return fn;
2749 1.1 mrg }
2750 1.1 mrg
2751 1.1 mrg /* Expand a call to one of the builtin rounding functions gcc defines
2752 1.1 mrg as an extension (lfloor and lceil). As these are gcc extensions we
2753 1.1 mrg do not need to worry about setting errno to EDOM.
2754 1.1 mrg If expanding via optab fails, lower expression to (int)(floor(x)).
2755 1.1 mrg EXP is the expression that is a call to the builtin function;
2756 1.1 mrg if convenient, the result should be placed in TARGET. */
2757 1.1 mrg
2758 1.1 mrg static rtx
2759 1.1 mrg expand_builtin_int_roundingfn (tree exp, rtx target)
2760 1.1 mrg {
2761 1.1 mrg convert_optab builtin_optab;
2762 1.1 mrg rtx op0, tmp;
2763 1.1 mrg rtx_insn *insns;
2764 1.1 mrg tree fndecl = get_callee_fndecl (exp);
2765 1.1 mrg enum built_in_function fallback_fn;
2766 1.1 mrg tree fallback_fndecl;
2767 1.1 mrg machine_mode mode;
2768 1.1 mrg tree arg;
2769 1.1 mrg
2770 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2771 1.1 mrg return NULL_RTX;
2772 1.1 mrg
2773 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
2774 1.1 mrg
2775 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
2776 1.1 mrg {
2777 1.1 mrg CASE_FLT_FN (BUILT_IN_ICEIL):
2778 1.1 mrg CASE_FLT_FN (BUILT_IN_LCEIL):
2779 1.1 mrg CASE_FLT_FN (BUILT_IN_LLCEIL):
2780 1.1 mrg builtin_optab = lceil_optab;
2781 1.1 mrg fallback_fn = BUILT_IN_CEIL;
2782 1.1 mrg break;
2783 1.1 mrg
2784 1.1 mrg CASE_FLT_FN (BUILT_IN_IFLOOR):
2785 1.1 mrg CASE_FLT_FN (BUILT_IN_LFLOOR):
2786 1.1 mrg CASE_FLT_FN (BUILT_IN_LLFLOOR):
2787 1.1 mrg builtin_optab = lfloor_optab;
2788 1.1 mrg fallback_fn = BUILT_IN_FLOOR;
2789 1.1 mrg break;
2790 1.1 mrg
2791 1.1 mrg default:
2792 1.1 mrg gcc_unreachable ();
2793 1.1 mrg }
2794 1.1 mrg
2795 1.1 mrg /* Make a suitable register to place result in. */
2796 1.1 mrg mode = TYPE_MODE (TREE_TYPE (exp));
2797 1.1 mrg
2798 1.1 mrg target = gen_reg_rtx (mode);
2799 1.1 mrg
2800 1.1 mrg /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2801 1.1 mrg need to expand the argument again. This way, we will not perform
2802 1.1 mrg side-effects more the once. */
2803 1.1 mrg CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2804 1.1 mrg
2805 1.1 mrg op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2806 1.1 mrg
2807 1.1 mrg start_sequence ();
2808 1.1 mrg
2809 1.1 mrg /* Compute into TARGET. */
2810 1.1 mrg if (expand_sfix_optab (target, op0, builtin_optab))
2811 1.1 mrg {
2812 1.1 mrg /* Output the entire sequence. */
2813 1.1 mrg insns = get_insns ();
2814 1.1 mrg end_sequence ();
2815 1.1 mrg emit_insn (insns);
2816 1.1 mrg return target;
2817 1.1 mrg }
2818 1.1 mrg
2819 1.1 mrg /* If we were unable to expand via the builtin, stop the sequence
2820 1.1 mrg (without outputting the insns). */
2821 1.1 mrg end_sequence ();
2822 1.1 mrg
2823 1.1 mrg /* Fall back to floating point rounding optab. */
2824 1.1 mrg fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2825 1.1 mrg
2826 1.1 mrg /* For non-C99 targets we may end up without a fallback fndecl here
2827 1.1 mrg if the user called __builtin_lfloor directly. In this case emit
2828 1.1 mrg a call to the floor/ceil variants nevertheless. This should result
2829 1.1 mrg in the best user experience for not full C99 targets. */
2830 1.1 mrg if (fallback_fndecl == NULL_TREE)
2831 1.1 mrg {
2832 1.1 mrg tree fntype;
2833 1.1 mrg const char *name = NULL;
2834 1.1 mrg
2835 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
2836 1.1 mrg {
2837 1.1 mrg case BUILT_IN_ICEIL:
2838 1.1 mrg case BUILT_IN_LCEIL:
2839 1.1 mrg case BUILT_IN_LLCEIL:
2840 1.1 mrg name = "ceil";
2841 1.1 mrg break;
2842 1.1 mrg case BUILT_IN_ICEILF:
2843 1.1 mrg case BUILT_IN_LCEILF:
2844 1.1 mrg case BUILT_IN_LLCEILF:
2845 1.1 mrg name = "ceilf";
2846 1.1 mrg break;
2847 1.1 mrg case BUILT_IN_ICEILL:
2848 1.1 mrg case BUILT_IN_LCEILL:
2849 1.1 mrg case BUILT_IN_LLCEILL:
2850 1.1 mrg name = "ceill";
2851 1.1 mrg break;
2852 1.1 mrg case BUILT_IN_IFLOOR:
2853 1.1 mrg case BUILT_IN_LFLOOR:
2854 1.1 mrg case BUILT_IN_LLFLOOR:
2855 1.1 mrg name = "floor";
2856 1.1 mrg break;
2857 1.1 mrg case BUILT_IN_IFLOORF:
2858 1.1 mrg case BUILT_IN_LFLOORF:
2859 1.1 mrg case BUILT_IN_LLFLOORF:
2860 1.1 mrg name = "floorf";
2861 1.1 mrg break;
2862 1.1 mrg case BUILT_IN_IFLOORL:
2863 1.1 mrg case BUILT_IN_LFLOORL:
2864 1.1 mrg case BUILT_IN_LLFLOORL:
2865 1.1 mrg name = "floorl";
2866 1.1 mrg break;
2867 1.1 mrg default:
2868 1.1 mrg gcc_unreachable ();
2869 1.1 mrg }
2870 1.1 mrg
2871 1.1 mrg fntype = build_function_type_list (TREE_TYPE (arg),
2872 1.1 mrg TREE_TYPE (arg), NULL_TREE);
2873 1.1 mrg fallback_fndecl = build_fn_decl (name, fntype);
2874 1.1 mrg }
2875 1.1 mrg
2876 1.1 mrg exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2877 1.1 mrg
2878 1.1 mrg tmp = expand_normal (exp);
2879 1.1 mrg tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2880 1.1 mrg
2881 1.1 mrg /* Truncate the result of floating point optab to integer
2882 1.1 mrg via expand_fix (). */
2883 1.1 mrg target = gen_reg_rtx (mode);
2884 1.1 mrg expand_fix (target, tmp, 0);
2885 1.1 mrg
2886 1.1 mrg return target;
2887 1.1 mrg }
2888 1.1 mrg
2889 1.1 mrg /* Expand a call to one of the builtin math functions doing integer
2890 1.1 mrg conversion (lrint).
2891 1.1 mrg Return 0 if a normal call should be emitted rather than expanding the
2892 1.1 mrg function in-line. EXP is the expression that is a call to the builtin
2893 1.1 mrg function; if convenient, the result should be placed in TARGET. */
2894 1.1 mrg
2895 1.1 mrg static rtx
2896 1.1 mrg expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2897 1.1 mrg {
2898 1.1 mrg convert_optab builtin_optab;
2899 1.1 mrg rtx op0;
2900 1.1 mrg rtx_insn *insns;
2901 1.1 mrg tree fndecl = get_callee_fndecl (exp);
2902 1.1 mrg tree arg;
2903 1.1 mrg machine_mode mode;
2904 1.1 mrg enum built_in_function fallback_fn = BUILT_IN_NONE;
2905 1.1 mrg
2906 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2907 1.1 mrg return NULL_RTX;
2908 1.1 mrg
2909 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
2910 1.1 mrg
2911 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
2912 1.1 mrg {
2913 1.1 mrg CASE_FLT_FN (BUILT_IN_IRINT):
2914 1.1 mrg fallback_fn = BUILT_IN_LRINT;
2915 1.1 mrg gcc_fallthrough ();
2916 1.1 mrg CASE_FLT_FN (BUILT_IN_LRINT):
2917 1.1 mrg CASE_FLT_FN (BUILT_IN_LLRINT):
2918 1.1 mrg builtin_optab = lrint_optab;
2919 1.1 mrg break;
2920 1.1 mrg
2921 1.1 mrg CASE_FLT_FN (BUILT_IN_IROUND):
2922 1.1 mrg fallback_fn = BUILT_IN_LROUND;
2923 1.1 mrg gcc_fallthrough ();
2924 1.1 mrg CASE_FLT_FN (BUILT_IN_LROUND):
2925 1.1 mrg CASE_FLT_FN (BUILT_IN_LLROUND):
2926 1.1 mrg builtin_optab = lround_optab;
2927 1.1 mrg break;
2928 1.1 mrg
2929 1.1 mrg default:
2930 1.1 mrg gcc_unreachable ();
2931 1.1 mrg }
2932 1.1 mrg
2933 1.1 mrg /* There's no easy way to detect the case we need to set EDOM. */
2934 1.1 mrg if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2935 1.1 mrg return NULL_RTX;
2936 1.1 mrg
2937 1.1 mrg /* Make a suitable register to place result in. */
2938 1.1 mrg mode = TYPE_MODE (TREE_TYPE (exp));
2939 1.1 mrg
2940 1.1 mrg /* There's no easy way to detect the case we need to set EDOM. */
2941 1.1 mrg if (!flag_errno_math)
2942 1.1 mrg {
2943 1.1 mrg rtx result = gen_reg_rtx (mode);
2944 1.1 mrg
2945 1.1 mrg /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2946 1.1 mrg need to expand the argument again. This way, we will not perform
2947 1.1 mrg side-effects more the once. */
2948 1.1 mrg CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2949 1.1 mrg
2950 1.1 mrg op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2951 1.1 mrg
2952 1.1 mrg start_sequence ();
2953 1.1 mrg
2954 1.1 mrg if (expand_sfix_optab (result, op0, builtin_optab))
2955 1.1 mrg {
2956 1.1 mrg /* Output the entire sequence. */
2957 1.1 mrg insns = get_insns ();
2958 1.1 mrg end_sequence ();
2959 1.1 mrg emit_insn (insns);
2960 1.1 mrg return result;
2961 1.1 mrg }
2962 1.1 mrg
2963 1.1 mrg /* If we were unable to expand via the builtin, stop the sequence
2964 1.1 mrg (without outputting the insns) and call to the library function
2965 1.1 mrg with the stabilized argument list. */
2966 1.1 mrg end_sequence ();
2967 1.1 mrg }
2968 1.1 mrg
2969 1.1 mrg if (fallback_fn != BUILT_IN_NONE)
2970 1.1 mrg {
2971 1.1 mrg /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2972 1.1 mrg targets, (int) round (x) should never be transformed into
2973 1.1 mrg BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2974 1.1 mrg a call to lround in the hope that the target provides at least some
2975 1.1 mrg C99 functions. This should result in the best user experience for
2976 1.1 mrg not full C99 targets.
2977 1.1 mrg As scalar float conversions with same mode are useless in GIMPLE,
2978 1.1 mrg we can end up e.g. with _Float32 argument passed to float builtin,
2979 1.1 mrg try to get the type from the builtin prototype first. */
2980 1.1 mrg tree fallback_fndecl = NULL_TREE;
2981 1.1 mrg if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
2982 1.1 mrg fallback_fndecl
2983 1.1 mrg = mathfn_built_in_1 (TREE_VALUE (argtypes),
2984 1.1 mrg as_combined_fn (fallback_fn), 0);
2985 1.1 mrg if (fallback_fndecl == NULL_TREE)
2986 1.1 mrg fallback_fndecl
2987 1.1 mrg = mathfn_built_in_1 (TREE_TYPE (arg),
2988 1.1 mrg as_combined_fn (fallback_fn), 0);
2989 1.1 mrg if (fallback_fndecl)
2990 1.1 mrg {
2991 1.1 mrg exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2992 1.1 mrg fallback_fndecl, 1, arg);
2993 1.1 mrg
2994 1.1 mrg target = expand_call (exp, NULL_RTX, target == const0_rtx);
2995 1.1 mrg target = maybe_emit_group_store (target, TREE_TYPE (exp));
2996 1.1 mrg return convert_to_mode (mode, target, 0);
2997 1.1 mrg }
2998 1.1 mrg }
2999 1.1 mrg
3000 1.1 mrg return expand_call (exp, target, target == const0_rtx);
3001 1.1 mrg }
3002 1.1 mrg
3003 1.1 mrg /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3004 1.1 mrg a normal call should be emitted rather than expanding the function
3005 1.1 mrg in-line. EXP is the expression that is a call to the builtin
3006 1.1 mrg function; if convenient, the result should be placed in TARGET. */
3007 1.1 mrg
3008 1.1 mrg static rtx
3009 1.1 mrg expand_builtin_powi (tree exp, rtx target)
3010 1.1 mrg {
3011 1.1 mrg tree arg0, arg1;
3012 1.1 mrg rtx op0, op1;
3013 1.1 mrg machine_mode mode;
3014 1.1 mrg machine_mode mode2;
3015 1.1 mrg
3016 1.1 mrg if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3017 1.1 mrg return NULL_RTX;
3018 1.1 mrg
3019 1.1 mrg arg0 = CALL_EXPR_ARG (exp, 0);
3020 1.1 mrg arg1 = CALL_EXPR_ARG (exp, 1);
3021 1.1 mrg mode = TYPE_MODE (TREE_TYPE (exp));
3022 1.1 mrg
3023 1.1 mrg /* Emit a libcall to libgcc. */
3024 1.1 mrg
3025 1.1 mrg /* Mode of the 2nd argument must match that of an int. */
3026 1.1 mrg mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3027 1.1 mrg
3028 1.1 mrg if (target == NULL_RTX)
3029 1.1 mrg target = gen_reg_rtx (mode);
3030 1.1 mrg
3031 1.1 mrg op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3032 1.1 mrg if (GET_MODE (op0) != mode)
3033 1.1 mrg op0 = convert_to_mode (mode, op0, 0);
3034 1.1 mrg op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3035 1.1 mrg if (GET_MODE (op1) != mode2)
3036 1.1 mrg op1 = convert_to_mode (mode2, op1, 0);
3037 1.1 mrg
3038 1.1 mrg target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3039 1.1 mrg target, LCT_CONST, mode,
3040 1.1 mrg op0, mode, op1, mode2);
3041 1.1 mrg
3042 1.1 mrg return target;
3043 1.1 mrg }
3044 1.1 mrg
3045 1.1 mrg /* Expand expression EXP which is a call to the strlen builtin. Return
3046 1.1 mrg NULL_RTX if we failed and the caller should emit a normal call, otherwise
3047 1.1 mrg try to get the result in TARGET, if convenient. */
3048 1.1 mrg
3049 1.1 mrg static rtx
3050 1.1 mrg expand_builtin_strlen (tree exp, rtx target,
3051 1.1 mrg machine_mode target_mode)
3052 1.1 mrg {
3053 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3054 1.1 mrg return NULL_RTX;
3055 1.1 mrg
3056 1.1 mrg tree src = CALL_EXPR_ARG (exp, 0);
3057 1.1 mrg
3058 1.1 mrg /* If the length can be computed at compile-time, return it. */
3059 1.1 mrg if (tree len = c_strlen (src, 0))
3060 1.1 mrg return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3061 1.1 mrg
3062 1.1 mrg /* If the length can be computed at compile-time and is constant
3063 1.1 mrg integer, but there are side-effects in src, evaluate
3064 1.1 mrg src for side-effects, then return len.
3065 1.1 mrg E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3066 1.1 mrg can be optimized into: i++; x = 3; */
3067 1.1 mrg tree len = c_strlen (src, 1);
3068 1.1 mrg if (len && TREE_CODE (len) == INTEGER_CST)
3069 1.1 mrg {
3070 1.1 mrg expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3071 1.1 mrg return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3072 1.1 mrg }
3073 1.1 mrg
3074 1.1 mrg unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3075 1.1 mrg
3076 1.1 mrg /* If SRC is not a pointer type, don't do this operation inline. */
3077 1.1 mrg if (align == 0)
3078 1.1 mrg return NULL_RTX;
3079 1.1 mrg
3080 1.1 mrg /* Bail out if we can't compute strlen in the right mode. */
3081 1.1 mrg machine_mode insn_mode;
3082 1.1 mrg enum insn_code icode = CODE_FOR_nothing;
3083 1.1 mrg FOR_EACH_MODE_FROM (insn_mode, target_mode)
3084 1.1 mrg {
3085 1.1 mrg icode = optab_handler (strlen_optab, insn_mode);
3086 1.1 mrg if (icode != CODE_FOR_nothing)
3087 1.1 mrg break;
3088 1.1 mrg }
3089 1.1 mrg if (insn_mode == VOIDmode)
3090 1.1 mrg return NULL_RTX;
3091 1.1 mrg
3092 1.1 mrg /* Make a place to hold the source address. We will not expand
3093 1.1 mrg the actual source until we are sure that the expansion will
3094 1.1 mrg not fail -- there are trees that cannot be expanded twice. */
3095 1.1 mrg rtx src_reg = gen_reg_rtx (Pmode);
3096 1.1 mrg
3097 1.1 mrg /* Mark the beginning of the strlen sequence so we can emit the
3098 1.1 mrg source operand later. */
3099 1.1 mrg rtx_insn *before_strlen = get_last_insn ();
3100 1.1 mrg
3101 1.1 mrg class expand_operand ops[4];
3102 1.1 mrg create_output_operand (&ops[0], target, insn_mode);
3103 1.1 mrg create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3104 1.1 mrg create_integer_operand (&ops[2], 0);
3105 1.1 mrg create_integer_operand (&ops[3], align);
3106 1.1 mrg if (!maybe_expand_insn (icode, 4, ops))
3107 1.1 mrg return NULL_RTX;
3108 1.1 mrg
3109 1.1 mrg /* Check to see if the argument was declared attribute nonstring
3110 1.1 mrg and if so, issue a warning since at this point it's not known
3111 1.1 mrg to be nul-terminated. */
3112 1.1 mrg maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3113 1.1 mrg
3114 1.1 mrg /* Now that we are assured of success, expand the source. */
3115 1.1 mrg start_sequence ();
3116 1.1 mrg rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3117 1.1 mrg if (pat != src_reg)
3118 1.1 mrg {
3119 1.1 mrg #ifdef POINTERS_EXTEND_UNSIGNED
3120 1.1 mrg if (GET_MODE (pat) != Pmode)
3121 1.1 mrg pat = convert_to_mode (Pmode, pat,
3122 1.1 mrg POINTERS_EXTEND_UNSIGNED);
3123 1.1 mrg #endif
3124 1.1 mrg emit_move_insn (src_reg, pat);
3125 1.1 mrg }
3126 1.1 mrg pat = get_insns ();
3127 1.1 mrg end_sequence ();
3128 1.1 mrg
3129 1.1 mrg if (before_strlen)
3130 1.1 mrg emit_insn_after (pat, before_strlen);
3131 1.1 mrg else
3132 1.1 mrg emit_insn_before (pat, get_insns ());
3133 1.1 mrg
3134 1.1 mrg /* Return the value in the proper mode for this function. */
3135 1.1 mrg if (GET_MODE (ops[0].value) == target_mode)
3136 1.1 mrg target = ops[0].value;
3137 1.1 mrg else if (target != 0)
3138 1.1 mrg convert_move (target, ops[0].value, 0);
3139 1.1 mrg else
3140 1.1 mrg target = convert_to_mode (target_mode, ops[0].value, 0);
3141 1.1 mrg
3142 1.1 mrg return target;
3143 1.1 mrg }
3144 1.1 mrg
3145 1.1 mrg /* Expand call EXP to the strnlen built-in, returning the result
3146 1.1 mrg and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3147 1.1 mrg
3148 1.1 mrg static rtx
3149 1.1 mrg expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3150 1.1 mrg {
3151 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3152 1.1 mrg return NULL_RTX;
3153 1.1 mrg
3154 1.1 mrg tree src = CALL_EXPR_ARG (exp, 0);
3155 1.1 mrg tree bound = CALL_EXPR_ARG (exp, 1);
3156 1.1 mrg
3157 1.1 mrg if (!bound)
3158 1.1 mrg return NULL_RTX;
3159 1.1 mrg
3160 1.1 mrg location_t loc = UNKNOWN_LOCATION;
3161 1.1 mrg if (EXPR_HAS_LOCATION (exp))
3162 1.1 mrg loc = EXPR_LOCATION (exp);
3163 1.1 mrg
3164 1.1 mrg /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3165 1.1 mrg so these conversions aren't necessary. */
3166 1.1 mrg c_strlen_data lendata = { };
3167 1.1 mrg tree len = c_strlen (src, 0, &lendata, 1);
3168 1.1 mrg if (len)
3169 1.1 mrg len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3170 1.1 mrg
3171 1.1 mrg if (TREE_CODE (bound) == INTEGER_CST)
3172 1.1 mrg {
3173 1.1 mrg if (!len)
3174 1.1 mrg return NULL_RTX;
3175 1.1 mrg
3176 1.1 mrg len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3177 1.1 mrg return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3178 1.1 mrg }
3179 1.1 mrg
3180 1.1 mrg if (TREE_CODE (bound) != SSA_NAME)
3181 1.1 mrg return NULL_RTX;
3182 1.1 mrg
3183 1.1 mrg wide_int min, max;
3184 1.1 mrg value_range r;
3185 1.1 mrg get_global_range_query ()->range_of_expr (r, bound);
3186 1.1 mrg if (r.kind () != VR_RANGE)
3187 1.1 mrg return NULL_RTX;
3188 1.1 mrg min = r.lower_bound ();
3189 1.1 mrg max = r.upper_bound ();
3190 1.1 mrg
3191 1.1 mrg if (!len || TREE_CODE (len) != INTEGER_CST)
3192 1.1 mrg {
3193 1.1 mrg bool exact;
3194 1.1 mrg lendata.decl = unterminated_array (src, &len, &exact);
3195 1.1 mrg if (!lendata.decl)
3196 1.1 mrg return NULL_RTX;
3197 1.1 mrg }
3198 1.1 mrg
3199 1.1 mrg if (lendata.decl)
3200 1.1 mrg return NULL_RTX;
3201 1.1 mrg
3202 1.1 mrg if (wi::gtu_p (min, wi::to_wide (len)))
3203 1.1 mrg return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3204 1.1 mrg
3205 1.1 mrg len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3206 1.1 mrg return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3207 1.1 mrg }
3208 1.1 mrg
3209 1.1 mrg /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3210 1.1 mrg bytes from bytes at DATA + OFFSET and return it reinterpreted as
3211 1.1 mrg a target constant. */
3212 1.1 mrg
3213 1.1 mrg static rtx
3214 1.1 mrg builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3215 1.1 mrg fixed_size_mode mode)
3216 1.1 mrg {
3217 1.1 mrg /* The REPresentation pointed to by DATA need not be a nul-terminated
3218 1.1 mrg string but the caller guarantees it's large enough for MODE. */
3219 1.1 mrg const char *rep = (const char *) data;
3220 1.1 mrg
3221 1.1 mrg /* The by-pieces infrastructure does not try to pick a vector mode
3222 1.1 mrg for memcpy expansion. */
3223 1.1 mrg return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3224 1.1 mrg /*nul_terminated=*/false);
3225 1.1 mrg }
3226 1.1 mrg
3227 1.1 mrg /* LEN specify length of the block of memcpy/memset operation.
3228 1.1 mrg Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3229 1.1 mrg In some cases we can make very likely guess on max size, then we
3230 1.1 mrg set it into PROBABLE_MAX_SIZE. */
3231 1.1 mrg
3232 1.1 mrg static void
3233 1.1 mrg determine_block_size (tree len, rtx len_rtx,
3234 1.1 mrg unsigned HOST_WIDE_INT *min_size,
3235 1.1 mrg unsigned HOST_WIDE_INT *max_size,
3236 1.1 mrg unsigned HOST_WIDE_INT *probable_max_size)
3237 1.1 mrg {
3238 1.1 mrg if (CONST_INT_P (len_rtx))
3239 1.1 mrg {
3240 1.1 mrg *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3241 1.1 mrg return;
3242 1.1 mrg }
3243 1.1 mrg else
3244 1.1 mrg {
3245 1.1 mrg wide_int min, max;
3246 1.1 mrg enum value_range_kind range_type = VR_UNDEFINED;
3247 1.1 mrg
3248 1.1 mrg /* Determine bounds from the type. */
3249 1.1 mrg if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3250 1.1 mrg *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3251 1.1 mrg else
3252 1.1 mrg *min_size = 0;
3253 1.1 mrg if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3254 1.1 mrg *probable_max_size = *max_size
3255 1.1 mrg = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3256 1.1 mrg else
3257 1.1 mrg *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3258 1.1 mrg
3259 1.1 mrg if (TREE_CODE (len) == SSA_NAME)
3260 1.1 mrg {
3261 1.1 mrg value_range r;
3262 1.1 mrg get_global_range_query ()->range_of_expr (r, len);
3263 1.1 mrg range_type = r.kind ();
3264 1.1 mrg if (range_type != VR_UNDEFINED)
3265 1.1 mrg {
3266 1.1 mrg min = wi::to_wide (r.min ());
3267 1.1 mrg max = wi::to_wide (r.max ());
3268 1.1 mrg }
3269 1.1 mrg }
3270 1.1 mrg if (range_type == VR_RANGE)
3271 1.1 mrg {
3272 1.1 mrg if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3273 1.1 mrg *min_size = min.to_uhwi ();
3274 1.1 mrg if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3275 1.1 mrg *probable_max_size = *max_size = max.to_uhwi ();
3276 1.1 mrg }
3277 1.1 mrg else if (range_type == VR_ANTI_RANGE)
3278 1.1 mrg {
3279 1.1 mrg /* Code like
3280 1.1 mrg
3281 1.1 mrg int n;
3282 1.1 mrg if (n < 100)
3283 1.1 mrg memcpy (a, b, n)
3284 1.1 mrg
3285 1.1 mrg Produce anti range allowing negative values of N. We still
3286 1.1 mrg can use the information and make a guess that N is not negative.
3287 1.1 mrg */
3288 1.1 mrg if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3289 1.1 mrg *probable_max_size = min.to_uhwi () - 1;
3290 1.1 mrg }
3291 1.1 mrg }
3292 1.1 mrg gcc_checking_assert (*max_size <=
3293 1.1 mrg (unsigned HOST_WIDE_INT)
3294 1.1 mrg GET_MODE_MASK (GET_MODE (len_rtx)));
3295 1.1 mrg }
3296 1.1 mrg
3297 1.1 mrg /* Expand a call EXP to the memcpy builtin.
3298 1.1 mrg Return NULL_RTX if we failed, the caller should emit a normal call,
3299 1.1 mrg otherwise try to get the result in TARGET, if convenient (and in
3300 1.1 mrg mode MODE if that's convenient). */
3301 1.1 mrg
3302 1.1 mrg static rtx
3303 1.1 mrg expand_builtin_memcpy (tree exp, rtx target)
3304 1.1 mrg {
3305 1.1 mrg if (!validate_arglist (exp,
3306 1.1 mrg POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3307 1.1 mrg return NULL_RTX;
3308 1.1 mrg
3309 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
3310 1.1 mrg tree src = CALL_EXPR_ARG (exp, 1);
3311 1.1 mrg tree len = CALL_EXPR_ARG (exp, 2);
3312 1.1 mrg
3313 1.1 mrg return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3314 1.1 mrg /*retmode=*/ RETURN_BEGIN, false);
3315 1.1 mrg }
3316 1.1 mrg
3317 1.1 mrg /* Check a call EXP to the memmove built-in for validity.
3318 1.1 mrg Return NULL_RTX on both success and failure. */
3319 1.1 mrg
3320 1.1 mrg static rtx
3321 1.1 mrg expand_builtin_memmove (tree exp, rtx target)
3322 1.1 mrg {
3323 1.1 mrg if (!validate_arglist (exp,
3324 1.1 mrg POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3325 1.1 mrg return NULL_RTX;
3326 1.1 mrg
3327 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
3328 1.1 mrg tree src = CALL_EXPR_ARG (exp, 1);
3329 1.1 mrg tree len = CALL_EXPR_ARG (exp, 2);
3330 1.1 mrg
3331 1.1 mrg return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3332 1.1 mrg /*retmode=*/ RETURN_BEGIN, true);
3333 1.1 mrg }
3334 1.1 mrg
3335 1.1 mrg /* Expand a call EXP to the mempcpy builtin.
3336 1.1 mrg Return NULL_RTX if we failed; the caller should emit a normal call,
3337 1.1 mrg otherwise try to get the result in TARGET, if convenient (and in
3338 1.1 mrg mode MODE if that's convenient). */
3339 1.1 mrg
3340 1.1 mrg static rtx
3341 1.1 mrg expand_builtin_mempcpy (tree exp, rtx target)
3342 1.1 mrg {
3343 1.1 mrg if (!validate_arglist (exp,
3344 1.1 mrg POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3345 1.1 mrg return NULL_RTX;
3346 1.1 mrg
3347 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
3348 1.1 mrg tree src = CALL_EXPR_ARG (exp, 1);
3349 1.1 mrg tree len = CALL_EXPR_ARG (exp, 2);
3350 1.1 mrg
3351 1.1 mrg /* Policy does not generally allow using compute_objsize (which
3352 1.1 mrg is used internally by check_memop_size) to change code generation
3353 1.1 mrg or drive optimization decisions.
3354 1.1 mrg
3355 1.1 mrg In this instance it is safe because the code we generate has
3356 1.1 mrg the same semantics regardless of the return value of
3357 1.1 mrg check_memop_sizes. Exactly the same amount of data is copied
3358 1.1 mrg and the return value is exactly the same in both cases.
3359 1.1 mrg
3360 1.1 mrg Furthermore, check_memop_size always uses mode 0 for the call to
3361 1.1 mrg compute_objsize, so the imprecise nature of compute_objsize is
3362 1.1 mrg avoided. */
3363 1.1 mrg
3364 1.1 mrg /* Avoid expanding mempcpy into memcpy when the call is determined
3365 1.1 mrg to overflow the buffer. This also prevents the same overflow
3366 1.1 mrg from being diagnosed again when expanding memcpy. */
3367 1.1 mrg
3368 1.1 mrg return expand_builtin_mempcpy_args (dest, src, len,
3369 1.1 mrg target, exp, /*retmode=*/ RETURN_END);
3370 1.1 mrg }
3371 1.1 mrg
3372 1.1 mrg /* Helper function to do the actual work for expand of memory copy family
3373 1.1 mrg functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3374 1.1 mrg of memory from SRC to DEST and assign to TARGET if convenient. Return
3375 1.1 mrg value is based on RETMODE argument. */
3376 1.1 mrg
3377 1.1 mrg static rtx
3378 1.1 mrg expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3379 1.1 mrg rtx target, tree exp, memop_ret retmode,
3380 1.1 mrg bool might_overlap)
3381 1.1 mrg {
3382 1.1 mrg unsigned int src_align = get_pointer_alignment (src);
3383 1.1 mrg unsigned int dest_align = get_pointer_alignment (dest);
3384 1.1 mrg rtx dest_mem, src_mem, dest_addr, len_rtx;
3385 1.1 mrg HOST_WIDE_INT expected_size = -1;
3386 1.1 mrg unsigned int expected_align = 0;
3387 1.1 mrg unsigned HOST_WIDE_INT min_size;
3388 1.1 mrg unsigned HOST_WIDE_INT max_size;
3389 1.1 mrg unsigned HOST_WIDE_INT probable_max_size;
3390 1.1 mrg
3391 1.1 mrg bool is_move_done;
3392 1.1 mrg
3393 1.1 mrg /* If DEST is not a pointer type, call the normal function. */
3394 1.1 mrg if (dest_align == 0)
3395 1.1 mrg return NULL_RTX;
3396 1.1 mrg
3397 1.1 mrg /* If either SRC is not a pointer type, don't do this
3398 1.1 mrg operation in-line. */
3399 1.1 mrg if (src_align == 0)
3400 1.1 mrg return NULL_RTX;
3401 1.1 mrg
3402 1.1 mrg if (currently_expanding_gimple_stmt)
3403 1.1 mrg stringop_block_profile (currently_expanding_gimple_stmt,
3404 1.1 mrg &expected_align, &expected_size);
3405 1.1 mrg
3406 1.1 mrg if (expected_align < dest_align)
3407 1.1 mrg expected_align = dest_align;
3408 1.1 mrg dest_mem = get_memory_rtx (dest, len);
3409 1.1 mrg set_mem_align (dest_mem, dest_align);
3410 1.1 mrg len_rtx = expand_normal (len);
3411 1.1 mrg determine_block_size (len, len_rtx, &min_size, &max_size,
3412 1.1 mrg &probable_max_size);
3413 1.1 mrg
3414 1.1 mrg /* Try to get the byte representation of the constant SRC points to,
3415 1.1 mrg with its byte size in NBYTES. */
3416 1.1 mrg unsigned HOST_WIDE_INT nbytes;
3417 1.1 mrg const char *rep = getbyterep (src, &nbytes);
3418 1.1 mrg
3419 1.1 mrg /* If the function's constant bound LEN_RTX is less than or equal
3420 1.1 mrg to the byte size of the representation of the constant argument,
3421 1.1 mrg and if block move would be done by pieces, we can avoid loading
3422 1.1 mrg the bytes from memory and only store the computed constant.
3423 1.1 mrg This works in the overlap (memmove) case as well because
3424 1.1 mrg store_by_pieces just generates a series of stores of constants
3425 1.1 mrg from the representation returned by getbyterep(). */
3426 1.1 mrg if (rep
3427 1.1 mrg && CONST_INT_P (len_rtx)
3428 1.1 mrg && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3429 1.1 mrg && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3430 1.1 mrg CONST_CAST (char *, rep),
3431 1.1 mrg dest_align, false))
3432 1.1 mrg {
3433 1.1 mrg dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3434 1.1 mrg builtin_memcpy_read_str,
3435 1.1 mrg CONST_CAST (char *, rep),
3436 1.1 mrg dest_align, false, retmode);
3437 1.1 mrg dest_mem = force_operand (XEXP (dest_mem, 0), target);
3438 1.1 mrg dest_mem = convert_memory_address (ptr_mode, dest_mem);
3439 1.1 mrg return dest_mem;
3440 1.1 mrg }
3441 1.1 mrg
3442 1.1 mrg src_mem = get_memory_rtx (src, len);
3443 1.1 mrg set_mem_align (src_mem, src_align);
3444 1.1 mrg
3445 1.1 mrg /* Copy word part most expediently. */
3446 1.1 mrg enum block_op_methods method = BLOCK_OP_NORMAL;
3447 1.1 mrg if (CALL_EXPR_TAILCALL (exp)
3448 1.1 mrg && (retmode == RETURN_BEGIN || target == const0_rtx))
3449 1.1 mrg method = BLOCK_OP_TAILCALL;
3450 1.1 mrg bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3451 1.1 mrg && retmode == RETURN_END
3452 1.1 mrg && !might_overlap
3453 1.1 mrg && target != const0_rtx);
3454 1.1 mrg if (use_mempcpy_call)
3455 1.1 mrg method = BLOCK_OP_NO_LIBCALL_RET;
3456 1.1 mrg dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3457 1.1 mrg expected_align, expected_size,
3458 1.1 mrg min_size, max_size, probable_max_size,
3459 1.1 mrg use_mempcpy_call, &is_move_done,
3460 1.1 mrg might_overlap);
3461 1.1 mrg
3462 1.1 mrg /* Bail out when a mempcpy call would be expanded as libcall and when
3463 1.1 mrg we have a target that provides a fast implementation
3464 1.1 mrg of mempcpy routine. */
3465 1.1 mrg if (!is_move_done)
3466 1.1 mrg return NULL_RTX;
3467 1.1 mrg
3468 1.1 mrg if (dest_addr == pc_rtx)
3469 1.1 mrg return NULL_RTX;
3470 1.1 mrg
3471 1.1 mrg if (dest_addr == 0)
3472 1.1 mrg {
3473 1.1 mrg dest_addr = force_operand (XEXP (dest_mem, 0), target);
3474 1.1 mrg dest_addr = convert_memory_address (ptr_mode, dest_addr);
3475 1.1 mrg }
3476 1.1 mrg
3477 1.1 mrg if (retmode != RETURN_BEGIN && target != const0_rtx)
3478 1.1 mrg {
3479 1.1 mrg dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3480 1.1 mrg /* stpcpy pointer to last byte. */
3481 1.1 mrg if (retmode == RETURN_END_MINUS_ONE)
3482 1.1 mrg dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3483 1.1 mrg }
3484 1.1 mrg
3485 1.1 mrg return dest_addr;
3486 1.1 mrg }
3487 1.1 mrg
3488 1.1 mrg static rtx
3489 1.1 mrg expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3490 1.1 mrg rtx target, tree orig_exp, memop_ret retmode)
3491 1.1 mrg {
3492 1.1 mrg return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3493 1.1 mrg retmode, false);
3494 1.1 mrg }
3495 1.1 mrg
3496 1.1 mrg /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3497 1.1 mrg we failed, the caller should emit a normal call, otherwise try to
3498 1.1 mrg get the result in TARGET, if convenient.
3499 1.1 mrg Return value is based on RETMODE argument. */
3500 1.1 mrg
3501 1.1 mrg static rtx
3502 1.1 mrg expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3503 1.1 mrg {
3504 1.1 mrg class expand_operand ops[3];
3505 1.1 mrg rtx dest_mem;
3506 1.1 mrg rtx src_mem;
3507 1.1 mrg
3508 1.1 mrg if (!targetm.have_movstr ())
3509 1.1 mrg return NULL_RTX;
3510 1.1 mrg
3511 1.1 mrg dest_mem = get_memory_rtx (dest, NULL);
3512 1.1 mrg src_mem = get_memory_rtx (src, NULL);
3513 1.1 mrg if (retmode == RETURN_BEGIN)
3514 1.1 mrg {
3515 1.1 mrg target = force_reg (Pmode, XEXP (dest_mem, 0));
3516 1.1 mrg dest_mem = replace_equiv_address (dest_mem, target);
3517 1.1 mrg }
3518 1.1 mrg
3519 1.1 mrg create_output_operand (&ops[0],
3520 1.1 mrg retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3521 1.1 mrg create_fixed_operand (&ops[1], dest_mem);
3522 1.1 mrg create_fixed_operand (&ops[2], src_mem);
3523 1.1 mrg if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3524 1.1 mrg return NULL_RTX;
3525 1.1 mrg
3526 1.1 mrg if (retmode != RETURN_BEGIN && target != const0_rtx)
3527 1.1 mrg {
3528 1.1 mrg target = ops[0].value;
3529 1.1 mrg /* movstr is supposed to set end to the address of the NUL
3530 1.1 mrg terminator. If the caller requested a mempcpy-like return value,
3531 1.1 mrg adjust it. */
3532 1.1 mrg if (retmode == RETURN_END)
3533 1.1 mrg {
3534 1.1 mrg rtx tem = plus_constant (GET_MODE (target),
3535 1.1 mrg gen_lowpart (GET_MODE (target), target), 1);
3536 1.1 mrg emit_move_insn (target, force_operand (tem, NULL_RTX));
3537 1.1 mrg }
3538 1.1 mrg }
3539 1.1 mrg return target;
3540 1.1 mrg }
3541 1.1 mrg
3542 1.1 mrg /* Expand expression EXP, which is a call to the strcpy builtin. Return
3543 1.1 mrg NULL_RTX if we failed the caller should emit a normal call, otherwise
3544 1.1 mrg try to get the result in TARGET, if convenient (and in mode MODE if that's
3545 1.1 mrg convenient). */
3546 1.1 mrg
3547 1.1 mrg static rtx
3548 1.1 mrg expand_builtin_strcpy (tree exp, rtx target)
3549 1.1 mrg {
3550 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3551 1.1 mrg return NULL_RTX;
3552 1.1 mrg
3553 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
3554 1.1 mrg tree src = CALL_EXPR_ARG (exp, 1);
3555 1.1 mrg
3556 1.1 mrg return expand_builtin_strcpy_args (exp, dest, src, target);
3557 1.1 mrg }
3558 1.1 mrg
3559 1.1 mrg /* Helper function to do the actual work for expand_builtin_strcpy. The
3560 1.1 mrg arguments to the builtin_strcpy call DEST and SRC are broken out
3561 1.1 mrg so that this can also be called without constructing an actual CALL_EXPR.
3562 1.1 mrg The other arguments and return value are the same as for
3563 1.1 mrg expand_builtin_strcpy. */
3564 1.1 mrg
3565 1.1 mrg static rtx
3566 1.1 mrg expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3567 1.1 mrg {
3568 1.1 mrg return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3569 1.1 mrg }
3570 1.1 mrg
3571 1.1 mrg /* Expand a call EXP to the stpcpy builtin.
3572 1.1 mrg Return NULL_RTX if we failed the caller should emit a normal call,
3573 1.1 mrg otherwise try to get the result in TARGET, if convenient (and in
3574 1.1 mrg mode MODE if that's convenient). */
3575 1.1 mrg
3576 1.1 mrg static rtx
3577 1.1 mrg expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3578 1.1 mrg {
3579 1.1 mrg tree dst, src;
3580 1.1 mrg location_t loc = EXPR_LOCATION (exp);
3581 1.1 mrg
3582 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3583 1.1 mrg return NULL_RTX;
3584 1.1 mrg
3585 1.1 mrg dst = CALL_EXPR_ARG (exp, 0);
3586 1.1 mrg src = CALL_EXPR_ARG (exp, 1);
3587 1.1 mrg
3588 1.1 mrg /* If return value is ignored, transform stpcpy into strcpy. */
3589 1.1 mrg if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3590 1.1 mrg {
3591 1.1 mrg tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3592 1.1 mrg tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3593 1.1 mrg return expand_expr (result, target, mode, EXPAND_NORMAL);
3594 1.1 mrg }
3595 1.1 mrg else
3596 1.1 mrg {
3597 1.1 mrg tree len, lenp1;
3598 1.1 mrg rtx ret;
3599 1.1 mrg
3600 1.1 mrg /* Ensure we get an actual string whose length can be evaluated at
3601 1.1 mrg compile-time, not an expression containing a string. This is
3602 1.1 mrg because the latter will potentially produce pessimized code
3603 1.1 mrg when used to produce the return value. */
3604 1.1 mrg c_strlen_data lendata = { };
3605 1.1 mrg if (!c_getstr (src)
3606 1.1 mrg || !(len = c_strlen (src, 0, &lendata, 1)))
3607 1.1 mrg return expand_movstr (dst, src, target,
3608 1.1 mrg /*retmode=*/ RETURN_END_MINUS_ONE);
3609 1.1 mrg
3610 1.1 mrg lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3611 1.1 mrg ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3612 1.1 mrg target, exp,
3613 1.1 mrg /*retmode=*/ RETURN_END_MINUS_ONE);
3614 1.1 mrg
3615 1.1 mrg if (ret)
3616 1.1 mrg return ret;
3617 1.1 mrg
3618 1.1 mrg if (TREE_CODE (len) == INTEGER_CST)
3619 1.1 mrg {
3620 1.1 mrg rtx len_rtx = expand_normal (len);
3621 1.1 mrg
3622 1.1 mrg if (CONST_INT_P (len_rtx))
3623 1.1 mrg {
3624 1.1 mrg ret = expand_builtin_strcpy_args (exp, dst, src, target);
3625 1.1 mrg
3626 1.1 mrg if (ret)
3627 1.1 mrg {
3628 1.1 mrg if (! target)
3629 1.1 mrg {
3630 1.1 mrg if (mode != VOIDmode)
3631 1.1 mrg target = gen_reg_rtx (mode);
3632 1.1 mrg else
3633 1.1 mrg target = gen_reg_rtx (GET_MODE (ret));
3634 1.1 mrg }
3635 1.1 mrg if (GET_MODE (target) != GET_MODE (ret))
3636 1.1 mrg ret = gen_lowpart (GET_MODE (target), ret);
3637 1.1 mrg
3638 1.1 mrg ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3639 1.1 mrg ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3640 1.1 mrg gcc_assert (ret);
3641 1.1 mrg
3642 1.1 mrg return target;
3643 1.1 mrg }
3644 1.1 mrg }
3645 1.1 mrg }
3646 1.1 mrg
3647 1.1 mrg return expand_movstr (dst, src, target,
3648 1.1 mrg /*retmode=*/ RETURN_END_MINUS_ONE);
3649 1.1 mrg }
3650 1.1 mrg }
3651 1.1 mrg
3652 1.1 mrg /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3653 1.1 mrg arguments while being careful to avoid duplicate warnings (which could
3654 1.1 mrg be issued if the expander were to expand the call, resulting in it
3655 1.1 mrg being emitted in expand_call(). */
3656 1.1 mrg
3657 1.1 mrg static rtx
3658 1.1 mrg expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3659 1.1 mrg {
3660 1.1 mrg if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3661 1.1 mrg {
3662 1.1 mrg /* The call has been successfully expanded. Check for nonstring
3663 1.1 mrg arguments and issue warnings as appropriate. */
3664 1.1 mrg maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3665 1.1 mrg return ret;
3666 1.1 mrg }
3667 1.1 mrg
3668 1.1 mrg return NULL_RTX;
3669 1.1 mrg }
3670 1.1 mrg
3671 1.1 mrg /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3672 1.1 mrg bytes from constant string DATA + OFFSET and return it as target
3673 1.1 mrg constant. */
3674 1.1 mrg
3675 1.1 mrg rtx
3676 1.1 mrg builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3677 1.1 mrg fixed_size_mode mode)
3678 1.1 mrg {
3679 1.1 mrg const char *str = (const char *) data;
3680 1.1 mrg
3681 1.1 mrg if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3682 1.1 mrg return const0_rtx;
3683 1.1 mrg
3684 1.1 mrg /* The by-pieces infrastructure does not try to pick a vector mode
3685 1.1 mrg for strncpy expansion. */
3686 1.1 mrg return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3687 1.1 mrg }
3688 1.1 mrg
3689 1.1 mrg /* Helper to check the sizes of sequences and the destination of calls
3690 1.1 mrg to __builtin_strncat and __builtin___strncat_chk. Returns true on
3691 1.1 mrg success (no overflow or invalid sizes), false otherwise. */
3692 1.1 mrg
3693 1.1 mrg static bool
3694 1.1 mrg check_strncat_sizes (tree exp, tree objsize)
3695 1.1 mrg {
3696 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
3697 1.1 mrg tree src = CALL_EXPR_ARG (exp, 1);
3698 1.1 mrg tree maxread = CALL_EXPR_ARG (exp, 2);
3699 1.1 mrg
3700 1.1 mrg /* Try to determine the range of lengths that the source expression
3701 1.1 mrg refers to. */
3702 1.1 mrg c_strlen_data lendata = { };
3703 1.1 mrg get_range_strlen (src, &lendata, /* eltsize = */ 1);
3704 1.1 mrg
3705 1.1 mrg /* Try to verify that the destination is big enough for the shortest
3706 1.1 mrg string. */
3707 1.1 mrg
3708 1.1 mrg access_data data (nullptr, exp, access_read_write, maxread, true);
3709 1.1 mrg if (!objsize && warn_stringop_overflow)
3710 1.1 mrg {
3711 1.1 mrg /* If it hasn't been provided by __strncat_chk, try to determine
3712 1.1 mrg the size of the destination object into which the source is
3713 1.1 mrg being copied. */
3714 1.1 mrg objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
3715 1.1 mrg }
3716 1.1 mrg
3717 1.1 mrg /* Add one for the terminating nul. */
3718 1.1 mrg tree srclen = (lendata.minlen
3719 1.1 mrg ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
3720 1.1 mrg size_one_node)
3721 1.1 mrg : NULL_TREE);
3722 1.1 mrg
3723 1.1 mrg /* The strncat function copies at most MAXREAD bytes and always appends
3724 1.1 mrg the terminating nul so the specified upper bound should never be equal
3725 1.1 mrg to (or greater than) the size of the destination. */
3726 1.1 mrg if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3727 1.1 mrg && tree_int_cst_equal (objsize, maxread))
3728 1.1 mrg {
3729 1.1 mrg location_t loc = EXPR_LOCATION (exp);
3730 1.1 mrg warning_at (loc, OPT_Wstringop_overflow_,
3731 1.1 mrg "%qD specified bound %E equals destination size",
3732 1.1 mrg get_callee_fndecl (exp), maxread);
3733 1.1 mrg
3734 1.1 mrg return false;
3735 1.1 mrg }
3736 1.1 mrg
3737 1.1 mrg if (!srclen
3738 1.1 mrg || (maxread && tree_fits_uhwi_p (maxread)
3739 1.1 mrg && tree_fits_uhwi_p (srclen)
3740 1.1 mrg && tree_int_cst_lt (maxread, srclen)))
3741 1.1 mrg srclen = maxread;
3742 1.1 mrg
3743 1.1 mrg /* The number of bytes to write is LEN but check_access will alsoa
3744 1.1 mrg check SRCLEN if LEN's value isn't known. */
3745 1.1 mrg return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
3746 1.1 mrg objsize, data.mode, &data);
3747 1.1 mrg }
3748 1.1 mrg
3749 1.1 mrg /* Expand expression EXP, which is a call to the strncpy builtin. Return
3750 1.1 mrg NULL_RTX if we failed the caller should emit a normal call. */
3751 1.1 mrg
3752 1.1 mrg static rtx
3753 1.1 mrg expand_builtin_strncpy (tree exp, rtx target)
3754 1.1 mrg {
3755 1.1 mrg location_t loc = EXPR_LOCATION (exp);
3756 1.1 mrg
3757 1.1 mrg if (!validate_arglist (exp,
3758 1.1 mrg POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3759 1.1 mrg return NULL_RTX;
3760 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
3761 1.1 mrg tree src = CALL_EXPR_ARG (exp, 1);
3762 1.1 mrg /* The number of bytes to write (not the maximum). */
3763 1.1 mrg tree len = CALL_EXPR_ARG (exp, 2);
3764 1.1 mrg
3765 1.1 mrg /* The length of the source sequence. */
3766 1.1 mrg tree slen = c_strlen (src, 1);
3767 1.1 mrg
3768 1.1 mrg /* We must be passed a constant len and src parameter. */
3769 1.1 mrg if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3770 1.1 mrg return NULL_RTX;
3771 1.1 mrg
3772 1.1 mrg slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3773 1.1 mrg
3774 1.1 mrg /* We're required to pad with trailing zeros if the requested
3775 1.1 mrg len is greater than strlen(s2)+1. In that case try to
3776 1.1 mrg use store_by_pieces, if it fails, punt. */
3777 1.1 mrg if (tree_int_cst_lt (slen, len))
3778 1.1 mrg {
3779 1.1 mrg unsigned int dest_align = get_pointer_alignment (dest);
3780 1.1 mrg const char *p = c_getstr (src);
3781 1.1 mrg rtx dest_mem;
3782 1.1 mrg
3783 1.1 mrg if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3784 1.1 mrg || !can_store_by_pieces (tree_to_uhwi (len),
3785 1.1 mrg builtin_strncpy_read_str,
3786 1.1 mrg CONST_CAST (char *, p),
3787 1.1 mrg dest_align, false))
3788 1.1 mrg return NULL_RTX;
3789 1.1 mrg
3790 1.1 mrg dest_mem = get_memory_rtx (dest, len);
3791 1.1 mrg store_by_pieces (dest_mem, tree_to_uhwi (len),
3792 1.1 mrg builtin_strncpy_read_str,
3793 1.1 mrg CONST_CAST (char *, p), dest_align, false,
3794 1.1 mrg RETURN_BEGIN);
3795 1.1 mrg dest_mem = force_operand (XEXP (dest_mem, 0), target);
3796 1.1 mrg dest_mem = convert_memory_address (ptr_mode, dest_mem);
3797 1.1 mrg return dest_mem;
3798 1.1 mrg }
3799 1.1 mrg
3800 1.1 mrg return NULL_RTX;
3801 1.1 mrg }
3802 1.1 mrg
3803 1.1 mrg /* Return the RTL of a register in MODE generated from PREV in the
3804 1.1 mrg previous iteration. */
3805 1.1 mrg
3806 1.1 mrg static rtx
3807 1.1 mrg gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3808 1.1 mrg {
3809 1.1 mrg rtx target = nullptr;
3810 1.1 mrg if (prev != nullptr && prev->data != nullptr)
3811 1.1 mrg {
3812 1.1 mrg /* Use the previous data in the same mode. */
3813 1.1 mrg if (prev->mode == mode)
3814 1.1 mrg return prev->data;
3815 1.1 mrg
3816 1.1 mrg fixed_size_mode prev_mode = prev->mode;
3817 1.1 mrg
3818 1.1 mrg /* Don't use the previous data to write QImode if it is in a
3819 1.1 mrg vector mode. */
3820 1.1 mrg if (VECTOR_MODE_P (prev_mode) && mode == QImode)
3821 1.1 mrg return target;
3822 1.1 mrg
3823 1.1 mrg rtx prev_rtx = prev->data;
3824 1.1 mrg
3825 1.1 mrg if (REG_P (prev_rtx)
3826 1.1 mrg && HARD_REGISTER_P (prev_rtx)
3827 1.1 mrg && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
3828 1.1 mrg {
3829 1.1 mrg /* This case occurs when PREV_MODE is a vector and when
3830 1.1 mrg MODE is too small to store using vector operations.
3831 1.1 mrg After register allocation, the code will need to move the
3832 1.1 mrg lowpart of the vector register into a non-vector register.
3833 1.1 mrg
3834 1.1 mrg Also, the target has chosen to use a hard register
3835 1.1 mrg instead of going with the default choice of using a
3836 1.1 mrg pseudo register. We should respect that choice and try to
3837 1.1 mrg avoid creating a pseudo register with the same mode as the
3838 1.1 mrg current hard register.
3839 1.1 mrg
3840 1.1 mrg In principle, we could just use a lowpart MODE subreg of
3841 1.1 mrg the vector register. However, the vector register mode might
3842 1.1 mrg be too wide for non-vector registers, and we already know
3843 1.1 mrg that the non-vector mode is too small for vector registers.
3844 1.1 mrg It's therefore likely that we'd need to spill to memory in
3845 1.1 mrg the vector mode and reload the non-vector value from there.
3846 1.1 mrg
3847 1.1 mrg Try to avoid that by reducing the vector register to the
3848 1.1 mrg smallest size that it can hold. This should increase the
3849 1.1 mrg chances that non-vector registers can hold both the inner
3850 1.1 mrg and outer modes of the subreg that we generate later. */
3851 1.1 mrg machine_mode m;
3852 1.1 mrg fixed_size_mode candidate;
3853 1.1 mrg FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
3854 1.1 mrg if (is_a<fixed_size_mode> (m, &candidate))
3855 1.1 mrg {
3856 1.1 mrg if (GET_MODE_SIZE (candidate)
3857 1.1 mrg >= GET_MODE_SIZE (prev_mode))
3858 1.1 mrg break;
3859 1.1 mrg if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3860 1.1 mrg && lowpart_subreg_regno (REGNO (prev_rtx),
3861 1.1 mrg prev_mode, candidate) >= 0)
3862 1.1 mrg {
3863 1.1 mrg target = lowpart_subreg (candidate, prev_rtx,
3864 1.1 mrg prev_mode);
3865 1.1 mrg prev_rtx = target;
3866 1.1 mrg prev_mode = candidate;
3867 1.1 mrg break;
3868 1.1 mrg }
3869 1.1 mrg }
3870 1.1 mrg if (target == nullptr)
3871 1.1 mrg prev_rtx = copy_to_reg (prev_rtx);
3872 1.1 mrg }
3873 1.1 mrg
3874 1.1 mrg target = lowpart_subreg (mode, prev_rtx, prev_mode);
3875 1.1 mrg }
3876 1.1 mrg return target;
3877 1.1 mrg }
3878 1.1 mrg
3879 1.1 mrg /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3880 1.1 mrg bytes from constant string DATA + OFFSET and return it as target
3881 1.1 mrg constant. If PREV isn't nullptr, it has the RTL info from the
3882 1.1 mrg previous iteration. */
3883 1.1 mrg
3884 1.1 mrg rtx
3885 1.1 mrg builtin_memset_read_str (void *data, void *prev,
3886 1.1 mrg HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3887 1.1 mrg fixed_size_mode mode)
3888 1.1 mrg {
3889 1.1 mrg const char *c = (const char *) data;
3890 1.1 mrg unsigned int size = GET_MODE_SIZE (mode);
3891 1.1 mrg
3892 1.1 mrg rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3893 1.1 mrg mode);
3894 1.1 mrg if (target != nullptr)
3895 1.1 mrg return target;
3896 1.1 mrg rtx src = gen_int_mode (*c, QImode);
3897 1.1 mrg
3898 1.1 mrg if (VECTOR_MODE_P (mode))
3899 1.1 mrg {
3900 1.1 mrg gcc_assert (GET_MODE_INNER (mode) == QImode);
3901 1.1 mrg
3902 1.1 mrg rtx const_vec = gen_const_vec_duplicate (mode, src);
3903 1.1 mrg if (prev == NULL)
3904 1.1 mrg /* Return CONST_VECTOR when called by a query function. */
3905 1.1 mrg return const_vec;
3906 1.1 mrg
3907 1.1 mrg /* Use the move expander with CONST_VECTOR. */
3908 1.1 mrg target = targetm.gen_memset_scratch_rtx (mode);
3909 1.1 mrg emit_move_insn (target, const_vec);
3910 1.1 mrg return target;
3911 1.1 mrg }
3912 1.1 mrg
3913 1.1 mrg char *p = XALLOCAVEC (char, size);
3914 1.1 mrg
3915 1.1 mrg memset (p, *c, size);
3916 1.1 mrg
3917 1.1 mrg /* Vector modes should be handled above. */
3918 1.1 mrg return c_readstr (p, as_a <scalar_int_mode> (mode));
3919 1.1 mrg }
3920 1.1 mrg
3921 1.1 mrg /* Callback routine for store_by_pieces. Return the RTL of a register
3922 1.1 mrg containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3923 1.1 mrg char value given in the RTL register data. For example, if mode is
3924 1.1 mrg 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3925 1.1 mrg nullptr, it has the RTL info from the previous iteration. */
3926 1.1 mrg
3927 1.1 mrg static rtx
3928 1.1 mrg builtin_memset_gen_str (void *data, void *prev,
3929 1.1 mrg HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3930 1.1 mrg fixed_size_mode mode)
3931 1.1 mrg {
3932 1.1 mrg rtx target, coeff;
3933 1.1 mrg size_t size;
3934 1.1 mrg char *p;
3935 1.1 mrg
3936 1.1 mrg size = GET_MODE_SIZE (mode);
3937 1.1 mrg if (size == 1)
3938 1.1 mrg return (rtx) data;
3939 1.1 mrg
3940 1.1 mrg target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3941 1.1 mrg if (target != nullptr)
3942 1.1 mrg return target;
3943 1.1 mrg
3944 1.1 mrg if (VECTOR_MODE_P (mode))
3945 1.1 mrg {
3946 1.1 mrg gcc_assert (GET_MODE_INNER (mode) == QImode);
3947 1.1 mrg
3948 1.1 mrg /* vec_duplicate_optab is a precondition to pick a vector mode for
3949 1.1 mrg the memset expander. */
3950 1.1 mrg insn_code icode = optab_handler (vec_duplicate_optab, mode);
3951 1.1 mrg
3952 1.1 mrg target = targetm.gen_memset_scratch_rtx (mode);
3953 1.1 mrg class expand_operand ops[2];
3954 1.1 mrg create_output_operand (&ops[0], target, mode);
3955 1.1 mrg create_input_operand (&ops[1], (rtx) data, QImode);
3956 1.1 mrg expand_insn (icode, 2, ops);
3957 1.1 mrg if (!rtx_equal_p (target, ops[0].value))
3958 1.1 mrg emit_move_insn (target, ops[0].value);
3959 1.1 mrg
3960 1.1 mrg return target;
3961 1.1 mrg }
3962 1.1 mrg
3963 1.1 mrg p = XALLOCAVEC (char, size);
3964 1.1 mrg memset (p, 1, size);
3965 1.1 mrg /* Vector modes should be handled above. */
3966 1.1 mrg coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3967 1.1 mrg
3968 1.1 mrg target = convert_to_mode (mode, (rtx) data, 1);
3969 1.1 mrg target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3970 1.1 mrg return force_reg (mode, target);
3971 1.1 mrg }
3972 1.1 mrg
3973 1.1 mrg /* Expand expression EXP, which is a call to the memset builtin. Return
3974 1.1 mrg NULL_RTX if we failed the caller should emit a normal call, otherwise
3975 1.1 mrg try to get the result in TARGET, if convenient (and in mode MODE if that's
3976 1.1 mrg convenient). */
3977 1.1 mrg
3978 1.1 mrg rtx
3979 1.1 mrg expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3980 1.1 mrg {
3981 1.1 mrg if (!validate_arglist (exp,
3982 1.1 mrg POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3983 1.1 mrg return NULL_RTX;
3984 1.1 mrg
3985 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
3986 1.1 mrg tree val = CALL_EXPR_ARG (exp, 1);
3987 1.1 mrg tree len = CALL_EXPR_ARG (exp, 2);
3988 1.1 mrg
3989 1.1 mrg return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3990 1.1 mrg }
3991 1.1 mrg
3992 1.1 mrg /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3993 1.1 mrg Return TRUE if successful, FALSE otherwise. TO is assumed to be
3994 1.1 mrg aligned at an ALIGN-bits boundary. LEN must be a multiple of
3995 1.1 mrg 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3996 1.1 mrg
3997 1.1 mrg The strategy is to issue one store_by_pieces for each power of two,
3998 1.1 mrg from most to least significant, guarded by a test on whether there
3999 1.1 mrg are at least that many bytes left to copy in LEN.
4000 1.1 mrg
4001 1.1 mrg ??? Should we skip some powers of two in favor of loops? Maybe start
4002 1.1 mrg at the max of TO/LEN/word alignment, at least when optimizing for
4003 1.1 mrg size, instead of ensuring O(log len) dynamic compares? */
4004 1.1 mrg
4005 1.1 mrg bool
4006 1.1 mrg try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4007 1.1 mrg unsigned HOST_WIDE_INT min_len,
4008 1.1 mrg unsigned HOST_WIDE_INT max_len,
4009 1.1 mrg rtx val, char valc, unsigned int align)
4010 1.1 mrg {
4011 1.1 mrg int max_bits = floor_log2 (max_len);
4012 1.1 mrg int min_bits = floor_log2 (min_len);
4013 1.1 mrg int sctz_len = ctz_len;
4014 1.1 mrg
4015 1.1 mrg gcc_checking_assert (sctz_len >= 0);
4016 1.1 mrg
4017 1.1 mrg if (val)
4018 1.1 mrg valc = 1;
4019 1.1 mrg
4020 1.1 mrg /* Bits more significant than TST_BITS are part of the shared prefix
4021 1.1 mrg in the binary representation of both min_len and max_len. Since
4022 1.1 mrg they're identical, we don't need to test them in the loop. */
4023 1.1 mrg int tst_bits = (max_bits != min_bits ? max_bits
4024 1.1 mrg : floor_log2 (max_len ^ min_len));
4025 1.1 mrg
4026 1.1 mrg /* Check whether it's profitable to start by storing a fixed BLKSIZE
4027 1.1 mrg bytes, to lower max_bits. In the unlikely case of a constant LEN
4028 1.1 mrg (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4029 1.1 mrg single store_by_pieces, but otherwise, select the minimum multiple
4030 1.1 mrg of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4031 1.1 mrg brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4032 1.1 mrg unsigned HOST_WIDE_INT blksize;
4033 1.1 mrg if (max_len > min_len)
4034 1.1 mrg {
4035 1.1 mrg unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4036 1.1 mrg align / BITS_PER_UNIT);
4037 1.1 mrg blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4038 1.1 mrg blksize &= ~(alrng - 1);
4039 1.1 mrg }
4040 1.1 mrg else if (max_len == min_len)
4041 1.1 mrg blksize = max_len;
4042 1.1 mrg else
4043 1.1 mrg /* Huh, max_len < min_len? Punt. See pr100843.c. */
4044 1.1 mrg return false;
4045 1.1 mrg if (min_len >= blksize)
4046 1.1 mrg {
4047 1.1 mrg min_len -= blksize;
4048 1.1 mrg min_bits = floor_log2 (min_len);
4049 1.1 mrg max_len -= blksize;
4050 1.1 mrg max_bits = floor_log2 (max_len);
4051 1.1 mrg
4052 1.1 mrg tst_bits = (max_bits != min_bits ? max_bits
4053 1.1 mrg : floor_log2 (max_len ^ min_len));
4054 1.1 mrg }
4055 1.1 mrg else
4056 1.1 mrg blksize = 0;
4057 1.1 mrg
4058 1.1 mrg /* Check that we can use store by pieces for the maximum store count
4059 1.1 mrg we may issue (initial fixed-size block, plus conditional
4060 1.1 mrg power-of-two-sized from max_bits to ctz_len. */
4061 1.1 mrg unsigned HOST_WIDE_INT xlenest = blksize;
4062 1.1 mrg if (max_bits >= 0)
4063 1.1 mrg xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4064 1.1 mrg - (HOST_WIDE_INT_1U << ctz_len));
4065 1.1 mrg if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
4066 1.1 mrg &valc, align, true))
4067 1.1 mrg return false;
4068 1.1 mrg
4069 1.1 mrg by_pieces_constfn constfun;
4070 1.1 mrg void *constfundata;
4071 1.1 mrg if (val)
4072 1.1 mrg {
4073 1.1 mrg constfun = builtin_memset_gen_str;
4074 1.1 mrg constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4075 1.1 mrg val);
4076 1.1 mrg }
4077 1.1 mrg else
4078 1.1 mrg {
4079 1.1 mrg constfun = builtin_memset_read_str;
4080 1.1 mrg constfundata = &valc;
4081 1.1 mrg }
4082 1.1 mrg
4083 1.1 mrg rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4084 1.1 mrg rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4085 1.1 mrg to = replace_equiv_address (to, ptr);
4086 1.1 mrg set_mem_align (to, align);
4087 1.1 mrg
4088 1.1 mrg if (blksize)
4089 1.1 mrg {
4090 1.1 mrg to = store_by_pieces (to, blksize,
4091 1.1 mrg constfun, constfundata,
4092 1.1 mrg align, true,
4093 1.1 mrg max_len != 0 ? RETURN_END : RETURN_BEGIN);
4094 1.1 mrg if (max_len == 0)
4095 1.1 mrg return true;
4096 1.1 mrg
4097 1.1 mrg /* Adjust PTR, TO and REM. Since TO's address is likely
4098 1.1 mrg PTR+offset, we have to replace it. */
4099 1.1 mrg emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4100 1.1 mrg to = replace_equiv_address (to, ptr);
4101 1.1 mrg rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4102 1.1 mrg emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4103 1.1 mrg }
4104 1.1 mrg
4105 1.1 mrg /* Iterate over power-of-two block sizes from the maximum length to
4106 1.1 mrg the least significant bit possibly set in the length. */
4107 1.1 mrg for (int i = max_bits; i >= sctz_len; i--)
4108 1.1 mrg {
4109 1.1 mrg rtx_code_label *label = NULL;
4110 1.1 mrg blksize = HOST_WIDE_INT_1U << i;
4111 1.1 mrg
4112 1.1 mrg /* If we're past the bits shared between min_ and max_len, expand
4113 1.1 mrg a test on the dynamic length, comparing it with the
4114 1.1 mrg BLKSIZE. */
4115 1.1 mrg if (i <= tst_bits)
4116 1.1 mrg {
4117 1.1 mrg label = gen_label_rtx ();
4118 1.1 mrg emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4119 1.1 mrg ptr_mode, 1, label,
4120 1.1 mrg profile_probability::even ());
4121 1.1 mrg }
4122 1.1 mrg /* If we are at a bit that is in the prefix shared by min_ and
4123 1.1 mrg max_len, skip this BLKSIZE if the bit is clear. */
4124 1.1 mrg else if ((max_len & blksize) == 0)
4125 1.1 mrg continue;
4126 1.1 mrg
4127 1.1 mrg /* Issue a store of BLKSIZE bytes. */
4128 1.1 mrg to = store_by_pieces (to, blksize,
4129 1.1 mrg constfun, constfundata,
4130 1.1 mrg align, true,
4131 1.1 mrg i != sctz_len ? RETURN_END : RETURN_BEGIN);
4132 1.1 mrg
4133 1.1 mrg /* Adjust REM and PTR, unless this is the last iteration. */
4134 1.1 mrg if (i != sctz_len)
4135 1.1 mrg {
4136 1.1 mrg emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4137 1.1 mrg to = replace_equiv_address (to, ptr);
4138 1.1 mrg rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4139 1.1 mrg emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4140 1.1 mrg }
4141 1.1 mrg
4142 1.1 mrg if (label)
4143 1.1 mrg {
4144 1.1 mrg emit_label (label);
4145 1.1 mrg
4146 1.1 mrg /* Given conditional stores, the offset can no longer be
4147 1.1 mrg known, so clear it. */
4148 1.1 mrg clear_mem_offset (to);
4149 1.1 mrg }
4150 1.1 mrg }
4151 1.1 mrg
4152 1.1 mrg return true;
4153 1.1 mrg }
4154 1.1 mrg
4155 1.1 mrg /* Helper function to do the actual work for expand_builtin_memset. The
4156 1.1 mrg arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4157 1.1 mrg so that this can also be called without constructing an actual CALL_EXPR.
4158 1.1 mrg The other arguments and return value are the same as for
4159 1.1 mrg expand_builtin_memset. */
4160 1.1 mrg
4161 1.1 mrg static rtx
4162 1.1 mrg expand_builtin_memset_args (tree dest, tree val, tree len,
4163 1.1 mrg rtx target, machine_mode mode, tree orig_exp)
4164 1.1 mrg {
4165 1.1 mrg tree fndecl, fn;
4166 1.1 mrg enum built_in_function fcode;
4167 1.1 mrg machine_mode val_mode;
4168 1.1 mrg char c;
4169 1.1 mrg unsigned int dest_align;
4170 1.1 mrg rtx dest_mem, dest_addr, len_rtx;
4171 1.1 mrg HOST_WIDE_INT expected_size = -1;
4172 1.1 mrg unsigned int expected_align = 0;
4173 1.1 mrg unsigned HOST_WIDE_INT min_size;
4174 1.1 mrg unsigned HOST_WIDE_INT max_size;
4175 1.1 mrg unsigned HOST_WIDE_INT probable_max_size;
4176 1.1 mrg
4177 1.1 mrg dest_align = get_pointer_alignment (dest);
4178 1.1 mrg
4179 1.1 mrg /* If DEST is not a pointer type, don't do this operation in-line. */
4180 1.1 mrg if (dest_align == 0)
4181 1.1 mrg return NULL_RTX;
4182 1.1 mrg
4183 1.1 mrg if (currently_expanding_gimple_stmt)
4184 1.1 mrg stringop_block_profile (currently_expanding_gimple_stmt,
4185 1.1 mrg &expected_align, &expected_size);
4186 1.1 mrg
4187 1.1 mrg if (expected_align < dest_align)
4188 1.1 mrg expected_align = dest_align;
4189 1.1 mrg
4190 1.1 mrg /* If the LEN parameter is zero, return DEST. */
4191 1.1 mrg if (integer_zerop (len))
4192 1.1 mrg {
4193 1.1 mrg /* Evaluate and ignore VAL in case it has side-effects. */
4194 1.1 mrg expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4195 1.1 mrg return expand_expr (dest, target, mode, EXPAND_NORMAL);
4196 1.1 mrg }
4197 1.1 mrg
4198 1.1 mrg /* Stabilize the arguments in case we fail. */
4199 1.1 mrg dest = builtin_save_expr (dest);
4200 1.1 mrg val = builtin_save_expr (val);
4201 1.1 mrg len = builtin_save_expr (len);
4202 1.1 mrg
4203 1.1 mrg len_rtx = expand_normal (len);
4204 1.1 mrg determine_block_size (len, len_rtx, &min_size, &max_size,
4205 1.1 mrg &probable_max_size);
4206 1.1 mrg dest_mem = get_memory_rtx (dest, len);
4207 1.1 mrg val_mode = TYPE_MODE (unsigned_char_type_node);
4208 1.1 mrg
4209 1.1 mrg if (TREE_CODE (val) != INTEGER_CST
4210 1.1 mrg || target_char_cast (val, &c))
4211 1.1 mrg {
4212 1.1 mrg rtx val_rtx;
4213 1.1 mrg
4214 1.1 mrg val_rtx = expand_normal (val);
4215 1.1 mrg val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4216 1.1 mrg
4217 1.1 mrg /* Assume that we can memset by pieces if we can store
4218 1.1 mrg * the coefficients by pieces (in the required modes).
4219 1.1 mrg * We can't pass builtin_memset_gen_str as that emits RTL. */
4220 1.1 mrg c = 1;
4221 1.1 mrg if (tree_fits_uhwi_p (len)
4222 1.1 mrg && can_store_by_pieces (tree_to_uhwi (len),
4223 1.1 mrg builtin_memset_read_str, &c, dest_align,
4224 1.1 mrg true))
4225 1.1 mrg {
4226 1.1 mrg val_rtx = force_reg (val_mode, val_rtx);
4227 1.1 mrg store_by_pieces (dest_mem, tree_to_uhwi (len),
4228 1.1 mrg builtin_memset_gen_str, val_rtx, dest_align,
4229 1.1 mrg true, RETURN_BEGIN);
4230 1.1 mrg }
4231 1.1 mrg else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4232 1.1 mrg dest_align, expected_align,
4233 1.1 mrg expected_size, min_size, max_size,
4234 1.1 mrg probable_max_size)
4235 1.1 mrg && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4236 1.1 mrg tree_ctz (len),
4237 1.1 mrg min_size, max_size,
4238 1.1 mrg val_rtx, 0,
4239 1.1 mrg dest_align))
4240 1.1 mrg goto do_libcall;
4241 1.1 mrg
4242 1.1 mrg dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4243 1.1 mrg dest_mem = convert_memory_address (ptr_mode, dest_mem);
4244 1.1 mrg return dest_mem;
4245 1.1 mrg }
4246 1.1 mrg
4247 1.1 mrg if (c)
4248 1.1 mrg {
4249 1.1 mrg if (tree_fits_uhwi_p (len)
4250 1.1 mrg && can_store_by_pieces (tree_to_uhwi (len),
4251 1.1 mrg builtin_memset_read_str, &c, dest_align,
4252 1.1 mrg true))
4253 1.1 mrg store_by_pieces (dest_mem, tree_to_uhwi (len),
4254 1.1 mrg builtin_memset_read_str, &c, dest_align, true,
4255 1.1 mrg RETURN_BEGIN);
4256 1.1 mrg else if (!set_storage_via_setmem (dest_mem, len_rtx,
4257 1.1 mrg gen_int_mode (c, val_mode),
4258 1.1 mrg dest_align, expected_align,
4259 1.1 mrg expected_size, min_size, max_size,
4260 1.1 mrg probable_max_size)
4261 1.1 mrg && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4262 1.1 mrg tree_ctz (len),
4263 1.1 mrg min_size, max_size,
4264 1.1 mrg NULL_RTX, c,
4265 1.1 mrg dest_align))
4266 1.1 mrg goto do_libcall;
4267 1.1 mrg
4268 1.1 mrg dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4269 1.1 mrg dest_mem = convert_memory_address (ptr_mode, dest_mem);
4270 1.1 mrg return dest_mem;
4271 1.1 mrg }
4272 1.1 mrg
4273 1.1 mrg set_mem_align (dest_mem, dest_align);
4274 1.1 mrg dest_addr = clear_storage_hints (dest_mem, len_rtx,
4275 1.1 mrg CALL_EXPR_TAILCALL (orig_exp)
4276 1.1 mrg ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4277 1.1 mrg expected_align, expected_size,
4278 1.1 mrg min_size, max_size,
4279 1.1 mrg probable_max_size, tree_ctz (len));
4280 1.1 mrg
4281 1.1 mrg if (dest_addr == 0)
4282 1.1 mrg {
4283 1.1 mrg dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4284 1.1 mrg dest_addr = convert_memory_address (ptr_mode, dest_addr);
4285 1.1 mrg }
4286 1.1 mrg
4287 1.1 mrg return dest_addr;
4288 1.1 mrg
4289 1.1 mrg do_libcall:
4290 1.1 mrg fndecl = get_callee_fndecl (orig_exp);
4291 1.1 mrg fcode = DECL_FUNCTION_CODE (fndecl);
4292 1.1 mrg if (fcode == BUILT_IN_MEMSET)
4293 1.1 mrg fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4294 1.1 mrg dest, val, len);
4295 1.1 mrg else if (fcode == BUILT_IN_BZERO)
4296 1.1 mrg fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4297 1.1 mrg dest, len);
4298 1.1 mrg else
4299 1.1 mrg gcc_unreachable ();
4300 1.1 mrg gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4301 1.1 mrg CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4302 1.1 mrg return expand_call (fn, target, target == const0_rtx);
4303 1.1 mrg }
4304 1.1 mrg
4305 1.1 mrg /* Expand expression EXP, which is a call to the bzero builtin. Return
4306 1.1 mrg NULL_RTX if we failed the caller should emit a normal call. */
4307 1.1 mrg
4308 1.1 mrg static rtx
4309 1.1 mrg expand_builtin_bzero (tree exp)
4310 1.1 mrg {
4311 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4312 1.1 mrg return NULL_RTX;
4313 1.1 mrg
4314 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
4315 1.1 mrg tree size = CALL_EXPR_ARG (exp, 1);
4316 1.1 mrg
4317 1.1 mrg /* New argument list transforming bzero(ptr x, int y) to
4318 1.1 mrg memset(ptr x, int 0, size_t y). This is done this way
4319 1.1 mrg so that if it isn't expanded inline, we fallback to
4320 1.1 mrg calling bzero instead of memset. */
4321 1.1 mrg
4322 1.1 mrg location_t loc = EXPR_LOCATION (exp);
4323 1.1 mrg
4324 1.1 mrg return expand_builtin_memset_args (dest, integer_zero_node,
4325 1.1 mrg fold_convert_loc (loc,
4326 1.1 mrg size_type_node, size),
4327 1.1 mrg const0_rtx, VOIDmode, exp);
4328 1.1 mrg }
4329 1.1 mrg
4330 1.1 mrg /* Try to expand cmpstr operation ICODE with the given operands.
4331 1.1 mrg Return the result rtx on success, otherwise return null. */
4332 1.1 mrg
4333 1.1 mrg static rtx
4334 1.1 mrg expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4335 1.1 mrg HOST_WIDE_INT align)
4336 1.1 mrg {
4337 1.1 mrg machine_mode insn_mode = insn_data[icode].operand[0].mode;
4338 1.1 mrg
4339 1.1 mrg if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4340 1.1 mrg target = NULL_RTX;
4341 1.1 mrg
4342 1.1 mrg class expand_operand ops[4];
4343 1.1 mrg create_output_operand (&ops[0], target, insn_mode);
4344 1.1 mrg create_fixed_operand (&ops[1], arg1_rtx);
4345 1.1 mrg create_fixed_operand (&ops[2], arg2_rtx);
4346 1.1 mrg create_integer_operand (&ops[3], align);
4347 1.1 mrg if (maybe_expand_insn (icode, 4, ops))
4348 1.1 mrg return ops[0].value;
4349 1.1 mrg return NULL_RTX;
4350 1.1 mrg }
4351 1.1 mrg
4352 1.1 mrg /* Expand expression EXP, which is a call to the memcmp built-in function.
4353 1.1 mrg Return NULL_RTX if we failed and the caller should emit a normal call,
4354 1.1 mrg otherwise try to get the result in TARGET, if convenient.
4355 1.1 mrg RESULT_EQ is true if we can relax the returned value to be either zero
4356 1.1 mrg or nonzero, without caring about the sign. */
4357 1.1 mrg
4358 1.1 mrg static rtx
4359 1.1 mrg expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4360 1.1 mrg {
4361 1.1 mrg if (!validate_arglist (exp,
4362 1.1 mrg POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4363 1.1 mrg return NULL_RTX;
4364 1.1 mrg
4365 1.1 mrg tree arg1 = CALL_EXPR_ARG (exp, 0);
4366 1.1 mrg tree arg2 = CALL_EXPR_ARG (exp, 1);
4367 1.1 mrg tree len = CALL_EXPR_ARG (exp, 2);
4368 1.1 mrg
4369 1.1 mrg /* Due to the performance benefit, always inline the calls first
4370 1.1 mrg when result_eq is false. */
4371 1.1 mrg rtx result = NULL_RTX;
4372 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4373 1.1 mrg if (!result_eq && fcode != BUILT_IN_BCMP)
4374 1.1 mrg {
4375 1.1 mrg result = inline_expand_builtin_bytecmp (exp, target);
4376 1.1 mrg if (result)
4377 1.1 mrg return result;
4378 1.1 mrg }
4379 1.1 mrg
4380 1.1 mrg machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4381 1.1 mrg location_t loc = EXPR_LOCATION (exp);
4382 1.1 mrg
4383 1.1 mrg unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4384 1.1 mrg unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4385 1.1 mrg
4386 1.1 mrg /* If we don't have POINTER_TYPE, call the function. */
4387 1.1 mrg if (arg1_align == 0 || arg2_align == 0)
4388 1.1 mrg return NULL_RTX;
4389 1.1 mrg
4390 1.1 mrg rtx arg1_rtx = get_memory_rtx (arg1, len);
4391 1.1 mrg rtx arg2_rtx = get_memory_rtx (arg2, len);
4392 1.1 mrg rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4393 1.1 mrg
4394 1.1 mrg /* Set MEM_SIZE as appropriate. */
4395 1.1 mrg if (CONST_INT_P (len_rtx))
4396 1.1 mrg {
4397 1.1 mrg set_mem_size (arg1_rtx, INTVAL (len_rtx));
4398 1.1 mrg set_mem_size (arg2_rtx, INTVAL (len_rtx));
4399 1.1 mrg }
4400 1.1 mrg
4401 1.1 mrg by_pieces_constfn constfn = NULL;
4402 1.1 mrg
4403 1.1 mrg /* Try to get the byte representation of the constant ARG2 (or, only
4404 1.1 mrg when the function's result is used for equality to zero, ARG1)
4405 1.1 mrg points to, with its byte size in NBYTES. */
4406 1.1 mrg unsigned HOST_WIDE_INT nbytes;
4407 1.1 mrg const char *rep = getbyterep (arg2, &nbytes);
4408 1.1 mrg if (result_eq && rep == NULL)
4409 1.1 mrg {
4410 1.1 mrg /* For equality to zero the arguments are interchangeable. */
4411 1.1 mrg rep = getbyterep (arg1, &nbytes);
4412 1.1 mrg if (rep != NULL)
4413 1.1 mrg std::swap (arg1_rtx, arg2_rtx);
4414 1.1 mrg }
4415 1.1 mrg
4416 1.1 mrg /* If the function's constant bound LEN_RTX is less than or equal
4417 1.1 mrg to the byte size of the representation of the constant argument,
4418 1.1 mrg and if block move would be done by pieces, we can avoid loading
4419 1.1 mrg the bytes from memory and only store the computed constant result. */
4420 1.1 mrg if (rep
4421 1.1 mrg && CONST_INT_P (len_rtx)
4422 1.1 mrg && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4423 1.1 mrg constfn = builtin_memcpy_read_str;
4424 1.1 mrg
4425 1.1 mrg result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4426 1.1 mrg TREE_TYPE (len), target,
4427 1.1 mrg result_eq, constfn,
4428 1.1 mrg CONST_CAST (char *, rep));
4429 1.1 mrg
4430 1.1 mrg if (result)
4431 1.1 mrg {
4432 1.1 mrg /* Return the value in the proper mode for this function. */
4433 1.1 mrg if (GET_MODE (result) == mode)
4434 1.1 mrg return result;
4435 1.1 mrg
4436 1.1 mrg if (target != 0)
4437 1.1 mrg {
4438 1.1 mrg convert_move (target, result, 0);
4439 1.1 mrg return target;
4440 1.1 mrg }
4441 1.1 mrg
4442 1.1 mrg return convert_to_mode (mode, result, 0);
4443 1.1 mrg }
4444 1.1 mrg
4445 1.1 mrg return NULL_RTX;
4446 1.1 mrg }
4447 1.1 mrg
4448 1.1 mrg /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4449 1.1 mrg if we failed the caller should emit a normal call, otherwise try to get
4450 1.1 mrg the result in TARGET, if convenient. */
4451 1.1 mrg
4452 1.1 mrg static rtx
4453 1.1 mrg expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4454 1.1 mrg {
4455 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4456 1.1 mrg return NULL_RTX;
4457 1.1 mrg
4458 1.1 mrg tree arg1 = CALL_EXPR_ARG (exp, 0);
4459 1.1 mrg tree arg2 = CALL_EXPR_ARG (exp, 1);
4460 1.1 mrg
4461 1.1 mrg /* Due to the performance benefit, always inline the calls first. */
4462 1.1 mrg rtx result = NULL_RTX;
4463 1.1 mrg result = inline_expand_builtin_bytecmp (exp, target);
4464 1.1 mrg if (result)
4465 1.1 mrg return result;
4466 1.1 mrg
4467 1.1 mrg insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4468 1.1 mrg insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4469 1.1 mrg if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4470 1.1 mrg return NULL_RTX;
4471 1.1 mrg
4472 1.1 mrg unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4473 1.1 mrg unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4474 1.1 mrg
4475 1.1 mrg /* If we don't have POINTER_TYPE, call the function. */
4476 1.1 mrg if (arg1_align == 0 || arg2_align == 0)
4477 1.1 mrg return NULL_RTX;
4478 1.1 mrg
4479 1.1 mrg /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4480 1.1 mrg arg1 = builtin_save_expr (arg1);
4481 1.1 mrg arg2 = builtin_save_expr (arg2);
4482 1.1 mrg
4483 1.1 mrg rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4484 1.1 mrg rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4485 1.1 mrg
4486 1.1 mrg /* Try to call cmpstrsi. */
4487 1.1 mrg if (cmpstr_icode != CODE_FOR_nothing)
4488 1.1 mrg result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4489 1.1 mrg MIN (arg1_align, arg2_align));
4490 1.1 mrg
4491 1.1 mrg /* Try to determine at least one length and call cmpstrnsi. */
4492 1.1 mrg if (!result && cmpstrn_icode != CODE_FOR_nothing)
4493 1.1 mrg {
4494 1.1 mrg tree len;
4495 1.1 mrg rtx arg3_rtx;
4496 1.1 mrg
4497 1.1 mrg tree len1 = c_strlen (arg1, 1);
4498 1.1 mrg tree len2 = c_strlen (arg2, 1);
4499 1.1 mrg
4500 1.1 mrg if (len1)
4501 1.1 mrg len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4502 1.1 mrg if (len2)
4503 1.1 mrg len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4504 1.1 mrg
4505 1.1 mrg /* If we don't have a constant length for the first, use the length
4506 1.1 mrg of the second, if we know it. We don't require a constant for
4507 1.1 mrg this case; some cost analysis could be done if both are available
4508 1.1 mrg but neither is constant. For now, assume they're equally cheap,
4509 1.1 mrg unless one has side effects. If both strings have constant lengths,
4510 1.1 mrg use the smaller. */
4511 1.1 mrg
4512 1.1 mrg if (!len1)
4513 1.1 mrg len = len2;
4514 1.1 mrg else if (!len2)
4515 1.1 mrg len = len1;
4516 1.1 mrg else if (TREE_SIDE_EFFECTS (len1))
4517 1.1 mrg len = len2;
4518 1.1 mrg else if (TREE_SIDE_EFFECTS (len2))
4519 1.1 mrg len = len1;
4520 1.1 mrg else if (TREE_CODE (len1) != INTEGER_CST)
4521 1.1 mrg len = len2;
4522 1.1 mrg else if (TREE_CODE (len2) != INTEGER_CST)
4523 1.1 mrg len = len1;
4524 1.1 mrg else if (tree_int_cst_lt (len1, len2))
4525 1.1 mrg len = len1;
4526 1.1 mrg else
4527 1.1 mrg len = len2;
4528 1.1 mrg
4529 1.1 mrg /* If both arguments have side effects, we cannot optimize. */
4530 1.1 mrg if (len && !TREE_SIDE_EFFECTS (len))
4531 1.1 mrg {
4532 1.1 mrg arg3_rtx = expand_normal (len);
4533 1.1 mrg result = expand_cmpstrn_or_cmpmem
4534 1.1 mrg (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4535 1.1 mrg arg3_rtx, MIN (arg1_align, arg2_align));
4536 1.1 mrg }
4537 1.1 mrg }
4538 1.1 mrg
4539 1.1 mrg tree fndecl = get_callee_fndecl (exp);
4540 1.1 mrg if (result)
4541 1.1 mrg {
4542 1.1 mrg /* Return the value in the proper mode for this function. */
4543 1.1 mrg machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4544 1.1 mrg if (GET_MODE (result) == mode)
4545 1.1 mrg return result;
4546 1.1 mrg if (target == 0)
4547 1.1 mrg return convert_to_mode (mode, result, 0);
4548 1.1 mrg convert_move (target, result, 0);
4549 1.1 mrg return target;
4550 1.1 mrg }
4551 1.1 mrg
4552 1.1 mrg /* Expand the library call ourselves using a stabilized argument
4553 1.1 mrg list to avoid re-evaluating the function's arguments twice. */
4554 1.1 mrg tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4555 1.1 mrg copy_warning (fn, exp);
4556 1.1 mrg gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4557 1.1 mrg CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4558 1.1 mrg return expand_call (fn, target, target == const0_rtx);
4559 1.1 mrg }
4560 1.1 mrg
4561 1.1 mrg /* Expand expression EXP, which is a call to the strncmp builtin. Return
4562 1.1 mrg NULL_RTX if we failed the caller should emit a normal call, otherwise
4563 1.1 mrg try to get the result in TARGET, if convenient. */
4564 1.1 mrg
4565 1.1 mrg static rtx
4566 1.1 mrg expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4567 1.1 mrg ATTRIBUTE_UNUSED machine_mode mode)
4568 1.1 mrg {
4569 1.1 mrg if (!validate_arglist (exp,
4570 1.1 mrg POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4571 1.1 mrg return NULL_RTX;
4572 1.1 mrg
4573 1.1 mrg tree arg1 = CALL_EXPR_ARG (exp, 0);
4574 1.1 mrg tree arg2 = CALL_EXPR_ARG (exp, 1);
4575 1.1 mrg tree arg3 = CALL_EXPR_ARG (exp, 2);
4576 1.1 mrg
4577 1.1 mrg location_t loc = EXPR_LOCATION (exp);
4578 1.1 mrg tree len1 = c_strlen (arg1, 1);
4579 1.1 mrg tree len2 = c_strlen (arg2, 1);
4580 1.1 mrg
4581 1.1 mrg /* Due to the performance benefit, always inline the calls first. */
4582 1.1 mrg rtx result = NULL_RTX;
4583 1.1 mrg result = inline_expand_builtin_bytecmp (exp, target);
4584 1.1 mrg if (result)
4585 1.1 mrg return result;
4586 1.1 mrg
4587 1.1 mrg /* If c_strlen can determine an expression for one of the string
4588 1.1 mrg lengths, and it doesn't have side effects, then emit cmpstrnsi
4589 1.1 mrg using length MIN(strlen(string)+1, arg3). */
4590 1.1 mrg insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4591 1.1 mrg if (cmpstrn_icode == CODE_FOR_nothing)
4592 1.1 mrg return NULL_RTX;
4593 1.1 mrg
4594 1.1 mrg tree len;
4595 1.1 mrg
4596 1.1 mrg unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4597 1.1 mrg unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4598 1.1 mrg
4599 1.1 mrg if (len1)
4600 1.1 mrg len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4601 1.1 mrg if (len2)
4602 1.1 mrg len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4603 1.1 mrg
4604 1.1 mrg tree len3 = fold_convert_loc (loc, sizetype, arg3);
4605 1.1 mrg
4606 1.1 mrg /* If we don't have a constant length for the first, use the length
4607 1.1 mrg of the second, if we know it. If neither string is constant length,
4608 1.1 mrg use the given length argument. We don't require a constant for
4609 1.1 mrg this case; some cost analysis could be done if both are available
4610 1.1 mrg but neither is constant. For now, assume they're equally cheap,
4611 1.1 mrg unless one has side effects. If both strings have constant lengths,
4612 1.1 mrg use the smaller. */
4613 1.1 mrg
4614 1.1 mrg if (!len1 && !len2)
4615 1.1 mrg len = len3;
4616 1.1 mrg else if (!len1)
4617 1.1 mrg len = len2;
4618 1.1 mrg else if (!len2)
4619 1.1 mrg len = len1;
4620 1.1 mrg else if (TREE_SIDE_EFFECTS (len1))
4621 1.1 mrg len = len2;
4622 1.1 mrg else if (TREE_SIDE_EFFECTS (len2))
4623 1.1 mrg len = len1;
4624 1.1 mrg else if (TREE_CODE (len1) != INTEGER_CST)
4625 1.1 mrg len = len2;
4626 1.1 mrg else if (TREE_CODE (len2) != INTEGER_CST)
4627 1.1 mrg len = len1;
4628 1.1 mrg else if (tree_int_cst_lt (len1, len2))
4629 1.1 mrg len = len1;
4630 1.1 mrg else
4631 1.1 mrg len = len2;
4632 1.1 mrg
4633 1.1 mrg /* If we are not using the given length, we must incorporate it here.
4634 1.1 mrg The actual new length parameter will be MIN(len,arg3) in this case. */
4635 1.1 mrg if (len != len3)
4636 1.1 mrg {
4637 1.1 mrg len = fold_convert_loc (loc, sizetype, len);
4638 1.1 mrg len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4639 1.1 mrg }
4640 1.1 mrg rtx arg1_rtx = get_memory_rtx (arg1, len);
4641 1.1 mrg rtx arg2_rtx = get_memory_rtx (arg2, len);
4642 1.1 mrg rtx arg3_rtx = expand_normal (len);
4643 1.1 mrg result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4644 1.1 mrg arg2_rtx, TREE_TYPE (len), arg3_rtx,
4645 1.1 mrg MIN (arg1_align, arg2_align));
4646 1.1 mrg
4647 1.1 mrg tree fndecl = get_callee_fndecl (exp);
4648 1.1 mrg if (result)
4649 1.1 mrg {
4650 1.1 mrg /* Return the value in the proper mode for this function. */
4651 1.1 mrg mode = TYPE_MODE (TREE_TYPE (exp));
4652 1.1 mrg if (GET_MODE (result) == mode)
4653 1.1 mrg return result;
4654 1.1 mrg if (target == 0)
4655 1.1 mrg return convert_to_mode (mode, result, 0);
4656 1.1 mrg convert_move (target, result, 0);
4657 1.1 mrg return target;
4658 1.1 mrg }
4659 1.1 mrg
4660 1.1 mrg /* Expand the library call ourselves using a stabilized argument
4661 1.1 mrg list to avoid re-evaluating the function's arguments twice. */
4662 1.1 mrg tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4663 1.1 mrg copy_warning (call, exp);
4664 1.1 mrg gcc_assert (TREE_CODE (call) == CALL_EXPR);
4665 1.1 mrg CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4666 1.1 mrg return expand_call (call, target, target == const0_rtx);
4667 1.1 mrg }
4668 1.1 mrg
4669 1.1 mrg /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4670 1.1 mrg if that's convenient. */
4671 1.1 mrg
4672 1.1 mrg rtx
4673 1.1 mrg expand_builtin_saveregs (void)
4674 1.1 mrg {
4675 1.1 mrg rtx val;
4676 1.1 mrg rtx_insn *seq;
4677 1.1 mrg
4678 1.1 mrg /* Don't do __builtin_saveregs more than once in a function.
4679 1.1 mrg Save the result of the first call and reuse it. */
4680 1.1 mrg if (saveregs_value != 0)
4681 1.1 mrg return saveregs_value;
4682 1.1 mrg
4683 1.1 mrg /* When this function is called, it means that registers must be
4684 1.1 mrg saved on entry to this function. So we migrate the call to the
4685 1.1 mrg first insn of this function. */
4686 1.1 mrg
4687 1.1 mrg start_sequence ();
4688 1.1 mrg
4689 1.1 mrg /* Do whatever the machine needs done in this case. */
4690 1.1 mrg val = targetm.calls.expand_builtin_saveregs ();
4691 1.1 mrg
4692 1.1 mrg seq = get_insns ();
4693 1.1 mrg end_sequence ();
4694 1.1 mrg
4695 1.1 mrg saveregs_value = val;
4696 1.1 mrg
4697 1.1 mrg /* Put the insns after the NOTE that starts the function. If this
4698 1.1 mrg is inside a start_sequence, make the outer-level insn chain current, so
4699 1.1 mrg the code is placed at the start of the function. */
4700 1.1 mrg push_topmost_sequence ();
4701 1.1 mrg emit_insn_after (seq, entry_of_function ());
4702 1.1 mrg pop_topmost_sequence ();
4703 1.1 mrg
4704 1.1 mrg return val;
4705 1.1 mrg }
4706 1.1 mrg
4707 1.1 mrg /* Expand a call to __builtin_next_arg. */
4708 1.1 mrg
4709 1.1 mrg static rtx
4710 1.1 mrg expand_builtin_next_arg (void)
4711 1.1 mrg {
4712 1.1 mrg /* Checking arguments is already done in fold_builtin_next_arg
4713 1.1 mrg that must be called before this function. */
4714 1.1 mrg return expand_binop (ptr_mode, add_optab,
4715 1.1 mrg crtl->args.internal_arg_pointer,
4716 1.1 mrg crtl->args.arg_offset_rtx,
4717 1.1 mrg NULL_RTX, 0, OPTAB_LIB_WIDEN);
4718 1.1 mrg }
4719 1.1 mrg
4720 1.1 mrg /* Make it easier for the backends by protecting the valist argument
4721 1.1 mrg from multiple evaluations. */
4722 1.1 mrg
4723 1.1 mrg static tree
4724 1.1 mrg stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4725 1.1 mrg {
4726 1.1 mrg tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4727 1.1 mrg
4728 1.1 mrg /* The current way of determining the type of valist is completely
4729 1.1 mrg bogus. We should have the information on the va builtin instead. */
4730 1.1 mrg if (!vatype)
4731 1.1 mrg vatype = targetm.fn_abi_va_list (cfun->decl);
4732 1.1 mrg
4733 1.1 mrg if (TREE_CODE (vatype) == ARRAY_TYPE)
4734 1.1 mrg {
4735 1.1 mrg if (TREE_SIDE_EFFECTS (valist))
4736 1.1 mrg valist = save_expr (valist);
4737 1.1 mrg
4738 1.1 mrg /* For this case, the backends will be expecting a pointer to
4739 1.1 mrg vatype, but it's possible we've actually been given an array
4740 1.1 mrg (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4741 1.1 mrg So fix it. */
4742 1.1 mrg if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4743 1.1 mrg {
4744 1.1 mrg tree p1 = build_pointer_type (TREE_TYPE (vatype));
4745 1.1 mrg valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4746 1.1 mrg }
4747 1.1 mrg }
4748 1.1 mrg else
4749 1.1 mrg {
4750 1.1 mrg tree pt = build_pointer_type (vatype);
4751 1.1 mrg
4752 1.1 mrg if (! needs_lvalue)
4753 1.1 mrg {
4754 1.1 mrg if (! TREE_SIDE_EFFECTS (valist))
4755 1.1 mrg return valist;
4756 1.1 mrg
4757 1.1 mrg valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4758 1.1 mrg TREE_SIDE_EFFECTS (valist) = 1;
4759 1.1 mrg }
4760 1.1 mrg
4761 1.1 mrg if (TREE_SIDE_EFFECTS (valist))
4762 1.1 mrg valist = save_expr (valist);
4763 1.1 mrg valist = fold_build2_loc (loc, MEM_REF,
4764 1.1 mrg vatype, valist, build_int_cst (pt, 0));
4765 1.1 mrg }
4766 1.1 mrg
4767 1.1 mrg return valist;
4768 1.1 mrg }
4769 1.1 mrg
4770 1.1 mrg /* The "standard" definition of va_list is void*. */
4771 1.1 mrg
4772 1.1 mrg tree
4773 1.1 mrg std_build_builtin_va_list (void)
4774 1.1 mrg {
4775 1.1 mrg return ptr_type_node;
4776 1.1 mrg }
4777 1.1 mrg
4778 1.1 mrg /* The "standard" abi va_list is va_list_type_node. */
4779 1.1 mrg
4780 1.1 mrg tree
4781 1.1 mrg std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4782 1.1 mrg {
4783 1.1 mrg return va_list_type_node;
4784 1.1 mrg }
4785 1.1 mrg
4786 1.1 mrg /* The "standard" type of va_list is va_list_type_node. */
4787 1.1 mrg
4788 1.1 mrg tree
4789 1.1 mrg std_canonical_va_list_type (tree type)
4790 1.1 mrg {
4791 1.1 mrg tree wtype, htype;
4792 1.1 mrg
4793 1.1 mrg wtype = va_list_type_node;
4794 1.1 mrg htype = type;
4795 1.1 mrg
4796 1.1 mrg if (TREE_CODE (wtype) == ARRAY_TYPE)
4797 1.1 mrg {
4798 1.1 mrg /* If va_list is an array type, the argument may have decayed
4799 1.1 mrg to a pointer type, e.g. by being passed to another function.
4800 1.1 mrg In that case, unwrap both types so that we can compare the
4801 1.1 mrg underlying records. */
4802 1.1 mrg if (TREE_CODE (htype) == ARRAY_TYPE
4803 1.1 mrg || POINTER_TYPE_P (htype))
4804 1.1 mrg {
4805 1.1 mrg wtype = TREE_TYPE (wtype);
4806 1.1 mrg htype = TREE_TYPE (htype);
4807 1.1 mrg }
4808 1.1 mrg }
4809 1.1 mrg if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4810 1.1 mrg return va_list_type_node;
4811 1.1 mrg
4812 1.1 mrg return NULL_TREE;
4813 1.1 mrg }
4814 1.1 mrg
4815 1.1 mrg /* The "standard" implementation of va_start: just assign `nextarg' to
4816 1.1 mrg the variable. */
4817 1.1 mrg
4818 1.1 mrg void
4819 1.1 mrg std_expand_builtin_va_start (tree valist, rtx nextarg)
4820 1.1 mrg {
4821 1.1 mrg rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4822 1.1 mrg convert_move (va_r, nextarg, 0);
4823 1.1 mrg }
4824 1.1 mrg
4825 1.1 mrg /* Expand EXP, a call to __builtin_va_start. */
4826 1.1 mrg
4827 1.1 mrg static rtx
4828 1.1 mrg expand_builtin_va_start (tree exp)
4829 1.1 mrg {
4830 1.1 mrg rtx nextarg;
4831 1.1 mrg tree valist;
4832 1.1 mrg location_t loc = EXPR_LOCATION (exp);
4833 1.1 mrg
4834 1.1 mrg if (call_expr_nargs (exp) < 2)
4835 1.1 mrg {
4836 1.1 mrg error_at (loc, "too few arguments to function %<va_start%>");
4837 1.1 mrg return const0_rtx;
4838 1.1 mrg }
4839 1.1 mrg
4840 1.1 mrg if (fold_builtin_next_arg (exp, true))
4841 1.1 mrg return const0_rtx;
4842 1.1 mrg
4843 1.1 mrg nextarg = expand_builtin_next_arg ();
4844 1.1 mrg valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4845 1.1 mrg
4846 1.1 mrg if (targetm.expand_builtin_va_start)
4847 1.1 mrg targetm.expand_builtin_va_start (valist, nextarg);
4848 1.1 mrg else
4849 1.1 mrg std_expand_builtin_va_start (valist, nextarg);
4850 1.1 mrg
4851 1.1 mrg return const0_rtx;
4852 1.1 mrg }
4853 1.1 mrg
4854 1.1 mrg /* Expand EXP, a call to __builtin_va_end. */
4855 1.1 mrg
4856 1.1 mrg static rtx
4857 1.1 mrg expand_builtin_va_end (tree exp)
4858 1.1 mrg {
4859 1.1 mrg tree valist = CALL_EXPR_ARG (exp, 0);
4860 1.1 mrg
4861 1.1 mrg /* Evaluate for side effects, if needed. I hate macros that don't
4862 1.1 mrg do that. */
4863 1.1 mrg if (TREE_SIDE_EFFECTS (valist))
4864 1.1 mrg expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4865 1.1 mrg
4866 1.1 mrg return const0_rtx;
4867 1.1 mrg }
4868 1.1 mrg
4869 1.1 mrg /* Expand EXP, a call to __builtin_va_copy. We do this as a
4870 1.1 mrg builtin rather than just as an assignment in stdarg.h because of the
4871 1.1 mrg nastiness of array-type va_list types. */
4872 1.1 mrg
4873 1.1 mrg static rtx
4874 1.1 mrg expand_builtin_va_copy (tree exp)
4875 1.1 mrg {
4876 1.1 mrg tree dst, src, t;
4877 1.1 mrg location_t loc = EXPR_LOCATION (exp);
4878 1.1 mrg
4879 1.1 mrg dst = CALL_EXPR_ARG (exp, 0);
4880 1.1 mrg src = CALL_EXPR_ARG (exp, 1);
4881 1.1 mrg
4882 1.1 mrg dst = stabilize_va_list_loc (loc, dst, 1);
4883 1.1 mrg src = stabilize_va_list_loc (loc, src, 0);
4884 1.1 mrg
4885 1.1 mrg gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4886 1.1 mrg
4887 1.1 mrg if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4888 1.1 mrg {
4889 1.1 mrg t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4890 1.1 mrg TREE_SIDE_EFFECTS (t) = 1;
4891 1.1 mrg expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4892 1.1 mrg }
4893 1.1 mrg else
4894 1.1 mrg {
4895 1.1 mrg rtx dstb, srcb, size;
4896 1.1 mrg
4897 1.1 mrg /* Evaluate to pointers. */
4898 1.1 mrg dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4899 1.1 mrg srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4900 1.1 mrg size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4901 1.1 mrg NULL_RTX, VOIDmode, EXPAND_NORMAL);
4902 1.1 mrg
4903 1.1 mrg dstb = convert_memory_address (Pmode, dstb);
4904 1.1 mrg srcb = convert_memory_address (Pmode, srcb);
4905 1.1 mrg
4906 1.1 mrg /* "Dereference" to BLKmode memories. */
4907 1.1 mrg dstb = gen_rtx_MEM (BLKmode, dstb);
4908 1.1 mrg set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4909 1.1 mrg set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4910 1.1 mrg srcb = gen_rtx_MEM (BLKmode, srcb);
4911 1.1 mrg set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4912 1.1 mrg set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4913 1.1 mrg
4914 1.1 mrg /* Copy. */
4915 1.1 mrg emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4916 1.1 mrg }
4917 1.1 mrg
4918 1.1 mrg return const0_rtx;
4919 1.1 mrg }
4920 1.1 mrg
4921 1.1 mrg /* Expand a call to one of the builtin functions __builtin_frame_address or
4922 1.1 mrg __builtin_return_address. */
4923 1.1 mrg
4924 1.1 mrg static rtx
4925 1.1 mrg expand_builtin_frame_address (tree fndecl, tree exp)
4926 1.1 mrg {
4927 1.1 mrg /* The argument must be a nonnegative integer constant.
4928 1.1 mrg It counts the number of frames to scan up the stack.
4929 1.1 mrg The value is either the frame pointer value or the return
4930 1.1 mrg address saved in that frame. */
4931 1.1 mrg if (call_expr_nargs (exp) == 0)
4932 1.1 mrg /* Warning about missing arg was already issued. */
4933 1.1 mrg return const0_rtx;
4934 1.1 mrg else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4935 1.1 mrg {
4936 1.1 mrg error ("invalid argument to %qD", fndecl);
4937 1.1 mrg return const0_rtx;
4938 1.1 mrg }
4939 1.1 mrg else
4940 1.1 mrg {
4941 1.1 mrg /* Number of frames to scan up the stack. */
4942 1.1 mrg unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4943 1.1 mrg
4944 1.1 mrg rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4945 1.1 mrg
4946 1.1 mrg /* Some ports cannot access arbitrary stack frames. */
4947 1.1 mrg if (tem == NULL)
4948 1.1 mrg {
4949 1.1 mrg warning (0, "unsupported argument to %qD", fndecl);
4950 1.1 mrg return const0_rtx;
4951 1.1 mrg }
4952 1.1 mrg
4953 1.1 mrg if (count)
4954 1.1 mrg {
4955 1.1 mrg /* Warn since no effort is made to ensure that any frame
4956 1.1 mrg beyond the current one exists or can be safely reached. */
4957 1.1 mrg warning (OPT_Wframe_address, "calling %qD with "
4958 1.1 mrg "a nonzero argument is unsafe", fndecl);
4959 1.1 mrg }
4960 1.1 mrg
4961 1.1 mrg /* For __builtin_frame_address, return what we've got. */
4962 1.1 mrg if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4963 1.1 mrg return tem;
4964 1.1 mrg
4965 1.1 mrg if (!REG_P (tem)
4966 1.1 mrg && ! CONSTANT_P (tem))
4967 1.1 mrg tem = copy_addr_to_reg (tem);
4968 1.1 mrg return tem;
4969 1.1 mrg }
4970 1.1 mrg }
4971 1.1 mrg
4972 1.1 mrg /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4973 1.1 mrg failed and the caller should emit a normal call. */
4974 1.1 mrg
4975 1.1 mrg static rtx
4976 1.1 mrg expand_builtin_alloca (tree exp)
4977 1.1 mrg {
4978 1.1 mrg rtx op0;
4979 1.1 mrg rtx result;
4980 1.1 mrg unsigned int align;
4981 1.1 mrg tree fndecl = get_callee_fndecl (exp);
4982 1.1 mrg HOST_WIDE_INT max_size;
4983 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4984 1.1 mrg bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4985 1.1 mrg bool valid_arglist
4986 1.1 mrg = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4987 1.1 mrg ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4988 1.1 mrg VOID_TYPE)
4989 1.1 mrg : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4990 1.1 mrg ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4991 1.1 mrg : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4992 1.1 mrg
4993 1.1 mrg if (!valid_arglist)
4994 1.1 mrg return NULL_RTX;
4995 1.1 mrg
4996 1.1 mrg /* Compute the argument. */
4997 1.1 mrg op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4998 1.1 mrg
4999 1.1 mrg /* Compute the alignment. */
5000 1.1 mrg align = (fcode == BUILT_IN_ALLOCA
5001 1.1 mrg ? BIGGEST_ALIGNMENT
5002 1.1 mrg : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5003 1.1 mrg
5004 1.1 mrg /* Compute the maximum size. */
5005 1.1 mrg max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5006 1.1 mrg ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5007 1.1 mrg : -1);
5008 1.1 mrg
5009 1.1 mrg /* Allocate the desired space. If the allocation stems from the declaration
5010 1.1 mrg of a variable-sized object, it cannot accumulate. */
5011 1.1 mrg result
5012 1.1 mrg = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5013 1.1 mrg result = convert_memory_address (ptr_mode, result);
5014 1.1 mrg
5015 1.1 mrg /* Dynamic allocations for variables are recorded during gimplification. */
5016 1.1 mrg if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5017 1.1 mrg record_dynamic_alloc (exp);
5018 1.1 mrg
5019 1.1 mrg return result;
5020 1.1 mrg }
5021 1.1 mrg
5022 1.1 mrg /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5023 1.1 mrg of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5024 1.1 mrg STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5025 1.1 mrg handle_builtin_stack_restore function. */
5026 1.1 mrg
5027 1.1 mrg static rtx
5028 1.1 mrg expand_asan_emit_allocas_unpoison (tree exp)
5029 1.1 mrg {
5030 1.1 mrg tree arg0 = CALL_EXPR_ARG (exp, 0);
5031 1.1 mrg tree arg1 = CALL_EXPR_ARG (exp, 1);
5032 1.1 mrg rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5033 1.1 mrg rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5034 1.1 mrg rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5035 1.1 mrg stack_pointer_rtx, NULL_RTX, 0,
5036 1.1 mrg OPTAB_LIB_WIDEN);
5037 1.1 mrg off = convert_modes (ptr_mode, Pmode, off, 0);
5038 1.1 mrg bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5039 1.1 mrg OPTAB_LIB_WIDEN);
5040 1.1 mrg rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5041 1.1 mrg ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5042 1.1 mrg top, ptr_mode, bot, ptr_mode);
5043 1.1 mrg return ret;
5044 1.1 mrg }
5045 1.1 mrg
5046 1.1 mrg /* Expand a call to bswap builtin in EXP.
5047 1.1 mrg Return NULL_RTX if a normal call should be emitted rather than expanding the
5048 1.1 mrg function in-line. If convenient, the result should be placed in TARGET.
5049 1.1 mrg SUBTARGET may be used as the target for computing one of EXP's operands. */
5050 1.1 mrg
5051 1.1 mrg static rtx
5052 1.1 mrg expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5053 1.1 mrg rtx subtarget)
5054 1.1 mrg {
5055 1.1 mrg tree arg;
5056 1.1 mrg rtx op0;
5057 1.1 mrg
5058 1.1 mrg if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5059 1.1 mrg return NULL_RTX;
5060 1.1 mrg
5061 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
5062 1.1 mrg op0 = expand_expr (arg,
5063 1.1 mrg subtarget && GET_MODE (subtarget) == target_mode
5064 1.1 mrg ? subtarget : NULL_RTX,
5065 1.1 mrg target_mode, EXPAND_NORMAL);
5066 1.1 mrg if (GET_MODE (op0) != target_mode)
5067 1.1 mrg op0 = convert_to_mode (target_mode, op0, 1);
5068 1.1 mrg
5069 1.1 mrg target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5070 1.1 mrg
5071 1.1 mrg gcc_assert (target);
5072 1.1 mrg
5073 1.1 mrg return convert_to_mode (target_mode, target, 1);
5074 1.1 mrg }
5075 1.1 mrg
5076 1.1 mrg /* Expand a call to a unary builtin in EXP.
5077 1.1 mrg Return NULL_RTX if a normal call should be emitted rather than expanding the
5078 1.1 mrg function in-line. If convenient, the result should be placed in TARGET.
5079 1.1 mrg SUBTARGET may be used as the target for computing one of EXP's operands. */
5080 1.1 mrg
5081 1.1 mrg static rtx
5082 1.1 mrg expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5083 1.1 mrg rtx subtarget, optab op_optab)
5084 1.1 mrg {
5085 1.1 mrg rtx op0;
5086 1.1 mrg
5087 1.1 mrg if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5088 1.1 mrg return NULL_RTX;
5089 1.1 mrg
5090 1.1 mrg /* Compute the argument. */
5091 1.1 mrg op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5092 1.1 mrg (subtarget
5093 1.1 mrg && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5094 1.1 mrg == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5095 1.1 mrg VOIDmode, EXPAND_NORMAL);
5096 1.1 mrg /* Compute op, into TARGET if possible.
5097 1.1 mrg Set TARGET to wherever the result comes back. */
5098 1.1 mrg target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5099 1.1 mrg op_optab, op0, target, op_optab != clrsb_optab);
5100 1.1 mrg gcc_assert (target);
5101 1.1 mrg
5102 1.1 mrg return convert_to_mode (target_mode, target, 0);
5103 1.1 mrg }
5104 1.1 mrg
5105 1.1 mrg /* Expand a call to __builtin_expect. We just return our argument
5106 1.1 mrg as the builtin_expect semantic should've been already executed by
5107 1.1 mrg tree branch prediction pass. */
5108 1.1 mrg
5109 1.1 mrg static rtx
5110 1.1 mrg expand_builtin_expect (tree exp, rtx target)
5111 1.1 mrg {
5112 1.1 mrg tree arg;
5113 1.1 mrg
5114 1.1 mrg if (call_expr_nargs (exp) < 2)
5115 1.1 mrg return const0_rtx;
5116 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
5117 1.1 mrg
5118 1.1 mrg target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5119 1.1 mrg /* When guessing was done, the hints should be already stripped away. */
5120 1.1 mrg gcc_assert (!flag_guess_branch_prob
5121 1.1 mrg || optimize == 0 || seen_error ());
5122 1.1 mrg return target;
5123 1.1 mrg }
5124 1.1 mrg
5125 1.1 mrg /* Expand a call to __builtin_expect_with_probability. We just return our
5126 1.1 mrg argument as the builtin_expect semantic should've been already executed by
5127 1.1 mrg tree branch prediction pass. */
5128 1.1 mrg
5129 1.1 mrg static rtx
5130 1.1 mrg expand_builtin_expect_with_probability (tree exp, rtx target)
5131 1.1 mrg {
5132 1.1 mrg tree arg;
5133 1.1 mrg
5134 1.1 mrg if (call_expr_nargs (exp) < 3)
5135 1.1 mrg return const0_rtx;
5136 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
5137 1.1 mrg
5138 1.1 mrg target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5139 1.1 mrg /* When guessing was done, the hints should be already stripped away. */
5140 1.1 mrg gcc_assert (!flag_guess_branch_prob
5141 1.1 mrg || optimize == 0 || seen_error ());
5142 1.1 mrg return target;
5143 1.1 mrg }
5144 1.1 mrg
5145 1.1 mrg
5146 1.1 mrg /* Expand a call to __builtin_assume_aligned. We just return our first
5147 1.1 mrg argument as the builtin_assume_aligned semantic should've been already
5148 1.1 mrg executed by CCP. */
5149 1.1 mrg
5150 1.1 mrg static rtx
5151 1.1 mrg expand_builtin_assume_aligned (tree exp, rtx target)
5152 1.1 mrg {
5153 1.1 mrg if (call_expr_nargs (exp) < 2)
5154 1.1 mrg return const0_rtx;
5155 1.1 mrg target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5156 1.1 mrg EXPAND_NORMAL);
5157 1.1 mrg gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5158 1.1 mrg && (call_expr_nargs (exp) < 3
5159 1.1 mrg || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5160 1.1 mrg return target;
5161 1.1 mrg }
5162 1.1 mrg
5163 1.1 mrg void
5164 1.1 mrg expand_builtin_trap (void)
5165 1.1 mrg {
5166 1.1 mrg if (targetm.have_trap ())
5167 1.1 mrg {
5168 1.1 mrg rtx_insn *insn = emit_insn (targetm.gen_trap ());
5169 1.1 mrg /* For trap insns when not accumulating outgoing args force
5170 1.1 mrg REG_ARGS_SIZE note to prevent crossjumping of calls with
5171 1.1 mrg different args sizes. */
5172 1.1 mrg if (!ACCUMULATE_OUTGOING_ARGS)
5173 1.1 mrg add_args_size_note (insn, stack_pointer_delta);
5174 1.1 mrg }
5175 1.1 mrg else
5176 1.1 mrg {
5177 1.1 mrg tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5178 1.1 mrg tree call_expr = build_call_expr (fn, 0);
5179 1.1 mrg expand_call (call_expr, NULL_RTX, false);
5180 1.1 mrg }
5181 1.1 mrg
5182 1.1 mrg emit_barrier ();
5183 1.1 mrg }
5184 1.1 mrg
5185 1.1 mrg /* Expand a call to __builtin_unreachable. We do nothing except emit
5186 1.1 mrg a barrier saying that control flow will not pass here.
5187 1.1 mrg
5188 1.1 mrg It is the responsibility of the program being compiled to ensure
5189 1.1 mrg that control flow does never reach __builtin_unreachable. */
5190 1.1 mrg static void
5191 1.1 mrg expand_builtin_unreachable (void)
5192 1.1 mrg {
5193 1.1 mrg emit_barrier ();
5194 1.1 mrg }
5195 1.1 mrg
5196 1.1 mrg /* Expand EXP, a call to fabs, fabsf or fabsl.
5197 1.1 mrg Return NULL_RTX if a normal call should be emitted rather than expanding
5198 1.1 mrg the function inline. If convenient, the result should be placed
5199 1.1 mrg in TARGET. SUBTARGET may be used as the target for computing
5200 1.1 mrg the operand. */
5201 1.1 mrg
5202 1.1 mrg static rtx
5203 1.1 mrg expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5204 1.1 mrg {
5205 1.1 mrg machine_mode mode;
5206 1.1 mrg tree arg;
5207 1.1 mrg rtx op0;
5208 1.1 mrg
5209 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5210 1.1 mrg return NULL_RTX;
5211 1.1 mrg
5212 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
5213 1.1 mrg CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5214 1.1 mrg mode = TYPE_MODE (TREE_TYPE (arg));
5215 1.1 mrg op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5216 1.1 mrg return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5217 1.1 mrg }
5218 1.1 mrg
5219 1.1 mrg /* Expand EXP, a call to copysign, copysignf, or copysignl.
5220 1.1 mrg Return NULL is a normal call should be emitted rather than expanding the
5221 1.1 mrg function inline. If convenient, the result should be placed in TARGET.
5222 1.1 mrg SUBTARGET may be used as the target for computing the operand. */
5223 1.1 mrg
5224 1.1 mrg static rtx
5225 1.1 mrg expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5226 1.1 mrg {
5227 1.1 mrg rtx op0, op1;
5228 1.1 mrg tree arg;
5229 1.1 mrg
5230 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5231 1.1 mrg return NULL_RTX;
5232 1.1 mrg
5233 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
5234 1.1 mrg op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5235 1.1 mrg
5236 1.1 mrg arg = CALL_EXPR_ARG (exp, 1);
5237 1.1 mrg op1 = expand_normal (arg);
5238 1.1 mrg
5239 1.1 mrg return expand_copysign (op0, op1, target);
5240 1.1 mrg }
5241 1.1 mrg
5242 1.1 mrg /* Emit a call to __builtin___clear_cache. */
5243 1.1 mrg
5244 1.1 mrg void
5245 1.1 mrg default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5246 1.1 mrg {
5247 1.1 mrg rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5248 1.1 mrg BUILTIN_ASM_NAME_PTR
5249 1.1 mrg (BUILT_IN_CLEAR_CACHE));
5250 1.1 mrg
5251 1.1 mrg emit_library_call (callee,
5252 1.1 mrg LCT_NORMAL, VOIDmode,
5253 1.1 mrg convert_memory_address (ptr_mode, begin), ptr_mode,
5254 1.1 mrg convert_memory_address (ptr_mode, end), ptr_mode);
5255 1.1 mrg }
5256 1.1 mrg
5257 1.1 mrg /* Emit a call to __builtin___clear_cache, unless the target specifies
5258 1.1 mrg it as do-nothing. This function can be used by trampoline
5259 1.1 mrg finalizers to duplicate the effects of expanding a call to the
5260 1.1 mrg clear_cache builtin. */
5261 1.1 mrg
5262 1.1 mrg void
5263 1.1 mrg maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5264 1.1 mrg {
5265 1.1 mrg gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5266 1.1 mrg || CONST_INT_P (begin))
5267 1.1 mrg && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5268 1.1 mrg || CONST_INT_P (end)));
5269 1.1 mrg
5270 1.1 mrg if (targetm.have_clear_cache ())
5271 1.1 mrg {
5272 1.1 mrg /* We have a "clear_cache" insn, and it will handle everything. */
5273 1.1 mrg class expand_operand ops[2];
5274 1.1 mrg
5275 1.1 mrg create_address_operand (&ops[0], begin);
5276 1.1 mrg create_address_operand (&ops[1], end);
5277 1.1 mrg
5278 1.1 mrg if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5279 1.1 mrg return;
5280 1.1 mrg }
5281 1.1 mrg else
5282 1.1 mrg {
5283 1.1 mrg #ifndef CLEAR_INSN_CACHE
5284 1.1 mrg /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5285 1.1 mrg does nothing. There is no need to call it. Do nothing. */
5286 1.1 mrg return;
5287 1.1 mrg #endif /* CLEAR_INSN_CACHE */
5288 1.1 mrg }
5289 1.1 mrg
5290 1.1 mrg targetm.calls.emit_call_builtin___clear_cache (begin, end);
5291 1.1 mrg }
5292 1.1 mrg
5293 1.1 mrg /* Expand a call to __builtin___clear_cache. */
5294 1.1 mrg
5295 1.1 mrg static void
5296 1.1 mrg expand_builtin___clear_cache (tree exp)
5297 1.1 mrg {
5298 1.1 mrg tree begin, end;
5299 1.1 mrg rtx begin_rtx, end_rtx;
5300 1.1 mrg
5301 1.1 mrg /* We must not expand to a library call. If we did, any
5302 1.1 mrg fallback library function in libgcc that might contain a call to
5303 1.1 mrg __builtin___clear_cache() would recurse infinitely. */
5304 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5305 1.1 mrg {
5306 1.1 mrg error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5307 1.1 mrg return;
5308 1.1 mrg }
5309 1.1 mrg
5310 1.1 mrg begin = CALL_EXPR_ARG (exp, 0);
5311 1.1 mrg begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5312 1.1 mrg
5313 1.1 mrg end = CALL_EXPR_ARG (exp, 1);
5314 1.1 mrg end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5315 1.1 mrg
5316 1.1 mrg maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5317 1.1 mrg }
5318 1.1 mrg
5319 1.1 mrg /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5320 1.1 mrg
5321 1.1 mrg static rtx
5322 1.1 mrg round_trampoline_addr (rtx tramp)
5323 1.1 mrg {
5324 1.1 mrg rtx temp, addend, mask;
5325 1.1 mrg
5326 1.1 mrg /* If we don't need too much alignment, we'll have been guaranteed
5327 1.1 mrg proper alignment by get_trampoline_type. */
5328 1.1 mrg if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5329 1.1 mrg return tramp;
5330 1.1 mrg
5331 1.1 mrg /* Round address up to desired boundary. */
5332 1.1 mrg temp = gen_reg_rtx (Pmode);
5333 1.1 mrg addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5334 1.1 mrg mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5335 1.1 mrg
5336 1.1 mrg temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5337 1.1 mrg temp, 0, OPTAB_LIB_WIDEN);
5338 1.1 mrg tramp = expand_simple_binop (Pmode, AND, temp, mask,
5339 1.1 mrg temp, 0, OPTAB_LIB_WIDEN);
5340 1.1 mrg
5341 1.1 mrg return tramp;
5342 1.1 mrg }
5343 1.1 mrg
5344 1.1 mrg static rtx
5345 1.1 mrg expand_builtin_init_trampoline (tree exp, bool onstack)
5346 1.1 mrg {
5347 1.1 mrg tree t_tramp, t_func, t_chain;
5348 1.1 mrg rtx m_tramp, r_tramp, r_chain, tmp;
5349 1.1 mrg
5350 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5351 1.1 mrg POINTER_TYPE, VOID_TYPE))
5352 1.1 mrg return NULL_RTX;
5353 1.1 mrg
5354 1.1 mrg t_tramp = CALL_EXPR_ARG (exp, 0);
5355 1.1 mrg t_func = CALL_EXPR_ARG (exp, 1);
5356 1.1 mrg t_chain = CALL_EXPR_ARG (exp, 2);
5357 1.1 mrg
5358 1.1 mrg r_tramp = expand_normal (t_tramp);
5359 1.1 mrg m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5360 1.1 mrg MEM_NOTRAP_P (m_tramp) = 1;
5361 1.1 mrg
5362 1.1 mrg /* If ONSTACK, the TRAMP argument should be the address of a field
5363 1.1 mrg within the local function's FRAME decl. Either way, let's see if
5364 1.1 mrg we can fill in the MEM_ATTRs for this memory. */
5365 1.1 mrg if (TREE_CODE (t_tramp) == ADDR_EXPR)
5366 1.1 mrg set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5367 1.1 mrg
5368 1.1 mrg /* Creator of a heap trampoline is responsible for making sure the
5369 1.1 mrg address is aligned to at least STACK_BOUNDARY. Normally malloc
5370 1.1 mrg will ensure this anyhow. */
5371 1.1 mrg tmp = round_trampoline_addr (r_tramp);
5372 1.1 mrg if (tmp != r_tramp)
5373 1.1 mrg {
5374 1.1 mrg m_tramp = change_address (m_tramp, BLKmode, tmp);
5375 1.1 mrg set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5376 1.1 mrg set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5377 1.1 mrg }
5378 1.1 mrg
5379 1.1 mrg /* The FUNC argument should be the address of the nested function.
5380 1.1 mrg Extract the actual function decl to pass to the hook. */
5381 1.1 mrg gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5382 1.1 mrg t_func = TREE_OPERAND (t_func, 0);
5383 1.1 mrg gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5384 1.1 mrg
5385 1.1 mrg r_chain = expand_normal (t_chain);
5386 1.1 mrg
5387 1.1 mrg /* Generate insns to initialize the trampoline. */
5388 1.1 mrg targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5389 1.1 mrg
5390 1.1 mrg if (onstack)
5391 1.1 mrg {
5392 1.1 mrg trampolines_created = 1;
5393 1.1 mrg
5394 1.1 mrg if (targetm.calls.custom_function_descriptors != 0)
5395 1.1 mrg warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5396 1.1 mrg "trampoline generated for nested function %qD", t_func);
5397 1.1 mrg }
5398 1.1 mrg
5399 1.1 mrg return const0_rtx;
5400 1.1 mrg }
5401 1.1 mrg
5402 1.1 mrg static rtx
5403 1.1 mrg expand_builtin_adjust_trampoline (tree exp)
5404 1.1 mrg {
5405 1.1 mrg rtx tramp;
5406 1.1 mrg
5407 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5408 1.1 mrg return NULL_RTX;
5409 1.1 mrg
5410 1.1 mrg tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5411 1.1 mrg tramp = round_trampoline_addr (tramp);
5412 1.1 mrg if (targetm.calls.trampoline_adjust_address)
5413 1.1 mrg tramp = targetm.calls.trampoline_adjust_address (tramp);
5414 1.1 mrg
5415 1.1 mrg return tramp;
5416 1.1 mrg }
5417 1.1 mrg
5418 1.1 mrg /* Expand a call to the builtin descriptor initialization routine.
5419 1.1 mrg A descriptor is made up of a couple of pointers to the static
5420 1.1 mrg chain and the code entry in this order. */
5421 1.1 mrg
5422 1.1 mrg static rtx
5423 1.1 mrg expand_builtin_init_descriptor (tree exp)
5424 1.1 mrg {
5425 1.1 mrg tree t_descr, t_func, t_chain;
5426 1.1 mrg rtx m_descr, r_descr, r_func, r_chain;
5427 1.1 mrg
5428 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5429 1.1 mrg VOID_TYPE))
5430 1.1 mrg return NULL_RTX;
5431 1.1 mrg
5432 1.1 mrg t_descr = CALL_EXPR_ARG (exp, 0);
5433 1.1 mrg t_func = CALL_EXPR_ARG (exp, 1);
5434 1.1 mrg t_chain = CALL_EXPR_ARG (exp, 2);
5435 1.1 mrg
5436 1.1 mrg r_descr = expand_normal (t_descr);
5437 1.1 mrg m_descr = gen_rtx_MEM (BLKmode, r_descr);
5438 1.1 mrg MEM_NOTRAP_P (m_descr) = 1;
5439 1.1 mrg set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5440 1.1 mrg
5441 1.1 mrg r_func = expand_normal (t_func);
5442 1.1 mrg r_chain = expand_normal (t_chain);
5443 1.1 mrg
5444 1.1 mrg /* Generate insns to initialize the descriptor. */
5445 1.1 mrg emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5446 1.1 mrg emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5447 1.1 mrg POINTER_SIZE / BITS_PER_UNIT), r_func);
5448 1.1 mrg
5449 1.1 mrg return const0_rtx;
5450 1.1 mrg }
5451 1.1 mrg
5452 1.1 mrg /* Expand a call to the builtin descriptor adjustment routine. */
5453 1.1 mrg
5454 1.1 mrg static rtx
5455 1.1 mrg expand_builtin_adjust_descriptor (tree exp)
5456 1.1 mrg {
5457 1.1 mrg rtx tramp;
5458 1.1 mrg
5459 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5460 1.1 mrg return NULL_RTX;
5461 1.1 mrg
5462 1.1 mrg tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5463 1.1 mrg
5464 1.1 mrg /* Unalign the descriptor to allow runtime identification. */
5465 1.1 mrg tramp = plus_constant (ptr_mode, tramp,
5466 1.1 mrg targetm.calls.custom_function_descriptors);
5467 1.1 mrg
5468 1.1 mrg return force_operand (tramp, NULL_RTX);
5469 1.1 mrg }
5470 1.1 mrg
5471 1.1 mrg /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5472 1.1 mrg function. The function first checks whether the back end provides
5473 1.1 mrg an insn to implement signbit for the respective mode. If not, it
5474 1.1 mrg checks whether the floating point format of the value is such that
5475 1.1 mrg the sign bit can be extracted. If that is not the case, error out.
5476 1.1 mrg EXP is the expression that is a call to the builtin function; if
5477 1.1 mrg convenient, the result should be placed in TARGET. */
5478 1.1 mrg static rtx
5479 1.1 mrg expand_builtin_signbit (tree exp, rtx target)
5480 1.1 mrg {
5481 1.1 mrg const struct real_format *fmt;
5482 1.1 mrg scalar_float_mode fmode;
5483 1.1 mrg scalar_int_mode rmode, imode;
5484 1.1 mrg tree arg;
5485 1.1 mrg int word, bitpos;
5486 1.1 mrg enum insn_code icode;
5487 1.1 mrg rtx temp;
5488 1.1 mrg location_t loc = EXPR_LOCATION (exp);
5489 1.1 mrg
5490 1.1 mrg if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5491 1.1 mrg return NULL_RTX;
5492 1.1 mrg
5493 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
5494 1.1 mrg fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5495 1.1 mrg rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5496 1.1 mrg fmt = REAL_MODE_FORMAT (fmode);
5497 1.1 mrg
5498 1.1 mrg arg = builtin_save_expr (arg);
5499 1.1 mrg
5500 1.1 mrg /* Expand the argument yielding a RTX expression. */
5501 1.1 mrg temp = expand_normal (arg);
5502 1.1 mrg
5503 1.1 mrg /* Check if the back end provides an insn that handles signbit for the
5504 1.1 mrg argument's mode. */
5505 1.1 mrg icode = optab_handler (signbit_optab, fmode);
5506 1.1 mrg if (icode != CODE_FOR_nothing)
5507 1.1 mrg {
5508 1.1 mrg rtx_insn *last = get_last_insn ();
5509 1.1 mrg target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5510 1.1 mrg if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5511 1.1 mrg return target;
5512 1.1 mrg delete_insns_since (last);
5513 1.1 mrg }
5514 1.1 mrg
5515 1.1 mrg /* For floating point formats without a sign bit, implement signbit
5516 1.1 mrg as "ARG < 0.0". */
5517 1.1 mrg bitpos = fmt->signbit_ro;
5518 1.1 mrg if (bitpos < 0)
5519 1.1 mrg {
5520 1.1 mrg /* But we can't do this if the format supports signed zero. */
5521 1.1 mrg gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5522 1.1 mrg
5523 1.1 mrg arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5524 1.1 mrg build_real (TREE_TYPE (arg), dconst0));
5525 1.1 mrg return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5526 1.1 mrg }
5527 1.1 mrg
5528 1.1 mrg if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5529 1.1 mrg {
5530 1.1 mrg imode = int_mode_for_mode (fmode).require ();
5531 1.1 mrg temp = gen_lowpart (imode, temp);
5532 1.1 mrg }
5533 1.1 mrg else
5534 1.1 mrg {
5535 1.1 mrg imode = word_mode;
5536 1.1 mrg /* Handle targets with different FP word orders. */
5537 1.1 mrg if (FLOAT_WORDS_BIG_ENDIAN)
5538 1.1 mrg word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5539 1.1 mrg else
5540 1.1 mrg word = bitpos / BITS_PER_WORD;
5541 1.1 mrg temp = operand_subword_force (temp, word, fmode);
5542 1.1 mrg bitpos = bitpos % BITS_PER_WORD;
5543 1.1 mrg }
5544 1.1 mrg
5545 1.1 mrg /* Force the intermediate word_mode (or narrower) result into a
5546 1.1 mrg register. This avoids attempting to create paradoxical SUBREGs
5547 1.1 mrg of floating point modes below. */
5548 1.1 mrg temp = force_reg (imode, temp);
5549 1.1 mrg
5550 1.1 mrg /* If the bitpos is within the "result mode" lowpart, the operation
5551 1.1 mrg can be implement with a single bitwise AND. Otherwise, we need
5552 1.1 mrg a right shift and an AND. */
5553 1.1 mrg
5554 1.1 mrg if (bitpos < GET_MODE_BITSIZE (rmode))
5555 1.1 mrg {
5556 1.1 mrg wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5557 1.1 mrg
5558 1.1 mrg if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5559 1.1 mrg temp = gen_lowpart (rmode, temp);
5560 1.1 mrg temp = expand_binop (rmode, and_optab, temp,
5561 1.1 mrg immed_wide_int_const (mask, rmode),
5562 1.1 mrg NULL_RTX, 1, OPTAB_LIB_WIDEN);
5563 1.1 mrg }
5564 1.1 mrg else
5565 1.1 mrg {
5566 1.1 mrg /* Perform a logical right shift to place the signbit in the least
5567 1.1 mrg significant bit, then truncate the result to the desired mode
5568 1.1 mrg and mask just this bit. */
5569 1.1 mrg temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5570 1.1 mrg temp = gen_lowpart (rmode, temp);
5571 1.1 mrg temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5572 1.1 mrg NULL_RTX, 1, OPTAB_LIB_WIDEN);
5573 1.1 mrg }
5574 1.1 mrg
5575 1.1 mrg return temp;
5576 1.1 mrg }
5577 1.1 mrg
5578 1.1 mrg /* Expand fork or exec calls. TARGET is the desired target of the
5579 1.1 mrg call. EXP is the call. FN is the
5580 1.1 mrg identificator of the actual function. IGNORE is nonzero if the
5581 1.1 mrg value is to be ignored. */
5582 1.1 mrg
5583 1.1 mrg static rtx
5584 1.1 mrg expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5585 1.1 mrg {
5586 1.1 mrg tree id, decl;
5587 1.1 mrg tree call;
5588 1.1 mrg
5589 1.1 mrg /* If we are not profiling, just call the function. */
5590 1.1 mrg if (!profile_arc_flag)
5591 1.1 mrg return NULL_RTX;
5592 1.1 mrg
5593 1.1 mrg /* Otherwise call the wrapper. This should be equivalent for the rest of
5594 1.1 mrg compiler, so the code does not diverge, and the wrapper may run the
5595 1.1 mrg code necessary for keeping the profiling sane. */
5596 1.1 mrg
5597 1.1 mrg switch (DECL_FUNCTION_CODE (fn))
5598 1.1 mrg {
5599 1.1 mrg case BUILT_IN_FORK:
5600 1.1 mrg id = get_identifier ("__gcov_fork");
5601 1.1 mrg break;
5602 1.1 mrg
5603 1.1 mrg case BUILT_IN_EXECL:
5604 1.1 mrg id = get_identifier ("__gcov_execl");
5605 1.1 mrg break;
5606 1.1 mrg
5607 1.1 mrg case BUILT_IN_EXECV:
5608 1.1 mrg id = get_identifier ("__gcov_execv");
5609 1.1 mrg break;
5610 1.1 mrg
5611 1.1 mrg case BUILT_IN_EXECLP:
5612 1.1 mrg id = get_identifier ("__gcov_execlp");
5613 1.1 mrg break;
5614 1.1 mrg
5615 1.1 mrg case BUILT_IN_EXECLE:
5616 1.1 mrg id = get_identifier ("__gcov_execle");
5617 1.1 mrg break;
5618 1.1 mrg
5619 1.1 mrg case BUILT_IN_EXECVP:
5620 1.1 mrg id = get_identifier ("__gcov_execvp");
5621 1.1 mrg break;
5622 1.1 mrg
5623 1.1 mrg case BUILT_IN_EXECVE:
5624 1.1 mrg id = get_identifier ("__gcov_execve");
5625 1.1 mrg break;
5626 1.1 mrg
5627 1.1 mrg default:
5628 1.1 mrg gcc_unreachable ();
5629 1.1 mrg }
5630 1.1 mrg
5631 1.1 mrg decl = build_decl (DECL_SOURCE_LOCATION (fn),
5632 1.1 mrg FUNCTION_DECL, id, TREE_TYPE (fn));
5633 1.1 mrg DECL_EXTERNAL (decl) = 1;
5634 1.1 mrg TREE_PUBLIC (decl) = 1;
5635 1.1 mrg DECL_ARTIFICIAL (decl) = 1;
5636 1.1 mrg TREE_NOTHROW (decl) = 1;
5637 1.1 mrg DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5638 1.1 mrg DECL_VISIBILITY_SPECIFIED (decl) = 1;
5639 1.1 mrg call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5640 1.1 mrg return expand_call (call, target, ignore);
5641 1.1 mrg }
5642 1.1 mrg
5643 1.1 mrg
5644 1.1 mrg
5645 1.1 mrg /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5647 1.1 mrg the pointer in these functions is void*, the tree optimizers may remove
5648 1.1 mrg casts. The mode computed in expand_builtin isn't reliable either, due
5649 1.1 mrg to __sync_bool_compare_and_swap.
5650 1.1 mrg
5651 1.1 mrg FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5652 1.1 mrg group of builtins. This gives us log2 of the mode size. */
5653 1.1 mrg
5654 1.1 mrg static inline machine_mode
5655 1.1 mrg get_builtin_sync_mode (int fcode_diff)
5656 1.1 mrg {
5657 1.1 mrg /* The size is not negotiable, so ask not to get BLKmode in return
5658 1.1 mrg if the target indicates that a smaller size would be better. */
5659 1.1 mrg return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5660 1.1 mrg }
5661 1.1 mrg
5662 1.1 mrg /* Expand the memory expression LOC and return the appropriate memory operand
5663 1.1 mrg for the builtin_sync operations. */
5664 1.1 mrg
5665 1.1 mrg static rtx
5666 1.1 mrg get_builtin_sync_mem (tree loc, machine_mode mode)
5667 1.1 mrg {
5668 1.1 mrg rtx addr, mem;
5669 1.1 mrg int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5670 1.1 mrg ? TREE_TYPE (TREE_TYPE (loc))
5671 1.1 mrg : TREE_TYPE (loc));
5672 1.1 mrg scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5673 1.1 mrg
5674 1.1 mrg addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5675 1.1 mrg addr = convert_memory_address (addr_mode, addr);
5676 1.1 mrg
5677 1.1 mrg /* Note that we explicitly do not want any alias information for this
5678 1.1 mrg memory, so that we kill all other live memories. Otherwise we don't
5679 1.1 mrg satisfy the full barrier semantics of the intrinsic. */
5680 1.1 mrg mem = gen_rtx_MEM (mode, addr);
5681 1.1 mrg
5682 1.1 mrg set_mem_addr_space (mem, addr_space);
5683 1.1 mrg
5684 1.1 mrg mem = validize_mem (mem);
5685 1.1 mrg
5686 1.1 mrg /* The alignment needs to be at least according to that of the mode. */
5687 1.1 mrg set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5688 1.1 mrg get_pointer_alignment (loc)));
5689 1.1 mrg set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5690 1.1 mrg MEM_VOLATILE_P (mem) = 1;
5691 1.1 mrg
5692 1.1 mrg return mem;
5693 1.1 mrg }
5694 1.1 mrg
5695 1.1 mrg /* Make sure an argument is in the right mode.
5696 1.1 mrg EXP is the tree argument.
5697 1.1 mrg MODE is the mode it should be in. */
5698 1.1 mrg
5699 1.1 mrg static rtx
5700 1.1 mrg expand_expr_force_mode (tree exp, machine_mode mode)
5701 1.1 mrg {
5702 1.1 mrg rtx val;
5703 1.1 mrg machine_mode old_mode;
5704 1.1 mrg
5705 1.1 mrg if (TREE_CODE (exp) == SSA_NAME
5706 1.1 mrg && TYPE_MODE (TREE_TYPE (exp)) != mode)
5707 1.1 mrg {
5708 1.1 mrg /* Undo argument promotion if possible, as combine might not
5709 1.1 mrg be able to do it later due to MEM_VOLATILE_P uses in the
5710 1.1 mrg patterns. */
5711 1.1 mrg gimple *g = get_gimple_for_ssa_name (exp);
5712 1.1 mrg if (g && gimple_assign_cast_p (g))
5713 1.1 mrg {
5714 1.1 mrg tree rhs = gimple_assign_rhs1 (g);
5715 1.1 mrg tree_code code = gimple_assign_rhs_code (g);
5716 1.1 mrg if (CONVERT_EXPR_CODE_P (code)
5717 1.1 mrg && TYPE_MODE (TREE_TYPE (rhs)) == mode
5718 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (exp))
5719 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5720 1.1 mrg && (TYPE_PRECISION (TREE_TYPE (exp))
5721 1.1 mrg > TYPE_PRECISION (TREE_TYPE (rhs))))
5722 1.1 mrg exp = rhs;
5723 1.1 mrg }
5724 1.1 mrg }
5725 1.1 mrg
5726 1.1 mrg val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5727 1.1 mrg /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5728 1.1 mrg of CONST_INTs, where we know the old_mode only from the call argument. */
5729 1.1 mrg
5730 1.1 mrg old_mode = GET_MODE (val);
5731 1.1 mrg if (old_mode == VOIDmode)
5732 1.1 mrg old_mode = TYPE_MODE (TREE_TYPE (exp));
5733 1.1 mrg val = convert_modes (mode, old_mode, val, 1);
5734 1.1 mrg return val;
5735 1.1 mrg }
5736 1.1 mrg
5737 1.1 mrg
5738 1.1 mrg /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5739 1.1 mrg EXP is the CALL_EXPR. CODE is the rtx code
5740 1.1 mrg that corresponds to the arithmetic or logical operation from the name;
5741 1.1 mrg an exception here is that NOT actually means NAND. TARGET is an optional
5742 1.1 mrg place for us to store the results; AFTER is true if this is the
5743 1.1 mrg fetch_and_xxx form. */
5744 1.1 mrg
5745 1.1 mrg static rtx
5746 1.1 mrg expand_builtin_sync_operation (machine_mode mode, tree exp,
5747 1.1 mrg enum rtx_code code, bool after,
5748 1.1 mrg rtx target)
5749 1.1 mrg {
5750 1.1 mrg rtx val, mem;
5751 1.1 mrg location_t loc = EXPR_LOCATION (exp);
5752 1.1 mrg
5753 1.1 mrg if (code == NOT && warn_sync_nand)
5754 1.1 mrg {
5755 1.1 mrg tree fndecl = get_callee_fndecl (exp);
5756 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5757 1.1 mrg
5758 1.1 mrg static bool warned_f_a_n, warned_n_a_f;
5759 1.1 mrg
5760 1.1 mrg switch (fcode)
5761 1.1 mrg {
5762 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5763 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5764 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5765 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5766 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5767 1.1 mrg if (warned_f_a_n)
5768 1.1 mrg break;
5769 1.1 mrg
5770 1.1 mrg fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5771 1.1 mrg inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5772 1.1 mrg warned_f_a_n = true;
5773 1.1 mrg break;
5774 1.1 mrg
5775 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5776 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5777 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5778 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5779 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5780 1.1 mrg if (warned_n_a_f)
5781 1.1 mrg break;
5782 1.1 mrg
5783 1.1 mrg fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5784 1.1 mrg inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5785 1.1 mrg warned_n_a_f = true;
5786 1.1 mrg break;
5787 1.1 mrg
5788 1.1 mrg default:
5789 1.1 mrg gcc_unreachable ();
5790 1.1 mrg }
5791 1.1 mrg }
5792 1.1 mrg
5793 1.1 mrg /* Expand the operands. */
5794 1.1 mrg mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5795 1.1 mrg val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5796 1.1 mrg
5797 1.1 mrg return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5798 1.1 mrg after);
5799 1.1 mrg }
5800 1.1 mrg
5801 1.1 mrg /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5802 1.1 mrg intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5803 1.1 mrg true if this is the boolean form. TARGET is a place for us to store the
5804 1.1 mrg results; this is NOT optional if IS_BOOL is true. */
5805 1.1 mrg
5806 1.1 mrg static rtx
5807 1.1 mrg expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5808 1.1 mrg bool is_bool, rtx target)
5809 1.1 mrg {
5810 1.1 mrg rtx old_val, new_val, mem;
5811 1.1 mrg rtx *pbool, *poval;
5812 1.1 mrg
5813 1.1 mrg /* Expand the operands. */
5814 1.1 mrg mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5815 1.1 mrg old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5816 1.1 mrg new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5817 1.1 mrg
5818 1.1 mrg pbool = poval = NULL;
5819 1.1 mrg if (target != const0_rtx)
5820 1.1 mrg {
5821 1.1 mrg if (is_bool)
5822 1.1 mrg pbool = ⌖
5823 1.1 mrg else
5824 1.1 mrg poval = ⌖
5825 1.1 mrg }
5826 1.1 mrg if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5827 1.1 mrg false, MEMMODEL_SYNC_SEQ_CST,
5828 1.1 mrg MEMMODEL_SYNC_SEQ_CST))
5829 1.1 mrg return NULL_RTX;
5830 1.1 mrg
5831 1.1 mrg return target;
5832 1.1 mrg }
5833 1.1 mrg
5834 1.1 mrg /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5835 1.1 mrg general form is actually an atomic exchange, and some targets only
5836 1.1 mrg support a reduced form with the second argument being a constant 1.
5837 1.1 mrg EXP is the CALL_EXPR; TARGET is an optional place for us to store
5838 1.1 mrg the results. */
5839 1.1 mrg
5840 1.1 mrg static rtx
5841 1.1 mrg expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5842 1.1 mrg rtx target)
5843 1.1 mrg {
5844 1.1 mrg rtx val, mem;
5845 1.1 mrg
5846 1.1 mrg /* Expand the operands. */
5847 1.1 mrg mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5848 1.1 mrg val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5849 1.1 mrg
5850 1.1 mrg return expand_sync_lock_test_and_set (target, mem, val);
5851 1.1 mrg }
5852 1.1 mrg
5853 1.1 mrg /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5854 1.1 mrg
5855 1.1 mrg static void
5856 1.1 mrg expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5857 1.1 mrg {
5858 1.1 mrg rtx mem;
5859 1.1 mrg
5860 1.1 mrg /* Expand the operands. */
5861 1.1 mrg mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5862 1.1 mrg
5863 1.1 mrg expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5864 1.1 mrg }
5865 1.1 mrg
5866 1.1 mrg /* Given an integer representing an ``enum memmodel'', verify its
5867 1.1 mrg correctness and return the memory model enum. */
5868 1.1 mrg
5869 1.1 mrg static enum memmodel
5870 1.1 mrg get_memmodel (tree exp)
5871 1.1 mrg {
5872 1.1 mrg /* If the parameter is not a constant, it's a run time value so we'll just
5873 1.1 mrg convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5874 1.1 mrg if (TREE_CODE (exp) != INTEGER_CST)
5875 1.1 mrg return MEMMODEL_SEQ_CST;
5876 1.1 mrg
5877 1.1 mrg rtx op = expand_normal (exp);
5878 1.1 mrg
5879 1.1 mrg unsigned HOST_WIDE_INT val = INTVAL (op);
5880 1.1 mrg if (targetm.memmodel_check)
5881 1.1 mrg val = targetm.memmodel_check (val);
5882 1.1 mrg else if (val & ~MEMMODEL_MASK)
5883 1.1 mrg return MEMMODEL_SEQ_CST;
5884 1.1 mrg
5885 1.1 mrg /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5886 1.1 mrg if (memmodel_base (val) >= MEMMODEL_LAST)
5887 1.1 mrg return MEMMODEL_SEQ_CST;
5888 1.1 mrg
5889 1.1 mrg /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5890 1.1 mrg be conservative and promote consume to acquire. */
5891 1.1 mrg if (val == MEMMODEL_CONSUME)
5892 1.1 mrg val = MEMMODEL_ACQUIRE;
5893 1.1 mrg
5894 1.1 mrg return (enum memmodel) val;
5895 1.1 mrg }
5896 1.1 mrg
5897 1.1 mrg /* Expand the __atomic_exchange intrinsic:
5898 1.1 mrg TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5899 1.1 mrg EXP is the CALL_EXPR.
5900 1.1 mrg TARGET is an optional place for us to store the results. */
5901 1.1 mrg
5902 1.1 mrg static rtx
5903 1.1 mrg expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5904 1.1 mrg {
5905 1.1 mrg rtx val, mem;
5906 1.1 mrg enum memmodel model;
5907 1.1 mrg
5908 1.1 mrg model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5909 1.1 mrg
5910 1.1 mrg if (!flag_inline_atomics)
5911 1.1 mrg return NULL_RTX;
5912 1.1 mrg
5913 1.1 mrg /* Expand the operands. */
5914 1.1 mrg mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5915 1.1 mrg val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5916 1.1 mrg
5917 1.1 mrg return expand_atomic_exchange (target, mem, val, model);
5918 1.1 mrg }
5919 1.1 mrg
5920 1.1 mrg /* Expand the __atomic_compare_exchange intrinsic:
5921 1.1 mrg bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5922 1.1 mrg TYPE desired, BOOL weak,
5923 1.1 mrg enum memmodel success,
5924 1.1 mrg enum memmodel failure)
5925 1.1 mrg EXP is the CALL_EXPR.
5926 1.1 mrg TARGET is an optional place for us to store the results. */
5927 1.1 mrg
5928 1.1 mrg static rtx
5929 1.1 mrg expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5930 1.1 mrg rtx target)
5931 1.1 mrg {
5932 1.1 mrg rtx expect, desired, mem, oldval;
5933 1.1 mrg rtx_code_label *label;
5934 1.1 mrg tree weak;
5935 1.1 mrg bool is_weak;
5936 1.1 mrg
5937 1.1 mrg memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5938 1.1 mrg memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5939 1.1 mrg
5940 1.1 mrg if (failure > success)
5941 1.1 mrg success = MEMMODEL_SEQ_CST;
5942 1.1 mrg
5943 1.1 mrg if (is_mm_release (failure) || is_mm_acq_rel (failure))
5944 1.1 mrg {
5945 1.1 mrg failure = MEMMODEL_SEQ_CST;
5946 1.1 mrg success = MEMMODEL_SEQ_CST;
5947 1.1 mrg }
5948 1.1 mrg
5949 1.1 mrg
5950 1.1 mrg if (!flag_inline_atomics)
5951 1.1 mrg return NULL_RTX;
5952 1.1 mrg
5953 1.1 mrg /* Expand the operands. */
5954 1.1 mrg mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5955 1.1 mrg
5956 1.1 mrg expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5957 1.1 mrg expect = convert_memory_address (Pmode, expect);
5958 1.1 mrg expect = gen_rtx_MEM (mode, expect);
5959 1.1 mrg desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5960 1.1 mrg
5961 1.1 mrg weak = CALL_EXPR_ARG (exp, 3);
5962 1.1 mrg is_weak = false;
5963 1.1 mrg if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5964 1.1 mrg is_weak = true;
5965 1.1 mrg
5966 1.1 mrg if (target == const0_rtx)
5967 1.1 mrg target = NULL;
5968 1.1 mrg
5969 1.1 mrg /* Lest the rtl backend create a race condition with an imporoper store
5970 1.1 mrg to memory, always create a new pseudo for OLDVAL. */
5971 1.1 mrg oldval = NULL;
5972 1.1 mrg
5973 1.1 mrg if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5974 1.1 mrg is_weak, success, failure))
5975 1.1 mrg return NULL_RTX;
5976 1.1 mrg
5977 1.1 mrg /* Conditionally store back to EXPECT, lest we create a race condition
5978 1.1 mrg with an improper store to memory. */
5979 1.1 mrg /* ??? With a rearrangement of atomics at the gimple level, we can handle
5980 1.1 mrg the normal case where EXPECT is totally private, i.e. a register. At
5981 1.1 mrg which point the store can be unconditional. */
5982 1.1 mrg label = gen_label_rtx ();
5983 1.1 mrg emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5984 1.1 mrg GET_MODE (target), 1, label);
5985 1.1 mrg emit_move_insn (expect, oldval);
5986 1.1 mrg emit_label (label);
5987 1.1 mrg
5988 1.1 mrg return target;
5989 1.1 mrg }
5990 1.1 mrg
5991 1.1 mrg /* Helper function for expand_ifn_atomic_compare_exchange - expand
5992 1.1 mrg internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5993 1.1 mrg call. The weak parameter must be dropped to match the expected parameter
5994 1.1 mrg list and the expected argument changed from value to pointer to memory
5995 1.1 mrg slot. */
5996 1.1 mrg
5997 1.1 mrg static void
5998 1.1 mrg expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5999 1.1 mrg {
6000 1.1 mrg unsigned int z;
6001 1.1 mrg vec<tree, va_gc> *vec;
6002 1.1 mrg
6003 1.1 mrg vec_alloc (vec, 5);
6004 1.1 mrg vec->quick_push (gimple_call_arg (call, 0));
6005 1.1 mrg tree expected = gimple_call_arg (call, 1);
6006 1.1 mrg rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6007 1.1 mrg TREE_TYPE (expected));
6008 1.1 mrg rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6009 1.1 mrg if (expd != x)
6010 1.1 mrg emit_move_insn (x, expd);
6011 1.1 mrg tree v = make_tree (TREE_TYPE (expected), x);
6012 1.1 mrg vec->quick_push (build1 (ADDR_EXPR,
6013 1.1 mrg build_pointer_type (TREE_TYPE (expected)), v));
6014 1.1 mrg vec->quick_push (gimple_call_arg (call, 2));
6015 1.1 mrg /* Skip the boolean weak parameter. */
6016 1.1 mrg for (z = 4; z < 6; z++)
6017 1.1 mrg vec->quick_push (gimple_call_arg (call, z));
6018 1.1 mrg /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6019 1.1 mrg unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6020 1.1 mrg gcc_assert (bytes_log2 < 5);
6021 1.1 mrg built_in_function fncode
6022 1.1 mrg = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6023 1.1 mrg + bytes_log2);
6024 1.1 mrg tree fndecl = builtin_decl_explicit (fncode);
6025 1.1 mrg tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6026 1.1 mrg fndecl);
6027 1.1 mrg tree exp = build_call_vec (boolean_type_node, fn, vec);
6028 1.1 mrg tree lhs = gimple_call_lhs (call);
6029 1.1 mrg rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6030 1.1 mrg if (lhs)
6031 1.1 mrg {
6032 1.1 mrg rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6033 1.1 mrg if (GET_MODE (boolret) != mode)
6034 1.1 mrg boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6035 1.1 mrg x = force_reg (mode, x);
6036 1.1 mrg write_complex_part (target, boolret, true);
6037 1.1 mrg write_complex_part (target, x, false);
6038 1.1 mrg }
6039 1.1 mrg }
6040 1.1 mrg
6041 1.1 mrg /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6042 1.1 mrg
6043 1.1 mrg void
6044 1.1 mrg expand_ifn_atomic_compare_exchange (gcall *call)
6045 1.1 mrg {
6046 1.1 mrg int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6047 1.1 mrg gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6048 1.1 mrg machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6049 1.1 mrg
6050 1.1 mrg memmodel success = get_memmodel (gimple_call_arg (call, 4));
6051 1.1 mrg memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6052 1.1 mrg
6053 1.1 mrg if (failure > success)
6054 1.1 mrg success = MEMMODEL_SEQ_CST;
6055 1.1 mrg
6056 1.1 mrg if (is_mm_release (failure) || is_mm_acq_rel (failure))
6057 1.1 mrg {
6058 1.1 mrg failure = MEMMODEL_SEQ_CST;
6059 1.1 mrg success = MEMMODEL_SEQ_CST;
6060 1.1 mrg }
6061 1.1 mrg
6062 1.1 mrg if (!flag_inline_atomics)
6063 1.1 mrg {
6064 1.1 mrg expand_ifn_atomic_compare_exchange_into_call (call, mode);
6065 1.1 mrg return;
6066 1.1 mrg }
6067 1.1 mrg
6068 1.1 mrg /* Expand the operands. */
6069 1.1 mrg rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6070 1.1 mrg
6071 1.1 mrg rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6072 1.1 mrg rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6073 1.1 mrg
6074 1.1 mrg bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6075 1.1 mrg
6076 1.1 mrg rtx boolret = NULL;
6077 1.1 mrg rtx oldval = NULL;
6078 1.1 mrg
6079 1.1 mrg if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6080 1.1 mrg is_weak, success, failure))
6081 1.1 mrg {
6082 1.1 mrg expand_ifn_atomic_compare_exchange_into_call (call, mode);
6083 1.1 mrg return;
6084 1.1 mrg }
6085 1.1 mrg
6086 1.1 mrg tree lhs = gimple_call_lhs (call);
6087 1.1 mrg if (lhs)
6088 1.1 mrg {
6089 1.1 mrg rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6090 1.1 mrg if (GET_MODE (boolret) != mode)
6091 1.1 mrg boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6092 1.1 mrg write_complex_part (target, boolret, true);
6093 1.1 mrg write_complex_part (target, oldval, false);
6094 1.1 mrg }
6095 1.1 mrg }
6096 1.1 mrg
6097 1.1 mrg /* Expand the __atomic_load intrinsic:
6098 1.1 mrg TYPE __atomic_load (TYPE *object, enum memmodel)
6099 1.1 mrg EXP is the CALL_EXPR.
6100 1.1 mrg TARGET is an optional place for us to store the results. */
6101 1.1 mrg
6102 1.1 mrg static rtx
6103 1.1 mrg expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6104 1.1 mrg {
6105 1.1 mrg memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6106 1.1 mrg if (is_mm_release (model) || is_mm_acq_rel (model))
6107 1.1 mrg model = MEMMODEL_SEQ_CST;
6108 1.1 mrg
6109 1.1 mrg if (!flag_inline_atomics)
6110 1.1 mrg return NULL_RTX;
6111 1.1 mrg
6112 1.1 mrg /* Expand the operand. */
6113 1.1 mrg rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6114 1.1 mrg
6115 1.1 mrg return expand_atomic_load (target, mem, model);
6116 1.1 mrg }
6117 1.1 mrg
6118 1.1 mrg
6119 1.1 mrg /* Expand the __atomic_store intrinsic:
6120 1.1 mrg void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6121 1.1 mrg EXP is the CALL_EXPR.
6122 1.1 mrg TARGET is an optional place for us to store the results. */
6123 1.1 mrg
6124 1.1 mrg static rtx
6125 1.1 mrg expand_builtin_atomic_store (machine_mode mode, tree exp)
6126 1.1 mrg {
6127 1.1 mrg memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6128 1.1 mrg if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6129 1.1 mrg || is_mm_release (model)))
6130 1.1 mrg model = MEMMODEL_SEQ_CST;
6131 1.1 mrg
6132 1.1 mrg if (!flag_inline_atomics)
6133 1.1 mrg return NULL_RTX;
6134 1.1 mrg
6135 1.1 mrg /* Expand the operands. */
6136 1.1 mrg rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6137 1.1 mrg rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6138 1.1 mrg
6139 1.1 mrg return expand_atomic_store (mem, val, model, false);
6140 1.1 mrg }
6141 1.1 mrg
6142 1.1 mrg /* Expand the __atomic_fetch_XXX intrinsic:
6143 1.1 mrg TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6144 1.1 mrg EXP is the CALL_EXPR.
6145 1.1 mrg TARGET is an optional place for us to store the results.
6146 1.1 mrg CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6147 1.1 mrg FETCH_AFTER is true if returning the result of the operation.
6148 1.1 mrg FETCH_AFTER is false if returning the value before the operation.
6149 1.1 mrg IGNORE is true if the result is not used.
6150 1.1 mrg EXT_CALL is the correct builtin for an external call if this cannot be
6151 1.1 mrg resolved to an instruction sequence. */
6152 1.1 mrg
6153 1.1 mrg static rtx
6154 1.1 mrg expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6155 1.1 mrg enum rtx_code code, bool fetch_after,
6156 1.1 mrg bool ignore, enum built_in_function ext_call)
6157 1.1 mrg {
6158 1.1 mrg rtx val, mem, ret;
6159 1.1 mrg enum memmodel model;
6160 1.1 mrg tree fndecl;
6161 1.1 mrg tree addr;
6162 1.1 mrg
6163 1.1 mrg model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6164 1.1 mrg
6165 1.1 mrg /* Expand the operands. */
6166 1.1 mrg mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6167 1.1 mrg val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6168 1.1 mrg
6169 1.1 mrg /* Only try generating instructions if inlining is turned on. */
6170 1.1 mrg if (flag_inline_atomics)
6171 1.1 mrg {
6172 1.1 mrg ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6173 1.1 mrg if (ret)
6174 1.1 mrg return ret;
6175 1.1 mrg }
6176 1.1 mrg
6177 1.1 mrg /* Return if a different routine isn't needed for the library call. */
6178 1.1 mrg if (ext_call == BUILT_IN_NONE)
6179 1.1 mrg return NULL_RTX;
6180 1.1 mrg
6181 1.1 mrg /* Change the call to the specified function. */
6182 1.1 mrg fndecl = get_callee_fndecl (exp);
6183 1.1 mrg addr = CALL_EXPR_FN (exp);
6184 1.1 mrg STRIP_NOPS (addr);
6185 1.1 mrg
6186 1.1 mrg gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6187 1.1 mrg TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6188 1.1 mrg
6189 1.1 mrg /* If we will emit code after the call, the call cannot be a tail call.
6190 1.1 mrg If it is emitted as a tail call, a barrier is emitted after it, and
6191 1.1 mrg then all trailing code is removed. */
6192 1.1 mrg if (!ignore)
6193 1.1 mrg CALL_EXPR_TAILCALL (exp) = 0;
6194 1.1 mrg
6195 1.1 mrg /* Expand the call here so we can emit trailing code. */
6196 1.1 mrg ret = expand_call (exp, target, ignore);
6197 1.1 mrg
6198 1.1 mrg /* Replace the original function just in case it matters. */
6199 1.1 mrg TREE_OPERAND (addr, 0) = fndecl;
6200 1.1 mrg
6201 1.1 mrg /* Then issue the arithmetic correction to return the right result. */
6202 1.1 mrg if (!ignore)
6203 1.1 mrg {
6204 1.1 mrg if (code == NOT)
6205 1.1 mrg {
6206 1.1 mrg ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6207 1.1 mrg OPTAB_LIB_WIDEN);
6208 1.1 mrg ret = expand_simple_unop (mode, NOT, ret, target, true);
6209 1.1 mrg }
6210 1.1 mrg else
6211 1.1 mrg ret = expand_simple_binop (mode, code, ret, val, target, true,
6212 1.1 mrg OPTAB_LIB_WIDEN);
6213 1.1 mrg }
6214 1.1 mrg return ret;
6215 1.1 mrg }
6216 1.1 mrg
6217 1.1 mrg /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6218 1.1 mrg
6219 1.1 mrg void
6220 1.1 mrg expand_ifn_atomic_bit_test_and (gcall *call)
6221 1.1 mrg {
6222 1.1 mrg tree ptr = gimple_call_arg (call, 0);
6223 1.1 mrg tree bit = gimple_call_arg (call, 1);
6224 1.1 mrg tree flag = gimple_call_arg (call, 2);
6225 1.1 mrg tree lhs = gimple_call_lhs (call);
6226 1.1 mrg enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6227 1.1 mrg machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6228 1.1 mrg enum rtx_code code;
6229 1.1 mrg optab optab;
6230 1.1 mrg class expand_operand ops[5];
6231 1.1 mrg
6232 1.1 mrg gcc_assert (flag_inline_atomics);
6233 1.1 mrg
6234 1.1 mrg if (gimple_call_num_args (call) == 5)
6235 1.1 mrg model = get_memmodel (gimple_call_arg (call, 3));
6236 1.1 mrg
6237 1.1 mrg rtx mem = get_builtin_sync_mem (ptr, mode);
6238 1.1 mrg rtx val = expand_expr_force_mode (bit, mode);
6239 1.1 mrg
6240 1.1 mrg switch (gimple_call_internal_fn (call))
6241 1.1 mrg {
6242 1.1 mrg case IFN_ATOMIC_BIT_TEST_AND_SET:
6243 1.1 mrg code = IOR;
6244 1.1 mrg optab = atomic_bit_test_and_set_optab;
6245 1.1 mrg break;
6246 1.1 mrg case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6247 1.1 mrg code = XOR;
6248 1.1 mrg optab = atomic_bit_test_and_complement_optab;
6249 1.1 mrg break;
6250 1.1 mrg case IFN_ATOMIC_BIT_TEST_AND_RESET:
6251 1.1 mrg code = AND;
6252 1.1 mrg optab = atomic_bit_test_and_reset_optab;
6253 1.1 mrg break;
6254 1.1 mrg default:
6255 1.1 mrg gcc_unreachable ();
6256 1.1 mrg }
6257 1.1 mrg
6258 1.1 mrg if (lhs == NULL_TREE)
6259 1.1 mrg {
6260 1.1 mrg rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6261 1.1 mrg val, NULL_RTX, true, OPTAB_DIRECT);
6262 1.1 mrg if (code == AND)
6263 1.1 mrg val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6264 1.1 mrg if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
6265 1.1 mrg return;
6266 1.1 mrg }
6267 1.1 mrg
6268 1.1 mrg rtx target;
6269 1.1 mrg if (lhs)
6270 1.1 mrg target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6271 1.1 mrg else
6272 1.1 mrg target = gen_reg_rtx (mode);
6273 1.1 mrg enum insn_code icode = direct_optab_handler (optab, mode);
6274 1.1 mrg gcc_assert (icode != CODE_FOR_nothing);
6275 1.1 mrg create_output_operand (&ops[0], target, mode);
6276 1.1 mrg create_fixed_operand (&ops[1], mem);
6277 1.1 mrg create_convert_operand_to (&ops[2], val, mode, true);
6278 1.1 mrg create_integer_operand (&ops[3], model);
6279 1.1 mrg create_integer_operand (&ops[4], integer_onep (flag));
6280 1.1 mrg if (maybe_expand_insn (icode, 5, ops))
6281 1.1 mrg return;
6282 1.1 mrg
6283 1.1 mrg rtx bitval = val;
6284 1.1 mrg val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6285 1.1 mrg val, NULL_RTX, true, OPTAB_DIRECT);
6286 1.1 mrg rtx maskval = val;
6287 1.1 mrg if (code == AND)
6288 1.1 mrg val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6289 1.1 mrg rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6290 1.1 mrg code, model, false);
6291 1.1 mrg if (!result)
6292 1.1 mrg {
6293 1.1 mrg bool is_atomic = gimple_call_num_args (call) == 5;
6294 1.1 mrg tree tcall = gimple_call_arg (call, 3 + is_atomic);
6295 1.1 mrg tree fndecl = gimple_call_addr_fndecl (tcall);
6296 1.1 mrg tree type = TREE_TYPE (TREE_TYPE (fndecl));
6297 1.1 mrg tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
6298 1.1 mrg make_tree (type, val),
6299 1.1 mrg is_atomic
6300 1.1 mrg ? gimple_call_arg (call, 3)
6301 1.1 mrg : integer_zero_node);
6302 1.1 mrg result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6303 1.1 mrg mode, !lhs);
6304 1.1 mrg }
6305 1.1 mrg if (!lhs)
6306 1.1 mrg return;
6307 1.1 mrg if (integer_onep (flag))
6308 1.1 mrg {
6309 1.1 mrg result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6310 1.1 mrg NULL_RTX, true, OPTAB_DIRECT);
6311 1.1 mrg result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6312 1.1 mrg true, OPTAB_DIRECT);
6313 1.1 mrg }
6314 1.1 mrg else
6315 1.1 mrg result = expand_simple_binop (mode, AND, result, maskval, target, true,
6316 1.1 mrg OPTAB_DIRECT);
6317 1.1 mrg if (result != target)
6318 1.1 mrg emit_move_insn (target, result);
6319 1.1 mrg }
6320 1.1 mrg
6321 1.1 mrg /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6322 1.1 mrg
6323 1.1 mrg void
6324 1.1 mrg expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6325 1.1 mrg {
6326 1.1 mrg tree cmp = gimple_call_arg (call, 0);
6327 1.1 mrg tree ptr = gimple_call_arg (call, 1);
6328 1.1 mrg tree arg = gimple_call_arg (call, 2);
6329 1.1 mrg tree lhs = gimple_call_lhs (call);
6330 1.1 mrg enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6331 1.1 mrg machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6332 1.1 mrg optab optab;
6333 1.1 mrg rtx_code code;
6334 1.1 mrg class expand_operand ops[5];
6335 1.1 mrg
6336 1.1 mrg gcc_assert (flag_inline_atomics);
6337 1.1 mrg
6338 1.1 mrg if (gimple_call_num_args (call) == 5)
6339 1.1 mrg model = get_memmodel (gimple_call_arg (call, 3));
6340 1.1 mrg
6341 1.1 mrg rtx mem = get_builtin_sync_mem (ptr, mode);
6342 1.1 mrg rtx op = expand_expr_force_mode (arg, mode);
6343 1.1 mrg
6344 1.1 mrg switch (gimple_call_internal_fn (call))
6345 1.1 mrg {
6346 1.1 mrg case IFN_ATOMIC_ADD_FETCH_CMP_0:
6347 1.1 mrg code = PLUS;
6348 1.1 mrg optab = atomic_add_fetch_cmp_0_optab;
6349 1.1 mrg break;
6350 1.1 mrg case IFN_ATOMIC_SUB_FETCH_CMP_0:
6351 1.1 mrg code = MINUS;
6352 1.1 mrg optab = atomic_sub_fetch_cmp_0_optab;
6353 1.1 mrg break;
6354 1.1 mrg case IFN_ATOMIC_AND_FETCH_CMP_0:
6355 1.1 mrg code = AND;
6356 1.1 mrg optab = atomic_and_fetch_cmp_0_optab;
6357 1.1 mrg break;
6358 1.1 mrg case IFN_ATOMIC_OR_FETCH_CMP_0:
6359 1.1 mrg code = IOR;
6360 1.1 mrg optab = atomic_or_fetch_cmp_0_optab;
6361 1.1 mrg break;
6362 1.1 mrg case IFN_ATOMIC_XOR_FETCH_CMP_0:
6363 1.1 mrg code = XOR;
6364 1.1 mrg optab = atomic_xor_fetch_cmp_0_optab;
6365 1.1 mrg break;
6366 1.1 mrg default:
6367 1.1 mrg gcc_unreachable ();
6368 1.1 mrg }
6369 1.1 mrg
6370 1.1 mrg enum rtx_code comp = UNKNOWN;
6371 1.1 mrg switch (tree_to_uhwi (cmp))
6372 1.1 mrg {
6373 1.1 mrg case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6374 1.1 mrg case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6375 1.1 mrg case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6376 1.1 mrg case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6377 1.1 mrg case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6378 1.1 mrg case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6379 1.1 mrg default: gcc_unreachable ();
6380 1.1 mrg }
6381 1.1 mrg
6382 1.1 mrg rtx target;
6383 1.1 mrg if (lhs == NULL_TREE)
6384 1.1 mrg target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6385 1.1 mrg else
6386 1.1 mrg target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6387 1.1 mrg enum insn_code icode = direct_optab_handler (optab, mode);
6388 1.1 mrg gcc_assert (icode != CODE_FOR_nothing);
6389 1.1 mrg create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6390 1.1 mrg create_fixed_operand (&ops[1], mem);
6391 1.1 mrg create_convert_operand_to (&ops[2], op, mode, true);
6392 1.1 mrg create_integer_operand (&ops[3], model);
6393 1.1 mrg create_integer_operand (&ops[4], comp);
6394 1.1 mrg if (maybe_expand_insn (icode, 5, ops))
6395 1.1 mrg return;
6396 1.1 mrg
6397 1.1 mrg rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6398 1.1 mrg code, model, true);
6399 1.1 mrg if (!result)
6400 1.1 mrg {
6401 1.1 mrg bool is_atomic = gimple_call_num_args (call) == 5;
6402 1.1 mrg tree tcall = gimple_call_arg (call, 3 + is_atomic);
6403 1.1 mrg tree fndecl = gimple_call_addr_fndecl (tcall);
6404 1.1 mrg tree type = TREE_TYPE (TREE_TYPE (fndecl));
6405 1.1 mrg tree exp = build_call_nary (type, tcall,
6406 1.1 mrg 2 + is_atomic, ptr, arg,
6407 1.1 mrg is_atomic
6408 1.1 mrg ? gimple_call_arg (call, 3)
6409 1.1 mrg : integer_zero_node);
6410 1.1 mrg result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6411 1.1 mrg mode, !lhs);
6412 1.1 mrg }
6413 1.1 mrg
6414 1.1 mrg if (lhs)
6415 1.1 mrg {
6416 1.1 mrg result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6417 1.1 mrg 0, 1);
6418 1.1 mrg if (result != target)
6419 1.1 mrg emit_move_insn (target, result);
6420 1.1 mrg }
6421 1.1 mrg }
6422 1.1 mrg
6423 1.1 mrg /* Expand an atomic clear operation.
6424 1.1 mrg void _atomic_clear (BOOL *obj, enum memmodel)
6425 1.1 mrg EXP is the call expression. */
6426 1.1 mrg
6427 1.1 mrg static rtx
6428 1.1 mrg expand_builtin_atomic_clear (tree exp)
6429 1.1 mrg {
6430 1.1 mrg machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6431 1.1 mrg rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6432 1.1 mrg memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6433 1.1 mrg
6434 1.1 mrg if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6435 1.1 mrg model = MEMMODEL_SEQ_CST;
6436 1.1 mrg
6437 1.1 mrg /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6438 1.1 mrg Failing that, a store is issued by __atomic_store. The only way this can
6439 1.1 mrg fail is if the bool type is larger than a word size. Unlikely, but
6440 1.1 mrg handle it anyway for completeness. Assume a single threaded model since
6441 1.1 mrg there is no atomic support in this case, and no barriers are required. */
6442 1.1 mrg rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
6443 1.1 mrg if (!ret)
6444 1.1 mrg emit_move_insn (mem, const0_rtx);
6445 1.1 mrg return const0_rtx;
6446 1.1 mrg }
6447 1.1 mrg
6448 1.1 mrg /* Expand an atomic test_and_set operation.
6449 1.1 mrg bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6450 1.1 mrg EXP is the call expression. */
6451 1.1 mrg
6452 1.1 mrg static rtx
6453 1.1 mrg expand_builtin_atomic_test_and_set (tree exp, rtx target)
6454 1.1 mrg {
6455 1.1 mrg rtx mem;
6456 1.1 mrg enum memmodel model;
6457 1.1 mrg machine_mode mode;
6458 1.1 mrg
6459 1.1 mrg mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6460 1.1 mrg mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6461 1.1 mrg model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6462 1.1 mrg
6463 1.1 mrg return expand_atomic_test_and_set (target, mem, model);
6464 1.1 mrg }
6465 1.1 mrg
6466 1.1 mrg
6467 1.1 mrg /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6468 1.1 mrg this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6469 1.1 mrg
6470 1.1 mrg static tree
6471 1.1 mrg fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6472 1.1 mrg {
6473 1.1 mrg int size;
6474 1.1 mrg machine_mode mode;
6475 1.1 mrg unsigned int mode_align, type_align;
6476 1.1 mrg
6477 1.1 mrg if (TREE_CODE (arg0) != INTEGER_CST)
6478 1.1 mrg return NULL_TREE;
6479 1.1 mrg
6480 1.1 mrg /* We need a corresponding integer mode for the access to be lock-free. */
6481 1.1 mrg size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6482 1.1 mrg if (!int_mode_for_size (size, 0).exists (&mode))
6483 1.1 mrg return boolean_false_node;
6484 1.1 mrg
6485 1.1 mrg mode_align = GET_MODE_ALIGNMENT (mode);
6486 1.1 mrg
6487 1.1 mrg if (TREE_CODE (arg1) == INTEGER_CST)
6488 1.1 mrg {
6489 1.1 mrg unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6490 1.1 mrg
6491 1.1 mrg /* Either this argument is null, or it's a fake pointer encoding
6492 1.1 mrg the alignment of the object. */
6493 1.1 mrg val = least_bit_hwi (val);
6494 1.1 mrg val *= BITS_PER_UNIT;
6495 1.1 mrg
6496 1.1 mrg if (val == 0 || mode_align < val)
6497 1.1 mrg type_align = mode_align;
6498 1.1 mrg else
6499 1.1 mrg type_align = val;
6500 1.1 mrg }
6501 1.1 mrg else
6502 1.1 mrg {
6503 1.1 mrg tree ttype = TREE_TYPE (arg1);
6504 1.1 mrg
6505 1.1 mrg /* This function is usually invoked and folded immediately by the front
6506 1.1 mrg end before anything else has a chance to look at it. The pointer
6507 1.1 mrg parameter at this point is usually cast to a void *, so check for that
6508 1.1 mrg and look past the cast. */
6509 1.1 mrg if (CONVERT_EXPR_P (arg1)
6510 1.1 mrg && POINTER_TYPE_P (ttype)
6511 1.1 mrg && VOID_TYPE_P (TREE_TYPE (ttype))
6512 1.1 mrg && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6513 1.1 mrg arg1 = TREE_OPERAND (arg1, 0);
6514 1.1 mrg
6515 1.1 mrg ttype = TREE_TYPE (arg1);
6516 1.1 mrg gcc_assert (POINTER_TYPE_P (ttype));
6517 1.1 mrg
6518 1.1 mrg /* Get the underlying type of the object. */
6519 1.1 mrg ttype = TREE_TYPE (ttype);
6520 1.1 mrg type_align = TYPE_ALIGN (ttype);
6521 1.1 mrg }
6522 1.1 mrg
6523 1.1 mrg /* If the object has smaller alignment, the lock free routines cannot
6524 1.1 mrg be used. */
6525 1.1 mrg if (type_align < mode_align)
6526 1.1 mrg return boolean_false_node;
6527 1.1 mrg
6528 1.1 mrg /* Check if a compare_and_swap pattern exists for the mode which represents
6529 1.1 mrg the required size. The pattern is not allowed to fail, so the existence
6530 1.1 mrg of the pattern indicates support is present. Also require that an
6531 1.1 mrg atomic load exists for the required size. */
6532 1.1 mrg if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6533 1.1 mrg return boolean_true_node;
6534 1.1 mrg else
6535 1.1 mrg return boolean_false_node;
6536 1.1 mrg }
6537 1.1 mrg
6538 1.1 mrg /* Return true if the parameters to call EXP represent an object which will
6539 1.1 mrg always generate lock free instructions. The first argument represents the
6540 1.1 mrg size of the object, and the second parameter is a pointer to the object
6541 1.1 mrg itself. If NULL is passed for the object, then the result is based on
6542 1.1 mrg typical alignment for an object of the specified size. Otherwise return
6543 1.1 mrg false. */
6544 1.1 mrg
6545 1.1 mrg static rtx
6546 1.1 mrg expand_builtin_atomic_always_lock_free (tree exp)
6547 1.1 mrg {
6548 1.1 mrg tree size;
6549 1.1 mrg tree arg0 = CALL_EXPR_ARG (exp, 0);
6550 1.1 mrg tree arg1 = CALL_EXPR_ARG (exp, 1);
6551 1.1 mrg
6552 1.1 mrg if (TREE_CODE (arg0) != INTEGER_CST)
6553 1.1 mrg {
6554 1.1 mrg error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6555 1.1 mrg return const0_rtx;
6556 1.1 mrg }
6557 1.1 mrg
6558 1.1 mrg size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6559 1.1 mrg if (size == boolean_true_node)
6560 1.1 mrg return const1_rtx;
6561 1.1 mrg return const0_rtx;
6562 1.1 mrg }
6563 1.1 mrg
6564 1.1 mrg /* Return a one or zero if it can be determined that object ARG1 of size ARG
6565 1.1 mrg is lock free on this architecture. */
6566 1.1 mrg
6567 1.1 mrg static tree
6568 1.1 mrg fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6569 1.1 mrg {
6570 1.1 mrg if (!flag_inline_atomics)
6571 1.1 mrg return NULL_TREE;
6572 1.1 mrg
6573 1.1 mrg /* If it isn't always lock free, don't generate a result. */
6574 1.1 mrg if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6575 1.1 mrg return boolean_true_node;
6576 1.1 mrg
6577 1.1 mrg return NULL_TREE;
6578 1.1 mrg }
6579 1.1 mrg
6580 1.1 mrg /* Return true if the parameters to call EXP represent an object which will
6581 1.1 mrg always generate lock free instructions. The first argument represents the
6582 1.1 mrg size of the object, and the second parameter is a pointer to the object
6583 1.1 mrg itself. If NULL is passed for the object, then the result is based on
6584 1.1 mrg typical alignment for an object of the specified size. Otherwise return
6585 1.1 mrg NULL*/
6586 1.1 mrg
6587 1.1 mrg static rtx
6588 1.1 mrg expand_builtin_atomic_is_lock_free (tree exp)
6589 1.1 mrg {
6590 1.1 mrg tree size;
6591 1.1 mrg tree arg0 = CALL_EXPR_ARG (exp, 0);
6592 1.1 mrg tree arg1 = CALL_EXPR_ARG (exp, 1);
6593 1.1 mrg
6594 1.1 mrg if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6595 1.1 mrg {
6596 1.1 mrg error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6597 1.1 mrg return NULL_RTX;
6598 1.1 mrg }
6599 1.1 mrg
6600 1.1 mrg if (!flag_inline_atomics)
6601 1.1 mrg return NULL_RTX;
6602 1.1 mrg
6603 1.1 mrg /* If the value is known at compile time, return the RTX for it. */
6604 1.1 mrg size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6605 1.1 mrg if (size == boolean_true_node)
6606 1.1 mrg return const1_rtx;
6607 1.1 mrg
6608 1.1 mrg return NULL_RTX;
6609 1.1 mrg }
6610 1.1 mrg
6611 1.1 mrg /* Expand the __atomic_thread_fence intrinsic:
6612 1.1 mrg void __atomic_thread_fence (enum memmodel)
6613 1.1 mrg EXP is the CALL_EXPR. */
6614 1.1 mrg
6615 1.1 mrg static void
6616 1.1 mrg expand_builtin_atomic_thread_fence (tree exp)
6617 1.1 mrg {
6618 1.1 mrg enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6619 1.1 mrg expand_mem_thread_fence (model);
6620 1.1 mrg }
6621 1.1 mrg
6622 1.1 mrg /* Expand the __atomic_signal_fence intrinsic:
6623 1.1 mrg void __atomic_signal_fence (enum memmodel)
6624 1.1 mrg EXP is the CALL_EXPR. */
6625 1.1 mrg
6626 1.1 mrg static void
6627 1.1 mrg expand_builtin_atomic_signal_fence (tree exp)
6628 1.1 mrg {
6629 1.1 mrg enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6630 1.1 mrg expand_mem_signal_fence (model);
6631 1.1 mrg }
6632 1.1 mrg
6633 1.1 mrg /* Expand the __sync_synchronize intrinsic. */
6634 1.1 mrg
6635 1.1 mrg static void
6636 1.1 mrg expand_builtin_sync_synchronize (void)
6637 1.1 mrg {
6638 1.1 mrg expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6639 1.1 mrg }
6640 1.1 mrg
6641 1.1 mrg static rtx
6642 1.1 mrg expand_builtin_thread_pointer (tree exp, rtx target)
6643 1.1 mrg {
6644 1.1 mrg enum insn_code icode;
6645 1.1 mrg if (!validate_arglist (exp, VOID_TYPE))
6646 1.1 mrg return const0_rtx;
6647 1.1 mrg icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6648 1.1 mrg if (icode != CODE_FOR_nothing)
6649 1.1 mrg {
6650 1.1 mrg class expand_operand op;
6651 1.1 mrg /* If the target is not sutitable then create a new target. */
6652 1.1 mrg if (target == NULL_RTX
6653 1.1 mrg || !REG_P (target)
6654 1.1 mrg || GET_MODE (target) != Pmode)
6655 1.1 mrg target = gen_reg_rtx (Pmode);
6656 1.1 mrg create_output_operand (&op, target, Pmode);
6657 1.1 mrg expand_insn (icode, 1, &op);
6658 1.1 mrg return target;
6659 1.1 mrg }
6660 1.1 mrg error ("%<__builtin_thread_pointer%> is not supported on this target");
6661 1.1 mrg return const0_rtx;
6662 1.1 mrg }
6663 1.1 mrg
6664 1.1 mrg static void
6665 1.1 mrg expand_builtin_set_thread_pointer (tree exp)
6666 1.1 mrg {
6667 1.1 mrg enum insn_code icode;
6668 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6669 1.1 mrg return;
6670 1.1 mrg icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6671 1.1 mrg if (icode != CODE_FOR_nothing)
6672 1.1 mrg {
6673 1.1 mrg class expand_operand op;
6674 1.1 mrg rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6675 1.1 mrg Pmode, EXPAND_NORMAL);
6676 1.1 mrg create_input_operand (&op, val, Pmode);
6677 1.1 mrg expand_insn (icode, 1, &op);
6678 1.1 mrg return;
6679 1.1 mrg }
6680 1.1 mrg error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6681 1.1 mrg }
6682 1.1 mrg
6683 1.1 mrg
6684 1.1 mrg /* Emit code to restore the current value of stack. */
6686 1.1 mrg
6687 1.1 mrg static void
6688 1.1 mrg expand_stack_restore (tree var)
6689 1.1 mrg {
6690 1.1 mrg rtx_insn *prev;
6691 1.1 mrg rtx sa = expand_normal (var);
6692 1.1 mrg
6693 1.1 mrg sa = convert_memory_address (Pmode, sa);
6694 1.1 mrg
6695 1.1 mrg prev = get_last_insn ();
6696 1.1 mrg emit_stack_restore (SAVE_BLOCK, sa);
6697 1.1 mrg
6698 1.1 mrg record_new_stack_level ();
6699 1.1 mrg
6700 1.1 mrg fixup_args_size_notes (prev, get_last_insn (), 0);
6701 1.1 mrg }
6702 1.1 mrg
6703 1.1 mrg /* Emit code to save the current value of stack. */
6704 1.1 mrg
6705 1.1 mrg static rtx
6706 1.1 mrg expand_stack_save (void)
6707 1.1 mrg {
6708 1.1 mrg rtx ret = NULL_RTX;
6709 1.1 mrg
6710 1.1 mrg emit_stack_save (SAVE_BLOCK, &ret);
6711 1.1 mrg return ret;
6712 1.1 mrg }
6713 1.1 mrg
6714 1.1 mrg /* Emit code to get the openacc gang, worker or vector id or size. */
6715 1.1 mrg
6716 1.1 mrg static rtx
6717 1.1 mrg expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6718 1.1 mrg {
6719 1.1 mrg const char *name;
6720 1.1 mrg rtx fallback_retval;
6721 1.1 mrg rtx_insn *(*gen_fn) (rtx, rtx);
6722 1.1 mrg switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6723 1.1 mrg {
6724 1.1 mrg case BUILT_IN_GOACC_PARLEVEL_ID:
6725 1.1 mrg name = "__builtin_goacc_parlevel_id";
6726 1.1 mrg fallback_retval = const0_rtx;
6727 1.1 mrg gen_fn = targetm.gen_oacc_dim_pos;
6728 1.1 mrg break;
6729 1.1 mrg case BUILT_IN_GOACC_PARLEVEL_SIZE:
6730 1.1 mrg name = "__builtin_goacc_parlevel_size";
6731 1.1 mrg fallback_retval = const1_rtx;
6732 1.1 mrg gen_fn = targetm.gen_oacc_dim_size;
6733 1.1 mrg break;
6734 1.1 mrg default:
6735 1.1 mrg gcc_unreachable ();
6736 1.1 mrg }
6737 1.1 mrg
6738 1.1 mrg if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6739 1.1 mrg {
6740 1.1 mrg error ("%qs only supported in OpenACC code", name);
6741 1.1 mrg return const0_rtx;
6742 1.1 mrg }
6743 1.1 mrg
6744 1.1 mrg tree arg = CALL_EXPR_ARG (exp, 0);
6745 1.1 mrg if (TREE_CODE (arg) != INTEGER_CST)
6746 1.1 mrg {
6747 1.1 mrg error ("non-constant argument 0 to %qs", name);
6748 1.1 mrg return const0_rtx;
6749 1.1 mrg }
6750 1.1 mrg
6751 1.1 mrg int dim = TREE_INT_CST_LOW (arg);
6752 1.1 mrg switch (dim)
6753 1.1 mrg {
6754 1.1 mrg case GOMP_DIM_GANG:
6755 1.1 mrg case GOMP_DIM_WORKER:
6756 1.1 mrg case GOMP_DIM_VECTOR:
6757 1.1 mrg break;
6758 1.1 mrg default:
6759 1.1 mrg error ("illegal argument 0 to %qs", name);
6760 1.1 mrg return const0_rtx;
6761 1.1 mrg }
6762 1.1 mrg
6763 1.1 mrg if (ignore)
6764 1.1 mrg return target;
6765 1.1 mrg
6766 1.1 mrg if (target == NULL_RTX)
6767 1.1 mrg target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6768 1.1 mrg
6769 1.1 mrg if (!targetm.have_oacc_dim_size ())
6770 1.1 mrg {
6771 1.1 mrg emit_move_insn (target, fallback_retval);
6772 1.1 mrg return target;
6773 1.1 mrg }
6774 1.1 mrg
6775 1.1 mrg rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6776 1.1 mrg emit_insn (gen_fn (reg, GEN_INT (dim)));
6777 1.1 mrg if (reg != target)
6778 1.1 mrg emit_move_insn (target, reg);
6779 1.1 mrg
6780 1.1 mrg return target;
6781 1.1 mrg }
6782 1.1 mrg
6783 1.1 mrg /* Expand a string compare operation using a sequence of char comparison
6784 1.1 mrg to get rid of the calling overhead, with result going to TARGET if
6785 1.1 mrg that's convenient.
6786 1.1 mrg
6787 1.1 mrg VAR_STR is the variable string source;
6788 1.1 mrg CONST_STR is the constant string source;
6789 1.1 mrg LENGTH is the number of chars to compare;
6790 1.1 mrg CONST_STR_N indicates which source string is the constant string;
6791 1.1 mrg IS_MEMCMP indicates whether it's a memcmp or strcmp.
6792 1.1 mrg
6793 1.1 mrg to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6794 1.1 mrg
6795 1.1 mrg target = (int) (unsigned char) var_str[0]
6796 1.1 mrg - (int) (unsigned char) const_str[0];
6797 1.1 mrg if (target != 0)
6798 1.1 mrg goto ne_label;
6799 1.1 mrg ...
6800 1.1 mrg target = (int) (unsigned char) var_str[length - 2]
6801 1.1 mrg - (int) (unsigned char) const_str[length - 2];
6802 1.1 mrg if (target != 0)
6803 1.1 mrg goto ne_label;
6804 1.1 mrg target = (int) (unsigned char) var_str[length - 1]
6805 1.1 mrg - (int) (unsigned char) const_str[length - 1];
6806 1.1 mrg ne_label:
6807 1.1 mrg */
6808 1.1 mrg
6809 1.1 mrg static rtx
6810 1.1 mrg inline_string_cmp (rtx target, tree var_str, const char *const_str,
6811 1.1 mrg unsigned HOST_WIDE_INT length,
6812 1.1 mrg int const_str_n, machine_mode mode)
6813 1.1 mrg {
6814 1.1 mrg HOST_WIDE_INT offset = 0;
6815 1.1 mrg rtx var_rtx_array
6816 1.1 mrg = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6817 1.1 mrg rtx var_rtx = NULL_RTX;
6818 1.1 mrg rtx const_rtx = NULL_RTX;
6819 1.1 mrg rtx result = target ? target : gen_reg_rtx (mode);
6820 1.1 mrg rtx_code_label *ne_label = gen_label_rtx ();
6821 1.1 mrg tree unit_type_node = unsigned_char_type_node;
6822 1.1 mrg scalar_int_mode unit_mode
6823 1.1 mrg = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6824 1.1 mrg
6825 1.1 mrg start_sequence ();
6826 1.1 mrg
6827 1.1 mrg for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6828 1.1 mrg {
6829 1.1 mrg var_rtx
6830 1.1 mrg = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6831 1.1 mrg const_rtx = c_readstr (const_str + offset, unit_mode);
6832 1.1 mrg rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6833 1.1 mrg rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6834 1.1 mrg
6835 1.1 mrg op0 = convert_modes (mode, unit_mode, op0, 1);
6836 1.1 mrg op1 = convert_modes (mode, unit_mode, op1, 1);
6837 1.1 mrg result = expand_simple_binop (mode, MINUS, op0, op1,
6838 1.1 mrg result, 1, OPTAB_WIDEN);
6839 1.1 mrg if (i < length - 1)
6840 1.1 mrg emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6841 1.1 mrg mode, true, ne_label);
6842 1.1 mrg offset += GET_MODE_SIZE (unit_mode);
6843 1.1 mrg }
6844 1.1 mrg
6845 1.1 mrg emit_label (ne_label);
6846 1.1 mrg rtx_insn *insns = get_insns ();
6847 1.1 mrg end_sequence ();
6848 1.1 mrg emit_insn (insns);
6849 1.1 mrg
6850 1.1 mrg return result;
6851 1.1 mrg }
6852 1.1 mrg
6853 1.1 mrg /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6854 1.1 mrg to TARGET if that's convenient.
6855 1.1 mrg If the call is not been inlined, return NULL_RTX. */
6856 1.1 mrg
6857 1.1 mrg static rtx
6858 1.1 mrg inline_expand_builtin_bytecmp (tree exp, rtx target)
6859 1.1 mrg {
6860 1.1 mrg tree fndecl = get_callee_fndecl (exp);
6861 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6862 1.1 mrg bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6863 1.1 mrg
6864 1.1 mrg /* Do NOT apply this inlining expansion when optimizing for size or
6865 1.1 mrg optimization level below 2. */
6866 1.1 mrg if (optimize < 2 || optimize_insn_for_size_p ())
6867 1.1 mrg return NULL_RTX;
6868 1.1 mrg
6869 1.1 mrg gcc_checking_assert (fcode == BUILT_IN_STRCMP
6870 1.1 mrg || fcode == BUILT_IN_STRNCMP
6871 1.1 mrg || fcode == BUILT_IN_MEMCMP);
6872 1.1 mrg
6873 1.1 mrg /* On a target where the type of the call (int) has same or narrower presicion
6874 1.1 mrg than unsigned char, give up the inlining expansion. */
6875 1.1 mrg if (TYPE_PRECISION (unsigned_char_type_node)
6876 1.1 mrg >= TYPE_PRECISION (TREE_TYPE (exp)))
6877 1.1 mrg return NULL_RTX;
6878 1.1 mrg
6879 1.1 mrg tree arg1 = CALL_EXPR_ARG (exp, 0);
6880 1.1 mrg tree arg2 = CALL_EXPR_ARG (exp, 1);
6881 1.1 mrg tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6882 1.1 mrg
6883 1.1 mrg unsigned HOST_WIDE_INT len1 = 0;
6884 1.1 mrg unsigned HOST_WIDE_INT len2 = 0;
6885 1.1 mrg unsigned HOST_WIDE_INT len3 = 0;
6886 1.1 mrg
6887 1.1 mrg /* Get the object representation of the initializers of ARG1 and ARG2
6888 1.1 mrg as strings, provided they refer to constant objects, with their byte
6889 1.1 mrg sizes in LEN1 and LEN2, respectively. */
6890 1.1 mrg const char *bytes1 = getbyterep (arg1, &len1);
6891 1.1 mrg const char *bytes2 = getbyterep (arg2, &len2);
6892 1.1 mrg
6893 1.1 mrg /* Fail if neither argument refers to an initialized constant. */
6894 1.1 mrg if (!bytes1 && !bytes2)
6895 1.1 mrg return NULL_RTX;
6896 1.1 mrg
6897 1.1 mrg if (is_ncmp)
6898 1.1 mrg {
6899 1.1 mrg /* Fail if the memcmp/strncmp bound is not a constant. */
6900 1.1 mrg if (!tree_fits_uhwi_p (len3_tree))
6901 1.1 mrg return NULL_RTX;
6902 1.1 mrg
6903 1.1 mrg len3 = tree_to_uhwi (len3_tree);
6904 1.1 mrg
6905 1.1 mrg if (fcode == BUILT_IN_MEMCMP)
6906 1.1 mrg {
6907 1.1 mrg /* Fail if the memcmp bound is greater than the size of either
6908 1.1 mrg of the two constant objects. */
6909 1.1 mrg if ((bytes1 && len1 < len3)
6910 1.1 mrg || (bytes2 && len2 < len3))
6911 1.1 mrg return NULL_RTX;
6912 1.1 mrg }
6913 1.1 mrg }
6914 1.1 mrg
6915 1.1 mrg if (fcode != BUILT_IN_MEMCMP)
6916 1.1 mrg {
6917 1.1 mrg /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6918 1.1 mrg and LEN2 to the length of the nul-terminated string stored
6919 1.1 mrg in each. */
6920 1.1 mrg if (bytes1 != NULL)
6921 1.1 mrg len1 = strnlen (bytes1, len1) + 1;
6922 1.1 mrg if (bytes2 != NULL)
6923 1.1 mrg len2 = strnlen (bytes2, len2) + 1;
6924 1.1 mrg }
6925 1.1 mrg
6926 1.1 mrg /* See inline_string_cmp. */
6927 1.1 mrg int const_str_n;
6928 1.1 mrg if (!len1)
6929 1.1 mrg const_str_n = 2;
6930 1.1 mrg else if (!len2)
6931 1.1 mrg const_str_n = 1;
6932 1.1 mrg else if (len2 > len1)
6933 1.1 mrg const_str_n = 1;
6934 1.1 mrg else
6935 1.1 mrg const_str_n = 2;
6936 1.1 mrg
6937 1.1 mrg /* For strncmp only, compute the new bound as the smallest of
6938 1.1 mrg the lengths of the two strings (plus 1) and the bound provided
6939 1.1 mrg to the function. */
6940 1.1 mrg unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
6941 1.1 mrg if (is_ncmp && len3 < bound)
6942 1.1 mrg bound = len3;
6943 1.1 mrg
6944 1.1 mrg /* If the bound of the comparison is larger than the threshold,
6945 1.1 mrg do nothing. */
6946 1.1 mrg if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
6947 1.1 mrg return NULL_RTX;
6948 1.1 mrg
6949 1.1 mrg machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6950 1.1 mrg
6951 1.1 mrg /* Now, start inline expansion the call. */
6952 1.1 mrg return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6953 1.1 mrg (const_str_n == 1) ? bytes1 : bytes2, bound,
6954 1.1 mrg const_str_n, mode);
6955 1.1 mrg }
6956 1.1 mrg
6957 1.1 mrg /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6958 1.1 mrg represents the size of the first argument to that call, or VOIDmode
6959 1.1 mrg if the argument is a pointer. IGNORE will be true if the result
6960 1.1 mrg isn't used. */
6961 1.1 mrg static rtx
6962 1.1 mrg expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6963 1.1 mrg bool ignore)
6964 1.1 mrg {
6965 1.1 mrg rtx val, failsafe;
6966 1.1 mrg unsigned nargs = call_expr_nargs (exp);
6967 1.1 mrg
6968 1.1 mrg tree arg0 = CALL_EXPR_ARG (exp, 0);
6969 1.1 mrg
6970 1.1 mrg if (mode == VOIDmode)
6971 1.1 mrg {
6972 1.1 mrg mode = TYPE_MODE (TREE_TYPE (arg0));
6973 1.1 mrg gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6974 1.1 mrg }
6975 1.1 mrg
6976 1.1 mrg val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6977 1.1 mrg
6978 1.1 mrg /* An optional second argument can be used as a failsafe value on
6979 1.1 mrg some machines. If it isn't present, then the failsafe value is
6980 1.1 mrg assumed to be 0. */
6981 1.1 mrg if (nargs > 1)
6982 1.1 mrg {
6983 1.1 mrg tree arg1 = CALL_EXPR_ARG (exp, 1);
6984 1.1 mrg failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6985 1.1 mrg }
6986 1.1 mrg else
6987 1.1 mrg failsafe = const0_rtx;
6988 1.1 mrg
6989 1.1 mrg /* If the result isn't used, the behavior is undefined. It would be
6990 1.1 mrg nice to emit a warning here, but path splitting means this might
6991 1.1 mrg happen with legitimate code. So simply drop the builtin
6992 1.1 mrg expansion in that case; we've handled any side-effects above. */
6993 1.1 mrg if (ignore)
6994 1.1 mrg return const0_rtx;
6995 1.1 mrg
6996 1.1 mrg /* If we don't have a suitable target, create one to hold the result. */
6997 1.1 mrg if (target == NULL || GET_MODE (target) != mode)
6998 1.1 mrg target = gen_reg_rtx (mode);
6999 1.1 mrg
7000 1.1 mrg if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7001 1.1 mrg val = convert_modes (mode, VOIDmode, val, false);
7002 1.1 mrg
7003 1.1 mrg return targetm.speculation_safe_value (mode, target, val, failsafe);
7004 1.1 mrg }
7005 1.1 mrg
7006 1.1 mrg /* Expand an expression EXP that calls a built-in function,
7007 1.1 mrg with result going to TARGET if that's convenient
7008 1.1 mrg (and in mode MODE if that's convenient).
7009 1.1 mrg SUBTARGET may be used as the target for computing one of EXP's operands.
7010 1.1 mrg IGNORE is nonzero if the value is to be ignored. */
7011 1.1 mrg
7012 1.1 mrg rtx
7013 1.1 mrg expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7014 1.1 mrg int ignore)
7015 1.1 mrg {
7016 1.1 mrg tree fndecl = get_callee_fndecl (exp);
7017 1.1 mrg machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7018 1.1 mrg int flags;
7019 1.1 mrg
7020 1.1 mrg if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7021 1.1 mrg return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7022 1.1 mrg
7023 1.1 mrg /* When ASan is enabled, we don't want to expand some memory/string
7024 1.1 mrg builtins and rely on libsanitizer's hooks. This allows us to avoid
7025 1.1 mrg redundant checks and be sure, that possible overflow will be detected
7026 1.1 mrg by ASan. */
7027 1.1 mrg
7028 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7029 1.1 mrg if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7030 1.1 mrg return expand_call (exp, target, ignore);
7031 1.1 mrg
7032 1.1 mrg /* When not optimizing, generate calls to library functions for a certain
7033 1.1 mrg set of builtins. */
7034 1.1 mrg if (!optimize
7035 1.1 mrg && !called_as_built_in (fndecl)
7036 1.1 mrg && fcode != BUILT_IN_FORK
7037 1.1 mrg && fcode != BUILT_IN_EXECL
7038 1.1 mrg && fcode != BUILT_IN_EXECV
7039 1.1 mrg && fcode != BUILT_IN_EXECLP
7040 1.1 mrg && fcode != BUILT_IN_EXECLE
7041 1.1 mrg && fcode != BUILT_IN_EXECVP
7042 1.1 mrg && fcode != BUILT_IN_EXECVE
7043 1.1 mrg && fcode != BUILT_IN_CLEAR_CACHE
7044 1.1 mrg && !ALLOCA_FUNCTION_CODE_P (fcode)
7045 1.1 mrg && fcode != BUILT_IN_FREE)
7046 1.1 mrg return expand_call (exp, target, ignore);
7047 1.1 mrg
7048 1.1 mrg /* The built-in function expanders test for target == const0_rtx
7049 1.1 mrg to determine whether the function's result will be ignored. */
7050 1.1 mrg if (ignore)
7051 1.1 mrg target = const0_rtx;
7052 1.1 mrg
7053 1.1 mrg /* If the result of a pure or const built-in function is ignored, and
7054 1.1 mrg none of its arguments are volatile, we can avoid expanding the
7055 1.1 mrg built-in call and just evaluate the arguments for side-effects. */
7056 1.1 mrg if (target == const0_rtx
7057 1.1 mrg && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7058 1.1 mrg && !(flags & ECF_LOOPING_CONST_OR_PURE))
7059 1.1 mrg {
7060 1.1 mrg bool volatilep = false;
7061 1.1 mrg tree arg;
7062 1.1 mrg call_expr_arg_iterator iter;
7063 1.1 mrg
7064 1.1 mrg FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7065 1.1 mrg if (TREE_THIS_VOLATILE (arg))
7066 1.1 mrg {
7067 1.1 mrg volatilep = true;
7068 1.1 mrg break;
7069 1.1 mrg }
7070 1.1 mrg
7071 1.1 mrg if (! volatilep)
7072 1.1 mrg {
7073 1.1 mrg FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7074 1.1 mrg expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7075 1.1 mrg return const0_rtx;
7076 1.1 mrg }
7077 1.1 mrg }
7078 1.1 mrg
7079 1.1 mrg switch (fcode)
7080 1.1 mrg {
7081 1.1 mrg CASE_FLT_FN (BUILT_IN_FABS):
7082 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7083 1.1 mrg case BUILT_IN_FABSD32:
7084 1.1 mrg case BUILT_IN_FABSD64:
7085 1.1 mrg case BUILT_IN_FABSD128:
7086 1.1 mrg target = expand_builtin_fabs (exp, target, subtarget);
7087 1.1 mrg if (target)
7088 1.1 mrg return target;
7089 1.1 mrg break;
7090 1.1 mrg
7091 1.1 mrg CASE_FLT_FN (BUILT_IN_COPYSIGN):
7092 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7093 1.1 mrg target = expand_builtin_copysign (exp, target, subtarget);
7094 1.1 mrg if (target)
7095 1.1 mrg return target;
7096 1.1 mrg break;
7097 1.1 mrg
7098 1.1 mrg /* Just do a normal library call if we were unable to fold
7099 1.1 mrg the values. */
7100 1.1 mrg CASE_FLT_FN (BUILT_IN_CABS):
7101 1.1 mrg break;
7102 1.1 mrg
7103 1.1 mrg CASE_FLT_FN (BUILT_IN_FMA):
7104 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7105 1.1 mrg target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7106 1.1 mrg if (target)
7107 1.1 mrg return target;
7108 1.1 mrg break;
7109 1.1 mrg
7110 1.1 mrg CASE_FLT_FN (BUILT_IN_ILOGB):
7111 1.1 mrg if (! flag_unsafe_math_optimizations)
7112 1.1 mrg break;
7113 1.1 mrg gcc_fallthrough ();
7114 1.1 mrg CASE_FLT_FN (BUILT_IN_ISINF):
7115 1.1 mrg CASE_FLT_FN (BUILT_IN_FINITE):
7116 1.1 mrg case BUILT_IN_ISFINITE:
7117 1.1 mrg case BUILT_IN_ISNORMAL:
7118 1.1 mrg target = expand_builtin_interclass_mathfn (exp, target);
7119 1.1 mrg if (target)
7120 1.1 mrg return target;
7121 1.1 mrg break;
7122 1.1 mrg
7123 1.1 mrg CASE_FLT_FN (BUILT_IN_ICEIL):
7124 1.1 mrg CASE_FLT_FN (BUILT_IN_LCEIL):
7125 1.1 mrg CASE_FLT_FN (BUILT_IN_LLCEIL):
7126 1.1 mrg CASE_FLT_FN (BUILT_IN_LFLOOR):
7127 1.1 mrg CASE_FLT_FN (BUILT_IN_IFLOOR):
7128 1.1 mrg CASE_FLT_FN (BUILT_IN_LLFLOOR):
7129 1.1 mrg target = expand_builtin_int_roundingfn (exp, target);
7130 1.1 mrg if (target)
7131 1.1 mrg return target;
7132 1.1 mrg break;
7133 1.1 mrg
7134 1.1 mrg CASE_FLT_FN (BUILT_IN_IRINT):
7135 1.1 mrg CASE_FLT_FN (BUILT_IN_LRINT):
7136 1.1 mrg CASE_FLT_FN (BUILT_IN_LLRINT):
7137 1.1 mrg CASE_FLT_FN (BUILT_IN_IROUND):
7138 1.1 mrg CASE_FLT_FN (BUILT_IN_LROUND):
7139 1.1 mrg CASE_FLT_FN (BUILT_IN_LLROUND):
7140 1.1 mrg target = expand_builtin_int_roundingfn_2 (exp, target);
7141 1.1 mrg if (target)
7142 1.1 mrg return target;
7143 1.1 mrg break;
7144 1.1 mrg
7145 1.1 mrg CASE_FLT_FN (BUILT_IN_POWI):
7146 1.1 mrg target = expand_builtin_powi (exp, target);
7147 1.1 mrg if (target)
7148 1.1 mrg return target;
7149 1.1 mrg break;
7150 1.1 mrg
7151 1.1 mrg CASE_FLT_FN (BUILT_IN_CEXPI):
7152 1.1 mrg target = expand_builtin_cexpi (exp, target);
7153 1.1 mrg gcc_assert (target);
7154 1.1 mrg return target;
7155 1.1 mrg
7156 1.1 mrg CASE_FLT_FN (BUILT_IN_SIN):
7157 1.1 mrg CASE_FLT_FN (BUILT_IN_COS):
7158 1.1 mrg if (! flag_unsafe_math_optimizations)
7159 1.1 mrg break;
7160 1.1 mrg target = expand_builtin_mathfn_3 (exp, target, subtarget);
7161 1.1 mrg if (target)
7162 1.1 mrg return target;
7163 1.1 mrg break;
7164 1.1 mrg
7165 1.1 mrg CASE_FLT_FN (BUILT_IN_SINCOS):
7166 1.1 mrg if (! flag_unsafe_math_optimizations)
7167 1.1 mrg break;
7168 1.1 mrg target = expand_builtin_sincos (exp);
7169 1.1 mrg if (target)
7170 1.1 mrg return target;
7171 1.1 mrg break;
7172 1.1 mrg
7173 1.1 mrg case BUILT_IN_FEGETROUND:
7174 1.1 mrg target = expand_builtin_fegetround (exp, target, target_mode);
7175 1.1 mrg if (target)
7176 1.1 mrg return target;
7177 1.1 mrg break;
7178 1.1 mrg
7179 1.1 mrg case BUILT_IN_FECLEAREXCEPT:
7180 1.1 mrg target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7181 1.1 mrg feclearexcept_optab);
7182 1.1 mrg if (target)
7183 1.1 mrg return target;
7184 1.1 mrg break;
7185 1.1 mrg
7186 1.1 mrg case BUILT_IN_FERAISEEXCEPT:
7187 1.1 mrg target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7188 1.1 mrg feraiseexcept_optab);
7189 1.1 mrg if (target)
7190 1.1 mrg return target;
7191 1.1 mrg break;
7192 1.1 mrg
7193 1.1 mrg case BUILT_IN_APPLY_ARGS:
7194 1.1 mrg return expand_builtin_apply_args ();
7195 1.1 mrg
7196 1.1 mrg /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7197 1.1 mrg FUNCTION with a copy of the parameters described by
7198 1.1 mrg ARGUMENTS, and ARGSIZE. It returns a block of memory
7199 1.1 mrg allocated on the stack into which is stored all the registers
7200 1.1 mrg that might possibly be used for returning the result of a
7201 1.1 mrg function. ARGUMENTS is the value returned by
7202 1.1 mrg __builtin_apply_args. ARGSIZE is the number of bytes of
7203 1.1 mrg arguments that must be copied. ??? How should this value be
7204 1.1 mrg computed? We'll also need a safe worst case value for varargs
7205 1.1 mrg functions. */
7206 1.1 mrg case BUILT_IN_APPLY:
7207 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE,
7208 1.1 mrg POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7209 1.1 mrg && !validate_arglist (exp, REFERENCE_TYPE,
7210 1.1 mrg POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7211 1.1 mrg return const0_rtx;
7212 1.1 mrg else
7213 1.1 mrg {
7214 1.1 mrg rtx ops[3];
7215 1.1 mrg
7216 1.1 mrg ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7217 1.1 mrg ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7218 1.1 mrg ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7219 1.1 mrg
7220 1.1 mrg return expand_builtin_apply (ops[0], ops[1], ops[2]);
7221 1.1 mrg }
7222 1.1 mrg
7223 1.1 mrg /* __builtin_return (RESULT) causes the function to return the
7224 1.1 mrg value described by RESULT. RESULT is address of the block of
7225 1.1 mrg memory returned by __builtin_apply. */
7226 1.1 mrg case BUILT_IN_RETURN:
7227 1.1 mrg if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7228 1.1 mrg expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7229 1.1 mrg return const0_rtx;
7230 1.1 mrg
7231 1.1 mrg case BUILT_IN_SAVEREGS:
7232 1.1 mrg return expand_builtin_saveregs ();
7233 1.1 mrg
7234 1.1 mrg case BUILT_IN_VA_ARG_PACK:
7235 1.1 mrg /* All valid uses of __builtin_va_arg_pack () are removed during
7236 1.1 mrg inlining. */
7237 1.1 mrg error ("invalid use of %<__builtin_va_arg_pack ()%>");
7238 1.1 mrg return const0_rtx;
7239 1.1 mrg
7240 1.1 mrg case BUILT_IN_VA_ARG_PACK_LEN:
7241 1.1 mrg /* All valid uses of __builtin_va_arg_pack_len () are removed during
7242 1.1 mrg inlining. */
7243 1.1 mrg error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7244 1.1 mrg return const0_rtx;
7245 1.1 mrg
7246 1.1 mrg /* Return the address of the first anonymous stack arg. */
7247 1.1 mrg case BUILT_IN_NEXT_ARG:
7248 1.1 mrg if (fold_builtin_next_arg (exp, false))
7249 1.1 mrg return const0_rtx;
7250 1.1 mrg return expand_builtin_next_arg ();
7251 1.1 mrg
7252 1.1 mrg case BUILT_IN_CLEAR_CACHE:
7253 1.1 mrg expand_builtin___clear_cache (exp);
7254 1.1 mrg return const0_rtx;
7255 1.1 mrg
7256 1.1 mrg case BUILT_IN_CLASSIFY_TYPE:
7257 1.1 mrg return expand_builtin_classify_type (exp);
7258 1.1 mrg
7259 1.1 mrg case BUILT_IN_CONSTANT_P:
7260 1.1 mrg return const0_rtx;
7261 1.1 mrg
7262 1.1 mrg case BUILT_IN_FRAME_ADDRESS:
7263 1.1 mrg case BUILT_IN_RETURN_ADDRESS:
7264 1.1 mrg return expand_builtin_frame_address (fndecl, exp);
7265 1.1 mrg
7266 1.1 mrg /* Returns the address of the area where the structure is returned.
7267 1.1 mrg 0 otherwise. */
7268 1.1 mrg case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7269 1.1 mrg if (call_expr_nargs (exp) != 0
7270 1.1 mrg || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7271 1.1 mrg || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7272 1.1 mrg return const0_rtx;
7273 1.1 mrg else
7274 1.1 mrg return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7275 1.1 mrg
7276 1.1 mrg CASE_BUILT_IN_ALLOCA:
7277 1.1 mrg target = expand_builtin_alloca (exp);
7278 1.1 mrg if (target)
7279 1.1 mrg return target;
7280 1.1 mrg break;
7281 1.1 mrg
7282 1.1 mrg case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7283 1.1 mrg return expand_asan_emit_allocas_unpoison (exp);
7284 1.1 mrg
7285 1.1 mrg case BUILT_IN_STACK_SAVE:
7286 1.1 mrg return expand_stack_save ();
7287 1.1 mrg
7288 1.1 mrg case BUILT_IN_STACK_RESTORE:
7289 1.1 mrg expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7290 1.1 mrg return const0_rtx;
7291 1.1 mrg
7292 1.1 mrg case BUILT_IN_BSWAP16:
7293 1.1 mrg case BUILT_IN_BSWAP32:
7294 1.1 mrg case BUILT_IN_BSWAP64:
7295 1.1 mrg case BUILT_IN_BSWAP128:
7296 1.1 mrg target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7297 1.1 mrg if (target)
7298 1.1 mrg return target;
7299 1.1 mrg break;
7300 1.1 mrg
7301 1.1 mrg CASE_INT_FN (BUILT_IN_FFS):
7302 1.1 mrg target = expand_builtin_unop (target_mode, exp, target,
7303 1.1 mrg subtarget, ffs_optab);
7304 1.1 mrg if (target)
7305 1.1 mrg return target;
7306 1.1 mrg break;
7307 1.1 mrg
7308 1.1 mrg CASE_INT_FN (BUILT_IN_CLZ):
7309 1.1 mrg target = expand_builtin_unop (target_mode, exp, target,
7310 1.1 mrg subtarget, clz_optab);
7311 1.1 mrg if (target)
7312 1.1 mrg return target;
7313 1.1 mrg break;
7314 1.1 mrg
7315 1.1 mrg CASE_INT_FN (BUILT_IN_CTZ):
7316 1.1 mrg target = expand_builtin_unop (target_mode, exp, target,
7317 1.1 mrg subtarget, ctz_optab);
7318 1.1 mrg if (target)
7319 1.1 mrg return target;
7320 1.1 mrg break;
7321 1.1 mrg
7322 1.1 mrg CASE_INT_FN (BUILT_IN_CLRSB):
7323 1.1 mrg target = expand_builtin_unop (target_mode, exp, target,
7324 1.1 mrg subtarget, clrsb_optab);
7325 1.1 mrg if (target)
7326 1.1 mrg return target;
7327 1.1 mrg break;
7328 1.1 mrg
7329 1.1 mrg CASE_INT_FN (BUILT_IN_POPCOUNT):
7330 1.1 mrg target = expand_builtin_unop (target_mode, exp, target,
7331 1.1 mrg subtarget, popcount_optab);
7332 1.1 mrg if (target)
7333 1.1 mrg return target;
7334 1.1 mrg break;
7335 1.1 mrg
7336 1.1 mrg CASE_INT_FN (BUILT_IN_PARITY):
7337 1.1 mrg target = expand_builtin_unop (target_mode, exp, target,
7338 1.1 mrg subtarget, parity_optab);
7339 1.1 mrg if (target)
7340 1.1 mrg return target;
7341 1.1 mrg break;
7342 1.1 mrg
7343 1.1 mrg case BUILT_IN_STRLEN:
7344 1.1 mrg target = expand_builtin_strlen (exp, target, target_mode);
7345 1.1 mrg if (target)
7346 1.1 mrg return target;
7347 1.1 mrg break;
7348 1.1 mrg
7349 1.1 mrg case BUILT_IN_STRNLEN:
7350 1.1 mrg target = expand_builtin_strnlen (exp, target, target_mode);
7351 1.1 mrg if (target)
7352 1.1 mrg return target;
7353 1.1 mrg break;
7354 1.1 mrg
7355 1.1 mrg case BUILT_IN_STRCPY:
7356 1.1 mrg target = expand_builtin_strcpy (exp, target);
7357 1.1 mrg if (target)
7358 1.1 mrg return target;
7359 1.1 mrg break;
7360 1.1 mrg
7361 1.1 mrg case BUILT_IN_STRNCPY:
7362 1.1 mrg target = expand_builtin_strncpy (exp, target);
7363 1.1 mrg if (target)
7364 1.1 mrg return target;
7365 1.1 mrg break;
7366 1.1 mrg
7367 1.1 mrg case BUILT_IN_STPCPY:
7368 1.1 mrg target = expand_builtin_stpcpy (exp, target, mode);
7369 1.1 mrg if (target)
7370 1.1 mrg return target;
7371 1.1 mrg break;
7372 1.1 mrg
7373 1.1 mrg case BUILT_IN_MEMCPY:
7374 1.1 mrg target = expand_builtin_memcpy (exp, target);
7375 1.1 mrg if (target)
7376 1.1 mrg return target;
7377 1.1 mrg break;
7378 1.1 mrg
7379 1.1 mrg case BUILT_IN_MEMMOVE:
7380 1.1 mrg target = expand_builtin_memmove (exp, target);
7381 1.1 mrg if (target)
7382 1.1 mrg return target;
7383 1.1 mrg break;
7384 1.1 mrg
7385 1.1 mrg case BUILT_IN_MEMPCPY:
7386 1.1 mrg target = expand_builtin_mempcpy (exp, target);
7387 1.1 mrg if (target)
7388 1.1 mrg return target;
7389 1.1 mrg break;
7390 1.1 mrg
7391 1.1 mrg case BUILT_IN_MEMSET:
7392 1.1 mrg target = expand_builtin_memset (exp, target, mode);
7393 1.1 mrg if (target)
7394 1.1 mrg return target;
7395 1.1 mrg break;
7396 1.1 mrg
7397 1.1 mrg case BUILT_IN_BZERO:
7398 1.1 mrg target = expand_builtin_bzero (exp);
7399 1.1 mrg if (target)
7400 1.1 mrg return target;
7401 1.1 mrg break;
7402 1.1 mrg
7403 1.1 mrg /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7404 1.1 mrg back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7405 1.1 mrg when changing it to a strcmp call. */
7406 1.1 mrg case BUILT_IN_STRCMP_EQ:
7407 1.1 mrg target = expand_builtin_memcmp (exp, target, true);
7408 1.1 mrg if (target)
7409 1.1 mrg return target;
7410 1.1 mrg
7411 1.1 mrg /* Change this call back to a BUILT_IN_STRCMP. */
7412 1.1 mrg TREE_OPERAND (exp, 1)
7413 1.1 mrg = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7414 1.1 mrg
7415 1.1 mrg /* Delete the last parameter. */
7416 1.1 mrg unsigned int i;
7417 1.1 mrg vec<tree, va_gc> *arg_vec;
7418 1.1 mrg vec_alloc (arg_vec, 2);
7419 1.1 mrg for (i = 0; i < 2; i++)
7420 1.1 mrg arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7421 1.1 mrg exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7422 1.1 mrg /* FALLTHROUGH */
7423 1.1 mrg
7424 1.1 mrg case BUILT_IN_STRCMP:
7425 1.1 mrg target = expand_builtin_strcmp (exp, target);
7426 1.1 mrg if (target)
7427 1.1 mrg return target;
7428 1.1 mrg break;
7429 1.1 mrg
7430 1.1 mrg /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7431 1.1 mrg back to a BUILT_IN_STRNCMP. */
7432 1.1 mrg case BUILT_IN_STRNCMP_EQ:
7433 1.1 mrg target = expand_builtin_memcmp (exp, target, true);
7434 1.1 mrg if (target)
7435 1.1 mrg return target;
7436 1.1 mrg
7437 1.1 mrg /* Change it back to a BUILT_IN_STRNCMP. */
7438 1.1 mrg TREE_OPERAND (exp, 1)
7439 1.1 mrg = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7440 1.1 mrg /* FALLTHROUGH */
7441 1.1 mrg
7442 1.1 mrg case BUILT_IN_STRNCMP:
7443 1.1 mrg target = expand_builtin_strncmp (exp, target, mode);
7444 1.1 mrg if (target)
7445 1.1 mrg return target;
7446 1.1 mrg break;
7447 1.1 mrg
7448 1.1 mrg case BUILT_IN_BCMP:
7449 1.1 mrg case BUILT_IN_MEMCMP:
7450 1.1 mrg case BUILT_IN_MEMCMP_EQ:
7451 1.1 mrg target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7452 1.1 mrg if (target)
7453 1.1 mrg return target;
7454 1.1 mrg if (fcode == BUILT_IN_MEMCMP_EQ)
7455 1.1 mrg {
7456 1.1 mrg tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7457 1.1 mrg TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7458 1.1 mrg }
7459 1.1 mrg break;
7460 1.1 mrg
7461 1.1 mrg case BUILT_IN_SETJMP:
7462 1.1 mrg /* This should have been lowered to the builtins below. */
7463 1.1 mrg gcc_unreachable ();
7464 1.1 mrg
7465 1.1 mrg case BUILT_IN_SETJMP_SETUP:
7466 1.1 mrg /* __builtin_setjmp_setup is passed a pointer to an array of five words
7467 1.1 mrg and the receiver label. */
7468 1.1 mrg if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7469 1.1 mrg {
7470 1.1 mrg rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7471 1.1 mrg VOIDmode, EXPAND_NORMAL);
7472 1.1 mrg tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7473 1.1 mrg rtx_insn *label_r = label_rtx (label);
7474 1.1 mrg
7475 1.1 mrg /* This is copied from the handling of non-local gotos. */
7476 1.1 mrg expand_builtin_setjmp_setup (buf_addr, label_r);
7477 1.1 mrg nonlocal_goto_handler_labels
7478 1.1 mrg = gen_rtx_INSN_LIST (VOIDmode, label_r,
7479 1.1 mrg nonlocal_goto_handler_labels);
7480 1.1 mrg /* ??? Do not let expand_label treat us as such since we would
7481 1.1 mrg not want to be both on the list of non-local labels and on
7482 1.1 mrg the list of forced labels. */
7483 1.1 mrg FORCED_LABEL (label) = 0;
7484 1.1 mrg return const0_rtx;
7485 1.1 mrg }
7486 1.1 mrg break;
7487 1.1 mrg
7488 1.1 mrg case BUILT_IN_SETJMP_RECEIVER:
7489 1.1 mrg /* __builtin_setjmp_receiver is passed the receiver label. */
7490 1.1 mrg if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7491 1.1 mrg {
7492 1.1 mrg tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7493 1.1 mrg rtx_insn *label_r = label_rtx (label);
7494 1.1 mrg
7495 1.1 mrg expand_builtin_setjmp_receiver (label_r);
7496 1.1 mrg return const0_rtx;
7497 1.1 mrg }
7498 1.1 mrg break;
7499 1.1 mrg
7500 1.1 mrg /* __builtin_longjmp is passed a pointer to an array of five words.
7501 1.1 mrg It's similar to the C library longjmp function but works with
7502 1.1 mrg __builtin_setjmp above. */
7503 1.1 mrg case BUILT_IN_LONGJMP:
7504 1.1 mrg if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7505 1.1 mrg {
7506 1.1 mrg rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7507 1.1 mrg VOIDmode, EXPAND_NORMAL);
7508 1.1 mrg rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7509 1.1 mrg
7510 1.1 mrg if (value != const1_rtx)
7511 1.1 mrg {
7512 1.1 mrg error ("%<__builtin_longjmp%> second argument must be 1");
7513 1.1 mrg return const0_rtx;
7514 1.1 mrg }
7515 1.1 mrg
7516 1.1 mrg expand_builtin_longjmp (buf_addr, value);
7517 1.1 mrg return const0_rtx;
7518 1.1 mrg }
7519 1.1 mrg break;
7520 1.1 mrg
7521 1.1 mrg case BUILT_IN_NONLOCAL_GOTO:
7522 1.1 mrg target = expand_builtin_nonlocal_goto (exp);
7523 1.1 mrg if (target)
7524 1.1 mrg return target;
7525 1.1 mrg break;
7526 1.1 mrg
7527 1.1 mrg /* This updates the setjmp buffer that is its argument with the value
7528 1.1 mrg of the current stack pointer. */
7529 1.1 mrg case BUILT_IN_UPDATE_SETJMP_BUF:
7530 1.1 mrg if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7531 1.1 mrg {
7532 1.1 mrg rtx buf_addr
7533 1.1 mrg = expand_normal (CALL_EXPR_ARG (exp, 0));
7534 1.1 mrg
7535 1.1 mrg expand_builtin_update_setjmp_buf (buf_addr);
7536 1.1 mrg return const0_rtx;
7537 1.1 mrg }
7538 1.1 mrg break;
7539 1.1 mrg
7540 1.1 mrg case BUILT_IN_TRAP:
7541 1.1 mrg expand_builtin_trap ();
7542 1.1 mrg return const0_rtx;
7543 1.1 mrg
7544 1.1 mrg case BUILT_IN_UNREACHABLE:
7545 1.1 mrg expand_builtin_unreachable ();
7546 1.1 mrg return const0_rtx;
7547 1.1 mrg
7548 1.1 mrg CASE_FLT_FN (BUILT_IN_SIGNBIT):
7549 1.1 mrg case BUILT_IN_SIGNBITD32:
7550 1.1 mrg case BUILT_IN_SIGNBITD64:
7551 1.1 mrg case BUILT_IN_SIGNBITD128:
7552 1.1 mrg target = expand_builtin_signbit (exp, target);
7553 1.1 mrg if (target)
7554 1.1 mrg return target;
7555 1.1 mrg break;
7556 1.1 mrg
7557 1.1 mrg /* Various hooks for the DWARF 2 __throw routine. */
7558 1.1 mrg case BUILT_IN_UNWIND_INIT:
7559 1.1 mrg expand_builtin_unwind_init ();
7560 1.1 mrg return const0_rtx;
7561 1.1 mrg case BUILT_IN_DWARF_CFA:
7562 1.1 mrg return virtual_cfa_rtx;
7563 1.1 mrg #ifdef DWARF2_UNWIND_INFO
7564 1.1 mrg case BUILT_IN_DWARF_SP_COLUMN:
7565 1.1 mrg return expand_builtin_dwarf_sp_column ();
7566 1.1 mrg case BUILT_IN_INIT_DWARF_REG_SIZES:
7567 1.1 mrg expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7568 1.1 mrg return const0_rtx;
7569 1.1 mrg #endif
7570 1.1 mrg case BUILT_IN_FROB_RETURN_ADDR:
7571 1.1 mrg return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7572 1.1 mrg case BUILT_IN_EXTRACT_RETURN_ADDR:
7573 1.1 mrg return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7574 1.1 mrg case BUILT_IN_EH_RETURN:
7575 1.1 mrg expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7576 1.1 mrg CALL_EXPR_ARG (exp, 1));
7577 1.1 mrg return const0_rtx;
7578 1.1 mrg case BUILT_IN_EH_RETURN_DATA_REGNO:
7579 1.1 mrg return expand_builtin_eh_return_data_regno (exp);
7580 1.1 mrg case BUILT_IN_EXTEND_POINTER:
7581 1.1 mrg return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7582 1.1 mrg case BUILT_IN_EH_POINTER:
7583 1.1 mrg return expand_builtin_eh_pointer (exp);
7584 1.1 mrg case BUILT_IN_EH_FILTER:
7585 1.1 mrg return expand_builtin_eh_filter (exp);
7586 1.1 mrg case BUILT_IN_EH_COPY_VALUES:
7587 1.1 mrg return expand_builtin_eh_copy_values (exp);
7588 1.1 mrg
7589 1.1 mrg case BUILT_IN_VA_START:
7590 1.1 mrg return expand_builtin_va_start (exp);
7591 1.1 mrg case BUILT_IN_VA_END:
7592 1.1 mrg return expand_builtin_va_end (exp);
7593 1.1 mrg case BUILT_IN_VA_COPY:
7594 1.1 mrg return expand_builtin_va_copy (exp);
7595 1.1 mrg case BUILT_IN_EXPECT:
7596 1.1 mrg return expand_builtin_expect (exp, target);
7597 1.1 mrg case BUILT_IN_EXPECT_WITH_PROBABILITY:
7598 1.1 mrg return expand_builtin_expect_with_probability (exp, target);
7599 1.1 mrg case BUILT_IN_ASSUME_ALIGNED:
7600 1.1 mrg return expand_builtin_assume_aligned (exp, target);
7601 1.1 mrg case BUILT_IN_PREFETCH:
7602 1.1 mrg expand_builtin_prefetch (exp);
7603 1.1 mrg return const0_rtx;
7604 1.1 mrg
7605 1.1 mrg case BUILT_IN_INIT_TRAMPOLINE:
7606 1.1 mrg return expand_builtin_init_trampoline (exp, true);
7607 1.1 mrg case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7608 1.1 mrg return expand_builtin_init_trampoline (exp, false);
7609 1.1 mrg case BUILT_IN_ADJUST_TRAMPOLINE:
7610 1.1 mrg return expand_builtin_adjust_trampoline (exp);
7611 1.1 mrg
7612 1.1 mrg case BUILT_IN_INIT_DESCRIPTOR:
7613 1.1 mrg return expand_builtin_init_descriptor (exp);
7614 1.1 mrg case BUILT_IN_ADJUST_DESCRIPTOR:
7615 1.1 mrg return expand_builtin_adjust_descriptor (exp);
7616 1.1 mrg
7617 1.1 mrg case BUILT_IN_FORK:
7618 1.1 mrg case BUILT_IN_EXECL:
7619 1.1 mrg case BUILT_IN_EXECV:
7620 1.1 mrg case BUILT_IN_EXECLP:
7621 1.1 mrg case BUILT_IN_EXECLE:
7622 1.1 mrg case BUILT_IN_EXECVP:
7623 1.1 mrg case BUILT_IN_EXECVE:
7624 1.1 mrg target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7625 1.1 mrg if (target)
7626 1.1 mrg return target;
7627 1.1 mrg break;
7628 1.1 mrg
7629 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7630 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7631 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7632 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7633 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7634 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7635 1.1 mrg target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7636 1.1 mrg if (target)
7637 1.1 mrg return target;
7638 1.1 mrg break;
7639 1.1 mrg
7640 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7641 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7642 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7643 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7644 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7645 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7646 1.1 mrg target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7647 1.1 mrg if (target)
7648 1.1 mrg return target;
7649 1.1 mrg break;
7650 1.1 mrg
7651 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_1:
7652 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_2:
7653 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_4:
7654 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_8:
7655 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_16:
7656 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7657 1.1 mrg target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7658 1.1 mrg if (target)
7659 1.1 mrg return target;
7660 1.1 mrg break;
7661 1.1 mrg
7662 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_1:
7663 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_2:
7664 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_4:
7665 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_8:
7666 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_16:
7667 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7668 1.1 mrg target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7669 1.1 mrg if (target)
7670 1.1 mrg return target;
7671 1.1 mrg break;
7672 1.1 mrg
7673 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7674 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7675 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7676 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7677 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7678 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7679 1.1 mrg target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7680 1.1 mrg if (target)
7681 1.1 mrg return target;
7682 1.1 mrg break;
7683 1.1 mrg
7684 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7685 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7686 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7687 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7688 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7689 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7690 1.1 mrg target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7691 1.1 mrg if (target)
7692 1.1 mrg return target;
7693 1.1 mrg break;
7694 1.1 mrg
7695 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7696 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7697 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7698 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7699 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7700 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7701 1.1 mrg target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7702 1.1 mrg if (target)
7703 1.1 mrg return target;
7704 1.1 mrg break;
7705 1.1 mrg
7706 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7707 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7708 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7709 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7710 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7711 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7712 1.1 mrg target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7713 1.1 mrg if (target)
7714 1.1 mrg return target;
7715 1.1 mrg break;
7716 1.1 mrg
7717 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_1:
7718 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_2:
7719 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_4:
7720 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_8:
7721 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_16:
7722 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7723 1.1 mrg target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7724 1.1 mrg if (target)
7725 1.1 mrg return target;
7726 1.1 mrg break;
7727 1.1 mrg
7728 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_1:
7729 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_2:
7730 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_4:
7731 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_8:
7732 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_16:
7733 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7734 1.1 mrg target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7735 1.1 mrg if (target)
7736 1.1 mrg return target;
7737 1.1 mrg break;
7738 1.1 mrg
7739 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7740 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7741 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7742 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7743 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7744 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7745 1.1 mrg target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7746 1.1 mrg if (target)
7747 1.1 mrg return target;
7748 1.1 mrg break;
7749 1.1 mrg
7750 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7751 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7752 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7753 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7754 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7755 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7756 1.1 mrg target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7757 1.1 mrg if (target)
7758 1.1 mrg return target;
7759 1.1 mrg break;
7760 1.1 mrg
7761 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7762 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7763 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7764 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7765 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7766 1.1 mrg if (mode == VOIDmode)
7767 1.1 mrg mode = TYPE_MODE (boolean_type_node);
7768 1.1 mrg if (!target || !register_operand (target, mode))
7769 1.1 mrg target = gen_reg_rtx (mode);
7770 1.1 mrg
7771 1.1 mrg mode = get_builtin_sync_mode
7772 1.1 mrg (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7773 1.1 mrg target = expand_builtin_compare_and_swap (mode, exp, true, target);
7774 1.1 mrg if (target)
7775 1.1 mrg return target;
7776 1.1 mrg break;
7777 1.1 mrg
7778 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7779 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7780 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7781 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7782 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7783 1.1 mrg mode = get_builtin_sync_mode
7784 1.1 mrg (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7785 1.1 mrg target = expand_builtin_compare_and_swap (mode, exp, false, target);
7786 1.1 mrg if (target)
7787 1.1 mrg return target;
7788 1.1 mrg break;
7789 1.1 mrg
7790 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7791 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7792 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7793 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7794 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7795 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7796 1.1 mrg target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7797 1.1 mrg if (target)
7798 1.1 mrg return target;
7799 1.1 mrg break;
7800 1.1 mrg
7801 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_1:
7802 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_2:
7803 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_4:
7804 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_8:
7805 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_16:
7806 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7807 1.1 mrg expand_builtin_sync_lock_release (mode, exp);
7808 1.1 mrg return const0_rtx;
7809 1.1 mrg
7810 1.1 mrg case BUILT_IN_SYNC_SYNCHRONIZE:
7811 1.1 mrg expand_builtin_sync_synchronize ();
7812 1.1 mrg return const0_rtx;
7813 1.1 mrg
7814 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_1:
7815 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_2:
7816 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_4:
7817 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_8:
7818 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_16:
7819 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7820 1.1 mrg target = expand_builtin_atomic_exchange (mode, exp, target);
7821 1.1 mrg if (target)
7822 1.1 mrg return target;
7823 1.1 mrg break;
7824 1.1 mrg
7825 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7826 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7827 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7828 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7829 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7830 1.1 mrg {
7831 1.1 mrg unsigned int nargs, z;
7832 1.1 mrg vec<tree, va_gc> *vec;
7833 1.1 mrg
7834 1.1 mrg mode =
7835 1.1 mrg get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7836 1.1 mrg target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7837 1.1 mrg if (target)
7838 1.1 mrg return target;
7839 1.1 mrg
7840 1.1 mrg /* If this is turned into an external library call, the weak parameter
7841 1.1 mrg must be dropped to match the expected parameter list. */
7842 1.1 mrg nargs = call_expr_nargs (exp);
7843 1.1 mrg vec_alloc (vec, nargs - 1);
7844 1.1 mrg for (z = 0; z < 3; z++)
7845 1.1 mrg vec->quick_push (CALL_EXPR_ARG (exp, z));
7846 1.1 mrg /* Skip the boolean weak parameter. */
7847 1.1 mrg for (z = 4; z < 6; z++)
7848 1.1 mrg vec->quick_push (CALL_EXPR_ARG (exp, z));
7849 1.1 mrg exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7850 1.1 mrg break;
7851 1.1 mrg }
7852 1.1 mrg
7853 1.1 mrg case BUILT_IN_ATOMIC_LOAD_1:
7854 1.1 mrg case BUILT_IN_ATOMIC_LOAD_2:
7855 1.1 mrg case BUILT_IN_ATOMIC_LOAD_4:
7856 1.1 mrg case BUILT_IN_ATOMIC_LOAD_8:
7857 1.1 mrg case BUILT_IN_ATOMIC_LOAD_16:
7858 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7859 1.1 mrg target = expand_builtin_atomic_load (mode, exp, target);
7860 1.1 mrg if (target)
7861 1.1 mrg return target;
7862 1.1 mrg break;
7863 1.1 mrg
7864 1.1 mrg case BUILT_IN_ATOMIC_STORE_1:
7865 1.1 mrg case BUILT_IN_ATOMIC_STORE_2:
7866 1.1 mrg case BUILT_IN_ATOMIC_STORE_4:
7867 1.1 mrg case BUILT_IN_ATOMIC_STORE_8:
7868 1.1 mrg case BUILT_IN_ATOMIC_STORE_16:
7869 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7870 1.1 mrg target = expand_builtin_atomic_store (mode, exp);
7871 1.1 mrg if (target)
7872 1.1 mrg return const0_rtx;
7873 1.1 mrg break;
7874 1.1 mrg
7875 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_1:
7876 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_2:
7877 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_4:
7878 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_8:
7879 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_16:
7880 1.1 mrg {
7881 1.1 mrg enum built_in_function lib;
7882 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7883 1.1 mrg lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7884 1.1 mrg (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7885 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7886 1.1 mrg ignore, lib);
7887 1.1 mrg if (target)
7888 1.1 mrg return target;
7889 1.1 mrg break;
7890 1.1 mrg }
7891 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_1:
7892 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_2:
7893 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_4:
7894 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_8:
7895 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_16:
7896 1.1 mrg {
7897 1.1 mrg enum built_in_function lib;
7898 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7899 1.1 mrg lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7900 1.1 mrg (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7901 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7902 1.1 mrg ignore, lib);
7903 1.1 mrg if (target)
7904 1.1 mrg return target;
7905 1.1 mrg break;
7906 1.1 mrg }
7907 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_1:
7908 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_2:
7909 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_4:
7910 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_8:
7911 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_16:
7912 1.1 mrg {
7913 1.1 mrg enum built_in_function lib;
7914 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7915 1.1 mrg lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7916 1.1 mrg (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7917 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7918 1.1 mrg ignore, lib);
7919 1.1 mrg if (target)
7920 1.1 mrg return target;
7921 1.1 mrg break;
7922 1.1 mrg }
7923 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_1:
7924 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_2:
7925 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_4:
7926 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_8:
7927 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_16:
7928 1.1 mrg {
7929 1.1 mrg enum built_in_function lib;
7930 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7931 1.1 mrg lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7932 1.1 mrg (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7933 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7934 1.1 mrg ignore, lib);
7935 1.1 mrg if (target)
7936 1.1 mrg return target;
7937 1.1 mrg break;
7938 1.1 mrg }
7939 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_1:
7940 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_2:
7941 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_4:
7942 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_8:
7943 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_16:
7944 1.1 mrg {
7945 1.1 mrg enum built_in_function lib;
7946 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7947 1.1 mrg lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7948 1.1 mrg (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7949 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7950 1.1 mrg ignore, lib);
7951 1.1 mrg if (target)
7952 1.1 mrg return target;
7953 1.1 mrg break;
7954 1.1 mrg }
7955 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_1:
7956 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_2:
7957 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_4:
7958 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_8:
7959 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_16:
7960 1.1 mrg {
7961 1.1 mrg enum built_in_function lib;
7962 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7963 1.1 mrg lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7964 1.1 mrg (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7965 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7966 1.1 mrg ignore, lib);
7967 1.1 mrg if (target)
7968 1.1 mrg return target;
7969 1.1 mrg break;
7970 1.1 mrg }
7971 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_1:
7972 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_2:
7973 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_4:
7974 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_8:
7975 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_16:
7976 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7977 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7978 1.1 mrg ignore, BUILT_IN_NONE);
7979 1.1 mrg if (target)
7980 1.1 mrg return target;
7981 1.1 mrg break;
7982 1.1 mrg
7983 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_1:
7984 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_2:
7985 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_4:
7986 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_8:
7987 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_16:
7988 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7989 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7990 1.1 mrg ignore, BUILT_IN_NONE);
7991 1.1 mrg if (target)
7992 1.1 mrg return target;
7993 1.1 mrg break;
7994 1.1 mrg
7995 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_1:
7996 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_2:
7997 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_4:
7998 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_8:
7999 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_16:
8000 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8001 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8002 1.1 mrg ignore, BUILT_IN_NONE);
8003 1.1 mrg if (target)
8004 1.1 mrg return target;
8005 1.1 mrg break;
8006 1.1 mrg
8007 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_1:
8008 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_2:
8009 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_4:
8010 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_8:
8011 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_16:
8012 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8013 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8014 1.1 mrg ignore, BUILT_IN_NONE);
8015 1.1 mrg if (target)
8016 1.1 mrg return target;
8017 1.1 mrg break;
8018 1.1 mrg
8019 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_1:
8020 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_2:
8021 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_4:
8022 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_8:
8023 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_16:
8024 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8025 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8026 1.1 mrg ignore, BUILT_IN_NONE);
8027 1.1 mrg if (target)
8028 1.1 mrg return target;
8029 1.1 mrg break;
8030 1.1 mrg
8031 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_1:
8032 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_2:
8033 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_4:
8034 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_8:
8035 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_16:
8036 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8037 1.1 mrg target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8038 1.1 mrg ignore, BUILT_IN_NONE);
8039 1.1 mrg if (target)
8040 1.1 mrg return target;
8041 1.1 mrg break;
8042 1.1 mrg
8043 1.1 mrg case BUILT_IN_ATOMIC_TEST_AND_SET:
8044 1.1 mrg return expand_builtin_atomic_test_and_set (exp, target);
8045 1.1 mrg
8046 1.1 mrg case BUILT_IN_ATOMIC_CLEAR:
8047 1.1 mrg return expand_builtin_atomic_clear (exp);
8048 1.1 mrg
8049 1.1 mrg case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8050 1.1 mrg return expand_builtin_atomic_always_lock_free (exp);
8051 1.1 mrg
8052 1.1 mrg case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8053 1.1 mrg target = expand_builtin_atomic_is_lock_free (exp);
8054 1.1 mrg if (target)
8055 1.1 mrg return target;
8056 1.1 mrg break;
8057 1.1 mrg
8058 1.1 mrg case BUILT_IN_ATOMIC_THREAD_FENCE:
8059 1.1 mrg expand_builtin_atomic_thread_fence (exp);
8060 1.1 mrg return const0_rtx;
8061 1.1 mrg
8062 1.1 mrg case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8063 1.1 mrg expand_builtin_atomic_signal_fence (exp);
8064 1.1 mrg return const0_rtx;
8065 1.1 mrg
8066 1.1 mrg case BUILT_IN_OBJECT_SIZE:
8067 1.1 mrg case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8068 1.1 mrg return expand_builtin_object_size (exp);
8069 1.1 mrg
8070 1.1 mrg case BUILT_IN_MEMCPY_CHK:
8071 1.1 mrg case BUILT_IN_MEMPCPY_CHK:
8072 1.1 mrg case BUILT_IN_MEMMOVE_CHK:
8073 1.1 mrg case BUILT_IN_MEMSET_CHK:
8074 1.1 mrg target = expand_builtin_memory_chk (exp, target, mode, fcode);
8075 1.1 mrg if (target)
8076 1.1 mrg return target;
8077 1.1 mrg break;
8078 1.1 mrg
8079 1.1 mrg case BUILT_IN_STRCPY_CHK:
8080 1.1 mrg case BUILT_IN_STPCPY_CHK:
8081 1.1 mrg case BUILT_IN_STRNCPY_CHK:
8082 1.1 mrg case BUILT_IN_STPNCPY_CHK:
8083 1.1 mrg case BUILT_IN_STRCAT_CHK:
8084 1.1 mrg case BUILT_IN_STRNCAT_CHK:
8085 1.1 mrg case BUILT_IN_SNPRINTF_CHK:
8086 1.1 mrg case BUILT_IN_VSNPRINTF_CHK:
8087 1.1 mrg maybe_emit_chk_warning (exp, fcode);
8088 1.1 mrg break;
8089 1.1 mrg
8090 1.1 mrg case BUILT_IN_SPRINTF_CHK:
8091 1.1 mrg case BUILT_IN_VSPRINTF_CHK:
8092 1.1 mrg maybe_emit_sprintf_chk_warning (exp, fcode);
8093 1.1 mrg break;
8094 1.1 mrg
8095 1.1 mrg case BUILT_IN_THREAD_POINTER:
8096 1.1 mrg return expand_builtin_thread_pointer (exp, target);
8097 1.1 mrg
8098 1.1 mrg case BUILT_IN_SET_THREAD_POINTER:
8099 1.1 mrg expand_builtin_set_thread_pointer (exp);
8100 1.1 mrg return const0_rtx;
8101 1.1 mrg
8102 1.1 mrg case BUILT_IN_ACC_ON_DEVICE:
8103 1.1 mrg /* Do library call, if we failed to expand the builtin when
8104 1.1 mrg folding. */
8105 1.1 mrg break;
8106 1.1 mrg
8107 1.1 mrg case BUILT_IN_GOACC_PARLEVEL_ID:
8108 1.1 mrg case BUILT_IN_GOACC_PARLEVEL_SIZE:
8109 1.1 mrg return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8110 1.1 mrg
8111 1.1 mrg case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8112 1.1 mrg return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8113 1.1 mrg
8114 1.1 mrg case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8115 1.1 mrg case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8116 1.1 mrg case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8117 1.1 mrg case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8118 1.1 mrg case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8119 1.1 mrg mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8120 1.1 mrg return expand_speculation_safe_value (mode, exp, target, ignore);
8121 1.1 mrg
8122 1.1 mrg default: /* just do library call, if unknown builtin */
8123 1.1 mrg break;
8124 1.1 mrg }
8125 1.1 mrg
8126 1.1 mrg /* The switch statement above can drop through to cause the function
8127 1.1 mrg to be called normally. */
8128 1.1 mrg return expand_call (exp, target, ignore);
8129 1.1 mrg }
8130 1.1 mrg
8131 1.1 mrg /* Determine whether a tree node represents a call to a built-in
8132 1.1 mrg function. If the tree T is a call to a built-in function with
8133 1.1 mrg the right number of arguments of the appropriate types, return
8134 1.1 mrg the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8135 1.1 mrg Otherwise the return value is END_BUILTINS. */
8136 1.1 mrg
8137 1.1 mrg enum built_in_function
8138 1.1 mrg builtin_mathfn_code (const_tree t)
8139 1.1 mrg {
8140 1.1 mrg const_tree fndecl, arg, parmlist;
8141 1.1 mrg const_tree argtype, parmtype;
8142 1.1 mrg const_call_expr_arg_iterator iter;
8143 1.1 mrg
8144 1.1 mrg if (TREE_CODE (t) != CALL_EXPR)
8145 1.1 mrg return END_BUILTINS;
8146 1.1 mrg
8147 1.1 mrg fndecl = get_callee_fndecl (t);
8148 1.1 mrg if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8149 1.1 mrg return END_BUILTINS;
8150 1.1 mrg
8151 1.1 mrg parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8152 1.1 mrg init_const_call_expr_arg_iterator (t, &iter);
8153 1.1 mrg for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8154 1.1 mrg {
8155 1.1 mrg /* If a function doesn't take a variable number of arguments,
8156 1.1 mrg the last element in the list will have type `void'. */
8157 1.1 mrg parmtype = TREE_VALUE (parmlist);
8158 1.1 mrg if (VOID_TYPE_P (parmtype))
8159 1.1 mrg {
8160 1.1 mrg if (more_const_call_expr_args_p (&iter))
8161 1.1 mrg return END_BUILTINS;
8162 1.1 mrg return DECL_FUNCTION_CODE (fndecl);
8163 1.1 mrg }
8164 1.1 mrg
8165 1.1 mrg if (! more_const_call_expr_args_p (&iter))
8166 1.1 mrg return END_BUILTINS;
8167 1.1 mrg
8168 1.1 mrg arg = next_const_call_expr_arg (&iter);
8169 1.1 mrg argtype = TREE_TYPE (arg);
8170 1.1 mrg
8171 1.1 mrg if (SCALAR_FLOAT_TYPE_P (parmtype))
8172 1.1 mrg {
8173 1.1 mrg if (! SCALAR_FLOAT_TYPE_P (argtype))
8174 1.1 mrg return END_BUILTINS;
8175 1.1 mrg }
8176 1.1 mrg else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8177 1.1 mrg {
8178 1.1 mrg if (! COMPLEX_FLOAT_TYPE_P (argtype))
8179 1.1 mrg return END_BUILTINS;
8180 1.1 mrg }
8181 1.1 mrg else if (POINTER_TYPE_P (parmtype))
8182 1.1 mrg {
8183 1.1 mrg if (! POINTER_TYPE_P (argtype))
8184 1.1 mrg return END_BUILTINS;
8185 1.1 mrg }
8186 1.1 mrg else if (INTEGRAL_TYPE_P (parmtype))
8187 1.1 mrg {
8188 1.1 mrg if (! INTEGRAL_TYPE_P (argtype))
8189 1.1 mrg return END_BUILTINS;
8190 1.1 mrg }
8191 1.1 mrg else
8192 1.1 mrg return END_BUILTINS;
8193 1.1 mrg }
8194 1.1 mrg
8195 1.1 mrg /* Variable-length argument list. */
8196 1.1 mrg return DECL_FUNCTION_CODE (fndecl);
8197 1.1 mrg }
8198 1.1 mrg
8199 1.1 mrg /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8200 1.1 mrg evaluate to a constant. */
8201 1.1 mrg
8202 1.1 mrg static tree
8203 1.1 mrg fold_builtin_constant_p (tree arg)
8204 1.1 mrg {
8205 1.1 mrg /* We return 1 for a numeric type that's known to be a constant
8206 1.1 mrg value at compile-time or for an aggregate type that's a
8207 1.1 mrg literal constant. */
8208 1.1 mrg STRIP_NOPS (arg);
8209 1.1 mrg
8210 1.1 mrg /* If we know this is a constant, emit the constant of one. */
8211 1.1 mrg if (CONSTANT_CLASS_P (arg)
8212 1.1 mrg || (TREE_CODE (arg) == CONSTRUCTOR
8213 1.1 mrg && TREE_CONSTANT (arg)))
8214 1.1 mrg return integer_one_node;
8215 1.1 mrg if (TREE_CODE (arg) == ADDR_EXPR)
8216 1.1 mrg {
8217 1.1 mrg tree op = TREE_OPERAND (arg, 0);
8218 1.1 mrg if (TREE_CODE (op) == STRING_CST
8219 1.1 mrg || (TREE_CODE (op) == ARRAY_REF
8220 1.1 mrg && integer_zerop (TREE_OPERAND (op, 1))
8221 1.1 mrg && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8222 1.1 mrg return integer_one_node;
8223 1.1 mrg }
8224 1.1 mrg
8225 1.1 mrg /* If this expression has side effects, show we don't know it to be a
8226 1.1 mrg constant. Likewise if it's a pointer or aggregate type since in
8227 1.1 mrg those case we only want literals, since those are only optimized
8228 1.1 mrg when generating RTL, not later.
8229 1.1 mrg And finally, if we are compiling an initializer, not code, we
8230 1.1 mrg need to return a definite result now; there's not going to be any
8231 1.1 mrg more optimization done. */
8232 1.1 mrg if (TREE_SIDE_EFFECTS (arg)
8233 1.1 mrg || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8234 1.1 mrg || POINTER_TYPE_P (TREE_TYPE (arg))
8235 1.1 mrg || cfun == 0
8236 1.1 mrg || folding_initializer
8237 1.1 mrg || force_folding_builtin_constant_p)
8238 1.1 mrg return integer_zero_node;
8239 1.1 mrg
8240 1.1 mrg return NULL_TREE;
8241 1.1 mrg }
8242 1.1 mrg
8243 1.1 mrg /* Create builtin_expect or builtin_expect_with_probability
8244 1.1 mrg with PRED and EXPECTED as its arguments and return it as a truthvalue.
8245 1.1 mrg Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8246 1.1 mrg builtin_expect_with_probability instead uses third argument as PROBABILITY
8247 1.1 mrg value. */
8248 1.1 mrg
8249 1.1 mrg static tree
8250 1.1 mrg build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8251 1.1 mrg tree predictor, tree probability)
8252 1.1 mrg {
8253 1.1 mrg tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8254 1.1 mrg
8255 1.1 mrg fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8256 1.1 mrg : BUILT_IN_EXPECT_WITH_PROBABILITY);
8257 1.1 mrg arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8258 1.1 mrg ret_type = TREE_TYPE (TREE_TYPE (fn));
8259 1.1 mrg pred_type = TREE_VALUE (arg_types);
8260 1.1 mrg expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8261 1.1 mrg
8262 1.1 mrg pred = fold_convert_loc (loc, pred_type, pred);
8263 1.1 mrg expected = fold_convert_loc (loc, expected_type, expected);
8264 1.1 mrg
8265 1.1 mrg if (probability)
8266 1.1 mrg call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8267 1.1 mrg else
8268 1.1 mrg call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8269 1.1 mrg predictor);
8270 1.1 mrg
8271 1.1 mrg return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8272 1.1 mrg build_int_cst (ret_type, 0));
8273 1.1 mrg }
8274 1.1 mrg
8275 1.1 mrg /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8276 1.1 mrg NULL_TREE if no simplification is possible. */
8277 1.1 mrg
8278 1.1 mrg tree
8279 1.1 mrg fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8280 1.1 mrg tree arg3)
8281 1.1 mrg {
8282 1.1 mrg tree inner, fndecl, inner_arg0;
8283 1.1 mrg enum tree_code code;
8284 1.1 mrg
8285 1.1 mrg /* Distribute the expected value over short-circuiting operators.
8286 1.1 mrg See through the cast from truthvalue_type_node to long. */
8287 1.1 mrg inner_arg0 = arg0;
8288 1.1 mrg while (CONVERT_EXPR_P (inner_arg0)
8289 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8290 1.1 mrg && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8291 1.1 mrg inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8292 1.1 mrg
8293 1.1 mrg /* If this is a builtin_expect within a builtin_expect keep the
8294 1.1 mrg inner one. See through a comparison against a constant. It
8295 1.1 mrg might have been added to create a thruthvalue. */
8296 1.1 mrg inner = inner_arg0;
8297 1.1 mrg
8298 1.1 mrg if (COMPARISON_CLASS_P (inner)
8299 1.1 mrg && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8300 1.1 mrg inner = TREE_OPERAND (inner, 0);
8301 1.1 mrg
8302 1.1 mrg if (TREE_CODE (inner) == CALL_EXPR
8303 1.1 mrg && (fndecl = get_callee_fndecl (inner))
8304 1.1 mrg && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8305 1.1 mrg || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8306 1.1 mrg return arg0;
8307 1.1 mrg
8308 1.1 mrg inner = inner_arg0;
8309 1.1 mrg code = TREE_CODE (inner);
8310 1.1 mrg if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8311 1.1 mrg {
8312 1.1 mrg tree op0 = TREE_OPERAND (inner, 0);
8313 1.1 mrg tree op1 = TREE_OPERAND (inner, 1);
8314 1.1 mrg arg1 = save_expr (arg1);
8315 1.1 mrg
8316 1.1 mrg op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8317 1.1 mrg op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8318 1.1 mrg inner = build2 (code, TREE_TYPE (inner), op0, op1);
8319 1.1 mrg
8320 1.1 mrg return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8321 1.1 mrg }
8322 1.1 mrg
8323 1.1 mrg /* If the argument isn't invariant then there's nothing else we can do. */
8324 1.1 mrg if (!TREE_CONSTANT (inner_arg0))
8325 1.1 mrg return NULL_TREE;
8326 1.1 mrg
8327 1.1 mrg /* If we expect that a comparison against the argument will fold to
8328 1.1 mrg a constant return the constant. In practice, this means a true
8329 1.1 mrg constant or the address of a non-weak symbol. */
8330 1.1 mrg inner = inner_arg0;
8331 1.1 mrg STRIP_NOPS (inner);
8332 1.1 mrg if (TREE_CODE (inner) == ADDR_EXPR)
8333 1.1 mrg {
8334 1.1 mrg do
8335 1.1 mrg {
8336 1.1 mrg inner = TREE_OPERAND (inner, 0);
8337 1.1 mrg }
8338 1.1 mrg while (TREE_CODE (inner) == COMPONENT_REF
8339 1.1 mrg || TREE_CODE (inner) == ARRAY_REF);
8340 1.1 mrg if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8341 1.1 mrg return NULL_TREE;
8342 1.1 mrg }
8343 1.1 mrg
8344 1.1 mrg /* Otherwise, ARG0 already has the proper type for the return value. */
8345 1.1 mrg return arg0;
8346 1.1 mrg }
8347 1.1 mrg
8348 1.1 mrg /* Fold a call to __builtin_classify_type with argument ARG. */
8349 1.1 mrg
8350 1.1 mrg static tree
8351 1.1 mrg fold_builtin_classify_type (tree arg)
8352 1.1 mrg {
8353 1.1 mrg if (arg == 0)
8354 1.1 mrg return build_int_cst (integer_type_node, no_type_class);
8355 1.1 mrg
8356 1.1 mrg return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8357 1.1 mrg }
8358 1.1 mrg
8359 1.1 mrg /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8360 1.1 mrg ARG. */
8361 1.1 mrg
8362 1.1 mrg static tree
8363 1.1 mrg fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8364 1.1 mrg {
8365 1.1 mrg if (!validate_arg (arg, POINTER_TYPE))
8366 1.1 mrg return NULL_TREE;
8367 1.1 mrg else
8368 1.1 mrg {
8369 1.1 mrg c_strlen_data lendata = { };
8370 1.1 mrg tree len = c_strlen (arg, 0, &lendata);
8371 1.1 mrg
8372 1.1 mrg if (len)
8373 1.1 mrg return fold_convert_loc (loc, type, len);
8374 1.1 mrg
8375 1.1 mrg /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8376 1.1 mrg also early enough to detect invalid reads in multimensional
8377 1.1 mrg arrays and struct members. */
8378 1.1 mrg if (!lendata.decl)
8379 1.1 mrg c_strlen (arg, 1, &lendata);
8380 1.1 mrg
8381 1.1 mrg if (lendata.decl)
8382 1.1 mrg {
8383 1.1 mrg if (EXPR_HAS_LOCATION (arg))
8384 1.1 mrg loc = EXPR_LOCATION (arg);
8385 1.1 mrg else if (loc == UNKNOWN_LOCATION)
8386 1.1 mrg loc = input_location;
8387 1.1 mrg warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8388 1.1 mrg }
8389 1.1 mrg
8390 1.1 mrg return NULL_TREE;
8391 1.1 mrg }
8392 1.1 mrg }
8393 1.1 mrg
8394 1.1 mrg /* Fold a call to __builtin_inf or __builtin_huge_val. */
8395 1.1 mrg
8396 1.1 mrg static tree
8397 1.1 mrg fold_builtin_inf (location_t loc, tree type, int warn)
8398 1.1 mrg {
8399 1.1 mrg REAL_VALUE_TYPE real;
8400 1.1 mrg
8401 1.1 mrg /* __builtin_inff is intended to be usable to define INFINITY on all
8402 1.1 mrg targets. If an infinity is not available, INFINITY expands "to a
8403 1.1 mrg positive constant of type float that overflows at translation
8404 1.1 mrg time", footnote "In this case, using INFINITY will violate the
8405 1.1 mrg constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8406 1.1 mrg Thus we pedwarn to ensure this constraint violation is
8407 1.1 mrg diagnosed. */
8408 1.1 mrg if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8409 1.1 mrg pedwarn (loc, 0, "target format does not support infinity");
8410 1.1 mrg
8411 1.1 mrg real_inf (&real);
8412 1.1 mrg return build_real (type, real);
8413 1.1 mrg }
8414 1.1 mrg
8415 1.1 mrg /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8416 1.1 mrg NULL_TREE if no simplification can be made. */
8417 1.1 mrg
8418 1.1 mrg static tree
8419 1.1 mrg fold_builtin_sincos (location_t loc,
8420 1.1 mrg tree arg0, tree arg1, tree arg2)
8421 1.1 mrg {
8422 1.1 mrg tree type;
8423 1.1 mrg tree fndecl, call = NULL_TREE;
8424 1.1 mrg
8425 1.1 mrg if (!validate_arg (arg0, REAL_TYPE)
8426 1.1 mrg || !validate_arg (arg1, POINTER_TYPE)
8427 1.1 mrg || !validate_arg (arg2, POINTER_TYPE))
8428 1.1 mrg return NULL_TREE;
8429 1.1 mrg
8430 1.1 mrg type = TREE_TYPE (arg0);
8431 1.1 mrg
8432 1.1 mrg /* Calculate the result when the argument is a constant. */
8433 1.1 mrg built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8434 1.1 mrg if (fn == END_BUILTINS)
8435 1.1 mrg return NULL_TREE;
8436 1.1 mrg
8437 1.1 mrg /* Canonicalize sincos to cexpi. */
8438 1.1 mrg if (TREE_CODE (arg0) == REAL_CST)
8439 1.1 mrg {
8440 1.1 mrg tree complex_type = build_complex_type (type);
8441 1.1 mrg call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8442 1.1 mrg }
8443 1.1 mrg if (!call)
8444 1.1 mrg {
8445 1.1 mrg if (!targetm.libc_has_function (function_c99_math_complex, type)
8446 1.1 mrg || !builtin_decl_implicit_p (fn))
8447 1.1 mrg return NULL_TREE;
8448 1.1 mrg fndecl = builtin_decl_explicit (fn);
8449 1.1 mrg call = build_call_expr_loc (loc, fndecl, 1, arg0);
8450 1.1 mrg call = builtin_save_expr (call);
8451 1.1 mrg }
8452 1.1 mrg
8453 1.1 mrg tree ptype = build_pointer_type (type);
8454 1.1 mrg arg1 = fold_convert (ptype, arg1);
8455 1.1 mrg arg2 = fold_convert (ptype, arg2);
8456 1.1 mrg return build2 (COMPOUND_EXPR, void_type_node,
8457 1.1 mrg build2 (MODIFY_EXPR, void_type_node,
8458 1.1 mrg build_fold_indirect_ref_loc (loc, arg1),
8459 1.1 mrg fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8460 1.1 mrg build2 (MODIFY_EXPR, void_type_node,
8461 1.1 mrg build_fold_indirect_ref_loc (loc, arg2),
8462 1.1 mrg fold_build1_loc (loc, REALPART_EXPR, type, call)));
8463 1.1 mrg }
8464 1.1 mrg
8465 1.1 mrg /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8466 1.1 mrg Return NULL_TREE if no simplification can be made. */
8467 1.1 mrg
8468 1.1 mrg static tree
8469 1.1 mrg fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8470 1.1 mrg {
8471 1.1 mrg if (!validate_arg (arg1, POINTER_TYPE)
8472 1.1 mrg || !validate_arg (arg2, POINTER_TYPE)
8473 1.1 mrg || !validate_arg (len, INTEGER_TYPE))
8474 1.1 mrg return NULL_TREE;
8475 1.1 mrg
8476 1.1 mrg /* If the LEN parameter is zero, return zero. */
8477 1.1 mrg if (integer_zerop (len))
8478 1.1 mrg return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8479 1.1 mrg arg1, arg2);
8480 1.1 mrg
8481 1.1 mrg /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8482 1.1 mrg if (operand_equal_p (arg1, arg2, 0))
8483 1.1 mrg return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8484 1.1 mrg
8485 1.1 mrg /* If len parameter is one, return an expression corresponding to
8486 1.1 mrg (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8487 1.1 mrg if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8488 1.1 mrg {
8489 1.1 mrg tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8490 1.1 mrg tree cst_uchar_ptr_node
8491 1.1 mrg = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8492 1.1 mrg
8493 1.1 mrg tree ind1
8494 1.1 mrg = fold_convert_loc (loc, integer_type_node,
8495 1.1 mrg build1 (INDIRECT_REF, cst_uchar_node,
8496 1.1 mrg fold_convert_loc (loc,
8497 1.1 mrg cst_uchar_ptr_node,
8498 1.1 mrg arg1)));
8499 1.1 mrg tree ind2
8500 1.1 mrg = fold_convert_loc (loc, integer_type_node,
8501 1.1 mrg build1 (INDIRECT_REF, cst_uchar_node,
8502 1.1 mrg fold_convert_loc (loc,
8503 1.1 mrg cst_uchar_ptr_node,
8504 1.1 mrg arg2)));
8505 1.1 mrg return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8506 1.1 mrg }
8507 1.1 mrg
8508 1.1 mrg return NULL_TREE;
8509 1.1 mrg }
8510 1.1 mrg
8511 1.1 mrg /* Fold a call to builtin isascii with argument ARG. */
8512 1.1 mrg
8513 1.1 mrg static tree
8514 1.1 mrg fold_builtin_isascii (location_t loc, tree arg)
8515 1.1 mrg {
8516 1.1 mrg if (!validate_arg (arg, INTEGER_TYPE))
8517 1.1 mrg return NULL_TREE;
8518 1.1 mrg else
8519 1.1 mrg {
8520 1.1 mrg /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8521 1.1 mrg arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8522 1.1 mrg build_int_cst (integer_type_node,
8523 1.1 mrg ~ (unsigned HOST_WIDE_INT) 0x7f));
8524 1.1 mrg return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8525 1.1 mrg arg, integer_zero_node);
8526 1.1 mrg }
8527 1.1 mrg }
8528 1.1 mrg
8529 1.1 mrg /* Fold a call to builtin toascii with argument ARG. */
8530 1.1 mrg
8531 1.1 mrg static tree
8532 1.1 mrg fold_builtin_toascii (location_t loc, tree arg)
8533 1.1 mrg {
8534 1.1 mrg if (!validate_arg (arg, INTEGER_TYPE))
8535 1.1 mrg return NULL_TREE;
8536 1.1 mrg
8537 1.1 mrg /* Transform toascii(c) -> (c & 0x7f). */
8538 1.1 mrg return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8539 1.1 mrg build_int_cst (integer_type_node, 0x7f));
8540 1.1 mrg }
8541 1.1 mrg
8542 1.1 mrg /* Fold a call to builtin isdigit with argument ARG. */
8543 1.1 mrg
8544 1.1 mrg static tree
8545 1.1 mrg fold_builtin_isdigit (location_t loc, tree arg)
8546 1.1 mrg {
8547 1.1 mrg if (!validate_arg (arg, INTEGER_TYPE))
8548 1.1 mrg return NULL_TREE;
8549 1.1 mrg else
8550 1.1 mrg {
8551 1.1 mrg /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8552 1.1 mrg /* According to the C standard, isdigit is unaffected by locale.
8553 1.1 mrg However, it definitely is affected by the target character set. */
8554 1.1 mrg unsigned HOST_WIDE_INT target_digit0
8555 1.1 mrg = lang_hooks.to_target_charset ('0');
8556 1.1 mrg
8557 1.1 mrg if (target_digit0 == 0)
8558 1.1 mrg return NULL_TREE;
8559 1.1 mrg
8560 1.1 mrg arg = fold_convert_loc (loc, unsigned_type_node, arg);
8561 1.1 mrg arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8562 1.1 mrg build_int_cst (unsigned_type_node, target_digit0));
8563 1.1 mrg return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8564 1.1 mrg build_int_cst (unsigned_type_node, 9));
8565 1.1 mrg }
8566 1.1 mrg }
8567 1.1 mrg
8568 1.1 mrg /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8569 1.1 mrg
8570 1.1 mrg static tree
8571 1.1 mrg fold_builtin_fabs (location_t loc, tree arg, tree type)
8572 1.1 mrg {
8573 1.1 mrg if (!validate_arg (arg, REAL_TYPE))
8574 1.1 mrg return NULL_TREE;
8575 1.1 mrg
8576 1.1 mrg arg = fold_convert_loc (loc, type, arg);
8577 1.1 mrg return fold_build1_loc (loc, ABS_EXPR, type, arg);
8578 1.1 mrg }
8579 1.1 mrg
8580 1.1 mrg /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8581 1.1 mrg
8582 1.1 mrg static tree
8583 1.1 mrg fold_builtin_abs (location_t loc, tree arg, tree type)
8584 1.1 mrg {
8585 1.1 mrg if (!validate_arg (arg, INTEGER_TYPE))
8586 1.1 mrg return NULL_TREE;
8587 1.1 mrg
8588 1.1 mrg arg = fold_convert_loc (loc, type, arg);
8589 1.1 mrg return fold_build1_loc (loc, ABS_EXPR, type, arg);
8590 1.1 mrg }
8591 1.1 mrg
8592 1.1 mrg /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8593 1.1 mrg
8594 1.1 mrg static tree
8595 1.1 mrg fold_builtin_carg (location_t loc, tree arg, tree type)
8596 1.1 mrg {
8597 1.1 mrg if (validate_arg (arg, COMPLEX_TYPE)
8598 1.1 mrg && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8599 1.1 mrg {
8600 1.1 mrg tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8601 1.1 mrg
8602 1.1 mrg if (atan2_fn)
8603 1.1 mrg {
8604 1.1 mrg tree new_arg = builtin_save_expr (arg);
8605 1.1 mrg tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8606 1.1 mrg tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8607 1.1 mrg return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8608 1.1 mrg }
8609 1.1 mrg }
8610 1.1 mrg
8611 1.1 mrg return NULL_TREE;
8612 1.1 mrg }
8613 1.1 mrg
8614 1.1 mrg /* Fold a call to builtin frexp, we can assume the base is 2. */
8615 1.1 mrg
8616 1.1 mrg static tree
8617 1.1 mrg fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8618 1.1 mrg {
8619 1.1 mrg if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8620 1.1 mrg return NULL_TREE;
8621 1.1 mrg
8622 1.1 mrg STRIP_NOPS (arg0);
8623 1.1 mrg
8624 1.1 mrg if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8625 1.1 mrg return NULL_TREE;
8626 1.1 mrg
8627 1.1 mrg arg1 = build_fold_indirect_ref_loc (loc, arg1);
8628 1.1 mrg
8629 1.1 mrg /* Proceed if a valid pointer type was passed in. */
8630 1.1 mrg if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8631 1.1 mrg {
8632 1.1 mrg const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8633 1.1 mrg tree frac, exp;
8634 1.1 mrg
8635 1.1 mrg switch (value->cl)
8636 1.1 mrg {
8637 1.1 mrg case rvc_zero:
8638 1.1 mrg case rvc_nan:
8639 1.1 mrg case rvc_inf:
8640 1.1 mrg /* For +-0, return (*exp = 0, +-0). */
8641 1.1 mrg /* For +-NaN or +-Inf, *exp is unspecified, but something should
8642 1.1 mrg be stored there so that it isn't read from uninitialized object.
8643 1.1 mrg As glibc and newlib store *exp = 0 for +-Inf/NaN, storing
8644 1.1 mrg 0 here as well is easiest. */
8645 1.1 mrg exp = integer_zero_node;
8646 1.1 mrg frac = arg0;
8647 1.1 mrg break;
8648 1.1 mrg case rvc_normal:
8649 1.1 mrg {
8650 1.1 mrg /* Since the frexp function always expects base 2, and in
8651 1.1 mrg GCC normalized significands are already in the range
8652 1.1 mrg [0.5, 1.0), we have exactly what frexp wants. */
8653 1.1 mrg REAL_VALUE_TYPE frac_rvt = *value;
8654 1.1 mrg SET_REAL_EXP (&frac_rvt, 0);
8655 1.1 mrg frac = build_real (rettype, frac_rvt);
8656 1.1 mrg exp = build_int_cst (integer_type_node, REAL_EXP (value));
8657 1.1 mrg }
8658 1.1 mrg break;
8659 1.1 mrg default:
8660 1.1 mrg gcc_unreachable ();
8661 1.1 mrg }
8662 1.1 mrg
8663 1.1 mrg /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8664 1.1 mrg arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8665 1.1 mrg TREE_SIDE_EFFECTS (arg1) = 1;
8666 1.1 mrg return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8667 1.1 mrg }
8668 1.1 mrg
8669 1.1 mrg return NULL_TREE;
8670 1.1 mrg }
8671 1.1 mrg
8672 1.1 mrg /* Fold a call to builtin modf. */
8673 1.1 mrg
8674 1.1 mrg static tree
8675 1.1 mrg fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8676 1.1 mrg {
8677 1.1 mrg if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8678 1.1 mrg return NULL_TREE;
8679 1.1 mrg
8680 1.1 mrg STRIP_NOPS (arg0);
8681 1.1 mrg
8682 1.1 mrg if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8683 1.1 mrg return NULL_TREE;
8684 1.1 mrg
8685 1.1 mrg arg1 = build_fold_indirect_ref_loc (loc, arg1);
8686 1.1 mrg
8687 1.1 mrg /* Proceed if a valid pointer type was passed in. */
8688 1.1 mrg if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8689 1.1 mrg {
8690 1.1 mrg const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8691 1.1 mrg REAL_VALUE_TYPE trunc, frac;
8692 1.1 mrg
8693 1.1 mrg switch (value->cl)
8694 1.1 mrg {
8695 1.1 mrg case rvc_nan:
8696 1.1 mrg case rvc_zero:
8697 1.1 mrg /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8698 1.1 mrg trunc = frac = *value;
8699 1.1 mrg break;
8700 1.1 mrg case rvc_inf:
8701 1.1 mrg /* For +-Inf, return (*arg1 = arg0, +-0). */
8702 1.1 mrg frac = dconst0;
8703 1.1 mrg frac.sign = value->sign;
8704 1.1 mrg trunc = *value;
8705 1.1 mrg break;
8706 1.1 mrg case rvc_normal:
8707 1.1 mrg /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8708 1.1 mrg real_trunc (&trunc, VOIDmode, value);
8709 1.1 mrg real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8710 1.1 mrg /* If the original number was negative and already
8711 1.1 mrg integral, then the fractional part is -0.0. */
8712 1.1 mrg if (value->sign && frac.cl == rvc_zero)
8713 1.1 mrg frac.sign = value->sign;
8714 1.1 mrg break;
8715 1.1 mrg }
8716 1.1 mrg
8717 1.1 mrg /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8718 1.1 mrg arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8719 1.1 mrg build_real (rettype, trunc));
8720 1.1 mrg TREE_SIDE_EFFECTS (arg1) = 1;
8721 1.1 mrg return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8722 1.1 mrg build_real (rettype, frac));
8723 1.1 mrg }
8724 1.1 mrg
8725 1.1 mrg return NULL_TREE;
8726 1.1 mrg }
8727 1.1 mrg
8728 1.1 mrg /* Given a location LOC, an interclass builtin function decl FNDECL
8729 1.1 mrg and its single argument ARG, return an folded expression computing
8730 1.1 mrg the same, or NULL_TREE if we either couldn't or didn't want to fold
8731 1.1 mrg (the latter happen if there's an RTL instruction available). */
8732 1.1 mrg
8733 1.1 mrg static tree
8734 1.1 mrg fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8735 1.1 mrg {
8736 1.1 mrg machine_mode mode;
8737 1.1 mrg
8738 1.1 mrg if (!validate_arg (arg, REAL_TYPE))
8739 1.1 mrg return NULL_TREE;
8740 1.1 mrg
8741 1.1 mrg if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8742 1.1 mrg return NULL_TREE;
8743 1.1 mrg
8744 1.1 mrg mode = TYPE_MODE (TREE_TYPE (arg));
8745 1.1 mrg
8746 1.1 mrg bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8747 1.1 mrg
8748 1.1 mrg /* If there is no optab, try generic code. */
8749 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
8750 1.1 mrg {
8751 1.1 mrg tree result;
8752 1.1 mrg
8753 1.1 mrg CASE_FLT_FN (BUILT_IN_ISINF):
8754 1.1 mrg {
8755 1.1 mrg /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8756 1.1 mrg tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8757 1.1 mrg tree type = TREE_TYPE (arg);
8758 1.1 mrg REAL_VALUE_TYPE r;
8759 1.1 mrg char buf[128];
8760 1.1 mrg
8761 1.1 mrg if (is_ibm_extended)
8762 1.1 mrg {
8763 1.1 mrg /* NaN and Inf are encoded in the high-order double value
8764 1.1 mrg only. The low-order value is not significant. */
8765 1.1 mrg type = double_type_node;
8766 1.1 mrg mode = DFmode;
8767 1.1 mrg arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8768 1.1 mrg }
8769 1.1 mrg get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8770 1.1 mrg real_from_string3 (&r, buf, mode);
8771 1.1 mrg result = build_call_expr (isgr_fn, 2,
8772 1.1 mrg fold_build1_loc (loc, ABS_EXPR, type, arg),
8773 1.1 mrg build_real (type, r));
8774 1.1 mrg return result;
8775 1.1 mrg }
8776 1.1 mrg CASE_FLT_FN (BUILT_IN_FINITE):
8777 1.1 mrg case BUILT_IN_ISFINITE:
8778 1.1 mrg {
8779 1.1 mrg /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8780 1.1 mrg tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8781 1.1 mrg tree type = TREE_TYPE (arg);
8782 1.1 mrg REAL_VALUE_TYPE r;
8783 1.1 mrg char buf[128];
8784 1.1 mrg
8785 1.1 mrg if (is_ibm_extended)
8786 1.1 mrg {
8787 1.1 mrg /* NaN and Inf are encoded in the high-order double value
8788 1.1 mrg only. The low-order value is not significant. */
8789 1.1 mrg type = double_type_node;
8790 1.1 mrg mode = DFmode;
8791 1.1 mrg arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8792 1.1 mrg }
8793 1.1 mrg get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8794 1.1 mrg real_from_string3 (&r, buf, mode);
8795 1.1 mrg result = build_call_expr (isle_fn, 2,
8796 1.1 mrg fold_build1_loc (loc, ABS_EXPR, type, arg),
8797 1.1 mrg build_real (type, r));
8798 1.1 mrg /*result = fold_build2_loc (loc, UNGT_EXPR,
8799 1.1 mrg TREE_TYPE (TREE_TYPE (fndecl)),
8800 1.1 mrg fold_build1_loc (loc, ABS_EXPR, type, arg),
8801 1.1 mrg build_real (type, r));
8802 1.1 mrg result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8803 1.1 mrg TREE_TYPE (TREE_TYPE (fndecl)),
8804 1.1 mrg result);*/
8805 1.1 mrg return result;
8806 1.1 mrg }
8807 1.1 mrg case BUILT_IN_ISNORMAL:
8808 1.1 mrg {
8809 1.1 mrg /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8810 1.1 mrg islessequal(fabs(x),DBL_MAX). */
8811 1.1 mrg tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8812 1.1 mrg tree type = TREE_TYPE (arg);
8813 1.1 mrg tree orig_arg, max_exp, min_exp;
8814 1.1 mrg machine_mode orig_mode = mode;
8815 1.1 mrg REAL_VALUE_TYPE rmax, rmin;
8816 1.1 mrg char buf[128];
8817 1.1 mrg
8818 1.1 mrg orig_arg = arg = builtin_save_expr (arg);
8819 1.1 mrg if (is_ibm_extended)
8820 1.1 mrg {
8821 1.1 mrg /* Use double to test the normal range of IBM extended
8822 1.1 mrg precision. Emin for IBM extended precision is
8823 1.1 mrg different to emin for IEEE double, being 53 higher
8824 1.1 mrg since the low double exponent is at least 53 lower
8825 1.1 mrg than the high double exponent. */
8826 1.1 mrg type = double_type_node;
8827 1.1 mrg mode = DFmode;
8828 1.1 mrg arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8829 1.1 mrg }
8830 1.1 mrg arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8831 1.1 mrg
8832 1.1 mrg get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8833 1.1 mrg real_from_string3 (&rmax, buf, mode);
8834 1.1 mrg if (DECIMAL_FLOAT_MODE_P (mode))
8835 1.1 mrg sprintf (buf, "1E%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8836 1.1 mrg else
8837 1.1 mrg sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8838 1.1 mrg real_from_string3 (&rmin, buf, orig_mode);
8839 1.1 mrg max_exp = build_real (type, rmax);
8840 1.1 mrg min_exp = build_real (type, rmin);
8841 1.1 mrg
8842 1.1 mrg max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8843 1.1 mrg if (is_ibm_extended)
8844 1.1 mrg {
8845 1.1 mrg /* Testing the high end of the range is done just using
8846 1.1 mrg the high double, using the same test as isfinite().
8847 1.1 mrg For the subnormal end of the range we first test the
8848 1.1 mrg high double, then if its magnitude is equal to the
8849 1.1 mrg limit of 0x1p-969, we test whether the low double is
8850 1.1 mrg non-zero and opposite sign to the high double. */
8851 1.1 mrg tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8852 1.1 mrg tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8853 1.1 mrg tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8854 1.1 mrg tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8855 1.1 mrg arg, min_exp);
8856 1.1 mrg tree as_complex = build1 (VIEW_CONVERT_EXPR,
8857 1.1 mrg complex_double_type_node, orig_arg);
8858 1.1 mrg tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8859 1.1 mrg tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8860 1.1 mrg tree zero = build_real (type, dconst0);
8861 1.1 mrg tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8862 1.1 mrg tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8863 1.1 mrg tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8864 1.1 mrg tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8865 1.1 mrg fold_build3 (COND_EXPR,
8866 1.1 mrg integer_type_node,
8867 1.1 mrg hilt, logt, lolt));
8868 1.1 mrg eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8869 1.1 mrg eq_min, ok_lo);
8870 1.1 mrg min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8871 1.1 mrg gt_min, eq_min);
8872 1.1 mrg }
8873 1.1 mrg else
8874 1.1 mrg {
8875 1.1 mrg tree const isge_fn
8876 1.1 mrg = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8877 1.1 mrg min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8878 1.1 mrg }
8879 1.1 mrg result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8880 1.1 mrg max_exp, min_exp);
8881 1.1 mrg return result;
8882 1.1 mrg }
8883 1.1 mrg default:
8884 1.1 mrg break;
8885 1.1 mrg }
8886 1.1 mrg
8887 1.1 mrg return NULL_TREE;
8888 1.1 mrg }
8889 1.1 mrg
8890 1.1 mrg /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8891 1.1 mrg ARG is the argument for the call. */
8892 1.1 mrg
8893 1.1 mrg static tree
8894 1.1 mrg fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8895 1.1 mrg {
8896 1.1 mrg tree type = TREE_TYPE (TREE_TYPE (fndecl));
8897 1.1 mrg
8898 1.1 mrg if (!validate_arg (arg, REAL_TYPE))
8899 1.1 mrg return NULL_TREE;
8900 1.1 mrg
8901 1.1 mrg switch (builtin_index)
8902 1.1 mrg {
8903 1.1 mrg case BUILT_IN_ISINF:
8904 1.1 mrg if (tree_expr_infinite_p (arg))
8905 1.1 mrg return omit_one_operand_loc (loc, type, integer_one_node, arg);
8906 1.1 mrg if (!tree_expr_maybe_infinite_p (arg))
8907 1.1 mrg return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8908 1.1 mrg return NULL_TREE;
8909 1.1 mrg
8910 1.1 mrg case BUILT_IN_ISINF_SIGN:
8911 1.1 mrg {
8912 1.1 mrg /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8913 1.1 mrg /* In a boolean context, GCC will fold the inner COND_EXPR to
8914 1.1 mrg 1. So e.g. "if (isinf_sign(x))" would be folded to just
8915 1.1 mrg "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8916 1.1 mrg tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8917 1.1 mrg tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8918 1.1 mrg tree tmp = NULL_TREE;
8919 1.1 mrg
8920 1.1 mrg arg = builtin_save_expr (arg);
8921 1.1 mrg
8922 1.1 mrg if (signbit_fn && isinf_fn)
8923 1.1 mrg {
8924 1.1 mrg tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8925 1.1 mrg tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8926 1.1 mrg
8927 1.1 mrg signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8928 1.1 mrg signbit_call, integer_zero_node);
8929 1.1 mrg isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8930 1.1 mrg isinf_call, integer_zero_node);
8931 1.1 mrg
8932 1.1 mrg tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8933 1.1 mrg integer_minus_one_node, integer_one_node);
8934 1.1 mrg tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8935 1.1 mrg isinf_call, tmp,
8936 1.1 mrg integer_zero_node);
8937 1.1 mrg }
8938 1.1 mrg
8939 1.1 mrg return tmp;
8940 1.1 mrg }
8941 1.1 mrg
8942 1.1 mrg case BUILT_IN_ISFINITE:
8943 1.1 mrg if (tree_expr_finite_p (arg))
8944 1.1 mrg return omit_one_operand_loc (loc, type, integer_one_node, arg);
8945 1.1 mrg if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
8946 1.1 mrg return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8947 1.1 mrg return NULL_TREE;
8948 1.1 mrg
8949 1.1 mrg case BUILT_IN_ISNAN:
8950 1.1 mrg if (tree_expr_nan_p (arg))
8951 1.1 mrg return omit_one_operand_loc (loc, type, integer_one_node, arg);
8952 1.1 mrg if (!tree_expr_maybe_nan_p (arg))
8953 1.1 mrg return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8954 1.1 mrg
8955 1.1 mrg {
8956 1.1 mrg bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8957 1.1 mrg if (is_ibm_extended)
8958 1.1 mrg {
8959 1.1 mrg /* NaN and Inf are encoded in the high-order double value
8960 1.1 mrg only. The low-order value is not significant. */
8961 1.1 mrg arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8962 1.1 mrg }
8963 1.1 mrg }
8964 1.1 mrg arg = builtin_save_expr (arg);
8965 1.1 mrg return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8966 1.1 mrg
8967 1.1 mrg default:
8968 1.1 mrg gcc_unreachable ();
8969 1.1 mrg }
8970 1.1 mrg }
8971 1.1 mrg
8972 1.1 mrg /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8973 1.1 mrg This builtin will generate code to return the appropriate floating
8974 1.1 mrg point classification depending on the value of the floating point
8975 1.1 mrg number passed in. The possible return values must be supplied as
8976 1.1 mrg int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8977 1.1 mrg FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8978 1.1 mrg one floating point argument which is "type generic". */
8979 1.1 mrg
8980 1.1 mrg static tree
8981 1.1 mrg fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8982 1.1 mrg {
8983 1.1 mrg tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8984 1.1 mrg arg, type, res, tmp;
8985 1.1 mrg machine_mode mode;
8986 1.1 mrg REAL_VALUE_TYPE r;
8987 1.1 mrg char buf[128];
8988 1.1 mrg
8989 1.1 mrg /* Verify the required arguments in the original call. */
8990 1.1 mrg if (nargs != 6
8991 1.1 mrg || !validate_arg (args[0], INTEGER_TYPE)
8992 1.1 mrg || !validate_arg (args[1], INTEGER_TYPE)
8993 1.1 mrg || !validate_arg (args[2], INTEGER_TYPE)
8994 1.1 mrg || !validate_arg (args[3], INTEGER_TYPE)
8995 1.1 mrg || !validate_arg (args[4], INTEGER_TYPE)
8996 1.1 mrg || !validate_arg (args[5], REAL_TYPE))
8997 1.1 mrg return NULL_TREE;
8998 1.1 mrg
8999 1.1 mrg fp_nan = args[0];
9000 1.1 mrg fp_infinite = args[1];
9001 1.1 mrg fp_normal = args[2];
9002 1.1 mrg fp_subnormal = args[3];
9003 1.1 mrg fp_zero = args[4];
9004 1.1 mrg arg = args[5];
9005 1.1 mrg type = TREE_TYPE (arg);
9006 1.1 mrg mode = TYPE_MODE (type);
9007 1.1 mrg arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9008 1.1 mrg
9009 1.1 mrg /* fpclassify(x) ->
9010 1.1 mrg isnan(x) ? FP_NAN :
9011 1.1 mrg (fabs(x) == Inf ? FP_INFINITE :
9012 1.1 mrg (fabs(x) >= DBL_MIN ? FP_NORMAL :
9013 1.1 mrg (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9014 1.1 mrg
9015 1.1 mrg tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9016 1.1 mrg build_real (type, dconst0));
9017 1.1 mrg res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9018 1.1 mrg tmp, fp_zero, fp_subnormal);
9019 1.1 mrg
9020 1.1 mrg if (DECIMAL_FLOAT_MODE_P (mode))
9021 1.1 mrg sprintf (buf, "1E%d", REAL_MODE_FORMAT (mode)->emin - 1);
9022 1.1 mrg else
9023 1.1 mrg sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9024 1.1 mrg real_from_string3 (&r, buf, mode);
9025 1.1 mrg tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9026 1.1 mrg arg, build_real (type, r));
9027 1.1 mrg res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9028 1.1 mrg fp_normal, res);
9029 1.1 mrg
9030 1.1 mrg if (tree_expr_maybe_infinite_p (arg))
9031 1.1 mrg {
9032 1.1 mrg real_inf (&r);
9033 1.1 mrg tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9034 1.1 mrg build_real (type, r));
9035 1.1 mrg res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9036 1.1 mrg fp_infinite, res);
9037 1.1 mrg }
9038 1.1 mrg
9039 1.1 mrg if (tree_expr_maybe_nan_p (arg))
9040 1.1 mrg {
9041 1.1 mrg tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9042 1.1 mrg res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9043 1.1 mrg res, fp_nan);
9044 1.1 mrg }
9045 1.1 mrg
9046 1.1 mrg return res;
9047 1.1 mrg }
9048 1.1 mrg
9049 1.1 mrg /* Fold a call to an unordered comparison function such as
9050 1.1 mrg __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9051 1.1 mrg being called and ARG0 and ARG1 are the arguments for the call.
9052 1.1 mrg UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9053 1.1 mrg the opposite of the desired result. UNORDERED_CODE is used
9054 1.1 mrg for modes that can hold NaNs and ORDERED_CODE is used for
9055 1.1 mrg the rest. */
9056 1.1 mrg
9057 1.1 mrg static tree
9058 1.1 mrg fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9059 1.1 mrg enum tree_code unordered_code,
9060 1.1 mrg enum tree_code ordered_code)
9061 1.1 mrg {
9062 1.1 mrg tree type = TREE_TYPE (TREE_TYPE (fndecl));
9063 1.1 mrg enum tree_code code;
9064 1.1 mrg tree type0, type1;
9065 1.1 mrg enum tree_code code0, code1;
9066 1.1 mrg tree cmp_type = NULL_TREE;
9067 1.1 mrg
9068 1.1 mrg type0 = TREE_TYPE (arg0);
9069 1.1 mrg type1 = TREE_TYPE (arg1);
9070 1.1 mrg
9071 1.1 mrg code0 = TREE_CODE (type0);
9072 1.1 mrg code1 = TREE_CODE (type1);
9073 1.1 mrg
9074 1.1 mrg if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9075 1.1 mrg /* Choose the wider of two real types. */
9076 1.1 mrg cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9077 1.1 mrg ? type0 : type1;
9078 1.1 mrg else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9079 1.1 mrg cmp_type = type0;
9080 1.1 mrg else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9081 1.1 mrg cmp_type = type1;
9082 1.1 mrg
9083 1.1 mrg arg0 = fold_convert_loc (loc, cmp_type, arg0);
9084 1.1 mrg arg1 = fold_convert_loc (loc, cmp_type, arg1);
9085 1.1 mrg
9086 1.1 mrg if (unordered_code == UNORDERED_EXPR)
9087 1.1 mrg {
9088 1.1 mrg if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9089 1.1 mrg return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9090 1.1 mrg if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9091 1.1 mrg return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9092 1.1 mrg return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9093 1.1 mrg }
9094 1.1 mrg
9095 1.1 mrg code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9096 1.1 mrg ? unordered_code : ordered_code;
9097 1.1 mrg return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9098 1.1 mrg fold_build2_loc (loc, code, type, arg0, arg1));
9099 1.1 mrg }
9100 1.1 mrg
9101 1.1 mrg /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9102 1.1 mrg arithmetics if it can never overflow, or into internal functions that
9103 1.1 mrg return both result of arithmetics and overflowed boolean flag in
9104 1.1 mrg a complex integer result, or some other check for overflow.
9105 1.1 mrg Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9106 1.1 mrg checking part of that. */
9107 1.1 mrg
9108 1.1 mrg static tree
9109 1.1 mrg fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9110 1.1 mrg tree arg0, tree arg1, tree arg2)
9111 1.1 mrg {
9112 1.1 mrg enum internal_fn ifn = IFN_LAST;
9113 1.1 mrg /* The code of the expression corresponding to the built-in. */
9114 1.1 mrg enum tree_code opcode = ERROR_MARK;
9115 1.1 mrg bool ovf_only = false;
9116 1.1 mrg
9117 1.1 mrg switch (fcode)
9118 1.1 mrg {
9119 1.1 mrg case BUILT_IN_ADD_OVERFLOW_P:
9120 1.1 mrg ovf_only = true;
9121 1.1 mrg /* FALLTHRU */
9122 1.1 mrg case BUILT_IN_ADD_OVERFLOW:
9123 1.1 mrg case BUILT_IN_SADD_OVERFLOW:
9124 1.1 mrg case BUILT_IN_SADDL_OVERFLOW:
9125 1.1 mrg case BUILT_IN_SADDLL_OVERFLOW:
9126 1.1 mrg case BUILT_IN_UADD_OVERFLOW:
9127 1.1 mrg case BUILT_IN_UADDL_OVERFLOW:
9128 1.1 mrg case BUILT_IN_UADDLL_OVERFLOW:
9129 1.1 mrg opcode = PLUS_EXPR;
9130 1.1 mrg ifn = IFN_ADD_OVERFLOW;
9131 1.1 mrg break;
9132 1.1 mrg case BUILT_IN_SUB_OVERFLOW_P:
9133 1.1 mrg ovf_only = true;
9134 1.1 mrg /* FALLTHRU */
9135 1.1 mrg case BUILT_IN_SUB_OVERFLOW:
9136 1.1 mrg case BUILT_IN_SSUB_OVERFLOW:
9137 1.1 mrg case BUILT_IN_SSUBL_OVERFLOW:
9138 1.1 mrg case BUILT_IN_SSUBLL_OVERFLOW:
9139 1.1 mrg case BUILT_IN_USUB_OVERFLOW:
9140 1.1 mrg case BUILT_IN_USUBL_OVERFLOW:
9141 1.1 mrg case BUILT_IN_USUBLL_OVERFLOW:
9142 1.1 mrg opcode = MINUS_EXPR;
9143 1.1 mrg ifn = IFN_SUB_OVERFLOW;
9144 1.1 mrg break;
9145 1.1 mrg case BUILT_IN_MUL_OVERFLOW_P:
9146 1.1 mrg ovf_only = true;
9147 1.1 mrg /* FALLTHRU */
9148 1.1 mrg case BUILT_IN_MUL_OVERFLOW:
9149 1.1 mrg case BUILT_IN_SMUL_OVERFLOW:
9150 1.1 mrg case BUILT_IN_SMULL_OVERFLOW:
9151 1.1 mrg case BUILT_IN_SMULLL_OVERFLOW:
9152 1.1 mrg case BUILT_IN_UMUL_OVERFLOW:
9153 1.1 mrg case BUILT_IN_UMULL_OVERFLOW:
9154 1.1 mrg case BUILT_IN_UMULLL_OVERFLOW:
9155 1.1 mrg opcode = MULT_EXPR;
9156 1.1 mrg ifn = IFN_MUL_OVERFLOW;
9157 1.1 mrg break;
9158 1.1 mrg default:
9159 1.1 mrg gcc_unreachable ();
9160 1.1 mrg }
9161 1.1 mrg
9162 1.1 mrg /* For the "generic" overloads, the first two arguments can have different
9163 1.1 mrg types and the last argument determines the target type to use to check
9164 1.1 mrg for overflow. The arguments of the other overloads all have the same
9165 1.1 mrg type. */
9166 1.1 mrg tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9167 1.1 mrg
9168 1.1 mrg /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9169 1.1 mrg arguments are constant, attempt to fold the built-in call into a constant
9170 1.1 mrg expression indicating whether or not it detected an overflow. */
9171 1.1 mrg if (ovf_only
9172 1.1 mrg && TREE_CODE (arg0) == INTEGER_CST
9173 1.1 mrg && TREE_CODE (arg1) == INTEGER_CST)
9174 1.1 mrg /* Perform the computation in the target type and check for overflow. */
9175 1.1 mrg return omit_one_operand_loc (loc, boolean_type_node,
9176 1.1 mrg arith_overflowed_p (opcode, type, arg0, arg1)
9177 1.1 mrg ? boolean_true_node : boolean_false_node,
9178 1.1 mrg arg2);
9179 1.1 mrg
9180 1.1 mrg tree intres, ovfres;
9181 1.1 mrg if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9182 1.1 mrg {
9183 1.1 mrg intres = fold_binary_loc (loc, opcode, type,
9184 1.1 mrg fold_convert_loc (loc, type, arg0),
9185 1.1 mrg fold_convert_loc (loc, type, arg1));
9186 1.1 mrg if (TREE_OVERFLOW (intres))
9187 1.1 mrg intres = drop_tree_overflow (intres);
9188 1.1 mrg ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9189 1.1 mrg ? boolean_true_node : boolean_false_node);
9190 1.1 mrg }
9191 1.1 mrg else
9192 1.1 mrg {
9193 1.1 mrg tree ctype = build_complex_type (type);
9194 1.1 mrg tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9195 1.1 mrg arg0, arg1);
9196 1.1 mrg tree tgt;
9197 1.1 mrg if (ovf_only)
9198 1.1 mrg {
9199 1.1 mrg tgt = call;
9200 1.1 mrg intres = NULL_TREE;
9201 1.1 mrg }
9202 1.1 mrg else
9203 1.1 mrg {
9204 1.1 mrg /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
9205 1.1 mrg as while the call itself is const, the REALPART_EXPR store is
9206 1.1 mrg certainly not. And in any case, we want just one call,
9207 1.1 mrg not multiple and trying to CSE them later. */
9208 1.1 mrg TREE_SIDE_EFFECTS (call) = 1;
9209 1.1 mrg tgt = save_expr (call);
9210 1.1 mrg }
9211 1.1 mrg intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9212 1.1 mrg ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9213 1.1 mrg ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9214 1.1 mrg }
9215 1.1 mrg
9216 1.1 mrg if (ovf_only)
9217 1.1 mrg return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9218 1.1 mrg
9219 1.1 mrg tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9220 1.1 mrg tree store
9221 1.1 mrg = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9222 1.1 mrg return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9223 1.1 mrg }
9224 1.1 mrg
9225 1.1 mrg /* Fold a call to __builtin_FILE to a constant string. */
9226 1.1 mrg
9227 1.1 mrg static inline tree
9228 1.1 mrg fold_builtin_FILE (location_t loc)
9229 1.1 mrg {
9230 1.1 mrg if (const char *fname = LOCATION_FILE (loc))
9231 1.1 mrg {
9232 1.1 mrg /* The documentation says this builtin is equivalent to the preprocessor
9233 1.1 mrg __FILE__ macro so it appears appropriate to use the same file prefix
9234 1.1 mrg mappings. */
9235 1.1 mrg fname = remap_macro_filename (fname);
9236 1.1 mrg return build_string_literal (strlen (fname) + 1, fname);
9237 1.1 mrg }
9238 1.1 mrg
9239 1.1 mrg return build_string_literal (1, "");
9240 1.1 mrg }
9241 1.1 mrg
9242 1.1 mrg /* Fold a call to __builtin_FUNCTION to a constant string. */
9243 1.1 mrg
9244 1.1 mrg static inline tree
9245 1.1 mrg fold_builtin_FUNCTION ()
9246 1.1 mrg {
9247 1.1 mrg const char *name = "";
9248 1.1 mrg
9249 1.1 mrg if (current_function_decl)
9250 1.1 mrg name = lang_hooks.decl_printable_name (current_function_decl, 0);
9251 1.1 mrg
9252 1.1 mrg return build_string_literal (strlen (name) + 1, name);
9253 1.1 mrg }
9254 1.1 mrg
9255 1.1 mrg /* Fold a call to __builtin_LINE to an integer constant. */
9256 1.1 mrg
9257 1.1 mrg static inline tree
9258 1.1 mrg fold_builtin_LINE (location_t loc, tree type)
9259 1.1 mrg {
9260 1.1 mrg return build_int_cst (type, LOCATION_LINE (loc));
9261 1.1 mrg }
9262 1.1 mrg
9263 1.1 mrg /* Fold a call to built-in function FNDECL with 0 arguments.
9264 1.1 mrg This function returns NULL_TREE if no simplification was possible. */
9265 1.1 mrg
9266 1.1 mrg static tree
9267 1.1 mrg fold_builtin_0 (location_t loc, tree fndecl)
9268 1.1 mrg {
9269 1.1 mrg tree type = TREE_TYPE (TREE_TYPE (fndecl));
9270 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9271 1.1 mrg switch (fcode)
9272 1.1 mrg {
9273 1.1 mrg case BUILT_IN_FILE:
9274 1.1 mrg return fold_builtin_FILE (loc);
9275 1.1 mrg
9276 1.1 mrg case BUILT_IN_FUNCTION:
9277 1.1 mrg return fold_builtin_FUNCTION ();
9278 1.1 mrg
9279 1.1 mrg case BUILT_IN_LINE:
9280 1.1 mrg return fold_builtin_LINE (loc, type);
9281 1.1 mrg
9282 1.1 mrg CASE_FLT_FN (BUILT_IN_INF):
9283 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9284 1.1 mrg case BUILT_IN_INFD32:
9285 1.1 mrg case BUILT_IN_INFD64:
9286 1.1 mrg case BUILT_IN_INFD128:
9287 1.1 mrg return fold_builtin_inf (loc, type, true);
9288 1.1 mrg
9289 1.1 mrg CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9290 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9291 1.1 mrg return fold_builtin_inf (loc, type, false);
9292 1.1 mrg
9293 1.1 mrg case BUILT_IN_CLASSIFY_TYPE:
9294 1.1 mrg return fold_builtin_classify_type (NULL_TREE);
9295 1.1 mrg
9296 1.1 mrg default:
9297 1.1 mrg break;
9298 1.1 mrg }
9299 1.1 mrg return NULL_TREE;
9300 1.1 mrg }
9301 1.1 mrg
9302 1.1 mrg /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9303 1.1 mrg This function returns NULL_TREE if no simplification was possible. */
9304 1.1 mrg
9305 1.1 mrg static tree
9306 1.1 mrg fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9307 1.1 mrg {
9308 1.1 mrg tree type = TREE_TYPE (TREE_TYPE (fndecl));
9309 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9310 1.1 mrg
9311 1.1 mrg if (TREE_CODE (arg0) == ERROR_MARK)
9312 1.1 mrg return NULL_TREE;
9313 1.1 mrg
9314 1.1 mrg if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9315 1.1 mrg return ret;
9316 1.1 mrg
9317 1.1 mrg switch (fcode)
9318 1.1 mrg {
9319 1.1 mrg case BUILT_IN_CONSTANT_P:
9320 1.1 mrg {
9321 1.1 mrg tree val = fold_builtin_constant_p (arg0);
9322 1.1 mrg
9323 1.1 mrg /* Gimplification will pull the CALL_EXPR for the builtin out of
9324 1.1 mrg an if condition. When not optimizing, we'll not CSE it back.
9325 1.1 mrg To avoid link error types of regressions, return false now. */
9326 1.1 mrg if (!val && !optimize)
9327 1.1 mrg val = integer_zero_node;
9328 1.1 mrg
9329 1.1 mrg return val;
9330 1.1 mrg }
9331 1.1 mrg
9332 1.1 mrg case BUILT_IN_CLASSIFY_TYPE:
9333 1.1 mrg return fold_builtin_classify_type (arg0);
9334 1.1 mrg
9335 1.1 mrg case BUILT_IN_STRLEN:
9336 1.1 mrg return fold_builtin_strlen (loc, expr, type, arg0);
9337 1.1 mrg
9338 1.1 mrg CASE_FLT_FN (BUILT_IN_FABS):
9339 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9340 1.1 mrg case BUILT_IN_FABSD32:
9341 1.1 mrg case BUILT_IN_FABSD64:
9342 1.1 mrg case BUILT_IN_FABSD128:
9343 1.1 mrg return fold_builtin_fabs (loc, arg0, type);
9344 1.1 mrg
9345 1.1 mrg case BUILT_IN_ABS:
9346 1.1 mrg case BUILT_IN_LABS:
9347 1.1 mrg case BUILT_IN_LLABS:
9348 1.1 mrg case BUILT_IN_IMAXABS:
9349 1.1 mrg return fold_builtin_abs (loc, arg0, type);
9350 1.1 mrg
9351 1.1 mrg CASE_FLT_FN (BUILT_IN_CONJ):
9352 1.1 mrg if (validate_arg (arg0, COMPLEX_TYPE)
9353 1.1 mrg && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9354 1.1 mrg return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9355 1.1 mrg break;
9356 1.1 mrg
9357 1.1 mrg CASE_FLT_FN (BUILT_IN_CREAL):
9358 1.1 mrg if (validate_arg (arg0, COMPLEX_TYPE)
9359 1.1 mrg && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9360 1.1 mrg return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9361 1.1 mrg break;
9362 1.1 mrg
9363 1.1 mrg CASE_FLT_FN (BUILT_IN_CIMAG):
9364 1.1 mrg if (validate_arg (arg0, COMPLEX_TYPE)
9365 1.1 mrg && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9366 1.1 mrg return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9367 1.1 mrg break;
9368 1.1 mrg
9369 1.1 mrg CASE_FLT_FN (BUILT_IN_CARG):
9370 1.1 mrg return fold_builtin_carg (loc, arg0, type);
9371 1.1 mrg
9372 1.1 mrg case BUILT_IN_ISASCII:
9373 1.1 mrg return fold_builtin_isascii (loc, arg0);
9374 1.1 mrg
9375 1.1 mrg case BUILT_IN_TOASCII:
9376 1.1 mrg return fold_builtin_toascii (loc, arg0);
9377 1.1 mrg
9378 1.1 mrg case BUILT_IN_ISDIGIT:
9379 1.1 mrg return fold_builtin_isdigit (loc, arg0);
9380 1.1 mrg
9381 1.1 mrg CASE_FLT_FN (BUILT_IN_FINITE):
9382 1.1 mrg case BUILT_IN_FINITED32:
9383 1.1 mrg case BUILT_IN_FINITED64:
9384 1.1 mrg case BUILT_IN_FINITED128:
9385 1.1 mrg case BUILT_IN_ISFINITE:
9386 1.1 mrg {
9387 1.1 mrg tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9388 1.1 mrg if (ret)
9389 1.1 mrg return ret;
9390 1.1 mrg return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9391 1.1 mrg }
9392 1.1 mrg
9393 1.1 mrg CASE_FLT_FN (BUILT_IN_ISINF):
9394 1.1 mrg case BUILT_IN_ISINFD32:
9395 1.1 mrg case BUILT_IN_ISINFD64:
9396 1.1 mrg case BUILT_IN_ISINFD128:
9397 1.1 mrg {
9398 1.1 mrg tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9399 1.1 mrg if (ret)
9400 1.1 mrg return ret;
9401 1.1 mrg return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9402 1.1 mrg }
9403 1.1 mrg
9404 1.1 mrg case BUILT_IN_ISNORMAL:
9405 1.1 mrg return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9406 1.1 mrg
9407 1.1 mrg case BUILT_IN_ISINF_SIGN:
9408 1.1 mrg return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9409 1.1 mrg
9410 1.1 mrg CASE_FLT_FN (BUILT_IN_ISNAN):
9411 1.1 mrg case BUILT_IN_ISNAND32:
9412 1.1 mrg case BUILT_IN_ISNAND64:
9413 1.1 mrg case BUILT_IN_ISNAND128:
9414 1.1 mrg return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9415 1.1 mrg
9416 1.1 mrg case BUILT_IN_FREE:
9417 1.1 mrg if (integer_zerop (arg0))
9418 1.1 mrg return build_empty_stmt (loc);
9419 1.1 mrg break;
9420 1.1 mrg
9421 1.1 mrg default:
9422 1.1 mrg break;
9423 1.1 mrg }
9424 1.1 mrg
9425 1.1 mrg return NULL_TREE;
9426 1.1 mrg
9427 1.1 mrg }
9428 1.1 mrg
9429 1.1 mrg /* Folds a call EXPR (which may be null) to built-in function FNDECL
9430 1.1 mrg with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9431 1.1 mrg if no simplification was possible. */
9432 1.1 mrg
9433 1.1 mrg static tree
9434 1.1 mrg fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9435 1.1 mrg {
9436 1.1 mrg tree type = TREE_TYPE (TREE_TYPE (fndecl));
9437 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9438 1.1 mrg
9439 1.1 mrg if (TREE_CODE (arg0) == ERROR_MARK
9440 1.1 mrg || TREE_CODE (arg1) == ERROR_MARK)
9441 1.1 mrg return NULL_TREE;
9442 1.1 mrg
9443 1.1 mrg if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9444 1.1 mrg return ret;
9445 1.1 mrg
9446 1.1 mrg switch (fcode)
9447 1.1 mrg {
9448 1.1 mrg CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9449 1.1 mrg CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9450 1.1 mrg if (validate_arg (arg0, REAL_TYPE)
9451 1.1 mrg && validate_arg (arg1, POINTER_TYPE))
9452 1.1 mrg return do_mpfr_lgamma_r (arg0, arg1, type);
9453 1.1 mrg break;
9454 1.1 mrg
9455 1.1 mrg CASE_FLT_FN (BUILT_IN_FREXP):
9456 1.1 mrg return fold_builtin_frexp (loc, arg0, arg1, type);
9457 1.1 mrg
9458 1.1 mrg CASE_FLT_FN (BUILT_IN_MODF):
9459 1.1 mrg return fold_builtin_modf (loc, arg0, arg1, type);
9460 1.1 mrg
9461 1.1 mrg case BUILT_IN_STRSPN:
9462 1.1 mrg return fold_builtin_strspn (loc, expr, arg0, arg1);
9463 1.1 mrg
9464 1.1 mrg case BUILT_IN_STRCSPN:
9465 1.1 mrg return fold_builtin_strcspn (loc, expr, arg0, arg1);
9466 1.1 mrg
9467 1.1 mrg case BUILT_IN_STRPBRK:
9468 1.1 mrg return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9469 1.1 mrg
9470 1.1 mrg case BUILT_IN_EXPECT:
9471 1.1 mrg return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9472 1.1 mrg
9473 1.1 mrg case BUILT_IN_ISGREATER:
9474 1.1 mrg return fold_builtin_unordered_cmp (loc, fndecl,
9475 1.1 mrg arg0, arg1, UNLE_EXPR, LE_EXPR);
9476 1.1 mrg case BUILT_IN_ISGREATEREQUAL:
9477 1.1 mrg return fold_builtin_unordered_cmp (loc, fndecl,
9478 1.1 mrg arg0, arg1, UNLT_EXPR, LT_EXPR);
9479 1.1 mrg case BUILT_IN_ISLESS:
9480 1.1 mrg return fold_builtin_unordered_cmp (loc, fndecl,
9481 1.1 mrg arg0, arg1, UNGE_EXPR, GE_EXPR);
9482 1.1 mrg case BUILT_IN_ISLESSEQUAL:
9483 1.1 mrg return fold_builtin_unordered_cmp (loc, fndecl,
9484 1.1 mrg arg0, arg1, UNGT_EXPR, GT_EXPR);
9485 1.1 mrg case BUILT_IN_ISLESSGREATER:
9486 1.1 mrg return fold_builtin_unordered_cmp (loc, fndecl,
9487 1.1 mrg arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9488 1.1 mrg case BUILT_IN_ISUNORDERED:
9489 1.1 mrg return fold_builtin_unordered_cmp (loc, fndecl,
9490 1.1 mrg arg0, arg1, UNORDERED_EXPR,
9491 1.1 mrg NOP_EXPR);
9492 1.1 mrg
9493 1.1 mrg /* We do the folding for va_start in the expander. */
9494 1.1 mrg case BUILT_IN_VA_START:
9495 1.1 mrg break;
9496 1.1 mrg
9497 1.1 mrg case BUILT_IN_OBJECT_SIZE:
9498 1.1 mrg case BUILT_IN_DYNAMIC_OBJECT_SIZE:
9499 1.1 mrg return fold_builtin_object_size (arg0, arg1, fcode);
9500 1.1 mrg
9501 1.1 mrg case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9502 1.1 mrg return fold_builtin_atomic_always_lock_free (arg0, arg1);
9503 1.1 mrg
9504 1.1 mrg case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9505 1.1 mrg return fold_builtin_atomic_is_lock_free (arg0, arg1);
9506 1.1 mrg
9507 1.1 mrg default:
9508 1.1 mrg break;
9509 1.1 mrg }
9510 1.1 mrg return NULL_TREE;
9511 1.1 mrg }
9512 1.1 mrg
9513 1.1 mrg /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9514 1.1 mrg and ARG2.
9515 1.1 mrg This function returns NULL_TREE if no simplification was possible. */
9516 1.1 mrg
9517 1.1 mrg static tree
9518 1.1 mrg fold_builtin_3 (location_t loc, tree fndecl,
9519 1.1 mrg tree arg0, tree arg1, tree arg2)
9520 1.1 mrg {
9521 1.1 mrg tree type = TREE_TYPE (TREE_TYPE (fndecl));
9522 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9523 1.1 mrg
9524 1.1 mrg if (TREE_CODE (arg0) == ERROR_MARK
9525 1.1 mrg || TREE_CODE (arg1) == ERROR_MARK
9526 1.1 mrg || TREE_CODE (arg2) == ERROR_MARK)
9527 1.1 mrg return NULL_TREE;
9528 1.1 mrg
9529 1.1 mrg if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9530 1.1 mrg arg0, arg1, arg2))
9531 1.1 mrg return ret;
9532 1.1 mrg
9533 1.1 mrg switch (fcode)
9534 1.1 mrg {
9535 1.1 mrg
9536 1.1 mrg CASE_FLT_FN (BUILT_IN_SINCOS):
9537 1.1 mrg return fold_builtin_sincos (loc, arg0, arg1, arg2);
9538 1.1 mrg
9539 1.1 mrg CASE_FLT_FN (BUILT_IN_REMQUO):
9540 1.1 mrg if (validate_arg (arg0, REAL_TYPE)
9541 1.1 mrg && validate_arg (arg1, REAL_TYPE)
9542 1.1 mrg && validate_arg (arg2, POINTER_TYPE))
9543 1.1 mrg return do_mpfr_remquo (arg0, arg1, arg2);
9544 1.1 mrg break;
9545 1.1 mrg
9546 1.1 mrg case BUILT_IN_MEMCMP:
9547 1.1 mrg return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9548 1.1 mrg
9549 1.1 mrg case BUILT_IN_EXPECT:
9550 1.1 mrg return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9551 1.1 mrg
9552 1.1 mrg case BUILT_IN_EXPECT_WITH_PROBABILITY:
9553 1.1 mrg return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9554 1.1 mrg
9555 1.1 mrg case BUILT_IN_ADD_OVERFLOW:
9556 1.1 mrg case BUILT_IN_SUB_OVERFLOW:
9557 1.1 mrg case BUILT_IN_MUL_OVERFLOW:
9558 1.1 mrg case BUILT_IN_ADD_OVERFLOW_P:
9559 1.1 mrg case BUILT_IN_SUB_OVERFLOW_P:
9560 1.1 mrg case BUILT_IN_MUL_OVERFLOW_P:
9561 1.1 mrg case BUILT_IN_SADD_OVERFLOW:
9562 1.1 mrg case BUILT_IN_SADDL_OVERFLOW:
9563 1.1 mrg case BUILT_IN_SADDLL_OVERFLOW:
9564 1.1 mrg case BUILT_IN_SSUB_OVERFLOW:
9565 1.1 mrg case BUILT_IN_SSUBL_OVERFLOW:
9566 1.1 mrg case BUILT_IN_SSUBLL_OVERFLOW:
9567 1.1 mrg case BUILT_IN_SMUL_OVERFLOW:
9568 1.1 mrg case BUILT_IN_SMULL_OVERFLOW:
9569 1.1 mrg case BUILT_IN_SMULLL_OVERFLOW:
9570 1.1 mrg case BUILT_IN_UADD_OVERFLOW:
9571 1.1 mrg case BUILT_IN_UADDL_OVERFLOW:
9572 1.1 mrg case BUILT_IN_UADDLL_OVERFLOW:
9573 1.1 mrg case BUILT_IN_USUB_OVERFLOW:
9574 1.1 mrg case BUILT_IN_USUBL_OVERFLOW:
9575 1.1 mrg case BUILT_IN_USUBLL_OVERFLOW:
9576 1.1 mrg case BUILT_IN_UMUL_OVERFLOW:
9577 1.1 mrg case BUILT_IN_UMULL_OVERFLOW:
9578 1.1 mrg case BUILT_IN_UMULLL_OVERFLOW:
9579 1.1 mrg return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9580 1.1 mrg
9581 1.1 mrg default:
9582 1.1 mrg break;
9583 1.1 mrg }
9584 1.1 mrg return NULL_TREE;
9585 1.1 mrg }
9586 1.1 mrg
9587 1.1 mrg /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9588 1.1 mrg ARGS is an array of NARGS arguments. IGNORE is true if the result
9589 1.1 mrg of the function call is ignored. This function returns NULL_TREE
9590 1.1 mrg if no simplification was possible. */
9591 1.1 mrg
9592 1.1 mrg static tree
9593 1.1 mrg fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9594 1.1 mrg int nargs, bool)
9595 1.1 mrg {
9596 1.1 mrg tree ret = NULL_TREE;
9597 1.1 mrg
9598 1.1 mrg switch (nargs)
9599 1.1 mrg {
9600 1.1 mrg case 0:
9601 1.1 mrg ret = fold_builtin_0 (loc, fndecl);
9602 1.1 mrg break;
9603 1.1 mrg case 1:
9604 1.1 mrg ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9605 1.1 mrg break;
9606 1.1 mrg case 2:
9607 1.1 mrg ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9608 1.1 mrg break;
9609 1.1 mrg case 3:
9610 1.1 mrg ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9611 1.1 mrg break;
9612 1.1 mrg default:
9613 1.1 mrg ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9614 1.1 mrg break;
9615 1.1 mrg }
9616 1.1 mrg if (ret)
9617 1.1 mrg {
9618 1.1 mrg ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9619 1.1 mrg SET_EXPR_LOCATION (ret, loc);
9620 1.1 mrg return ret;
9621 1.1 mrg }
9622 1.1 mrg return NULL_TREE;
9623 1.1 mrg }
9624 1.1 mrg
9625 1.1 mrg /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9626 1.1 mrg list ARGS along with N new arguments in NEWARGS. SKIP is the number
9627 1.1 mrg of arguments in ARGS to be omitted. OLDNARGS is the number of
9628 1.1 mrg elements in ARGS. */
9629 1.1 mrg
9630 1.1 mrg static tree
9631 1.1 mrg rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9632 1.1 mrg int skip, tree fndecl, int n, va_list newargs)
9633 1.1 mrg {
9634 1.1 mrg int nargs = oldnargs - skip + n;
9635 1.1 mrg tree *buffer;
9636 1.1 mrg
9637 1.1 mrg if (n > 0)
9638 1.1 mrg {
9639 1.1 mrg int i, j;
9640 1.1 mrg
9641 1.1 mrg buffer = XALLOCAVEC (tree, nargs);
9642 1.1 mrg for (i = 0; i < n; i++)
9643 1.1 mrg buffer[i] = va_arg (newargs, tree);
9644 1.1 mrg for (j = skip; j < oldnargs; j++, i++)
9645 1.1 mrg buffer[i] = args[j];
9646 1.1 mrg }
9647 1.1 mrg else
9648 1.1 mrg buffer = args + skip;
9649 1.1 mrg
9650 1.1 mrg return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9651 1.1 mrg }
9652 1.1 mrg
9653 1.1 mrg /* Return true if FNDECL shouldn't be folded right now.
9654 1.1 mrg If a built-in function has an inline attribute always_inline
9655 1.1 mrg wrapper, defer folding it after always_inline functions have
9656 1.1 mrg been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9657 1.1 mrg might not be performed. */
9658 1.1 mrg
9659 1.1 mrg bool
9660 1.1 mrg avoid_folding_inline_builtin (tree fndecl)
9661 1.1 mrg {
9662 1.1 mrg return (DECL_DECLARED_INLINE_P (fndecl)
9663 1.1 mrg && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9664 1.1 mrg && cfun
9665 1.1 mrg && !cfun->always_inline_functions_inlined
9666 1.1 mrg && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9667 1.1 mrg }
9668 1.1 mrg
9669 1.1 mrg /* A wrapper function for builtin folding that prevents warnings for
9670 1.1 mrg "statement without effect" and the like, caused by removing the
9671 1.1 mrg call node earlier than the warning is generated. */
9672 1.1 mrg
9673 1.1 mrg tree
9674 1.1 mrg fold_call_expr (location_t loc, tree exp, bool ignore)
9675 1.1 mrg {
9676 1.1 mrg tree ret = NULL_TREE;
9677 1.1 mrg tree fndecl = get_callee_fndecl (exp);
9678 1.1 mrg if (fndecl && fndecl_built_in_p (fndecl)
9679 1.1 mrg /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9680 1.1 mrg yet. Defer folding until we see all the arguments
9681 1.1 mrg (after inlining). */
9682 1.1 mrg && !CALL_EXPR_VA_ARG_PACK (exp))
9683 1.1 mrg {
9684 1.1 mrg int nargs = call_expr_nargs (exp);
9685 1.1 mrg
9686 1.1 mrg /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9687 1.1 mrg instead last argument is __builtin_va_arg_pack (). Defer folding
9688 1.1 mrg even in that case, until arguments are finalized. */
9689 1.1 mrg if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9690 1.1 mrg {
9691 1.1 mrg tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9692 1.1 mrg if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9693 1.1 mrg return NULL_TREE;
9694 1.1 mrg }
9695 1.1 mrg
9696 1.1 mrg if (avoid_folding_inline_builtin (fndecl))
9697 1.1 mrg return NULL_TREE;
9698 1.1 mrg
9699 1.1 mrg if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9700 1.1 mrg return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9701 1.1 mrg CALL_EXPR_ARGP (exp), ignore);
9702 1.1 mrg else
9703 1.1 mrg {
9704 1.1 mrg tree *args = CALL_EXPR_ARGP (exp);
9705 1.1 mrg ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
9706 1.1 mrg if (ret)
9707 1.1 mrg return ret;
9708 1.1 mrg }
9709 1.1 mrg }
9710 1.1 mrg return NULL_TREE;
9711 1.1 mrg }
9712 1.1 mrg
9713 1.1 mrg /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9714 1.1 mrg N arguments are passed in the array ARGARRAY. Return a folded
9715 1.1 mrg expression or NULL_TREE if no simplification was possible. */
9716 1.1 mrg
9717 1.1 mrg tree
9718 1.1 mrg fold_builtin_call_array (location_t loc, tree,
9719 1.1 mrg tree fn,
9720 1.1 mrg int n,
9721 1.1 mrg tree *argarray)
9722 1.1 mrg {
9723 1.1 mrg if (TREE_CODE (fn) != ADDR_EXPR)
9724 1.1 mrg return NULL_TREE;
9725 1.1 mrg
9726 1.1 mrg tree fndecl = TREE_OPERAND (fn, 0);
9727 1.1 mrg if (TREE_CODE (fndecl) == FUNCTION_DECL
9728 1.1 mrg && fndecl_built_in_p (fndecl))
9729 1.1 mrg {
9730 1.1 mrg /* If last argument is __builtin_va_arg_pack (), arguments to this
9731 1.1 mrg function are not finalized yet. Defer folding until they are. */
9732 1.1 mrg if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9733 1.1 mrg {
9734 1.1 mrg tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9735 1.1 mrg if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9736 1.1 mrg return NULL_TREE;
9737 1.1 mrg }
9738 1.1 mrg if (avoid_folding_inline_builtin (fndecl))
9739 1.1 mrg return NULL_TREE;
9740 1.1 mrg if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9741 1.1 mrg return targetm.fold_builtin (fndecl, n, argarray, false);
9742 1.1 mrg else
9743 1.1 mrg return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
9744 1.1 mrg }
9745 1.1 mrg
9746 1.1 mrg return NULL_TREE;
9747 1.1 mrg }
9748 1.1 mrg
9749 1.1 mrg /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9750 1.1 mrg along with N new arguments specified as the "..." parameters. SKIP
9751 1.1 mrg is the number of arguments in EXP to be omitted. This function is used
9752 1.1 mrg to do varargs-to-varargs transformations. */
9753 1.1 mrg
9754 1.1 mrg static tree
9755 1.1 mrg rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9756 1.1 mrg {
9757 1.1 mrg va_list ap;
9758 1.1 mrg tree t;
9759 1.1 mrg
9760 1.1 mrg va_start (ap, n);
9761 1.1 mrg t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9762 1.1 mrg CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9763 1.1 mrg va_end (ap);
9764 1.1 mrg
9765 1.1 mrg return t;
9766 1.1 mrg }
9767 1.1 mrg
9768 1.1 mrg /* Validate a single argument ARG against a tree code CODE representing
9769 1.1 mrg a type. Return true when argument is valid. */
9770 1.1 mrg
9771 1.1 mrg static bool
9772 1.1 mrg validate_arg (const_tree arg, enum tree_code code)
9773 1.1 mrg {
9774 1.1 mrg if (!arg)
9775 1.1 mrg return false;
9776 1.1 mrg else if (code == POINTER_TYPE)
9777 1.1 mrg return POINTER_TYPE_P (TREE_TYPE (arg));
9778 1.1 mrg else if (code == INTEGER_TYPE)
9779 1.1 mrg return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9780 1.1 mrg return code == TREE_CODE (TREE_TYPE (arg));
9781 1.1 mrg }
9782 1.1 mrg
9783 1.1 mrg /* This function validates the types of a function call argument list
9784 1.1 mrg against a specified list of tree_codes. If the last specifier is a 0,
9785 1.1 mrg that represents an ellipses, otherwise the last specifier must be a
9786 1.1 mrg VOID_TYPE.
9787 1.1 mrg
9788 1.1 mrg This is the GIMPLE version of validate_arglist. Eventually we want to
9789 1.1 mrg completely convert builtins.cc to work from GIMPLEs and the tree based
9790 1.1 mrg validate_arglist will then be removed. */
9791 1.1 mrg
9792 1.1 mrg bool
9793 1.1 mrg validate_gimple_arglist (const gcall *call, ...)
9794 1.1 mrg {
9795 1.1 mrg enum tree_code code;
9796 1.1 mrg bool res = 0;
9797 1.1 mrg va_list ap;
9798 1.1 mrg const_tree arg;
9799 1.1 mrg size_t i;
9800 1.1 mrg
9801 1.1 mrg va_start (ap, call);
9802 1.1 mrg i = 0;
9803 1.1 mrg
9804 1.1 mrg do
9805 1.1 mrg {
9806 1.1 mrg code = (enum tree_code) va_arg (ap, int);
9807 1.1 mrg switch (code)
9808 1.1 mrg {
9809 1.1 mrg case 0:
9810 1.1 mrg /* This signifies an ellipses, any further arguments are all ok. */
9811 1.1 mrg res = true;
9812 1.1 mrg goto end;
9813 1.1 mrg case VOID_TYPE:
9814 1.1 mrg /* This signifies an endlink, if no arguments remain, return
9815 1.1 mrg true, otherwise return false. */
9816 1.1 mrg res = (i == gimple_call_num_args (call));
9817 1.1 mrg goto end;
9818 1.1 mrg default:
9819 1.1 mrg /* If no parameters remain or the parameter's code does not
9820 1.1 mrg match the specified code, return false. Otherwise continue
9821 1.1 mrg checking any remaining arguments. */
9822 1.1 mrg arg = gimple_call_arg (call, i++);
9823 1.1 mrg if (!validate_arg (arg, code))
9824 1.1 mrg goto end;
9825 1.1 mrg break;
9826 1.1 mrg }
9827 1.1 mrg }
9828 1.1 mrg while (1);
9829 1.1 mrg
9830 1.1 mrg /* We need gotos here since we can only have one VA_CLOSE in a
9831 1.1 mrg function. */
9832 1.1 mrg end: ;
9833 1.1 mrg va_end (ap);
9834 1.1 mrg
9835 1.1 mrg return res;
9836 1.1 mrg }
9837 1.1 mrg
9838 1.1 mrg /* Default target-specific builtin expander that does nothing. */
9839 1.1 mrg
9840 1.1 mrg rtx
9841 1.1 mrg default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9842 1.1 mrg rtx target ATTRIBUTE_UNUSED,
9843 1.1 mrg rtx subtarget ATTRIBUTE_UNUSED,
9844 1.1 mrg machine_mode mode ATTRIBUTE_UNUSED,
9845 1.1 mrg int ignore ATTRIBUTE_UNUSED)
9846 1.1 mrg {
9847 1.1 mrg return NULL_RTX;
9848 1.1 mrg }
9849 1.1 mrg
9850 1.1 mrg /* Returns true is EXP represents data that would potentially reside
9851 1.1 mrg in a readonly section. */
9852 1.1 mrg
9853 1.1 mrg bool
9854 1.1 mrg readonly_data_expr (tree exp)
9855 1.1 mrg {
9856 1.1 mrg STRIP_NOPS (exp);
9857 1.1 mrg
9858 1.1 mrg if (TREE_CODE (exp) != ADDR_EXPR)
9859 1.1 mrg return false;
9860 1.1 mrg
9861 1.1 mrg exp = get_base_address (TREE_OPERAND (exp, 0));
9862 1.1 mrg if (!exp)
9863 1.1 mrg return false;
9864 1.1 mrg
9865 1.1 mrg /* Make sure we call decl_readonly_section only for trees it
9866 1.1 mrg can handle (since it returns true for everything it doesn't
9867 1.1 mrg understand). */
9868 1.1 mrg if (TREE_CODE (exp) == STRING_CST
9869 1.1 mrg || TREE_CODE (exp) == CONSTRUCTOR
9870 1.1 mrg || (VAR_P (exp) && TREE_STATIC (exp)))
9871 1.1 mrg return decl_readonly_section (exp, 0);
9872 1.1 mrg else
9873 1.1 mrg return false;
9874 1.1 mrg }
9875 1.1 mrg
9876 1.1 mrg /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9877 1.1 mrg to the call, and TYPE is its return type.
9878 1.1 mrg
9879 1.1 mrg Return NULL_TREE if no simplification was possible, otherwise return the
9880 1.1 mrg simplified form of the call as a tree.
9881 1.1 mrg
9882 1.1 mrg The simplified form may be a constant or other expression which
9883 1.1 mrg computes the same value, but in a more efficient manner (including
9884 1.1 mrg calls to other builtin functions).
9885 1.1 mrg
9886 1.1 mrg The call may contain arguments which need to be evaluated, but
9887 1.1 mrg which are not useful to determine the result of the call. In
9888 1.1 mrg this case we return a chain of COMPOUND_EXPRs. The LHS of each
9889 1.1 mrg COMPOUND_EXPR will be an argument which must be evaluated.
9890 1.1 mrg COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9891 1.1 mrg COMPOUND_EXPR in the chain will contain the tree for the simplified
9892 1.1 mrg form of the builtin function call. */
9893 1.1 mrg
9894 1.1 mrg static tree
9895 1.1 mrg fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
9896 1.1 mrg {
9897 1.1 mrg if (!validate_arg (s1, POINTER_TYPE)
9898 1.1 mrg || !validate_arg (s2, POINTER_TYPE))
9899 1.1 mrg return NULL_TREE;
9900 1.1 mrg
9901 1.1 mrg tree fn;
9902 1.1 mrg const char *p1, *p2;
9903 1.1 mrg
9904 1.1 mrg p2 = c_getstr (s2);
9905 1.1 mrg if (p2 == NULL)
9906 1.1 mrg return NULL_TREE;
9907 1.1 mrg
9908 1.1 mrg p1 = c_getstr (s1);
9909 1.1 mrg if (p1 != NULL)
9910 1.1 mrg {
9911 1.1 mrg const char *r = strpbrk (p1, p2);
9912 1.1 mrg tree tem;
9913 1.1 mrg
9914 1.1 mrg if (r == NULL)
9915 1.1 mrg return build_int_cst (TREE_TYPE (s1), 0);
9916 1.1 mrg
9917 1.1 mrg /* Return an offset into the constant string argument. */
9918 1.1 mrg tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9919 1.1 mrg return fold_convert_loc (loc, type, tem);
9920 1.1 mrg }
9921 1.1 mrg
9922 1.1 mrg if (p2[0] == '\0')
9923 1.1 mrg /* strpbrk(x, "") == NULL.
9924 1.1 mrg Evaluate and ignore s1 in case it had side-effects. */
9925 1.1 mrg return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9926 1.1 mrg
9927 1.1 mrg if (p2[1] != '\0')
9928 1.1 mrg return NULL_TREE; /* Really call strpbrk. */
9929 1.1 mrg
9930 1.1 mrg fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9931 1.1 mrg if (!fn)
9932 1.1 mrg return NULL_TREE;
9933 1.1 mrg
9934 1.1 mrg /* New argument list transforming strpbrk(s1, s2) to
9935 1.1 mrg strchr(s1, s2[0]). */
9936 1.1 mrg return build_call_expr_loc (loc, fn, 2, s1,
9937 1.1 mrg build_int_cst (integer_type_node, p2[0]));
9938 1.1 mrg }
9939 1.1 mrg
9940 1.1 mrg /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9941 1.1 mrg to the call.
9942 1.1 mrg
9943 1.1 mrg Return NULL_TREE if no simplification was possible, otherwise return the
9944 1.1 mrg simplified form of the call as a tree.
9945 1.1 mrg
9946 1.1 mrg The simplified form may be a constant or other expression which
9947 1.1 mrg computes the same value, but in a more efficient manner (including
9948 1.1 mrg calls to other builtin functions).
9949 1.1 mrg
9950 1.1 mrg The call may contain arguments which need to be evaluated, but
9951 1.1 mrg which are not useful to determine the result of the call. In
9952 1.1 mrg this case we return a chain of COMPOUND_EXPRs. The LHS of each
9953 1.1 mrg COMPOUND_EXPR will be an argument which must be evaluated.
9954 1.1 mrg COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9955 1.1 mrg COMPOUND_EXPR in the chain will contain the tree for the simplified
9956 1.1 mrg form of the builtin function call. */
9957 1.1 mrg
9958 1.1 mrg static tree
9959 1.1 mrg fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
9960 1.1 mrg {
9961 1.1 mrg if (!validate_arg (s1, POINTER_TYPE)
9962 1.1 mrg || !validate_arg (s2, POINTER_TYPE))
9963 1.1 mrg return NULL_TREE;
9964 1.1 mrg
9965 1.1 mrg if (!check_nul_terminated_array (expr, s1)
9966 1.1 mrg || !check_nul_terminated_array (expr, s2))
9967 1.1 mrg return NULL_TREE;
9968 1.1 mrg
9969 1.1 mrg const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9970 1.1 mrg
9971 1.1 mrg /* If either argument is "", return NULL_TREE. */
9972 1.1 mrg if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9973 1.1 mrg /* Evaluate and ignore both arguments in case either one has
9974 1.1 mrg side-effects. */
9975 1.1 mrg return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9976 1.1 mrg s1, s2);
9977 1.1 mrg return NULL_TREE;
9978 1.1 mrg }
9979 1.1 mrg
9980 1.1 mrg /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9981 1.1 mrg to the call.
9982 1.1 mrg
9983 1.1 mrg Return NULL_TREE if no simplification was possible, otherwise return the
9984 1.1 mrg simplified form of the call as a tree.
9985 1.1 mrg
9986 1.1 mrg The simplified form may be a constant or other expression which
9987 1.1 mrg computes the same value, but in a more efficient manner (including
9988 1.1 mrg calls to other builtin functions).
9989 1.1 mrg
9990 1.1 mrg The call may contain arguments which need to be evaluated, but
9991 1.1 mrg which are not useful to determine the result of the call. In
9992 1.1 mrg this case we return a chain of COMPOUND_EXPRs. The LHS of each
9993 1.1 mrg COMPOUND_EXPR will be an argument which must be evaluated.
9994 1.1 mrg COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9995 1.1 mrg COMPOUND_EXPR in the chain will contain the tree for the simplified
9996 1.1 mrg form of the builtin function call. */
9997 1.1 mrg
9998 1.1 mrg static tree
9999 1.1 mrg fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10000 1.1 mrg {
10001 1.1 mrg if (!validate_arg (s1, POINTER_TYPE)
10002 1.1 mrg || !validate_arg (s2, POINTER_TYPE))
10003 1.1 mrg return NULL_TREE;
10004 1.1 mrg
10005 1.1 mrg if (!check_nul_terminated_array (expr, s1)
10006 1.1 mrg || !check_nul_terminated_array (expr, s2))
10007 1.1 mrg return NULL_TREE;
10008 1.1 mrg
10009 1.1 mrg /* If the first argument is "", return NULL_TREE. */
10010 1.1 mrg const char *p1 = c_getstr (s1);
10011 1.1 mrg if (p1 && *p1 == '\0')
10012 1.1 mrg {
10013 1.1 mrg /* Evaluate and ignore argument s2 in case it has
10014 1.1 mrg side-effects. */
10015 1.1 mrg return omit_one_operand_loc (loc, size_type_node,
10016 1.1 mrg size_zero_node, s2);
10017 1.1 mrg }
10018 1.1 mrg
10019 1.1 mrg /* If the second argument is "", return __builtin_strlen(s1). */
10020 1.1 mrg const char *p2 = c_getstr (s2);
10021 1.1 mrg if (p2 && *p2 == '\0')
10022 1.1 mrg {
10023 1.1 mrg tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10024 1.1 mrg
10025 1.1 mrg /* If the replacement _DECL isn't initialized, don't do the
10026 1.1 mrg transformation. */
10027 1.1 mrg if (!fn)
10028 1.1 mrg return NULL_TREE;
10029 1.1 mrg
10030 1.1 mrg return build_call_expr_loc (loc, fn, 1, s1);
10031 1.1 mrg }
10032 1.1 mrg return NULL_TREE;
10033 1.1 mrg }
10034 1.1 mrg
10035 1.1 mrg /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10036 1.1 mrg produced. False otherwise. This is done so that we don't output the error
10037 1.1 mrg or warning twice or three times. */
10038 1.1 mrg
10039 1.1 mrg bool
10040 1.1 mrg fold_builtin_next_arg (tree exp, bool va_start_p)
10041 1.1 mrg {
10042 1.1 mrg tree fntype = TREE_TYPE (current_function_decl);
10043 1.1 mrg int nargs = call_expr_nargs (exp);
10044 1.1 mrg tree arg;
10045 1.1 mrg /* There is good chance the current input_location points inside the
10046 1.1 mrg definition of the va_start macro (perhaps on the token for
10047 1.1 mrg builtin) in a system header, so warnings will not be emitted.
10048 1.1 mrg Use the location in real source code. */
10049 1.1 mrg location_t current_location =
10050 1.1 mrg linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10051 1.1 mrg NULL);
10052 1.1 mrg
10053 1.1 mrg if (!stdarg_p (fntype))
10054 1.1 mrg {
10055 1.1 mrg error ("%<va_start%> used in function with fixed arguments");
10056 1.1 mrg return true;
10057 1.1 mrg }
10058 1.1 mrg
10059 1.1 mrg if (va_start_p)
10060 1.1 mrg {
10061 1.1 mrg if (va_start_p && (nargs != 2))
10062 1.1 mrg {
10063 1.1 mrg error ("wrong number of arguments to function %<va_start%>");
10064 1.1 mrg return true;
10065 1.1 mrg }
10066 1.1 mrg arg = CALL_EXPR_ARG (exp, 1);
10067 1.1 mrg }
10068 1.1 mrg /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10069 1.1 mrg when we checked the arguments and if needed issued a warning. */
10070 1.1 mrg else
10071 1.1 mrg {
10072 1.1 mrg if (nargs == 0)
10073 1.1 mrg {
10074 1.1 mrg /* Evidently an out of date version of <stdarg.h>; can't validate
10075 1.1 mrg va_start's second argument, but can still work as intended. */
10076 1.1 mrg warning_at (current_location,
10077 1.1 mrg OPT_Wvarargs,
10078 1.1 mrg "%<__builtin_next_arg%> called without an argument");
10079 1.1 mrg return true;
10080 1.1 mrg }
10081 1.1 mrg else if (nargs > 1)
10082 1.1 mrg {
10083 1.1 mrg error ("wrong number of arguments to function %<__builtin_next_arg%>");
10084 1.1 mrg return true;
10085 1.1 mrg }
10086 1.1 mrg arg = CALL_EXPR_ARG (exp, 0);
10087 1.1 mrg }
10088 1.1 mrg
10089 1.1 mrg if (TREE_CODE (arg) == SSA_NAME
10090 1.1 mrg && SSA_NAME_VAR (arg))
10091 1.1 mrg arg = SSA_NAME_VAR (arg);
10092 1.1 mrg
10093 1.1 mrg /* We destructively modify the call to be __builtin_va_start (ap, 0)
10094 1.1 mrg or __builtin_next_arg (0) the first time we see it, after checking
10095 1.1 mrg the arguments and if needed issuing a warning. */
10096 1.1 mrg if (!integer_zerop (arg))
10097 1.1 mrg {
10098 1.1 mrg tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10099 1.1 mrg
10100 1.1 mrg /* Strip off all nops for the sake of the comparison. This
10101 1.1 mrg is not quite the same as STRIP_NOPS. It does more.
10102 1.1 mrg We must also strip off INDIRECT_EXPR for C++ reference
10103 1.1 mrg parameters. */
10104 1.1 mrg while (CONVERT_EXPR_P (arg)
10105 1.1 mrg || TREE_CODE (arg) == INDIRECT_REF)
10106 1.1 mrg arg = TREE_OPERAND (arg, 0);
10107 1.1 mrg if (arg != last_parm)
10108 1.1 mrg {
10109 1.1 mrg /* FIXME: Sometimes with the tree optimizers we can get the
10110 1.1 mrg not the last argument even though the user used the last
10111 1.1 mrg argument. We just warn and set the arg to be the last
10112 1.1 mrg argument so that we will get wrong-code because of
10113 1.1 mrg it. */
10114 1.1 mrg warning_at (current_location,
10115 1.1 mrg OPT_Wvarargs,
10116 1.1 mrg "second parameter of %<va_start%> not last named argument");
10117 1.1 mrg }
10118 1.1 mrg
10119 1.1 mrg /* Undefined by C99 7.15.1.4p4 (va_start):
10120 1.1 mrg "If the parameter parmN is declared with the register storage
10121 1.1 mrg class, with a function or array type, or with a type that is
10122 1.1 mrg not compatible with the type that results after application of
10123 1.1 mrg the default argument promotions, the behavior is undefined."
10124 1.1 mrg */
10125 1.1 mrg else if (DECL_REGISTER (arg))
10126 1.1 mrg {
10127 1.1 mrg warning_at (current_location,
10128 1.1 mrg OPT_Wvarargs,
10129 1.1 mrg "undefined behavior when second parameter of "
10130 1.1 mrg "%<va_start%> is declared with %<register%> storage");
10131 1.1 mrg }
10132 1.1 mrg
10133 1.1 mrg /* We want to verify the second parameter just once before the tree
10134 1.1 mrg optimizers are run and then avoid keeping it in the tree,
10135 1.1 mrg as otherwise we could warn even for correct code like:
10136 1.1 mrg void foo (int i, ...)
10137 1.1 mrg { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10138 1.1 mrg if (va_start_p)
10139 1.1 mrg CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10140 1.1 mrg else
10141 1.1 mrg CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10142 1.1 mrg }
10143 1.1 mrg return false;
10144 1.1 mrg }
10145 1.1 mrg
10146 1.1 mrg
10147 1.1 mrg /* Expand a call EXP to __builtin_object_size. */
10148 1.1 mrg
10149 1.1 mrg static rtx
10150 1.1 mrg expand_builtin_object_size (tree exp)
10151 1.1 mrg {
10152 1.1 mrg tree ost;
10153 1.1 mrg int object_size_type;
10154 1.1 mrg tree fndecl = get_callee_fndecl (exp);
10155 1.1 mrg
10156 1.1 mrg if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10157 1.1 mrg {
10158 1.1 mrg error ("first argument of %qD must be a pointer, second integer constant",
10159 1.1 mrg fndecl);
10160 1.1 mrg expand_builtin_trap ();
10161 1.1 mrg return const0_rtx;
10162 1.1 mrg }
10163 1.1 mrg
10164 1.1 mrg ost = CALL_EXPR_ARG (exp, 1);
10165 1.1 mrg STRIP_NOPS (ost);
10166 1.1 mrg
10167 1.1 mrg if (TREE_CODE (ost) != INTEGER_CST
10168 1.1 mrg || tree_int_cst_sgn (ost) < 0
10169 1.1 mrg || compare_tree_int (ost, 3) > 0)
10170 1.1 mrg {
10171 1.1 mrg error ("last argument of %qD is not integer constant between 0 and 3",
10172 1.1 mrg fndecl);
10173 1.1 mrg expand_builtin_trap ();
10174 1.1 mrg return const0_rtx;
10175 1.1 mrg }
10176 1.1 mrg
10177 1.1 mrg object_size_type = tree_to_shwi (ost);
10178 1.1 mrg
10179 1.1 mrg return object_size_type < 2 ? constm1_rtx : const0_rtx;
10180 1.1 mrg }
10181 1.1 mrg
10182 1.1 mrg /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10183 1.1 mrg FCODE is the BUILT_IN_* to use.
10184 1.1 mrg Return NULL_RTX if we failed; the caller should emit a normal call,
10185 1.1 mrg otherwise try to get the result in TARGET, if convenient (and in
10186 1.1 mrg mode MODE if that's convenient). */
10187 1.1 mrg
10188 1.1 mrg static rtx
10189 1.1 mrg expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10190 1.1 mrg enum built_in_function fcode)
10191 1.1 mrg {
10192 1.1 mrg if (!validate_arglist (exp,
10193 1.1 mrg POINTER_TYPE,
10194 1.1 mrg fcode == BUILT_IN_MEMSET_CHK
10195 1.1 mrg ? INTEGER_TYPE : POINTER_TYPE,
10196 1.1 mrg INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10197 1.1 mrg return NULL_RTX;
10198 1.1 mrg
10199 1.1 mrg tree dest = CALL_EXPR_ARG (exp, 0);
10200 1.1 mrg tree src = CALL_EXPR_ARG (exp, 1);
10201 1.1 mrg tree len = CALL_EXPR_ARG (exp, 2);
10202 1.1 mrg tree size = CALL_EXPR_ARG (exp, 3);
10203 1.1 mrg
10204 1.1 mrg /* FIXME: Set access mode to write only for memset et al. */
10205 1.1 mrg bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10206 1.1 mrg /*srcstr=*/NULL_TREE, size, access_read_write);
10207 1.1 mrg
10208 1.1 mrg if (!tree_fits_uhwi_p (size))
10209 1.1 mrg return NULL_RTX;
10210 1.1 mrg
10211 1.1 mrg if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10212 1.1 mrg {
10213 1.1 mrg /* Avoid transforming the checking call to an ordinary one when
10214 1.1 mrg an overflow has been detected or when the call couldn't be
10215 1.1 mrg validated because the size is not constant. */
10216 1.1 mrg if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10217 1.1 mrg return NULL_RTX;
10218 1.1 mrg
10219 1.1 mrg tree fn = NULL_TREE;
10220 1.1 mrg /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10221 1.1 mrg mem{cpy,pcpy,move,set} is available. */
10222 1.1 mrg switch (fcode)
10223 1.1 mrg {
10224 1.1 mrg case BUILT_IN_MEMCPY_CHK:
10225 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10226 1.1 mrg break;
10227 1.1 mrg case BUILT_IN_MEMPCPY_CHK:
10228 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10229 1.1 mrg break;
10230 1.1 mrg case BUILT_IN_MEMMOVE_CHK:
10231 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10232 1.1 mrg break;
10233 1.1 mrg case BUILT_IN_MEMSET_CHK:
10234 1.1 mrg fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10235 1.1 mrg break;
10236 1.1 mrg default:
10237 1.1 mrg break;
10238 1.1 mrg }
10239 1.1 mrg
10240 1.1 mrg if (! fn)
10241 1.1 mrg return NULL_RTX;
10242 1.1 mrg
10243 1.1 mrg fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10244 1.1 mrg gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10245 1.1 mrg CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10246 1.1 mrg return expand_expr (fn, target, mode, EXPAND_NORMAL);
10247 1.1 mrg }
10248 1.1 mrg else if (fcode == BUILT_IN_MEMSET_CHK)
10249 1.1 mrg return NULL_RTX;
10250 1.1 mrg else
10251 1.1 mrg {
10252 1.1 mrg unsigned int dest_align = get_pointer_alignment (dest);
10253 1.1 mrg
10254 1.1 mrg /* If DEST is not a pointer type, call the normal function. */
10255 1.1 mrg if (dest_align == 0)
10256 1.1 mrg return NULL_RTX;
10257 1.1 mrg
10258 1.1 mrg /* If SRC and DEST are the same (and not volatile), do nothing. */
10259 1.1 mrg if (operand_equal_p (src, dest, 0))
10260 1.1 mrg {
10261 1.1 mrg tree expr;
10262 1.1 mrg
10263 1.1 mrg if (fcode != BUILT_IN_MEMPCPY_CHK)
10264 1.1 mrg {
10265 1.1 mrg /* Evaluate and ignore LEN in case it has side-effects. */
10266 1.1 mrg expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10267 1.1 mrg return expand_expr (dest, target, mode, EXPAND_NORMAL);
10268 1.1 mrg }
10269 1.1 mrg
10270 1.1 mrg expr = fold_build_pointer_plus (dest, len);
10271 1.1 mrg return expand_expr (expr, target, mode, EXPAND_NORMAL);
10272 1.1 mrg }
10273 1.1 mrg
10274 1.1 mrg /* __memmove_chk special case. */
10275 1.1 mrg if (fcode == BUILT_IN_MEMMOVE_CHK)
10276 1.1 mrg {
10277 1.1 mrg unsigned int src_align = get_pointer_alignment (src);
10278 1.1 mrg
10279 1.1 mrg if (src_align == 0)
10280 1.1 mrg return NULL_RTX;
10281 1.1 mrg
10282 1.1 mrg /* If src is categorized for a readonly section we can use
10283 1.1 mrg normal __memcpy_chk. */
10284 1.1 mrg if (readonly_data_expr (src))
10285 1.1 mrg {
10286 1.1 mrg tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10287 1.1 mrg if (!fn)
10288 1.1 mrg return NULL_RTX;
10289 1.1 mrg fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10290 1.1 mrg dest, src, len, size);
10291 1.1 mrg gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10292 1.1 mrg CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10293 1.1 mrg return expand_expr (fn, target, mode, EXPAND_NORMAL);
10294 1.1 mrg }
10295 1.1 mrg }
10296 1.1 mrg return NULL_RTX;
10297 1.1 mrg }
10298 1.1 mrg }
10299 1.1 mrg
10300 1.1 mrg /* Emit warning if a buffer overflow is detected at compile time. */
10301 1.1 mrg
10302 1.1 mrg static void
10303 1.1 mrg maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10304 1.1 mrg {
10305 1.1 mrg /* The source string. */
10306 1.1 mrg tree srcstr = NULL_TREE;
10307 1.1 mrg /* The size of the destination object returned by __builtin_object_size. */
10308 1.1 mrg tree objsize = NULL_TREE;
10309 1.1 mrg /* The string that is being concatenated with (as in __strcat_chk)
10310 1.1 mrg or null if it isn't. */
10311 1.1 mrg tree catstr = NULL_TREE;
10312 1.1 mrg /* The maximum length of the source sequence in a bounded operation
10313 1.1 mrg (such as __strncat_chk) or null if the operation isn't bounded
10314 1.1 mrg (such as __strcat_chk). */
10315 1.1 mrg tree maxread = NULL_TREE;
10316 1.1 mrg /* The exact size of the access (such as in __strncpy_chk). */
10317 1.1 mrg tree size = NULL_TREE;
10318 1.1 mrg /* The access by the function that's checked. Except for snprintf
10319 1.1 mrg both writing and reading is checked. */
10320 1.1 mrg access_mode mode = access_read_write;
10321 1.1 mrg
10322 1.1 mrg switch (fcode)
10323 1.1 mrg {
10324 1.1 mrg case BUILT_IN_STRCPY_CHK:
10325 1.1 mrg case BUILT_IN_STPCPY_CHK:
10326 1.1 mrg srcstr = CALL_EXPR_ARG (exp, 1);
10327 1.1 mrg objsize = CALL_EXPR_ARG (exp, 2);
10328 1.1 mrg break;
10329 1.1 mrg
10330 1.1 mrg case BUILT_IN_STRCAT_CHK:
10331 1.1 mrg /* For __strcat_chk the warning will be emitted only if overflowing
10332 1.1 mrg by at least strlen (dest) + 1 bytes. */
10333 1.1 mrg catstr = CALL_EXPR_ARG (exp, 0);
10334 1.1 mrg srcstr = CALL_EXPR_ARG (exp, 1);
10335 1.1 mrg objsize = CALL_EXPR_ARG (exp, 2);
10336 1.1 mrg break;
10337 1.1 mrg
10338 1.1 mrg case BUILT_IN_STRNCAT_CHK:
10339 1.1 mrg catstr = CALL_EXPR_ARG (exp, 0);
10340 1.1 mrg srcstr = CALL_EXPR_ARG (exp, 1);
10341 1.1 mrg maxread = CALL_EXPR_ARG (exp, 2);
10342 1.1 mrg objsize = CALL_EXPR_ARG (exp, 3);
10343 1.1 mrg break;
10344 1.1 mrg
10345 1.1 mrg case BUILT_IN_STRNCPY_CHK:
10346 1.1 mrg case BUILT_IN_STPNCPY_CHK:
10347 1.1 mrg srcstr = CALL_EXPR_ARG (exp, 1);
10348 1.1 mrg size = CALL_EXPR_ARG (exp, 2);
10349 1.1 mrg objsize = CALL_EXPR_ARG (exp, 3);
10350 1.1 mrg break;
10351 1.1 mrg
10352 1.1 mrg case BUILT_IN_SNPRINTF_CHK:
10353 1.1 mrg case BUILT_IN_VSNPRINTF_CHK:
10354 1.1 mrg maxread = CALL_EXPR_ARG (exp, 1);
10355 1.1 mrg objsize = CALL_EXPR_ARG (exp, 3);
10356 1.1 mrg /* The only checked access the write to the destination. */
10357 1.1 mrg mode = access_write_only;
10358 1.1 mrg break;
10359 1.1 mrg default:
10360 1.1 mrg gcc_unreachable ();
10361 1.1 mrg }
10362 1.1 mrg
10363 1.1 mrg if (catstr && maxread)
10364 1.1 mrg {
10365 1.1 mrg /* Check __strncat_chk. There is no way to determine the length
10366 1.1 mrg of the string to which the source string is being appended so
10367 1.1 mrg just warn when the length of the source string is not known. */
10368 1.1 mrg check_strncat_sizes (exp, objsize);
10369 1.1 mrg return;
10370 1.1 mrg }
10371 1.1 mrg
10372 1.1 mrg check_access (exp, size, maxread, srcstr, objsize, mode);
10373 1.1 mrg }
10374 1.1 mrg
10375 1.1 mrg /* Emit warning if a buffer overflow is detected at compile time
10376 1.1 mrg in __sprintf_chk/__vsprintf_chk calls. */
10377 1.1 mrg
10378 1.1 mrg static void
10379 1.1 mrg maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10380 1.1 mrg {
10381 1.1 mrg tree size, len, fmt;
10382 1.1 mrg const char *fmt_str;
10383 1.1 mrg int nargs = call_expr_nargs (exp);
10384 1.1 mrg
10385 1.1 mrg /* Verify the required arguments in the original call. */
10386 1.1 mrg
10387 1.1 mrg if (nargs < 4)
10388 1.1 mrg return;
10389 1.1 mrg size = CALL_EXPR_ARG (exp, 2);
10390 1.1 mrg fmt = CALL_EXPR_ARG (exp, 3);
10391 1.1 mrg
10392 1.1 mrg if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10393 1.1 mrg return;
10394 1.1 mrg
10395 1.1 mrg /* Check whether the format is a literal string constant. */
10396 1.1 mrg fmt_str = c_getstr (fmt);
10397 1.1 mrg if (fmt_str == NULL)
10398 1.1 mrg return;
10399 1.1 mrg
10400 1.1 mrg if (!init_target_chars ())
10401 1.1 mrg return;
10402 1.1 mrg
10403 1.1 mrg /* If the format doesn't contain % args or %%, we know its size. */
10404 1.1 mrg if (strchr (fmt_str, target_percent) == 0)
10405 1.1 mrg len = build_int_cstu (size_type_node, strlen (fmt_str));
10406 1.1 mrg /* If the format is "%s" and first ... argument is a string literal,
10407 1.1 mrg we know it too. */
10408 1.1 mrg else if (fcode == BUILT_IN_SPRINTF_CHK
10409 1.1 mrg && strcmp (fmt_str, target_percent_s) == 0)
10410 1.1 mrg {
10411 1.1 mrg tree arg;
10412 1.1 mrg
10413 1.1 mrg if (nargs < 5)
10414 1.1 mrg return;
10415 1.1 mrg arg = CALL_EXPR_ARG (exp, 4);
10416 1.1 mrg if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10417 1.1 mrg return;
10418 1.1 mrg
10419 1.1 mrg len = c_strlen (arg, 1);
10420 1.1 mrg if (!len || ! tree_fits_uhwi_p (len))
10421 1.1 mrg return;
10422 1.1 mrg }
10423 1.1 mrg else
10424 1.1 mrg return;
10425 1.1 mrg
10426 1.1 mrg /* Add one for the terminating nul. */
10427 1.1 mrg len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10428 1.1 mrg
10429 1.1 mrg check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10430 1.1 mrg access_write_only);
10431 1.1 mrg }
10432 1.1 mrg
10433 1.1 mrg /* Fold a call to __builtin_object_size with arguments PTR and OST,
10434 1.1 mrg if possible. */
10435 1.1 mrg
10436 1.1 mrg static tree
10437 1.1 mrg fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
10438 1.1 mrg {
10439 1.1 mrg tree bytes;
10440 1.1 mrg int object_size_type;
10441 1.1 mrg
10442 1.1 mrg if (!validate_arg (ptr, POINTER_TYPE)
10443 1.1 mrg || !validate_arg (ost, INTEGER_TYPE))
10444 1.1 mrg return NULL_TREE;
10445 1.1 mrg
10446 1.1 mrg STRIP_NOPS (ost);
10447 1.1 mrg
10448 1.1 mrg if (TREE_CODE (ost) != INTEGER_CST
10449 1.1 mrg || tree_int_cst_sgn (ost) < 0
10450 1.1 mrg || compare_tree_int (ost, 3) > 0)
10451 1.1 mrg return NULL_TREE;
10452 1.1 mrg
10453 1.1 mrg object_size_type = tree_to_shwi (ost);
10454 1.1 mrg
10455 1.1 mrg /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10456 1.1 mrg if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10457 1.1 mrg and (size_t) 0 for types 2 and 3. */
10458 1.1 mrg if (TREE_SIDE_EFFECTS (ptr))
10459 1.1 mrg return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10460 1.1 mrg
10461 1.1 mrg if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
10462 1.1 mrg object_size_type |= OST_DYNAMIC;
10463 1.1 mrg
10464 1.1 mrg if (TREE_CODE (ptr) == ADDR_EXPR)
10465 1.1 mrg {
10466 1.1 mrg compute_builtin_object_size (ptr, object_size_type, &bytes);
10467 1.1 mrg if ((object_size_type & OST_DYNAMIC)
10468 1.1 mrg || int_fits_type_p (bytes, size_type_node))
10469 1.1 mrg return fold_convert (size_type_node, bytes);
10470 1.1 mrg }
10471 1.1 mrg else if (TREE_CODE (ptr) == SSA_NAME)
10472 1.1 mrg {
10473 1.1 mrg /* If object size is not known yet, delay folding until
10474 1.1 mrg later. Maybe subsequent passes will help determining
10475 1.1 mrg it. */
10476 1.1 mrg if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10477 1.1 mrg && ((object_size_type & OST_DYNAMIC)
10478 1.1 mrg || int_fits_type_p (bytes, size_type_node)))
10479 1.1 mrg return fold_convert (size_type_node, bytes);
10480 1.1 mrg }
10481 1.1 mrg
10482 1.1 mrg return NULL_TREE;
10483 1.1 mrg }
10484 1.1 mrg
10485 1.1 mrg /* Builtins with folding operations that operate on "..." arguments
10486 1.1 mrg need special handling; we need to store the arguments in a convenient
10487 1.1 mrg data structure before attempting any folding. Fortunately there are
10488 1.1 mrg only a few builtins that fall into this category. FNDECL is the
10489 1.1 mrg function, EXP is the CALL_EXPR for the call. */
10490 1.1 mrg
10491 1.1 mrg static tree
10492 1.1 mrg fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10493 1.1 mrg {
10494 1.1 mrg enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10495 1.1 mrg tree ret = NULL_TREE;
10496 1.1 mrg
10497 1.1 mrg switch (fcode)
10498 1.1 mrg {
10499 1.1 mrg case BUILT_IN_FPCLASSIFY:
10500 1.1 mrg ret = fold_builtin_fpclassify (loc, args, nargs);
10501 1.1 mrg break;
10502 1.1 mrg
10503 1.1 mrg default:
10504 1.1 mrg break;
10505 1.1 mrg }
10506 1.1 mrg if (ret)
10507 1.1 mrg {
10508 1.1 mrg ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10509 1.1 mrg SET_EXPR_LOCATION (ret, loc);
10510 1.1 mrg suppress_warning (ret);
10511 1.1 mrg return ret;
10512 1.1 mrg }
10513 1.1 mrg return NULL_TREE;
10514 1.1 mrg }
10515 1.1 mrg
10516 1.1 mrg /* Initialize format string characters in the target charset. */
10517 1.1 mrg
10518 1.1 mrg bool
10519 1.1 mrg init_target_chars (void)
10520 1.1 mrg {
10521 1.1 mrg static bool init;
10522 1.1 mrg if (!init)
10523 1.1 mrg {
10524 1.1 mrg target_newline = lang_hooks.to_target_charset ('\n');
10525 1.1 mrg target_percent = lang_hooks.to_target_charset ('%');
10526 1.1 mrg target_c = lang_hooks.to_target_charset ('c');
10527 1.1 mrg target_s = lang_hooks.to_target_charset ('s');
10528 1.1 mrg if (target_newline == 0 || target_percent == 0 || target_c == 0
10529 1.1 mrg || target_s == 0)
10530 1.1 mrg return false;
10531 1.1 mrg
10532 1.1 mrg target_percent_c[0] = target_percent;
10533 1.1 mrg target_percent_c[1] = target_c;
10534 1.1 mrg target_percent_c[2] = '\0';
10535 1.1 mrg
10536 1.1 mrg target_percent_s[0] = target_percent;
10537 1.1 mrg target_percent_s[1] = target_s;
10538 1.1 mrg target_percent_s[2] = '\0';
10539 1.1 mrg
10540 1.1 mrg target_percent_s_newline[0] = target_percent;
10541 1.1 mrg target_percent_s_newline[1] = target_s;
10542 1.1 mrg target_percent_s_newline[2] = target_newline;
10543 1.1 mrg target_percent_s_newline[3] = '\0';
10544 1.1 mrg
10545 1.1 mrg init = true;
10546 1.1 mrg }
10547 1.1 mrg return true;
10548 1.1 mrg }
10549 1.1 mrg
10550 1.1 mrg /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10551 1.1 mrg and no overflow/underflow occurred. INEXACT is true if M was not
10552 1.1 mrg exactly calculated. TYPE is the tree type for the result. This
10553 1.1 mrg function assumes that you cleared the MPFR flags and then
10554 1.1 mrg calculated M to see if anything subsequently set a flag prior to
10555 1.1 mrg entering this function. Return NULL_TREE if any checks fail. */
10556 1.1 mrg
10557 1.1 mrg static tree
10558 1.1 mrg do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10559 1.1 mrg {
10560 1.1 mrg /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10561 1.1 mrg overflow/underflow occurred. If -frounding-math, proceed iff the
10562 1.1 mrg result of calling FUNC was exact. */
10563 1.1 mrg if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10564 1.1 mrg && (!flag_rounding_math || !inexact))
10565 1.1 mrg {
10566 1.1 mrg REAL_VALUE_TYPE rr;
10567 1.1 mrg
10568 1.1 mrg real_from_mpfr (&rr, m, type, MPFR_RNDN);
10569 1.1 mrg /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10570 1.1 mrg check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10571 1.1 mrg but the mpft_t is not, then we underflowed in the
10572 1.1 mrg conversion. */
10573 1.1 mrg if (real_isfinite (&rr)
10574 1.1 mrg && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10575 1.1 mrg {
10576 1.1 mrg REAL_VALUE_TYPE rmode;
10577 1.1 mrg
10578 1.1 mrg real_convert (&rmode, TYPE_MODE (type), &rr);
10579 1.1 mrg /* Proceed iff the specified mode can hold the value. */
10580 1.1 mrg if (real_identical (&rmode, &rr))
10581 1.1 mrg return build_real (type, rmode);
10582 1.1 mrg }
10583 1.1 mrg }
10584 1.1 mrg return NULL_TREE;
10585 1.1 mrg }
10586 1.1 mrg
10587 1.1 mrg /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10588 1.1 mrg number and no overflow/underflow occurred. INEXACT is true if M
10589 1.1 mrg was not exactly calculated. TYPE is the tree type for the result.
10590 1.1 mrg This function assumes that you cleared the MPFR flags and then
10591 1.1 mrg calculated M to see if anything subsequently set a flag prior to
10592 1.1 mrg entering this function. Return NULL_TREE if any checks fail, if
10593 1.1 mrg FORCE_CONVERT is true, then bypass the checks. */
10594 1.1 mrg
10595 1.1 mrg static tree
10596 1.1 mrg do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10597 1.1 mrg {
10598 1.1 mrg /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10599 1.1 mrg overflow/underflow occurred. If -frounding-math, proceed iff the
10600 1.1 mrg result of calling FUNC was exact. */
10601 1.1 mrg if (force_convert
10602 1.1 mrg || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10603 1.1 mrg && !mpfr_overflow_p () && !mpfr_underflow_p ()
10604 1.1 mrg && (!flag_rounding_math || !inexact)))
10605 1.1 mrg {
10606 1.1 mrg REAL_VALUE_TYPE re, im;
10607 1.1 mrg
10608 1.1 mrg real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10609 1.1 mrg real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10610 1.1 mrg /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10611 1.1 mrg check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10612 1.1 mrg but the mpft_t is not, then we underflowed in the
10613 1.1 mrg conversion. */
10614 1.1 mrg if (force_convert
10615 1.1 mrg || (real_isfinite (&re) && real_isfinite (&im)
10616 1.1 mrg && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10617 1.1 mrg && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10618 1.1 mrg {
10619 1.1 mrg REAL_VALUE_TYPE re_mode, im_mode;
10620 1.1 mrg
10621 1.1 mrg real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10622 1.1 mrg real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10623 1.1 mrg /* Proceed iff the specified mode can hold the value. */
10624 1.1 mrg if (force_convert
10625 1.1 mrg || (real_identical (&re_mode, &re)
10626 1.1 mrg && real_identical (&im_mode, &im)))
10627 1.1 mrg return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10628 1.1 mrg build_real (TREE_TYPE (type), im_mode));
10629 1.1 mrg }
10630 1.1 mrg }
10631 1.1 mrg return NULL_TREE;
10632 1.1 mrg }
10633 1.1 mrg
10634 1.1 mrg /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10635 1.1 mrg the pointer *(ARG_QUO) and return the result. The type is taken
10636 1.1 mrg from the type of ARG0 and is used for setting the precision of the
10637 1.1 mrg calculation and results. */
10638 1.1 mrg
10639 1.1 mrg static tree
10640 1.1 mrg do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10641 1.1 mrg {
10642 1.1 mrg tree const type = TREE_TYPE (arg0);
10643 1.1 mrg tree result = NULL_TREE;
10644 1.1 mrg
10645 1.1 mrg STRIP_NOPS (arg0);
10646 1.1 mrg STRIP_NOPS (arg1);
10647 1.1 mrg
10648 1.1 mrg /* To proceed, MPFR must exactly represent the target floating point
10649 1.1 mrg format, which only happens when the target base equals two. */
10650 1.1 mrg if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10651 1.1 mrg && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10652 1.1 mrg && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10653 1.1 mrg {
10654 1.1 mrg const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10655 1.1 mrg const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10656 1.1 mrg
10657 1.1 mrg if (real_isfinite (ra0) && real_isfinite (ra1))
10658 1.1 mrg {
10659 1.1 mrg const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10660 1.1 mrg const int prec = fmt->p;
10661 1.1 mrg const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10662 1.1 mrg tree result_rem;
10663 1.1 mrg long integer_quo;
10664 1.1 mrg mpfr_t m0, m1;
10665 1.1 mrg
10666 1.1 mrg mpfr_inits2 (prec, m0, m1, NULL);
10667 1.1 mrg mpfr_from_real (m0, ra0, MPFR_RNDN);
10668 1.1 mrg mpfr_from_real (m1, ra1, MPFR_RNDN);
10669 1.1 mrg mpfr_clear_flags ();
10670 1.1 mrg mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10671 1.1 mrg /* Remquo is independent of the rounding mode, so pass
10672 1.1 mrg inexact=0 to do_mpfr_ckconv(). */
10673 1.1 mrg result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10674 1.1 mrg mpfr_clears (m0, m1, NULL);
10675 1.1 mrg if (result_rem)
10676 1.1 mrg {
10677 1.1 mrg /* MPFR calculates quo in the host's long so it may
10678 1.1 mrg return more bits in quo than the target int can hold
10679 1.1 mrg if sizeof(host long) > sizeof(target int). This can
10680 1.1 mrg happen even for native compilers in LP64 mode. In
10681 1.1 mrg these cases, modulo the quo value with the largest
10682 1.1 mrg number that the target int can hold while leaving one
10683 1.1 mrg bit for the sign. */
10684 1.1 mrg if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10685 1.1 mrg integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10686 1.1 mrg
10687 1.1 mrg /* Dereference the quo pointer argument. */
10688 1.1 mrg arg_quo = build_fold_indirect_ref (arg_quo);
10689 1.1 mrg /* Proceed iff a valid pointer type was passed in. */
10690 1.1 mrg if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10691 1.1 mrg {
10692 1.1 mrg /* Set the value. */
10693 1.1 mrg tree result_quo
10694 1.1 mrg = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10695 1.1 mrg build_int_cst (TREE_TYPE (arg_quo),
10696 1.1 mrg integer_quo));
10697 1.1 mrg TREE_SIDE_EFFECTS (result_quo) = 1;
10698 1.1 mrg /* Combine the quo assignment with the rem. */
10699 1.1 mrg result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10700 1.1 mrg result_quo, result_rem));
10701 1.1 mrg }
10702 1.1 mrg }
10703 1.1 mrg }
10704 1.1 mrg }
10705 1.1 mrg return result;
10706 1.1 mrg }
10707 1.1 mrg
10708 1.1 mrg /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10709 1.1 mrg resulting value as a tree with type TYPE. The mpfr precision is
10710 1.1 mrg set to the precision of TYPE. We assume that this mpfr function
10711 1.1 mrg returns zero if the result could be calculated exactly within the
10712 1.1 mrg requested precision. In addition, the integer pointer represented
10713 1.1 mrg by ARG_SG will be dereferenced and set to the appropriate signgam
10714 1.1 mrg (-1,1) value. */
10715 1.1 mrg
10716 1.1 mrg static tree
10717 1.1 mrg do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10718 1.1 mrg {
10719 1.1 mrg tree result = NULL_TREE;
10720 1.1 mrg
10721 1.1 mrg STRIP_NOPS (arg);
10722 1.1 mrg
10723 1.1 mrg /* To proceed, MPFR must exactly represent the target floating point
10724 1.1 mrg format, which only happens when the target base equals two. Also
10725 1.1 mrg verify ARG is a constant and that ARG_SG is an int pointer. */
10726 1.1 mrg if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10727 1.1 mrg && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10728 1.1 mrg && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10729 1.1 mrg && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10730 1.1 mrg {
10731 1.1 mrg const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10732 1.1 mrg
10733 1.1 mrg /* In addition to NaN and Inf, the argument cannot be zero or a
10734 1.1 mrg negative integer. */
10735 1.1 mrg if (real_isfinite (ra)
10736 1.1 mrg && ra->cl != rvc_zero
10737 1.1 mrg && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10738 1.1 mrg {
10739 1.1 mrg const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10740 1.1 mrg const int prec = fmt->p;
10741 1.1 mrg const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10742 1.1 mrg int inexact, sg;
10743 1.1 mrg mpfr_t m;
10744 1.1 mrg tree result_lg;
10745 1.1 mrg
10746 1.1 mrg mpfr_init2 (m, prec);
10747 1.1 mrg mpfr_from_real (m, ra, MPFR_RNDN);
10748 1.1 mrg mpfr_clear_flags ();
10749 1.1 mrg inexact = mpfr_lgamma (m, &sg, m, rnd);
10750 1.1 mrg result_lg = do_mpfr_ckconv (m, type, inexact);
10751 1.1 mrg mpfr_clear (m);
10752 1.1 mrg if (result_lg)
10753 1.1 mrg {
10754 1.1 mrg tree result_sg;
10755 1.1 mrg
10756 1.1 mrg /* Dereference the arg_sg pointer argument. */
10757 1.1 mrg arg_sg = build_fold_indirect_ref (arg_sg);
10758 1.1 mrg /* Assign the signgam value into *arg_sg. */
10759 1.1 mrg result_sg = fold_build2 (MODIFY_EXPR,
10760 1.1 mrg TREE_TYPE (arg_sg), arg_sg,
10761 1.1 mrg build_int_cst (TREE_TYPE (arg_sg), sg));
10762 1.1 mrg TREE_SIDE_EFFECTS (result_sg) = 1;
10763 1.1 mrg /* Combine the signgam assignment with the lgamma result. */
10764 1.1 mrg result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10765 1.1 mrg result_sg, result_lg));
10766 1.1 mrg }
10767 1.1 mrg }
10768 1.1 mrg }
10769 1.1 mrg
10770 1.1 mrg return result;
10771 1.1 mrg }
10772 1.1 mrg
10773 1.1 mrg /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10774 1.1 mrg mpc function FUNC on it and return the resulting value as a tree
10775 1.1 mrg with type TYPE. The mpfr precision is set to the precision of
10776 1.1 mrg TYPE. We assume that function FUNC returns zero if the result
10777 1.1 mrg could be calculated exactly within the requested precision. If
10778 1.1 mrg DO_NONFINITE is true, then fold expressions containing Inf or NaN
10779 1.1 mrg in the arguments and/or results. */
10780 1.1 mrg
10781 1.1 mrg tree
10782 1.1 mrg do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10783 1.1 mrg int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10784 1.1 mrg {
10785 1.1 mrg tree result = NULL_TREE;
10786 1.1 mrg
10787 1.1 mrg STRIP_NOPS (arg0);
10788 1.1 mrg STRIP_NOPS (arg1);
10789 1.1 mrg
10790 1.1 mrg /* To proceed, MPFR must exactly represent the target floating point
10791 1.1 mrg format, which only happens when the target base equals two. */
10792 1.1 mrg if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10793 1.1 mrg && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10794 1.1 mrg && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10795 1.1 mrg && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10796 1.1 mrg && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10797 1.1 mrg {
10798 1.1 mrg const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10799 1.1 mrg const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10800 1.1 mrg const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10801 1.1 mrg const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10802 1.1 mrg
10803 1.1 mrg if (do_nonfinite
10804 1.1 mrg || (real_isfinite (re0) && real_isfinite (im0)
10805 1.1 mrg && real_isfinite (re1) && real_isfinite (im1)))
10806 1.1 mrg {
10807 1.1 mrg const struct real_format *const fmt =
10808 1.1 mrg REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10809 1.1 mrg const int prec = fmt->p;
10810 1.1 mrg const mpfr_rnd_t rnd = fmt->round_towards_zero
10811 1.1 mrg ? MPFR_RNDZ : MPFR_RNDN;
10812 1.1 mrg const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10813 1.1 mrg int inexact;
10814 1.1 mrg mpc_t m0, m1;
10815 1.1 mrg
10816 1.1 mrg mpc_init2 (m0, prec);
10817 1.1 mrg mpc_init2 (m1, prec);
10818 1.1 mrg mpfr_from_real (mpc_realref (m0), re0, rnd);
10819 1.1 mrg mpfr_from_real (mpc_imagref (m0), im0, rnd);
10820 1.1 mrg mpfr_from_real (mpc_realref (m1), re1, rnd);
10821 1.1 mrg mpfr_from_real (mpc_imagref (m1), im1, rnd);
10822 1.1 mrg mpfr_clear_flags ();
10823 1.1 mrg inexact = func (m0, m0, m1, crnd);
10824 1.1 mrg result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10825 1.1 mrg mpc_clear (m0);
10826 1.1 mrg mpc_clear (m1);
10827 1.1 mrg }
10828 1.1 mrg }
10829 1.1 mrg
10830 1.1 mrg return result;
10831 1.1 mrg }
10832 1.1 mrg
10833 1.1 mrg /* A wrapper function for builtin folding that prevents warnings for
10834 1.1 mrg "statement without effect" and the like, caused by removing the
10835 1.1 mrg call node earlier than the warning is generated. */
10836 1.1 mrg
10837 1.1 mrg tree
10838 1.1 mrg fold_call_stmt (gcall *stmt, bool ignore)
10839 1.1 mrg {
10840 1.1 mrg tree ret = NULL_TREE;
10841 1.1 mrg tree fndecl = gimple_call_fndecl (stmt);
10842 1.1 mrg location_t loc = gimple_location (stmt);
10843 1.1 mrg if (fndecl && fndecl_built_in_p (fndecl)
10844 1.1 mrg && !gimple_call_va_arg_pack_p (stmt))
10845 1.1 mrg {
10846 1.1 mrg int nargs = gimple_call_num_args (stmt);
10847 1.1 mrg tree *args = (nargs > 0
10848 1.1 mrg ? gimple_call_arg_ptr (stmt, 0)
10849 1.1 mrg : &error_mark_node);
10850 1.1 mrg
10851 1.1 mrg if (avoid_folding_inline_builtin (fndecl))
10852 1.1 mrg return NULL_TREE;
10853 1.1 mrg if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10854 1.1 mrg {
10855 1.1 mrg return targetm.fold_builtin (fndecl, nargs, args, ignore);
10856 1.1 mrg }
10857 1.1 mrg else
10858 1.1 mrg {
10859 1.1 mrg ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
10860 1.1 mrg if (ret)
10861 1.1 mrg {
10862 1.1 mrg /* Propagate location information from original call to
10863 1.1 mrg expansion of builtin. Otherwise things like
10864 1.1 mrg maybe_emit_chk_warning, that operate on the expansion
10865 1.1 mrg of a builtin, will use the wrong location information. */
10866 1.1 mrg if (gimple_has_location (stmt))
10867 1.1 mrg {
10868 1.1 mrg tree realret = ret;
10869 1.1 mrg if (TREE_CODE (ret) == NOP_EXPR)
10870 1.1 mrg realret = TREE_OPERAND (ret, 0);
10871 1.1 mrg if (CAN_HAVE_LOCATION_P (realret)
10872 1.1 mrg && !EXPR_HAS_LOCATION (realret))
10873 1.1 mrg SET_EXPR_LOCATION (realret, loc);
10874 1.1 mrg return realret;
10875 1.1 mrg }
10876 1.1 mrg return ret;
10877 1.1 mrg }
10878 1.1 mrg }
10879 1.1 mrg }
10880 1.1 mrg return NULL_TREE;
10881 1.1 mrg }
10882 1.1 mrg
10883 1.1 mrg /* Look up the function in builtin_decl that corresponds to DECL
10884 1.1 mrg and set ASMSPEC as its user assembler name. DECL must be a
10885 1.1 mrg function decl that declares a builtin. */
10886 1.1 mrg
10887 1.1 mrg void
10888 1.1 mrg set_builtin_user_assembler_name (tree decl, const char *asmspec)
10889 1.1 mrg {
10890 1.1 mrg gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10891 1.1 mrg && asmspec != 0);
10892 1.1 mrg
10893 1.1 mrg tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10894 1.1 mrg set_user_assembler_name (builtin, asmspec);
10895 1.1 mrg
10896 1.1 mrg if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10897 1.1 mrg && INT_TYPE_SIZE < BITS_PER_WORD)
10898 1.1 mrg {
10899 1.1 mrg scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10900 1.1 mrg set_user_assembler_libfunc ("ffs", asmspec);
10901 1.1 mrg set_optab_libfunc (ffs_optab, mode, "ffs");
10902 1.1 mrg }
10903 1.1 mrg }
10904 1.1 mrg
10905 1.1 mrg /* Return true if DECL is a builtin that expands to a constant or similarly
10906 1.1 mrg simple code. */
10907 1.1 mrg bool
10908 1.1 mrg is_simple_builtin (tree decl)
10909 1.1 mrg {
10910 1.1 mrg if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10911 1.1 mrg switch (DECL_FUNCTION_CODE (decl))
10912 1.1 mrg {
10913 1.1 mrg /* Builtins that expand to constants. */
10914 1.1 mrg case BUILT_IN_CONSTANT_P:
10915 1.1 mrg case BUILT_IN_EXPECT:
10916 1.1 mrg case BUILT_IN_OBJECT_SIZE:
10917 1.1 mrg case BUILT_IN_UNREACHABLE:
10918 1.1 mrg /* Simple register moves or loads from stack. */
10919 1.1 mrg case BUILT_IN_ASSUME_ALIGNED:
10920 1.1 mrg case BUILT_IN_RETURN_ADDRESS:
10921 1.1 mrg case BUILT_IN_EXTRACT_RETURN_ADDR:
10922 1.1 mrg case BUILT_IN_FROB_RETURN_ADDR:
10923 1.1 mrg case BUILT_IN_RETURN:
10924 1.1 mrg case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10925 1.1 mrg case BUILT_IN_FRAME_ADDRESS:
10926 1.1 mrg case BUILT_IN_VA_END:
10927 1.1 mrg case BUILT_IN_STACK_SAVE:
10928 1.1 mrg case BUILT_IN_STACK_RESTORE:
10929 1.1 mrg case BUILT_IN_DWARF_CFA:
10930 1.1 mrg /* Exception state returns or moves registers around. */
10931 1.1 mrg case BUILT_IN_EH_FILTER:
10932 1.1 mrg case BUILT_IN_EH_POINTER:
10933 1.1 mrg case BUILT_IN_EH_COPY_VALUES:
10934 1.1 mrg return true;
10935 1.1 mrg
10936 1.1 mrg default:
10937 1.1 mrg return false;
10938 1.1 mrg }
10939 1.1 mrg
10940 1.1 mrg return false;
10941 1.1 mrg }
10942 1.1 mrg
10943 1.1 mrg /* Return true if DECL is a builtin that is not expensive, i.e., they are
10944 1.1 mrg most probably expanded inline into reasonably simple code. This is a
10945 1.1 mrg superset of is_simple_builtin. */
10946 1.1 mrg bool
10947 1.1 mrg is_inexpensive_builtin (tree decl)
10948 1.1 mrg {
10949 1.1 mrg if (!decl)
10950 1.1 mrg return false;
10951 1.1 mrg else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10952 1.1 mrg return true;
10953 1.1 mrg else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10954 1.1 mrg switch (DECL_FUNCTION_CODE (decl))
10955 1.1 mrg {
10956 1.1 mrg case BUILT_IN_ABS:
10957 1.1 mrg CASE_BUILT_IN_ALLOCA:
10958 1.1 mrg case BUILT_IN_BSWAP16:
10959 1.1 mrg case BUILT_IN_BSWAP32:
10960 1.1 mrg case BUILT_IN_BSWAP64:
10961 1.1 mrg case BUILT_IN_BSWAP128:
10962 1.1 mrg case BUILT_IN_CLZ:
10963 1.1 mrg case BUILT_IN_CLZIMAX:
10964 1.1 mrg case BUILT_IN_CLZL:
10965 1.1 mrg case BUILT_IN_CLZLL:
10966 1.1 mrg case BUILT_IN_CTZ:
10967 1.1 mrg case BUILT_IN_CTZIMAX:
10968 1.1 mrg case BUILT_IN_CTZL:
10969 1.1 mrg case BUILT_IN_CTZLL:
10970 1.1 mrg case BUILT_IN_FFS:
10971 1.1 mrg case BUILT_IN_FFSIMAX:
10972 1.1 mrg case BUILT_IN_FFSL:
10973 1.1 mrg case BUILT_IN_FFSLL:
10974 1.1 mrg case BUILT_IN_IMAXABS:
10975 1.1 mrg case BUILT_IN_FINITE:
10976 1.1 mrg case BUILT_IN_FINITEF:
10977 1.1 mrg case BUILT_IN_FINITEL:
10978 1.1 mrg case BUILT_IN_FINITED32:
10979 1.1 mrg case BUILT_IN_FINITED64:
10980 1.1 mrg case BUILT_IN_FINITED128:
10981 1.1 mrg case BUILT_IN_FPCLASSIFY:
10982 1.1 mrg case BUILT_IN_ISFINITE:
10983 1.1 mrg case BUILT_IN_ISINF_SIGN:
10984 1.1 mrg case BUILT_IN_ISINF:
10985 1.1 mrg case BUILT_IN_ISINFF:
10986 1.1 mrg case BUILT_IN_ISINFL:
10987 1.1 mrg case BUILT_IN_ISINFD32:
10988 1.1 mrg case BUILT_IN_ISINFD64:
10989 1.1 mrg case BUILT_IN_ISINFD128:
10990 1.1 mrg case BUILT_IN_ISNAN:
10991 1.1 mrg case BUILT_IN_ISNANF:
10992 1.1 mrg case BUILT_IN_ISNANL:
10993 1.1 mrg case BUILT_IN_ISNAND32:
10994 1.1 mrg case BUILT_IN_ISNAND64:
10995 1.1 mrg case BUILT_IN_ISNAND128:
10996 1.1 mrg case BUILT_IN_ISNORMAL:
10997 1.1 mrg case BUILT_IN_ISGREATER:
10998 1.1 mrg case BUILT_IN_ISGREATEREQUAL:
10999 1.1 mrg case BUILT_IN_ISLESS:
11000 1.1 mrg case BUILT_IN_ISLESSEQUAL:
11001 1.1 mrg case BUILT_IN_ISLESSGREATER:
11002 1.1 mrg case BUILT_IN_ISUNORDERED:
11003 1.1 mrg case BUILT_IN_VA_ARG_PACK:
11004 1.1 mrg case BUILT_IN_VA_ARG_PACK_LEN:
11005 1.1 mrg case BUILT_IN_VA_COPY:
11006 1.1 mrg case BUILT_IN_TRAP:
11007 1.1 mrg case BUILT_IN_SAVEREGS:
11008 1.1 mrg case BUILT_IN_POPCOUNTL:
11009 1.1 mrg case BUILT_IN_POPCOUNTLL:
11010 1.1 mrg case BUILT_IN_POPCOUNTIMAX:
11011 1.1 mrg case BUILT_IN_POPCOUNT:
11012 1.1 mrg case BUILT_IN_PARITYL:
11013 1.1 mrg case BUILT_IN_PARITYLL:
11014 1.1 mrg case BUILT_IN_PARITYIMAX:
11015 1.1 mrg case BUILT_IN_PARITY:
11016 1.1 mrg case BUILT_IN_LABS:
11017 1.1 mrg case BUILT_IN_LLABS:
11018 1.1 mrg case BUILT_IN_PREFETCH:
11019 1.1 mrg case BUILT_IN_ACC_ON_DEVICE:
11020 1.1 mrg return true;
11021 1.1 mrg
11022 1.1 mrg default:
11023 1.1 mrg return is_simple_builtin (decl);
11024 1.1 mrg }
11025 1.1 mrg
11026 1.1 mrg return false;
11027 1.1 mrg }
11028 1.1 mrg
11029 1.1 mrg /* Return true if T is a constant and the value cast to a target char
11030 1.1 mrg can be represented by a host char.
11031 1.1 mrg Store the casted char constant in *P if so. */
11032 1.1 mrg
11033 1.1 mrg bool
11034 1.1 mrg target_char_cst_p (tree t, char *p)
11035 1.1 mrg {
11036 1.1 mrg if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11037 1.1 mrg return false;
11038 1.1 mrg
11039 1.1 mrg *p = (char)tree_to_uhwi (t);
11040 1.1 mrg return true;
11041 1.1 mrg }
11042 1.1 mrg
11043 1.1 mrg /* Return true if the builtin DECL is implemented in a standard library.
11044 1.1 mrg Otherwise return false which doesn't guarantee it is not (thus the list
11045 1.1 mrg of handled builtins below may be incomplete). */
11046 1.1 mrg
11047 1.1 mrg bool
11048 1.1 mrg builtin_with_linkage_p (tree decl)
11049 1.1 mrg {
11050 1.1 mrg if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11051 1.1 mrg switch (DECL_FUNCTION_CODE (decl))
11052 1.1 mrg {
11053 1.1 mrg CASE_FLT_FN (BUILT_IN_ACOS):
11054 1.1 mrg CASE_FLT_FN (BUILT_IN_ACOSH):
11055 1.1 mrg CASE_FLT_FN (BUILT_IN_ASIN):
11056 1.1 mrg CASE_FLT_FN (BUILT_IN_ASINH):
11057 1.1 mrg CASE_FLT_FN (BUILT_IN_ATAN):
11058 1.1 mrg CASE_FLT_FN (BUILT_IN_ATANH):
11059 1.1 mrg CASE_FLT_FN (BUILT_IN_ATAN2):
11060 1.1 mrg CASE_FLT_FN (BUILT_IN_CBRT):
11061 1.1 mrg CASE_FLT_FN (BUILT_IN_CEIL):
11062 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11063 1.1 mrg CASE_FLT_FN (BUILT_IN_COPYSIGN):
11064 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11065 1.1 mrg CASE_FLT_FN (BUILT_IN_COS):
11066 1.1 mrg CASE_FLT_FN (BUILT_IN_COSH):
11067 1.1 mrg CASE_FLT_FN (BUILT_IN_ERF):
11068 1.1 mrg CASE_FLT_FN (BUILT_IN_ERFC):
11069 1.1 mrg CASE_FLT_FN (BUILT_IN_EXP):
11070 1.1 mrg CASE_FLT_FN (BUILT_IN_EXP2):
11071 1.1 mrg CASE_FLT_FN (BUILT_IN_EXPM1):
11072 1.1 mrg CASE_FLT_FN (BUILT_IN_FABS):
11073 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11074 1.1 mrg CASE_FLT_FN (BUILT_IN_FDIM):
11075 1.1 mrg CASE_FLT_FN (BUILT_IN_FLOOR):
11076 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11077 1.1 mrg CASE_FLT_FN (BUILT_IN_FMA):
11078 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11079 1.1 mrg CASE_FLT_FN (BUILT_IN_FMAX):
11080 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11081 1.1 mrg CASE_FLT_FN (BUILT_IN_FMIN):
11082 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11083 1.1 mrg CASE_FLT_FN (BUILT_IN_FMOD):
11084 1.1 mrg CASE_FLT_FN (BUILT_IN_FREXP):
11085 1.1 mrg CASE_FLT_FN (BUILT_IN_HYPOT):
11086 1.1 mrg CASE_FLT_FN (BUILT_IN_ILOGB):
11087 1.1 mrg CASE_FLT_FN (BUILT_IN_LDEXP):
11088 1.1 mrg CASE_FLT_FN (BUILT_IN_LGAMMA):
11089 1.1 mrg CASE_FLT_FN (BUILT_IN_LLRINT):
11090 1.1 mrg CASE_FLT_FN (BUILT_IN_LLROUND):
11091 1.1 mrg CASE_FLT_FN (BUILT_IN_LOG):
11092 1.1 mrg CASE_FLT_FN (BUILT_IN_LOG10):
11093 1.1 mrg CASE_FLT_FN (BUILT_IN_LOG1P):
11094 1.1 mrg CASE_FLT_FN (BUILT_IN_LOG2):
11095 1.1 mrg CASE_FLT_FN (BUILT_IN_LOGB):
11096 1.1 mrg CASE_FLT_FN (BUILT_IN_LRINT):
11097 1.1 mrg CASE_FLT_FN (BUILT_IN_LROUND):
11098 1.1 mrg CASE_FLT_FN (BUILT_IN_MODF):
11099 1.1 mrg CASE_FLT_FN (BUILT_IN_NAN):
11100 1.1 mrg CASE_FLT_FN (BUILT_IN_NEARBYINT):
11101 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11102 1.1 mrg CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11103 1.1 mrg CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11104 1.1 mrg CASE_FLT_FN (BUILT_IN_POW):
11105 1.1 mrg CASE_FLT_FN (BUILT_IN_REMAINDER):
11106 1.1 mrg CASE_FLT_FN (BUILT_IN_REMQUO):
11107 1.1 mrg CASE_FLT_FN (BUILT_IN_RINT):
11108 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11109 1.1 mrg CASE_FLT_FN (BUILT_IN_ROUND):
11110 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11111 1.1 mrg CASE_FLT_FN (BUILT_IN_SCALBLN):
11112 1.1 mrg CASE_FLT_FN (BUILT_IN_SCALBN):
11113 1.1 mrg CASE_FLT_FN (BUILT_IN_SIN):
11114 1.1 mrg CASE_FLT_FN (BUILT_IN_SINH):
11115 1.1 mrg CASE_FLT_FN (BUILT_IN_SINCOS):
11116 1.1 mrg CASE_FLT_FN (BUILT_IN_SQRT):
11117 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11118 1.1 mrg CASE_FLT_FN (BUILT_IN_TAN):
11119 1.1 mrg CASE_FLT_FN (BUILT_IN_TANH):
11120 1.1 mrg CASE_FLT_FN (BUILT_IN_TGAMMA):
11121 1.1 mrg CASE_FLT_FN (BUILT_IN_TRUNC):
11122 1.1 mrg CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11123 1.1 mrg return true;
11124 1.1 mrg
11125 1.1 mrg case BUILT_IN_STPCPY:
11126 1.1 mrg case BUILT_IN_STPNCPY:
11127 1.1 mrg /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11128 1.1 mrg by libiberty's stpcpy.c for MinGW targets so we need to return true
11129 1.1 mrg in order to be able to build libiberty in LTO mode for them. */
11130 1.1 mrg return true;
11131 1.1 mrg
11132 1.1 mrg default:
11133 1.1 mrg break;
11134 1.1 mrg }
11135 1.1 mrg return false;
11136 1.1 mrg }
11137 1.1 mrg
11138 1.1 mrg /* Return true if OFFRNG is bounded to a subrange of offset values
11139 1.1 mrg valid for the largest possible object. */
11140 1.1 mrg
11141 1.1 mrg bool
11142 1.1 mrg access_ref::offset_bounded () const
11143 1.1 mrg {
11144 1.1 mrg tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
11145 1.1 mrg tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
11146 1.1 mrg return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
11147 1.1 mrg }
11148 1.1 mrg
11149 1.1 mrg /* If CALLEE has known side effects, fill in INFO and return true.
11150 1.1 mrg See tree-ssa-structalias.cc:find_func_aliases
11151 1.1 mrg for the list of builtins we might need to handle here. */
11152 1.1 mrg
11153 1.1 mrg attr_fnspec
11154 1.1 mrg builtin_fnspec (tree callee)
11155 1.1 mrg {
11156 1.1 mrg built_in_function code = DECL_FUNCTION_CODE (callee);
11157 1.1 mrg
11158 1.1 mrg switch (code)
11159 1.1 mrg {
11160 1.1 mrg /* All the following functions read memory pointed to by
11161 1.1 mrg their second argument and write memory pointed to by first
11162 1.1 mrg argument.
11163 1.1 mrg strcat/strncat additionally reads memory pointed to by the first
11164 1.1 mrg argument. */
11165 1.1 mrg case BUILT_IN_STRCAT:
11166 1.1 mrg case BUILT_IN_STRCAT_CHK:
11167 1.1 mrg return "1cW 1 ";
11168 1.1 mrg case BUILT_IN_STRNCAT:
11169 1.1 mrg case BUILT_IN_STRNCAT_CHK:
11170 1.1 mrg return "1cW 13";
11171 1.1 mrg case BUILT_IN_STRCPY:
11172 1.1 mrg case BUILT_IN_STRCPY_CHK:
11173 1.1 mrg return "1cO 1 ";
11174 1.1 mrg case BUILT_IN_STPCPY:
11175 1.1 mrg case BUILT_IN_STPCPY_CHK:
11176 1.1 mrg return ".cO 1 ";
11177 1.1 mrg case BUILT_IN_STRNCPY:
11178 1.1 mrg case BUILT_IN_MEMCPY:
11179 1.1 mrg case BUILT_IN_MEMMOVE:
11180 1.1 mrg case BUILT_IN_TM_MEMCPY:
11181 1.1 mrg case BUILT_IN_TM_MEMMOVE:
11182 1.1 mrg case BUILT_IN_STRNCPY_CHK:
11183 1.1 mrg case BUILT_IN_MEMCPY_CHK:
11184 1.1 mrg case BUILT_IN_MEMMOVE_CHK:
11185 1.1 mrg return "1cO313";
11186 1.1 mrg case BUILT_IN_MEMPCPY:
11187 1.1 mrg case BUILT_IN_MEMPCPY_CHK:
11188 1.1 mrg return ".cO313";
11189 1.1 mrg case BUILT_IN_STPNCPY:
11190 1.1 mrg case BUILT_IN_STPNCPY_CHK:
11191 1.1 mrg return ".cO313";
11192 1.1 mrg case BUILT_IN_BCOPY:
11193 1.1 mrg return ".c23O3";
11194 1.1 mrg case BUILT_IN_BZERO:
11195 1.1 mrg return ".cO2";
11196 1.1 mrg case BUILT_IN_MEMCMP:
11197 1.1 mrg case BUILT_IN_MEMCMP_EQ:
11198 1.1 mrg case BUILT_IN_BCMP:
11199 1.1 mrg case BUILT_IN_STRNCMP:
11200 1.1 mrg case BUILT_IN_STRNCMP_EQ:
11201 1.1 mrg case BUILT_IN_STRNCASECMP:
11202 1.1 mrg return ".cR3R3";
11203 1.1 mrg
11204 1.1 mrg /* The following functions read memory pointed to by their
11205 1.1 mrg first argument. */
11206 1.1 mrg CASE_BUILT_IN_TM_LOAD (1):
11207 1.1 mrg CASE_BUILT_IN_TM_LOAD (2):
11208 1.1 mrg CASE_BUILT_IN_TM_LOAD (4):
11209 1.1 mrg CASE_BUILT_IN_TM_LOAD (8):
11210 1.1 mrg CASE_BUILT_IN_TM_LOAD (FLOAT):
11211 1.1 mrg CASE_BUILT_IN_TM_LOAD (DOUBLE):
11212 1.1 mrg CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11213 1.1 mrg CASE_BUILT_IN_TM_LOAD (M64):
11214 1.1 mrg CASE_BUILT_IN_TM_LOAD (M128):
11215 1.1 mrg CASE_BUILT_IN_TM_LOAD (M256):
11216 1.1 mrg case BUILT_IN_TM_LOG:
11217 1.1 mrg case BUILT_IN_TM_LOG_1:
11218 1.1 mrg case BUILT_IN_TM_LOG_2:
11219 1.1 mrg case BUILT_IN_TM_LOG_4:
11220 1.1 mrg case BUILT_IN_TM_LOG_8:
11221 1.1 mrg case BUILT_IN_TM_LOG_FLOAT:
11222 1.1 mrg case BUILT_IN_TM_LOG_DOUBLE:
11223 1.1 mrg case BUILT_IN_TM_LOG_LDOUBLE:
11224 1.1 mrg case BUILT_IN_TM_LOG_M64:
11225 1.1 mrg case BUILT_IN_TM_LOG_M128:
11226 1.1 mrg case BUILT_IN_TM_LOG_M256:
11227 1.1 mrg return ".cR ";
11228 1.1 mrg
11229 1.1 mrg case BUILT_IN_INDEX:
11230 1.1 mrg case BUILT_IN_RINDEX:
11231 1.1 mrg case BUILT_IN_STRCHR:
11232 1.1 mrg case BUILT_IN_STRLEN:
11233 1.1 mrg case BUILT_IN_STRRCHR:
11234 1.1 mrg return ".cR ";
11235 1.1 mrg case BUILT_IN_STRNLEN:
11236 1.1 mrg return ".cR2";
11237 1.1 mrg
11238 1.1 mrg /* These read memory pointed to by the first argument.
11239 1.1 mrg Allocating memory does not have any side-effects apart from
11240 1.1 mrg being the definition point for the pointer.
11241 1.1 mrg Unix98 specifies that errno is set on allocation failure. */
11242 1.1 mrg case BUILT_IN_STRDUP:
11243 1.1 mrg return "mCR ";
11244 1.1 mrg case BUILT_IN_STRNDUP:
11245 1.1 mrg return "mCR2";
11246 1.1 mrg /* Allocating memory does not have any side-effects apart from
11247 1.1 mrg being the definition point for the pointer. */
11248 1.1 mrg case BUILT_IN_MALLOC:
11249 1.1 mrg case BUILT_IN_ALIGNED_ALLOC:
11250 1.1 mrg case BUILT_IN_CALLOC:
11251 1.1 mrg case BUILT_IN_GOMP_ALLOC:
11252 1.1 mrg return "mC";
11253 1.1 mrg CASE_BUILT_IN_ALLOCA:
11254 1.1 mrg return "mc";
11255 1.1 mrg /* These read memory pointed to by the first argument with size
11256 1.1 mrg in the third argument. */
11257 1.1 mrg case BUILT_IN_MEMCHR:
11258 1.1 mrg return ".cR3";
11259 1.1 mrg /* These read memory pointed to by the first and second arguments. */
11260 1.1 mrg case BUILT_IN_STRSTR:
11261 1.1 mrg case BUILT_IN_STRPBRK:
11262 1.1 mrg case BUILT_IN_STRCASECMP:
11263 1.1 mrg case BUILT_IN_STRCSPN:
11264 1.1 mrg case BUILT_IN_STRSPN:
11265 1.1 mrg case BUILT_IN_STRCMP:
11266 1.1 mrg case BUILT_IN_STRCMP_EQ:
11267 1.1 mrg return ".cR R ";
11268 1.1 mrg /* Freeing memory kills the pointed-to memory. More importantly
11269 1.1 mrg the call has to serve as a barrier for moving loads and stores
11270 1.1 mrg across it. */
11271 1.1 mrg case BUILT_IN_STACK_RESTORE:
11272 1.1 mrg case BUILT_IN_FREE:
11273 1.1 mrg case BUILT_IN_GOMP_FREE:
11274 1.1 mrg return ".co ";
11275 1.1 mrg case BUILT_IN_VA_END:
11276 1.1 mrg return ".cO ";
11277 1.1 mrg /* Realloc serves both as allocation point and deallocation point. */
11278 1.1 mrg case BUILT_IN_REALLOC:
11279 1.1 mrg return ".Cw ";
11280 1.1 mrg case BUILT_IN_GAMMA_R:
11281 1.1 mrg case BUILT_IN_GAMMAF_R:
11282 1.1 mrg case BUILT_IN_GAMMAL_R:
11283 1.1 mrg case BUILT_IN_LGAMMA_R:
11284 1.1 mrg case BUILT_IN_LGAMMAF_R:
11285 1.1 mrg case BUILT_IN_LGAMMAL_R:
11286 1.1 mrg return ".C. Ot";
11287 1.1 mrg case BUILT_IN_FREXP:
11288 1.1 mrg case BUILT_IN_FREXPF:
11289 1.1 mrg case BUILT_IN_FREXPL:
11290 1.1 mrg case BUILT_IN_MODF:
11291 1.1 mrg case BUILT_IN_MODFF:
11292 1.1 mrg case BUILT_IN_MODFL:
11293 1.1 mrg return ".c. Ot";
11294 1.1 mrg case BUILT_IN_REMQUO:
11295 1.1 mrg case BUILT_IN_REMQUOF:
11296 1.1 mrg case BUILT_IN_REMQUOL:
11297 1.1 mrg return ".c. . Ot";
11298 1.1 mrg case BUILT_IN_SINCOS:
11299 1.1 mrg case BUILT_IN_SINCOSF:
11300 1.1 mrg case BUILT_IN_SINCOSL:
11301 1.1 mrg return ".c. OtOt";
11302 1.1 mrg case BUILT_IN_MEMSET:
11303 1.1 mrg case BUILT_IN_MEMSET_CHK:
11304 1.1 mrg case BUILT_IN_TM_MEMSET:
11305 1.1 mrg return "1cO3";
11306 1.1 mrg CASE_BUILT_IN_TM_STORE (1):
11307 1.1 mrg CASE_BUILT_IN_TM_STORE (2):
11308 1.1 mrg CASE_BUILT_IN_TM_STORE (4):
11309 1.1 mrg CASE_BUILT_IN_TM_STORE (8):
11310 1.1 mrg CASE_BUILT_IN_TM_STORE (FLOAT):
11311 1.1 mrg CASE_BUILT_IN_TM_STORE (DOUBLE):
11312 1.1 mrg CASE_BUILT_IN_TM_STORE (LDOUBLE):
11313 1.1 mrg CASE_BUILT_IN_TM_STORE (M64):
11314 1.1 mrg CASE_BUILT_IN_TM_STORE (M128):
11315 1.1 mrg CASE_BUILT_IN_TM_STORE (M256):
11316 1.1 mrg return ".cO ";
11317 1.1 mrg case BUILT_IN_STACK_SAVE:
11318 1.1 mrg case BUILT_IN_RETURN:
11319 1.1 mrg case BUILT_IN_EH_POINTER:
11320 1.1 mrg case BUILT_IN_EH_FILTER:
11321 1.1 mrg case BUILT_IN_UNWIND_RESUME:
11322 1.1 mrg case BUILT_IN_CXA_END_CLEANUP:
11323 1.1 mrg case BUILT_IN_EH_COPY_VALUES:
11324 1.1 mrg case BUILT_IN_FRAME_ADDRESS:
11325 1.1 mrg case BUILT_IN_APPLY_ARGS:
11326 1.1 mrg case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
11327 1.1 mrg case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
11328 1.1 mrg case BUILT_IN_PREFETCH:
11329 1.1 mrg case BUILT_IN_DWARF_CFA:
11330 1.1 mrg case BUILT_IN_RETURN_ADDRESS:
11331 1.1 mrg return ".c";
11332 1.1 mrg case BUILT_IN_ASSUME_ALIGNED:
11333 1.1 mrg return "1cX ";
11334 1.1 mrg /* But posix_memalign stores a pointer into the memory pointed to
11335 1.1 mrg by its first argument. */
11336 1.1 mrg case BUILT_IN_POSIX_MEMALIGN:
11337 1.1 mrg return ".cOt";
11338 1.1 mrg
11339 1.1 mrg default:
11340 return "";
11341 }
11342 }
11343