tilepro.cc revision 1.1 1 1.1 mrg /* Subroutines used for code generation on the Tilera TILEPro.
2 1.1 mrg Copyright (C) 2011-2022 Free Software Foundation, Inc.
3 1.1 mrg Contributed by Walter Lee (walt (at) tilera.com)
4 1.1 mrg
5 1.1 mrg This file is part of GCC.
6 1.1 mrg
7 1.1 mrg GCC is free software; you can redistribute it and/or modify it
8 1.1 mrg under the terms of the GNU General Public License as published
9 1.1 mrg by the Free Software Foundation; either version 3, or (at your
10 1.1 mrg option) any later version.
11 1.1 mrg
12 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT
13 1.1 mrg ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 1.1 mrg or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 1.1 mrg License for more details.
16 1.1 mrg
17 1.1 mrg You should have received a copy of the GNU General Public License
18 1.1 mrg along with GCC; see the file COPYING3. If not see
19 1.1 mrg <http://www.gnu.org/licenses/>. */
20 1.1 mrg
21 1.1 mrg #define IN_TARGET_CODE 1
22 1.1 mrg
23 1.1 mrg #include "config.h"
24 1.1 mrg #include "system.h"
25 1.1 mrg #include "coretypes.h"
26 1.1 mrg #include "backend.h"
27 1.1 mrg #include "target.h"
28 1.1 mrg #include "rtl.h"
29 1.1 mrg #include "tree.h"
30 1.1 mrg #include "gimple.h"
31 1.1 mrg #include "df.h"
32 1.1 mrg #include "memmodel.h"
33 1.1 mrg #include "tm_p.h"
34 1.1 mrg #include "stringpool.h"
35 1.1 mrg #include "attribs.h"
36 1.1 mrg #include "expmed.h"
37 1.1 mrg #include "optabs.h"
38 1.1 mrg #include "regs.h"
39 1.1 mrg #include "emit-rtl.h"
40 1.1 mrg #include "recog.h"
41 1.1 mrg #include "diagnostic.h"
42 1.1 mrg #include "output.h"
43 1.1 mrg #include "insn-attr.h"
44 1.1 mrg #include "alias.h"
45 1.1 mrg #include "explow.h"
46 1.1 mrg #include "calls.h"
47 1.1 mrg #include "varasm.h"
48 1.1 mrg #include "expr.h"
49 1.1 mrg #include "langhooks.h"
50 1.1 mrg #include "cfgrtl.h"
51 1.1 mrg #include "tm-constrs.h"
52 1.1 mrg #include "dwarf2.h"
53 1.1 mrg #include "fold-const.h"
54 1.1 mrg #include "stor-layout.h"
55 1.1 mrg #include "gimplify.h"
56 1.1 mrg #include "tilepro-builtins.h"
57 1.1 mrg #include "tilepro-multiply.h"
58 1.1 mrg #include "builtins.h"
59 1.1 mrg
60 1.1 mrg /* This file should be included last. */
61 1.1 mrg #include "target-def.h"
62 1.1 mrg
63 1.1 mrg /* SYMBOL_REF for GOT */
64 1.1 mrg static GTY(()) rtx g_got_symbol = NULL;
65 1.1 mrg
66 1.1 mrg /* Report whether we're printing out the first address fragment of a
67 1.1 mrg POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
68 1.1 mrg TARGET_PRINT_OPERAND_ADDRESS. */
69 1.1 mrg static bool output_memory_autoinc_first;
70 1.1 mrg
71 1.1 mrg
72 1.1 mrg
74 1.1 mrg /* Option handling */
75 1.1 mrg
76 1.1 mrg /* Implement TARGET_OPTION_OVERRIDE. */
77 1.1 mrg static void
78 1.1 mrg tilepro_option_override (void)
79 1.1 mrg {
80 1.1 mrg /* When modulo scheduling is enabled, we still rely on regular
81 1.1 mrg scheduler for bundling. */
82 1.1 mrg if (flag_modulo_sched)
83 1.1 mrg flag_resched_modulo_sched = 1;
84 1.1 mrg }
85 1.1 mrg
86 1.1 mrg
88 1.1 mrg
89 1.1 mrg /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
90 1.1 mrg static bool
91 1.1 mrg tilepro_scalar_mode_supported_p (scalar_mode mode)
92 1.1 mrg {
93 1.1 mrg switch (mode)
94 1.1 mrg {
95 1.1 mrg case E_QImode:
96 1.1 mrg case E_HImode:
97 1.1 mrg case E_SImode:
98 1.1 mrg case E_DImode:
99 1.1 mrg return true;
100 1.1 mrg
101 1.1 mrg case E_SFmode:
102 1.1 mrg case E_DFmode:
103 1.1 mrg return true;
104 1.1 mrg
105 1.1 mrg default:
106 1.1 mrg return false;
107 1.1 mrg }
108 1.1 mrg }
109 1.1 mrg
110 1.1 mrg
111 1.1 mrg /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
112 1.1 mrg static bool
113 1.1 mrg tile_vector_mode_supported_p (machine_mode mode)
114 1.1 mrg {
115 1.1 mrg return mode == V4QImode || mode == V2HImode;
116 1.1 mrg }
117 1.1 mrg
118 1.1 mrg
119 1.1 mrg /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
120 1.1 mrg static bool
121 1.1 mrg tilepro_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
122 1.1 mrg rtx x ATTRIBUTE_UNUSED)
123 1.1 mrg {
124 1.1 mrg return true;
125 1.1 mrg }
126 1.1 mrg
127 1.1 mrg
128 1.1 mrg /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
129 1.1 mrg static bool
130 1.1 mrg tilepro_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
131 1.1 mrg {
132 1.1 mrg return decl != NULL;
133 1.1 mrg }
134 1.1 mrg
135 1.1 mrg
136 1.1 mrg /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
137 1.1 mrg passed by reference. */
138 1.1 mrg static bool
139 1.1 mrg tilepro_pass_by_reference (cumulative_args_t, const function_arg_info &arg)
140 1.1 mrg {
141 1.1 mrg return (arg.type
142 1.1 mrg && TYPE_SIZE (arg.type)
143 1.1 mrg && TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST);
144 1.1 mrg }
145 1.1 mrg
146 1.1 mrg
147 1.1 mrg /* Implement TARGET_RETURN_IN_MEMORY. */
148 1.1 mrg static bool
149 1.1 mrg tilepro_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
150 1.1 mrg {
151 1.1 mrg return !IN_RANGE (int_size_in_bytes (type),
152 1.1 mrg 0, TILEPRO_NUM_RETURN_REGS * UNITS_PER_WORD);
153 1.1 mrg }
154 1.1 mrg
155 1.1 mrg
156 1.1 mrg /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
157 1.1 mrg static unsigned int
158 1.1 mrg tilepro_function_arg_boundary (machine_mode mode, const_tree type)
159 1.1 mrg {
160 1.1 mrg unsigned int alignment;
161 1.1 mrg
162 1.1 mrg alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
163 1.1 mrg if (alignment < PARM_BOUNDARY)
164 1.1 mrg alignment = PARM_BOUNDARY;
165 1.1 mrg if (alignment > STACK_BOUNDARY)
166 1.1 mrg alignment = STACK_BOUNDARY;
167 1.1 mrg return alignment;
168 1.1 mrg }
169 1.1 mrg
170 1.1 mrg
171 1.1 mrg /* Implement TARGET_FUNCTION_ARG. */
172 1.1 mrg static rtx
173 1.1 mrg tilepro_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
174 1.1 mrg {
175 1.1 mrg CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
176 1.1 mrg int byte_size = arg.promoted_size_in_bytes ();
177 1.1 mrg bool doubleword_aligned_p;
178 1.1 mrg
179 1.1 mrg if (cum >= TILEPRO_NUM_ARG_REGS)
180 1.1 mrg return NULL_RTX;
181 1.1 mrg
182 1.1 mrg /* See whether the argument has doubleword alignment. */
183 1.1 mrg doubleword_aligned_p =
184 1.1 mrg tilepro_function_arg_boundary (arg.mode, arg.type) > BITS_PER_WORD;
185 1.1 mrg
186 1.1 mrg if (doubleword_aligned_p)
187 1.1 mrg cum += cum & 1;
188 1.1 mrg
189 1.1 mrg /* The ABI does not allow parameters to be passed partially in reg
190 1.1 mrg and partially in stack. */
191 1.1 mrg if ((cum + (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
192 1.1 mrg > TILEPRO_NUM_ARG_REGS)
193 1.1 mrg return NULL_RTX;
194 1.1 mrg
195 1.1 mrg return gen_rtx_REG (arg.mode, cum);
196 1.1 mrg }
197 1.1 mrg
198 1.1 mrg
199 1.1 mrg /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
200 1.1 mrg static void
201 1.1 mrg tilepro_function_arg_advance (cumulative_args_t cum_v,
202 1.1 mrg const function_arg_info &arg)
203 1.1 mrg {
204 1.1 mrg CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
205 1.1 mrg
206 1.1 mrg int byte_size = arg.promoted_size_in_bytes ();
207 1.1 mrg int word_size = (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
208 1.1 mrg bool doubleword_aligned_p;
209 1.1 mrg
210 1.1 mrg /* See whether the argument has doubleword alignment. */
211 1.1 mrg doubleword_aligned_p =
212 1.1 mrg tilepro_function_arg_boundary (arg.mode, arg.type) > BITS_PER_WORD;
213 1.1 mrg
214 1.1 mrg if (doubleword_aligned_p)
215 1.1 mrg *cum += *cum & 1;
216 1.1 mrg
217 1.1 mrg /* If the current argument does not fit in the pretend_args space,
218 1.1 mrg skip over it. */
219 1.1 mrg if (*cum < TILEPRO_NUM_ARG_REGS
220 1.1 mrg && *cum + word_size > TILEPRO_NUM_ARG_REGS)
221 1.1 mrg *cum = TILEPRO_NUM_ARG_REGS;
222 1.1 mrg
223 1.1 mrg *cum += word_size;
224 1.1 mrg }
225 1.1 mrg
226 1.1 mrg
227 1.1 mrg /* Implement TARGET_FUNCTION_VALUE. */
228 1.1 mrg static rtx
229 1.1 mrg tilepro_function_value (const_tree valtype, const_tree fn_decl_or_type,
230 1.1 mrg bool outgoing ATTRIBUTE_UNUSED)
231 1.1 mrg {
232 1.1 mrg machine_mode mode;
233 1.1 mrg int unsigned_p;
234 1.1 mrg
235 1.1 mrg mode = TYPE_MODE (valtype);
236 1.1 mrg unsigned_p = TYPE_UNSIGNED (valtype);
237 1.1 mrg
238 1.1 mrg mode = promote_function_mode (valtype, mode, &unsigned_p,
239 1.1 mrg fn_decl_or_type, 1);
240 1.1 mrg
241 1.1 mrg return gen_rtx_REG (mode, 0);
242 1.1 mrg }
243 1.1 mrg
244 1.1 mrg
245 1.1 mrg /* Implement TARGET_LIBCALL_VALUE. */
246 1.1 mrg static rtx
247 1.1 mrg tilepro_libcall_value (machine_mode mode,
248 1.1 mrg const_rtx fun ATTRIBUTE_UNUSED)
249 1.1 mrg {
250 1.1 mrg return gen_rtx_REG (mode, 0);
251 1.1 mrg }
252 1.1 mrg
253 1.1 mrg
254 1.1 mrg /* Implement FUNCTION_VALUE_REGNO_P. */
255 1.1 mrg static bool
256 1.1 mrg tilepro_function_value_regno_p (const unsigned int regno)
257 1.1 mrg {
258 1.1 mrg return regno < TILEPRO_NUM_RETURN_REGS;
259 1.1 mrg }
260 1.1 mrg
261 1.1 mrg
262 1.1 mrg /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
263 1.1 mrg static tree
264 1.1 mrg tilepro_build_builtin_va_list (void)
265 1.1 mrg {
266 1.1 mrg tree f_args, f_skip, record, type_decl;
267 1.1 mrg bool owp;
268 1.1 mrg
269 1.1 mrg record = lang_hooks.types.make_type (RECORD_TYPE);
270 1.1 mrg
271 1.1 mrg type_decl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
272 1.1 mrg get_identifier ("__va_list_tag"), record);
273 1.1 mrg
274 1.1 mrg f_args = build_decl (BUILTINS_LOCATION, FIELD_DECL,
275 1.1 mrg get_identifier ("__args"), ptr_type_node);
276 1.1 mrg f_skip = build_decl (BUILTINS_LOCATION, FIELD_DECL,
277 1.1 mrg get_identifier ("__skip"), ptr_type_node);
278 1.1 mrg
279 1.1 mrg DECL_FIELD_CONTEXT (f_args) = record;
280 1.1 mrg
281 1.1 mrg DECL_FIELD_CONTEXT (f_skip) = record;
282 1.1 mrg
283 1.1 mrg TREE_CHAIN (record) = type_decl;
284 1.1 mrg TYPE_NAME (record) = type_decl;
285 1.1 mrg TYPE_FIELDS (record) = f_args;
286 1.1 mrg TREE_CHAIN (f_args) = f_skip;
287 1.1 mrg
288 1.1 mrg /* We know this is being padded and we want it too. It is an
289 1.1 mrg internal type so hide the warnings from the user. */
290 1.1 mrg owp = warn_padded;
291 1.1 mrg warn_padded = false;
292 1.1 mrg
293 1.1 mrg layout_type (record);
294 1.1 mrg
295 1.1 mrg warn_padded = owp;
296 1.1 mrg
297 1.1 mrg /* The correct type is an array type of one element. */
298 1.1 mrg return record;
299 1.1 mrg }
300 1.1 mrg
301 1.1 mrg
302 1.1 mrg /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
303 1.1 mrg static void
304 1.1 mrg tilepro_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
305 1.1 mrg {
306 1.1 mrg tree f_args, f_skip;
307 1.1 mrg tree args, skip, t;
308 1.1 mrg
309 1.1 mrg f_args = TYPE_FIELDS (TREE_TYPE (valist));
310 1.1 mrg f_skip = TREE_CHAIN (f_args);
311 1.1 mrg
312 1.1 mrg args =
313 1.1 mrg build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
314 1.1 mrg skip =
315 1.1 mrg build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
316 1.1 mrg
317 1.1 mrg /* Find the __args area. */
318 1.1 mrg t = make_tree (TREE_TYPE (args), virtual_incoming_args_rtx);
319 1.1 mrg t = fold_build_pointer_plus_hwi (t,
320 1.1 mrg UNITS_PER_WORD *
321 1.1 mrg (crtl->args.info - TILEPRO_NUM_ARG_REGS));
322 1.1 mrg
323 1.1 mrg if (crtl->args.pretend_args_size > 0)
324 1.1 mrg t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
325 1.1 mrg
326 1.1 mrg t = build2 (MODIFY_EXPR, TREE_TYPE (args), args, t);
327 1.1 mrg TREE_SIDE_EFFECTS (t) = 1;
328 1.1 mrg expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
329 1.1 mrg
330 1.1 mrg /* Find the __skip area. */
331 1.1 mrg t = make_tree (TREE_TYPE (skip), virtual_incoming_args_rtx);
332 1.1 mrg t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
333 1.1 mrg t = build2 (MODIFY_EXPR, TREE_TYPE (skip), skip, t);
334 1.1 mrg TREE_SIDE_EFFECTS (t) = 1;
335 1.1 mrg expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
336 1.1 mrg }
337 1.1 mrg
338 1.1 mrg
339 1.1 mrg /* Implement TARGET_SETUP_INCOMING_VARARGS. */
340 1.1 mrg static void
341 1.1 mrg tilepro_setup_incoming_varargs (cumulative_args_t cum,
342 1.1 mrg const function_arg_info &arg,
343 1.1 mrg int *pretend_args, int no_rtl)
344 1.1 mrg {
345 1.1 mrg CUMULATIVE_ARGS local_cum = *get_cumulative_args (cum);
346 1.1 mrg int first_reg;
347 1.1 mrg
348 1.1 mrg /* The caller has advanced CUM up to, but not beyond, the last named
349 1.1 mrg argument. Advance a local copy of CUM past the last "real" named
350 1.1 mrg argument, to find out how many registers are left over. */
351 1.1 mrg targetm.calls.function_arg_advance (pack_cumulative_args (&local_cum), arg);
352 1.1 mrg first_reg = local_cum;
353 1.1 mrg
354 1.1 mrg if (local_cum < TILEPRO_NUM_ARG_REGS)
355 1.1 mrg {
356 1.1 mrg *pretend_args = UNITS_PER_WORD * (TILEPRO_NUM_ARG_REGS - first_reg);
357 1.1 mrg
358 1.1 mrg if (!no_rtl)
359 1.1 mrg {
360 1.1 mrg alias_set_type set = get_varargs_alias_set ();
361 1.1 mrg rtx tmp =
362 1.1 mrg gen_rtx_MEM (BLKmode, plus_constant (Pmode, \
363 1.1 mrg virtual_incoming_args_rtx,
364 1.1 mrg -STACK_POINTER_OFFSET -
365 1.1 mrg UNITS_PER_WORD *
366 1.1 mrg (TILEPRO_NUM_ARG_REGS -
367 1.1 mrg first_reg)));
368 1.1 mrg MEM_NOTRAP_P (tmp) = 1;
369 1.1 mrg set_mem_alias_set (tmp, set);
370 1.1 mrg move_block_from_reg (first_reg, tmp,
371 1.1 mrg TILEPRO_NUM_ARG_REGS - first_reg);
372 1.1 mrg }
373 1.1 mrg }
374 1.1 mrg else
375 1.1 mrg *pretend_args = 0;
376 1.1 mrg }
377 1.1 mrg
378 1.1 mrg
379 1.1 mrg /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
380 1.1 mrg the va_list structure VALIST as required to retrieve an argument of
381 1.1 mrg type TYPE, and returning that argument.
382 1.1 mrg
383 1.1 mrg ret = va_arg(VALIST, TYPE);
384 1.1 mrg
385 1.1 mrg generates code equivalent to:
386 1.1 mrg
387 1.1 mrg paddedsize = (sizeof(TYPE) + 3) & -4;
388 1.1 mrg if ((VALIST.__args + paddedsize > VALIST.__skip)
389 1.1 mrg & (VALIST.__args <= VALIST.__skip))
390 1.1 mrg addr = VALIST.__skip + STACK_POINTER_OFFSET;
391 1.1 mrg else
392 1.1 mrg addr = VALIST.__args;
393 1.1 mrg VALIST.__args = addr + paddedsize;
394 1.1 mrg ret = *(TYPE *)addr; */
395 1.1 mrg static tree
396 1.1 mrg tilepro_gimplify_va_arg_expr (tree valist, tree type, gimple_seq * pre_p,
397 1.1 mrg gimple_seq * post_p ATTRIBUTE_UNUSED)
398 1.1 mrg {
399 1.1 mrg tree f_args, f_skip;
400 1.1 mrg tree args, skip;
401 1.1 mrg HOST_WIDE_INT size, rsize;
402 1.1 mrg tree addr, tmp;
403 1.1 mrg bool pass_by_reference_p;
404 1.1 mrg
405 1.1 mrg f_args = TYPE_FIELDS (va_list_type_node);
406 1.1 mrg f_skip = TREE_CHAIN (f_args);
407 1.1 mrg
408 1.1 mrg args =
409 1.1 mrg build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
410 1.1 mrg skip =
411 1.1 mrg build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
412 1.1 mrg
413 1.1 mrg addr = create_tmp_var (ptr_type_node, "va_arg");
414 1.1 mrg
415 1.1 mrg /* if an object is dynamically sized, a pointer to it is passed
416 1.1 mrg instead of the object itself. */
417 1.1 mrg pass_by_reference_p = pass_va_arg_by_reference (type);
418 1.1 mrg
419 1.1 mrg if (pass_by_reference_p)
420 1.1 mrg type = build_pointer_type (type);
421 1.1 mrg
422 1.1 mrg size = int_size_in_bytes (type);
423 1.1 mrg rsize = ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD) * UNITS_PER_WORD;
424 1.1 mrg
425 1.1 mrg /* If the alignment of the type is greater than the default for a
426 1.1 mrg parameter, align to STACK_BOUNDARY. */
427 1.1 mrg if (TYPE_ALIGN (type) > PARM_BOUNDARY)
428 1.1 mrg {
429 1.1 mrg /* Assert the only case we generate code for: when
430 1.1 mrg stack boundary = 2 * parm boundary. */
431 1.1 mrg gcc_assert (STACK_BOUNDARY == PARM_BOUNDARY * 2);
432 1.1 mrg
433 1.1 mrg tmp = build2 (BIT_AND_EXPR, sizetype,
434 1.1 mrg fold_convert (sizetype, unshare_expr (args)),
435 1.1 mrg size_int (PARM_BOUNDARY / 8));
436 1.1 mrg tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
437 1.1 mrg unshare_expr (args), tmp);
438 1.1 mrg
439 1.1 mrg gimplify_assign (unshare_expr (args), tmp, pre_p);
440 1.1 mrg }
441 1.1 mrg
442 1.1 mrg /* Build conditional expression to calculate addr. The expression
443 1.1 mrg will be gimplified later. */
444 1.1 mrg tmp = fold_build_pointer_plus_hwi (unshare_expr (args), rsize);
445 1.1 mrg tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
446 1.1 mrg build2 (GT_EXPR, boolean_type_node, tmp, unshare_expr (skip)),
447 1.1 mrg build2 (LE_EXPR, boolean_type_node, unshare_expr (args),
448 1.1 mrg unshare_expr (skip)));
449 1.1 mrg
450 1.1 mrg tmp = build3 (COND_EXPR, ptr_type_node, tmp,
451 1.1 mrg build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (skip),
452 1.1 mrg size_int (STACK_POINTER_OFFSET)),
453 1.1 mrg unshare_expr (args));
454 1.1 mrg
455 1.1 mrg gimplify_assign (addr, tmp, pre_p);
456 1.1 mrg
457 1.1 mrg /* Update VALIST.__args. */
458 1.1 mrg tmp = fold_build_pointer_plus_hwi (addr, rsize);
459 1.1 mrg gimplify_assign (unshare_expr (args), tmp, pre_p);
460 1.1 mrg
461 1.1 mrg addr = fold_convert (build_pointer_type (type), addr);
462 1.1 mrg
463 1.1 mrg if (pass_by_reference_p)
464 1.1 mrg addr = build_va_arg_indirect_ref (addr);
465 1.1 mrg
466 1.1 mrg return build_va_arg_indirect_ref (addr);
467 1.1 mrg }
468 1.1 mrg
469 1.1 mrg
471 1.1 mrg
472 1.1 mrg /* Implement TARGET_RTX_COSTS. */
473 1.1 mrg static bool
474 1.1 mrg tilepro_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno,
475 1.1 mrg int *total, bool speed)
476 1.1 mrg {
477 1.1 mrg int code = GET_CODE (x);
478 1.1 mrg
479 1.1 mrg switch (code)
480 1.1 mrg {
481 1.1 mrg case CONST_INT:
482 1.1 mrg /* If this is an 8-bit constant, return zero since it can be
483 1.1 mrg used nearly anywhere with no cost. If it is a valid operand
484 1.1 mrg for an ADD or AND, likewise return 0 if we know it will be
485 1.1 mrg used in that context. Otherwise, return 2 since it might be
486 1.1 mrg used there later. All other constants take at least two
487 1.1 mrg insns. */
488 1.1 mrg if (satisfies_constraint_I (x))
489 1.1 mrg {
490 1.1 mrg *total = 0;
491 1.1 mrg return true;
492 1.1 mrg }
493 1.1 mrg else if (outer_code == PLUS && add_operand (x, VOIDmode))
494 1.1 mrg {
495 1.1 mrg /* Slightly penalize large constants even though we can add
496 1.1 mrg them in one instruction, because it forces the use of
497 1.1 mrg 2-wide bundling mode. */
498 1.1 mrg *total = 1;
499 1.1 mrg return true;
500 1.1 mrg }
501 1.1 mrg else if (move_operand (x, SImode))
502 1.1 mrg {
503 1.1 mrg /* We can materialize in one move. */
504 1.1 mrg *total = COSTS_N_INSNS (1);
505 1.1 mrg return true;
506 1.1 mrg }
507 1.1 mrg else
508 1.1 mrg {
509 1.1 mrg /* We can materialize in two moves. */
510 1.1 mrg *total = COSTS_N_INSNS (2);
511 1.1 mrg return true;
512 1.1 mrg }
513 1.1 mrg
514 1.1 mrg return false;
515 1.1 mrg
516 1.1 mrg case CONST:
517 1.1 mrg case LABEL_REF:
518 1.1 mrg case SYMBOL_REF:
519 1.1 mrg *total = COSTS_N_INSNS (2);
520 1.1 mrg return true;
521 1.1 mrg
522 1.1 mrg case CONST_DOUBLE:
523 1.1 mrg *total = COSTS_N_INSNS (4);
524 1.1 mrg return true;
525 1.1 mrg
526 1.1 mrg case HIGH:
527 1.1 mrg *total = 0;
528 1.1 mrg return true;
529 1.1 mrg
530 1.1 mrg case MEM:
531 1.1 mrg /* If outer-code was a sign or zero extension, a cost of
532 1.1 mrg COSTS_N_INSNS (1) was already added in, so account for
533 1.1 mrg that. */
534 1.1 mrg if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
535 1.1 mrg *total = COSTS_N_INSNS (1);
536 1.1 mrg else
537 1.1 mrg *total = COSTS_N_INSNS (2);
538 1.1 mrg return true;
539 1.1 mrg
540 1.1 mrg case PLUS:
541 1.1 mrg /* Convey that s[123]a are efficient. */
542 1.1 mrg if (GET_CODE (XEXP (x, 0)) == MULT
543 1.1 mrg && cint_248_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
544 1.1 mrg {
545 1.1 mrg *total = (rtx_cost (XEXP (XEXP (x, 0), 0), mode,
546 1.1 mrg (enum rtx_code) outer_code, opno, speed)
547 1.1 mrg + rtx_cost (XEXP (x, 1), mode,
548 1.1 mrg (enum rtx_code) outer_code, opno, speed)
549 1.1 mrg + COSTS_N_INSNS (1));
550 1.1 mrg return true;
551 1.1 mrg }
552 1.1 mrg return false;
553 1.1 mrg
554 1.1 mrg case MULT:
555 1.1 mrg *total = COSTS_N_INSNS (2);
556 1.1 mrg return false;
557 1.1 mrg
558 1.1 mrg case SIGN_EXTEND:
559 1.1 mrg case ZERO_EXTEND:
560 1.1 mrg if (outer_code == MULT)
561 1.1 mrg *total = 0;
562 1.1 mrg else
563 1.1 mrg *total = COSTS_N_INSNS (1);
564 1.1 mrg return false;
565 1.1 mrg
566 1.1 mrg case DIV:
567 1.1 mrg case UDIV:
568 1.1 mrg case MOD:
569 1.1 mrg case UMOD:
570 1.1 mrg /* These are handled by software and are very expensive. */
571 1.1 mrg *total = COSTS_N_INSNS (100);
572 1.1 mrg return false;
573 1.1 mrg
574 1.1 mrg case UNSPEC:
575 1.1 mrg case UNSPEC_VOLATILE:
576 1.1 mrg {
577 1.1 mrg int num = XINT (x, 1);
578 1.1 mrg
579 1.1 mrg if (num <= TILEPRO_LAST_LATENCY_1_INSN)
580 1.1 mrg *total = COSTS_N_INSNS (1);
581 1.1 mrg else if (num <= TILEPRO_LAST_LATENCY_2_INSN)
582 1.1 mrg *total = COSTS_N_INSNS (2);
583 1.1 mrg else if (num > TILEPRO_LAST_LATENCY_INSN)
584 1.1 mrg {
585 1.1 mrg if (outer_code == PLUS)
586 1.1 mrg *total = 0;
587 1.1 mrg else
588 1.1 mrg *total = COSTS_N_INSNS (1);
589 1.1 mrg }
590 1.1 mrg else
591 1.1 mrg {
592 1.1 mrg switch (num)
593 1.1 mrg {
594 1.1 mrg case UNSPEC_BLOCKAGE:
595 1.1 mrg case UNSPEC_NETWORK_BARRIER:
596 1.1 mrg *total = 0;
597 1.1 mrg break;
598 1.1 mrg
599 1.1 mrg case UNSPEC_LNK_AND_LABEL:
600 1.1 mrg case UNSPEC_MF:
601 1.1 mrg case UNSPEC_NETWORK_RECEIVE:
602 1.1 mrg case UNSPEC_NETWORK_SEND:
603 1.1 mrg case UNSPEC_TLS_GD_ADD:
604 1.1 mrg *total = COSTS_N_INSNS (1);
605 1.1 mrg break;
606 1.1 mrg
607 1.1 mrg case UNSPEC_TLS_IE_LOAD:
608 1.1 mrg *total = COSTS_N_INSNS (2);
609 1.1 mrg break;
610 1.1 mrg
611 1.1 mrg case UNSPEC_SP_SET:
612 1.1 mrg *total = COSTS_N_INSNS (3);
613 1.1 mrg break;
614 1.1 mrg
615 1.1 mrg case UNSPEC_SP_TEST:
616 1.1 mrg *total = COSTS_N_INSNS (4);
617 1.1 mrg break;
618 1.1 mrg
619 1.1 mrg case UNSPEC_LATENCY_L2:
620 1.1 mrg *total = COSTS_N_INSNS (8);
621 1.1 mrg break;
622 1.1 mrg
623 1.1 mrg case UNSPEC_TLS_GD_CALL:
624 1.1 mrg *total = COSTS_N_INSNS (30);
625 1.1 mrg break;
626 1.1 mrg
627 1.1 mrg case UNSPEC_LATENCY_MISS:
628 1.1 mrg *total = COSTS_N_INSNS (80);
629 1.1 mrg break;
630 1.1 mrg
631 1.1 mrg default:
632 1.1 mrg *total = COSTS_N_INSNS (1);
633 1.1 mrg }
634 1.1 mrg }
635 1.1 mrg return true;
636 1.1 mrg }
637 1.1 mrg
638 1.1 mrg default:
639 1.1 mrg return false;
640 1.1 mrg }
641 1.1 mrg }
642 1.1 mrg
643 1.1 mrg
645 1.1 mrg
646 1.1 mrg /* Returns an SImode integer rtx with value VAL. */
647 1.1 mrg static rtx
648 1.1 mrg gen_int_si (HOST_WIDE_INT val)
649 1.1 mrg {
650 1.1 mrg return gen_int_mode (val, SImode);
651 1.1 mrg }
652 1.1 mrg
653 1.1 mrg
654 1.1 mrg /* Create a temporary variable to hold a partial result, to enable
655 1.1 mrg CSE. */
656 1.1 mrg static rtx
657 1.1 mrg create_temp_reg_if_possible (machine_mode mode, rtx default_reg)
658 1.1 mrg {
659 1.1 mrg return can_create_pseudo_p ()? gen_reg_rtx (mode) : default_reg;
660 1.1 mrg }
661 1.1 mrg
662 1.1 mrg
663 1.1 mrg /* Functions to save and restore machine-specific function data. */
664 1.1 mrg static struct machine_function *
665 1.1 mrg tilepro_init_machine_status (void)
666 1.1 mrg {
667 1.1 mrg return ggc_cleared_alloc<machine_function> ();
668 1.1 mrg }
669 1.1 mrg
670 1.1 mrg
671 1.1 mrg /* Do anything needed before RTL is emitted for each function. */
672 1.1 mrg void
673 1.1 mrg tilepro_init_expanders (void)
674 1.1 mrg {
675 1.1 mrg /* Arrange to initialize and mark the machine per-function
676 1.1 mrg status. */
677 1.1 mrg init_machine_status = tilepro_init_machine_status;
678 1.1 mrg
679 1.1 mrg if (cfun && cfun->machine && flag_pic)
680 1.1 mrg {
681 1.1 mrg static int label_num = 0;
682 1.1 mrg
683 1.1 mrg char text_label_name[32];
684 1.1 mrg
685 1.1 mrg struct machine_function *machine = cfun->machine;
686 1.1 mrg
687 1.1 mrg ASM_GENERATE_INTERNAL_LABEL (text_label_name, "L_PICLNK", label_num++);
688 1.1 mrg
689 1.1 mrg machine->text_label_symbol =
690 1.1 mrg gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (text_label_name));
691 1.1 mrg
692 1.1 mrg machine->text_label_rtx =
693 1.1 mrg gen_rtx_REG (Pmode, TILEPRO_PIC_TEXT_LABEL_REGNUM);
694 1.1 mrg
695 1.1 mrg machine->got_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
696 1.1 mrg
697 1.1 mrg machine->calls_tls_get_addr = false;
698 1.1 mrg }
699 1.1 mrg }
700 1.1 mrg
701 1.1 mrg
702 1.1 mrg /* Return true if X contains a thread-local symbol. */
703 1.1 mrg static bool
704 1.1 mrg tilepro_tls_referenced_p (rtx x)
705 1.1 mrg {
706 1.1 mrg if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
707 1.1 mrg x = XEXP (XEXP (x, 0), 0);
708 1.1 mrg
709 1.1 mrg if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x))
710 1.1 mrg return true;
711 1.1 mrg
712 1.1 mrg /* That's all we handle in tilepro_legitimize_tls_address for
713 1.1 mrg now. */
714 1.1 mrg return false;
715 1.1 mrg }
716 1.1 mrg
717 1.1 mrg
718 1.1 mrg /* Return true if X requires a scratch register. It is given that
719 1.1 mrg flag_pic is on and that X satisfies CONSTANT_P. */
720 1.1 mrg static int
721 1.1 mrg tilepro_pic_address_needs_scratch (rtx x)
722 1.1 mrg {
723 1.1 mrg if (GET_CODE (x) == CONST
724 1.1 mrg && GET_CODE (XEXP (x, 0)) == PLUS
725 1.1 mrg && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
726 1.1 mrg || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
727 1.1 mrg && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
728 1.1 mrg return true;
729 1.1 mrg
730 1.1 mrg return false;
731 1.1 mrg }
732 1.1 mrg
733 1.1 mrg
734 1.1 mrg /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
735 1.1 mrg which we are willing to load the value into a register via a move
736 1.1 mrg pattern. TLS cannot be treated as a constant because it can
737 1.1 mrg include a function call. */
738 1.1 mrg static bool
739 1.1 mrg tilepro_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
740 1.1 mrg {
741 1.1 mrg switch (GET_CODE (x))
742 1.1 mrg {
743 1.1 mrg case CONST:
744 1.1 mrg case SYMBOL_REF:
745 1.1 mrg return !tilepro_tls_referenced_p (x);
746 1.1 mrg
747 1.1 mrg default:
748 1.1 mrg return true;
749 1.1 mrg }
750 1.1 mrg }
751 1.1 mrg
752 1.1 mrg
753 1.1 mrg /* Return true if the constant value X is a legitimate general operand
754 1.1 mrg when generating PIC code. It is given that flag_pic is on and that
755 1.1 mrg X satisfies CONSTANT_P. */
756 1.1 mrg bool
757 1.1 mrg tilepro_legitimate_pic_operand_p (rtx x)
758 1.1 mrg {
759 1.1 mrg if (tilepro_pic_address_needs_scratch (x))
760 1.1 mrg return false;
761 1.1 mrg
762 1.1 mrg if (tilepro_tls_referenced_p (x))
763 1.1 mrg return false;
764 1.1 mrg
765 1.1 mrg return true;
766 1.1 mrg }
767 1.1 mrg
768 1.1 mrg
769 1.1 mrg /* Return true if the rtx X can be used as an address operand. */
770 1.1 mrg static bool
771 1.1 mrg tilepro_legitimate_address_p (machine_mode ARG_UNUSED (mode), rtx x,
772 1.1 mrg bool strict)
773 1.1 mrg {
774 1.1 mrg if (GET_CODE (x) == SUBREG)
775 1.1 mrg x = SUBREG_REG (x);
776 1.1 mrg
777 1.1 mrg switch (GET_CODE (x))
778 1.1 mrg {
779 1.1 mrg case POST_INC:
780 1.1 mrg case POST_DEC:
781 1.1 mrg if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
782 1.1 mrg return false;
783 1.1 mrg
784 1.1 mrg x = XEXP (x, 0);
785 1.1 mrg break;
786 1.1 mrg
787 1.1 mrg case POST_MODIFY:
788 1.1 mrg if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
789 1.1 mrg return false;
790 1.1 mrg
791 1.1 mrg if (GET_CODE (XEXP (x, 1)) != PLUS)
792 1.1 mrg return false;
793 1.1 mrg
794 1.1 mrg if (!rtx_equal_p (XEXP (x, 0), XEXP (XEXP (x, 1), 0)))
795 1.1 mrg return false;
796 1.1 mrg
797 1.1 mrg if (!satisfies_constraint_I (XEXP (XEXP (x, 1), 1)))
798 1.1 mrg return false;
799 1.1 mrg
800 1.1 mrg x = XEXP (x, 0);
801 1.1 mrg break;
802 1.1 mrg
803 1.1 mrg case REG:
804 1.1 mrg break;
805 1.1 mrg
806 1.1 mrg default:
807 1.1 mrg return false;
808 1.1 mrg }
809 1.1 mrg
810 1.1 mrg /* Check if x is a valid reg. */
811 1.1 mrg if (!REG_P (x))
812 1.1 mrg return false;
813 1.1 mrg
814 1.1 mrg if (strict)
815 1.1 mrg return REGNO_OK_FOR_BASE_P (REGNO (x));
816 1.1 mrg else
817 1.1 mrg return true;
818 1.1 mrg }
819 1.1 mrg
820 1.1 mrg
821 1.1 mrg /* Return the rtx containing SYMBOL_REF to the text label. */
822 1.1 mrg static rtx
823 1.1 mrg tilepro_text_label_symbol (void)
824 1.1 mrg {
825 1.1 mrg return cfun->machine->text_label_symbol;
826 1.1 mrg }
827 1.1 mrg
828 1.1 mrg
829 1.1 mrg /* Return the register storing the value of the text label. */
830 1.1 mrg static rtx
831 1.1 mrg tilepro_text_label_rtx (void)
832 1.1 mrg {
833 1.1 mrg return cfun->machine->text_label_rtx;
834 1.1 mrg }
835 1.1 mrg
836 1.1 mrg
837 1.1 mrg /* Return the register storing the value of the global offset
838 1.1 mrg table. */
839 1.1 mrg static rtx
840 1.1 mrg tilepro_got_rtx (void)
841 1.1 mrg {
842 1.1 mrg return cfun->machine->got_rtx;
843 1.1 mrg }
844 1.1 mrg
845 1.1 mrg
846 1.1 mrg /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
847 1.1 mrg static rtx
848 1.1 mrg tilepro_got_symbol (void)
849 1.1 mrg {
850 1.1 mrg if (g_got_symbol == NULL)
851 1.1 mrg g_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
852 1.1 mrg
853 1.1 mrg return g_got_symbol;
854 1.1 mrg }
855 1.1 mrg
856 1.1 mrg
857 1.1 mrg /* Return a reference to the got to be used by tls references. */
858 1.1 mrg static rtx
859 1.1 mrg tilepro_tls_got (void)
860 1.1 mrg {
861 1.1 mrg rtx temp;
862 1.1 mrg if (flag_pic)
863 1.1 mrg {
864 1.1 mrg crtl->uses_pic_offset_table = 1;
865 1.1 mrg return tilepro_got_rtx ();
866 1.1 mrg }
867 1.1 mrg
868 1.1 mrg temp = gen_reg_rtx (Pmode);
869 1.1 mrg emit_move_insn (temp, tilepro_got_symbol ());
870 1.1 mrg
871 1.1 mrg return temp;
872 1.1 mrg }
873 1.1 mrg
874 1.1 mrg
875 1.1 mrg /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
876 1.1 mrg this (thread-local) address. */
877 1.1 mrg static rtx
878 1.1 mrg tilepro_legitimize_tls_address (rtx addr)
879 1.1 mrg {
880 1.1 mrg rtx ret;
881 1.1 mrg
882 1.1 mrg gcc_assert (can_create_pseudo_p ());
883 1.1 mrg
884 1.1 mrg if (GET_CODE (addr) == SYMBOL_REF)
885 1.1 mrg switch (SYMBOL_REF_TLS_MODEL (addr))
886 1.1 mrg {
887 1.1 mrg case TLS_MODEL_GLOBAL_DYNAMIC:
888 1.1 mrg case TLS_MODEL_LOCAL_DYNAMIC:
889 1.1 mrg {
890 1.1 mrg rtx r0, temp1, temp2, temp3, got;
891 1.1 mrg rtx_insn *last;
892 1.1 mrg
893 1.1 mrg ret = gen_reg_rtx (Pmode);
894 1.1 mrg r0 = gen_rtx_REG (Pmode, 0);
895 1.1 mrg temp1 = gen_reg_rtx (Pmode);
896 1.1 mrg temp2 = gen_reg_rtx (Pmode);
897 1.1 mrg temp3 = gen_reg_rtx (Pmode);
898 1.1 mrg
899 1.1 mrg got = tilepro_tls_got ();
900 1.1 mrg emit_insn (gen_tls_gd_addhi (temp1, got, addr));
901 1.1 mrg emit_insn (gen_tls_gd_addlo (temp2, temp1, addr));
902 1.1 mrg emit_move_insn (r0, temp2);
903 1.1 mrg emit_insn (gen_tls_gd_call (addr));
904 1.1 mrg emit_move_insn (temp3, r0);
905 1.1 mrg last = emit_insn (gen_tls_gd_add (ret, temp3, addr));
906 1.1 mrg set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
907 1.1 mrg break;
908 1.1 mrg }
909 1.1 mrg case TLS_MODEL_INITIAL_EXEC:
910 1.1 mrg {
911 1.1 mrg rtx temp1, temp2, temp3, got;
912 1.1 mrg rtx_insn *last;
913 1.1 mrg
914 1.1 mrg ret = gen_reg_rtx (Pmode);
915 1.1 mrg temp1 = gen_reg_rtx (Pmode);
916 1.1 mrg temp2 = gen_reg_rtx (Pmode);
917 1.1 mrg temp3 = gen_reg_rtx (Pmode);
918 1.1 mrg
919 1.1 mrg got = tilepro_tls_got ();
920 1.1 mrg emit_insn (gen_tls_ie_addhi (temp1, got, addr));
921 1.1 mrg emit_insn (gen_tls_ie_addlo (temp2, temp1, addr));
922 1.1 mrg emit_insn (gen_tls_ie_load (temp3, temp2, addr));
923 1.1 mrg last =
924 1.1 mrg emit_move_insn(ret,
925 1.1 mrg gen_rtx_PLUS (Pmode,
926 1.1 mrg gen_rtx_REG (Pmode,
927 1.1 mrg THREAD_POINTER_REGNUM),
928 1.1 mrg temp3));
929 1.1 mrg set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
930 1.1 mrg break;
931 1.1 mrg }
932 1.1 mrg case TLS_MODEL_LOCAL_EXEC:
933 1.1 mrg {
934 1.1 mrg rtx temp1;
935 1.1 mrg rtx_insn *last;
936 1.1 mrg
937 1.1 mrg ret = gen_reg_rtx (Pmode);
938 1.1 mrg temp1 = gen_reg_rtx (Pmode);
939 1.1 mrg
940 1.1 mrg emit_insn (gen_tls_le_addhi (temp1,
941 1.1 mrg gen_rtx_REG (Pmode,
942 1.1 mrg THREAD_POINTER_REGNUM),
943 1.1 mrg addr));
944 1.1 mrg last = emit_insn (gen_tls_le_addlo (ret, temp1, addr));
945 1.1 mrg set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
946 1.1 mrg break;
947 1.1 mrg }
948 1.1 mrg default:
949 1.1 mrg gcc_unreachable ();
950 1.1 mrg }
951 1.1 mrg else if (GET_CODE (addr) == CONST)
952 1.1 mrg {
953 1.1 mrg rtx base, offset;
954 1.1 mrg
955 1.1 mrg gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS);
956 1.1 mrg
957 1.1 mrg base = tilepro_legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
958 1.1 mrg offset = XEXP (XEXP (addr, 0), 1);
959 1.1 mrg
960 1.1 mrg base = force_operand (base, NULL_RTX);
961 1.1 mrg ret = force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
962 1.1 mrg }
963 1.1 mrg else
964 1.1 mrg gcc_unreachable ();
965 1.1 mrg
966 1.1 mrg return ret;
967 1.1 mrg }
968 1.1 mrg
969 1.1 mrg
970 1.1 mrg /* Legitimize PIC addresses. If the address is already
971 1.1 mrg position-independent, we return ORIG. Newly generated
972 1.1 mrg position-independent addresses go into a reg. This is REG if
973 1.1 mrg nonzero, otherwise we allocate register(s) as necessary. */
974 1.1 mrg static rtx
975 1.1 mrg tilepro_legitimize_pic_address (rtx orig,
976 1.1 mrg machine_mode mode ATTRIBUTE_UNUSED,
977 1.1 mrg rtx reg)
978 1.1 mrg {
979 1.1 mrg if (GET_CODE (orig) == SYMBOL_REF)
980 1.1 mrg {
981 1.1 mrg rtx address, pic_ref;
982 1.1 mrg
983 1.1 mrg if (reg == 0)
984 1.1 mrg {
985 1.1 mrg gcc_assert (can_create_pseudo_p ());
986 1.1 mrg reg = gen_reg_rtx (Pmode);
987 1.1 mrg }
988 1.1 mrg
989 1.1 mrg if (SYMBOL_REF_LOCAL_P (orig))
990 1.1 mrg {
991 1.1 mrg /* If not during reload, allocate another temp reg here for
992 1.1 mrg loading in the address, so that these instructions can be
993 1.1 mrg optimized properly. */
994 1.1 mrg rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
995 1.1 mrg rtx text_label_symbol = tilepro_text_label_symbol ();
996 1.1 mrg rtx text_label_rtx = tilepro_text_label_rtx ();
997 1.1 mrg
998 1.1 mrg emit_insn (gen_addli_pcrel (temp_reg, text_label_rtx, orig,
999 1.1 mrg text_label_symbol));
1000 1.1 mrg emit_insn (gen_auli_pcrel (temp_reg, temp_reg, orig,
1001 1.1 mrg text_label_symbol));
1002 1.1 mrg
1003 1.1 mrg /* Note: this is conservative. We use the text_label but we
1004 1.1 mrg don't use the pic_offset_table. However, in some cases
1005 1.1 mrg we may need the pic_offset_table (see
1006 1.1 mrg tilepro_fixup_pcrel_references). */
1007 1.1 mrg crtl->uses_pic_offset_table = 1;
1008 1.1 mrg
1009 1.1 mrg address = temp_reg;
1010 1.1 mrg
1011 1.1 mrg emit_move_insn (reg, address);
1012 1.1 mrg return reg;
1013 1.1 mrg }
1014 1.1 mrg else
1015 1.1 mrg {
1016 1.1 mrg /* If not during reload, allocate another temp reg here for
1017 1.1 mrg loading in the address, so that these instructions can be
1018 1.1 mrg optimized properly. */
1019 1.1 mrg rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1020 1.1 mrg
1021 1.1 mrg gcc_assert (flag_pic);
1022 1.1 mrg if (flag_pic == 1)
1023 1.1 mrg {
1024 1.1 mrg emit_insn (gen_add_got16 (temp_reg,
1025 1.1 mrg tilepro_got_rtx (), orig));
1026 1.1 mrg }
1027 1.1 mrg else
1028 1.1 mrg {
1029 1.1 mrg rtx temp_reg2 = create_temp_reg_if_possible (Pmode, reg);
1030 1.1 mrg emit_insn (gen_addhi_got32 (temp_reg2,
1031 1.1 mrg tilepro_got_rtx (), orig));
1032 1.1 mrg emit_insn (gen_addlo_got32 (temp_reg, temp_reg2, orig));
1033 1.1 mrg }
1034 1.1 mrg
1035 1.1 mrg address = temp_reg;
1036 1.1 mrg
1037 1.1 mrg pic_ref = gen_const_mem (Pmode, address);
1038 1.1 mrg crtl->uses_pic_offset_table = 1;
1039 1.1 mrg emit_move_insn (reg, pic_ref);
1040 1.1 mrg /* The following put a REG_EQUAL note on this insn, so that
1041 1.1 mrg it can be optimized by loop. But it causes the label to
1042 1.1 mrg be optimized away. */
1043 1.1 mrg /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1044 1.1 mrg return reg;
1045 1.1 mrg }
1046 1.1 mrg }
1047 1.1 mrg else if (GET_CODE (orig) == CONST)
1048 1.1 mrg {
1049 1.1 mrg rtx base, offset;
1050 1.1 mrg
1051 1.1 mrg if (GET_CODE (XEXP (orig, 0)) == PLUS
1052 1.1 mrg && XEXP (XEXP (orig, 0), 0) == tilepro_got_rtx ())
1053 1.1 mrg return orig;
1054 1.1 mrg
1055 1.1 mrg if (reg == 0)
1056 1.1 mrg {
1057 1.1 mrg gcc_assert (can_create_pseudo_p ());
1058 1.1 mrg reg = gen_reg_rtx (Pmode);
1059 1.1 mrg }
1060 1.1 mrg
1061 1.1 mrg gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
1062 1.1 mrg base = tilepro_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode,
1063 1.1 mrg reg);
1064 1.1 mrg offset =
1065 1.1 mrg tilepro_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1066 1.1 mrg base == reg ? 0 : reg);
1067 1.1 mrg
1068 1.1 mrg if (CONST_INT_P (offset))
1069 1.1 mrg {
1070 1.1 mrg if (can_create_pseudo_p ())
1071 1.1 mrg offset = force_reg (Pmode, offset);
1072 1.1 mrg else
1073 1.1 mrg /* If we reach here, then something is seriously
1074 1.1 mrg wrong. */
1075 1.1 mrg gcc_unreachable ();
1076 1.1 mrg }
1077 1.1 mrg
1078 1.1 mrg if (can_create_pseudo_p ())
1079 1.1 mrg return force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1080 1.1 mrg else
1081 1.1 mrg gcc_unreachable ();
1082 1.1 mrg }
1083 1.1 mrg else if (GET_CODE (orig) == LABEL_REF)
1084 1.1 mrg {
1085 1.1 mrg rtx address, temp_reg;
1086 1.1 mrg rtx text_label_symbol;
1087 1.1 mrg rtx text_label_rtx;
1088 1.1 mrg
1089 1.1 mrg if (reg == 0)
1090 1.1 mrg {
1091 1.1 mrg gcc_assert (can_create_pseudo_p ());
1092 1.1 mrg reg = gen_reg_rtx (Pmode);
1093 1.1 mrg }
1094 1.1 mrg
1095 1.1 mrg /* If not during reload, allocate another temp reg here for
1096 1.1 mrg loading in the address, so that these instructions can be
1097 1.1 mrg optimized properly. */
1098 1.1 mrg temp_reg = create_temp_reg_if_possible (Pmode, reg);
1099 1.1 mrg text_label_symbol = tilepro_text_label_symbol ();
1100 1.1 mrg text_label_rtx = tilepro_text_label_rtx ();
1101 1.1 mrg
1102 1.1 mrg emit_insn (gen_addli_pcrel (temp_reg, text_label_rtx, orig,
1103 1.1 mrg text_label_symbol));
1104 1.1 mrg emit_insn (gen_auli_pcrel (temp_reg, temp_reg, orig,
1105 1.1 mrg text_label_symbol));
1106 1.1 mrg
1107 1.1 mrg /* Note: this is conservative. We use the text_label but we
1108 1.1 mrg don't use the pic_offset_table. */
1109 1.1 mrg crtl->uses_pic_offset_table = 1;
1110 1.1 mrg
1111 1.1 mrg address = temp_reg;
1112 1.1 mrg
1113 1.1 mrg emit_move_insn (reg, address);
1114 1.1 mrg
1115 1.1 mrg return reg;
1116 1.1 mrg }
1117 1.1 mrg
1118 1.1 mrg return orig;
1119 1.1 mrg }
1120 1.1 mrg
1121 1.1 mrg
1122 1.1 mrg /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1123 1.1 mrg static rtx
1124 1.1 mrg tilepro_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1125 1.1 mrg machine_mode mode)
1126 1.1 mrg {
1127 1.1 mrg if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
1128 1.1 mrg && symbolic_operand (x, Pmode) && tilepro_tls_referenced_p (x))
1129 1.1 mrg {
1130 1.1 mrg return tilepro_legitimize_tls_address (x);
1131 1.1 mrg }
1132 1.1 mrg else if (flag_pic)
1133 1.1 mrg {
1134 1.1 mrg return tilepro_legitimize_pic_address (x, mode, 0);
1135 1.1 mrg }
1136 1.1 mrg else
1137 1.1 mrg return x;
1138 1.1 mrg }
1139 1.1 mrg
1140 1.1 mrg
1141 1.1 mrg /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1142 1.1 mrg static rtx
1143 1.1 mrg tilepro_delegitimize_address (rtx x)
1144 1.1 mrg {
1145 1.1 mrg x = delegitimize_mem_from_attrs (x);
1146 1.1 mrg
1147 1.1 mrg if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
1148 1.1 mrg {
1149 1.1 mrg switch (XINT (XEXP (x, 0), 1))
1150 1.1 mrg {
1151 1.1 mrg case UNSPEC_PCREL_SYM:
1152 1.1 mrg case UNSPEC_GOT16_SYM:
1153 1.1 mrg case UNSPEC_GOT32_SYM:
1154 1.1 mrg case UNSPEC_TLS_GD:
1155 1.1 mrg case UNSPEC_TLS_IE:
1156 1.1 mrg x = XVECEXP (XEXP (x, 0), 0, 0);
1157 1.1 mrg break;
1158 1.1 mrg }
1159 1.1 mrg }
1160 1.1 mrg
1161 1.1 mrg return x;
1162 1.1 mrg }
1163 1.1 mrg
1164 1.1 mrg
1165 1.1 mrg /* Emit code to load the PIC register. */
1166 1.1 mrg static void
1167 1.1 mrg load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED)
1168 1.1 mrg {
1169 1.1 mrg int orig_flag_pic = flag_pic;
1170 1.1 mrg
1171 1.1 mrg rtx got_symbol = tilepro_got_symbol ();
1172 1.1 mrg rtx text_label_symbol = tilepro_text_label_symbol ();
1173 1.1 mrg rtx text_label_rtx = tilepro_text_label_rtx ();
1174 1.1 mrg flag_pic = 0;
1175 1.1 mrg
1176 1.1 mrg emit_insn (gen_insn_lnk_and_label (text_label_rtx, text_label_symbol));
1177 1.1 mrg
1178 1.1 mrg emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1179 1.1 mrg text_label_rtx, got_symbol, text_label_symbol));
1180 1.1 mrg
1181 1.1 mrg emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1182 1.1 mrg tilepro_got_rtx (),
1183 1.1 mrg got_symbol, text_label_symbol));
1184 1.1 mrg
1185 1.1 mrg flag_pic = orig_flag_pic;
1186 1.1 mrg
1187 1.1 mrg /* Need to emit this whether or not we obey regdecls, since
1188 1.1 mrg setjmp/longjmp can cause life info to screw up. ??? In the case
1189 1.1 mrg where we don't obey regdecls, this is not sufficient since we may
1190 1.1 mrg not fall out the bottom. */
1191 1.1 mrg emit_use (tilepro_got_rtx ());
1192 1.1 mrg }
1193 1.1 mrg
1194 1.1 mrg
1195 1.1 mrg /* Return the simd variant of the constant NUM of mode MODE, by
1196 1.1 mrg replicating it to fill an interger of mode SImode. NUM is first
1197 1.1 mrg truncated to fit in MODE. */
1198 1.1 mrg rtx
1199 1.1 mrg tilepro_simd_int (rtx num, machine_mode mode)
1200 1.1 mrg {
1201 1.1 mrg HOST_WIDE_INT n = 0;
1202 1.1 mrg
1203 1.1 mrg gcc_assert (CONST_INT_P (num));
1204 1.1 mrg
1205 1.1 mrg n = INTVAL (num);
1206 1.1 mrg
1207 1.1 mrg switch (mode)
1208 1.1 mrg {
1209 1.1 mrg case E_QImode:
1210 1.1 mrg n = 0x01010101 * (n & 0x000000FF);
1211 1.1 mrg break;
1212 1.1 mrg case E_HImode:
1213 1.1 mrg n = 0x00010001 * (n & 0x0000FFFF);
1214 1.1 mrg break;
1215 1.1 mrg case E_SImode:
1216 1.1 mrg break;
1217 1.1 mrg case E_DImode:
1218 1.1 mrg break;
1219 1.1 mrg default:
1220 1.1 mrg gcc_unreachable ();
1221 1.1 mrg }
1222 1.1 mrg
1223 1.1 mrg return gen_int_si (n);
1224 1.1 mrg }
1225 1.1 mrg
1226 1.1 mrg
1227 1.1 mrg /* Split one or more DImode RTL references into pairs of SImode
1228 1.1 mrg references. The RTL can be REG, offsettable MEM, integer constant,
1229 1.1 mrg or CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL
1230 1.1 mrg to split and "num" is its length. lo_half and hi_half are output
1231 1.1 mrg arrays that parallel "operands". */
1232 1.1 mrg void
1233 1.1 mrg split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1234 1.1 mrg {
1235 1.1 mrg while (num--)
1236 1.1 mrg {
1237 1.1 mrg rtx op = operands[num];
1238 1.1 mrg
1239 1.1 mrg /* simplify_subreg refuse to split volatile memory addresses,
1240 1.1 mrg but we still have to handle it. */
1241 1.1 mrg if (MEM_P (op))
1242 1.1 mrg {
1243 1.1 mrg lo_half[num] = adjust_address (op, SImode, 0);
1244 1.1 mrg hi_half[num] = adjust_address (op, SImode, 4);
1245 1.1 mrg }
1246 1.1 mrg else
1247 1.1 mrg {
1248 1.1 mrg lo_half[num] = simplify_gen_subreg (SImode, op,
1249 1.1 mrg GET_MODE (op) == VOIDmode
1250 1.1 mrg ? DImode : GET_MODE (op), 0);
1251 1.1 mrg hi_half[num] = simplify_gen_subreg (SImode, op,
1252 1.1 mrg GET_MODE (op) == VOIDmode
1253 1.1 mrg ? DImode : GET_MODE (op), 4);
1254 1.1 mrg }
1255 1.1 mrg }
1256 1.1 mrg }
1257 1.1 mrg
1258 1.1 mrg
1259 1.1 mrg /* Returns true iff val can be moved into a register in one
1260 1.1 mrg instruction. And if it can, it emits the code to move the
1261 1.1 mrg constant.
1262 1.1 mrg
1263 1.1 mrg If three_wide_only is true, this insists on an instruction that
1264 1.1 mrg works in a bundle containing three instructions. */
1265 1.1 mrg static bool
1266 1.1 mrg expand_set_cint32_one_inst (rtx dest_reg,
1267 1.1 mrg HOST_WIDE_INT val, bool three_wide_only)
1268 1.1 mrg {
1269 1.1 mrg val = trunc_int_for_mode (val, SImode);
1270 1.1 mrg
1271 1.1 mrg if (val == trunc_int_for_mode (val, QImode))
1272 1.1 mrg {
1273 1.1 mrg /* Success! */
1274 1.1 mrg emit_move_insn (dest_reg, GEN_INT (val));
1275 1.1 mrg return true;
1276 1.1 mrg }
1277 1.1 mrg else if (!three_wide_only)
1278 1.1 mrg {
1279 1.1 mrg rtx imm_op = GEN_INT (val);
1280 1.1 mrg
1281 1.1 mrg if (satisfies_constraint_J (imm_op)
1282 1.1 mrg || satisfies_constraint_K (imm_op)
1283 1.1 mrg || satisfies_constraint_N (imm_op)
1284 1.1 mrg || satisfies_constraint_P (imm_op))
1285 1.1 mrg {
1286 1.1 mrg emit_move_insn (dest_reg, imm_op);
1287 1.1 mrg return true;
1288 1.1 mrg }
1289 1.1 mrg }
1290 1.1 mrg
1291 1.1 mrg return false;
1292 1.1 mrg }
1293 1.1 mrg
1294 1.1 mrg
1295 1.1 mrg /* Implement SImode rotatert. */
1296 1.1 mrg static HOST_WIDE_INT
1297 1.1 mrg rotate_right (HOST_WIDE_INT n, int count)
1298 1.1 mrg {
1299 1.1 mrg unsigned HOST_WIDE_INT x = n & 0xFFFFFFFF;
1300 1.1 mrg if (count == 0)
1301 1.1 mrg return x;
1302 1.1 mrg return ((x >> count) | (x << (32 - count))) & 0xFFFFFFFF;
1303 1.1 mrg }
1304 1.1 mrg
1305 1.1 mrg
1306 1.1 mrg /* Return true iff n contains exactly one contiguous sequence of 1
1307 1.1 mrg bits, possibly wrapping around from high bits to low bits. */
1308 1.1 mrg bool
1309 1.1 mrg tilepro_bitfield_operand_p (HOST_WIDE_INT n, int *first_bit, int *last_bit)
1310 1.1 mrg {
1311 1.1 mrg int i;
1312 1.1 mrg
1313 1.1 mrg if (n == 0)
1314 1.1 mrg return false;
1315 1.1 mrg
1316 1.1 mrg for (i = 0; i < 32; i++)
1317 1.1 mrg {
1318 1.1 mrg unsigned HOST_WIDE_INT x = rotate_right (n, i);
1319 1.1 mrg if (!(x & 1))
1320 1.1 mrg continue;
1321 1.1 mrg
1322 1.1 mrg /* See if x is a power of two minus one, i.e. only consecutive 1
1323 1.1 mrg bits starting from bit 0. */
1324 1.1 mrg if ((x & (x + 1)) == 0)
1325 1.1 mrg {
1326 1.1 mrg if (first_bit != NULL)
1327 1.1 mrg *first_bit = i;
1328 1.1 mrg if (last_bit != NULL)
1329 1.1 mrg *last_bit = (i + exact_log2 (x ^ (x >> 1))) & 31;
1330 1.1 mrg
1331 1.1 mrg return true;
1332 1.1 mrg }
1333 1.1 mrg }
1334 1.1 mrg
1335 1.1 mrg return false;
1336 1.1 mrg }
1337 1.1 mrg
1338 1.1 mrg
1339 1.1 mrg /* Create code to move the CONST_INT value in src_val to dest_reg. */
1340 1.1 mrg static void
1341 1.1 mrg expand_set_cint32 (rtx dest_reg, rtx src_val)
1342 1.1 mrg {
1343 1.1 mrg HOST_WIDE_INT val;
1344 1.1 mrg int leading_zeroes, trailing_zeroes;
1345 1.1 mrg int lower, upper;
1346 1.1 mrg int three_wide_only;
1347 1.1 mrg rtx temp;
1348 1.1 mrg
1349 1.1 mrg gcc_assert (CONST_INT_P (src_val));
1350 1.1 mrg val = trunc_int_for_mode (INTVAL (src_val), SImode);
1351 1.1 mrg
1352 1.1 mrg /* See if we can generate the constant in one instruction. */
1353 1.1 mrg if (expand_set_cint32_one_inst (dest_reg, val, false))
1354 1.1 mrg return;
1355 1.1 mrg
1356 1.1 mrg /* Create a temporary variable to hold a partial result, to enable
1357 1.1 mrg CSE. */
1358 1.1 mrg temp = create_temp_reg_if_possible (SImode, dest_reg);
1359 1.1 mrg
1360 1.1 mrg leading_zeroes = 31 - floor_log2 (val & 0xFFFFFFFF);
1361 1.1 mrg trailing_zeroes = exact_log2 (val & -val);
1362 1.1 mrg
1363 1.1 mrg lower = trunc_int_for_mode (val, HImode);
1364 1.1 mrg upper = trunc_int_for_mode ((val - lower) >> 16, HImode);
1365 1.1 mrg
1366 1.1 mrg /* First try all three-wide instructions that generate a constant
1367 1.1 mrg (i.e. movei) followed by various shifts and rotates. If none of
1368 1.1 mrg those work, try various two-wide ways of generating a constant
1369 1.1 mrg followed by various shifts and rotates. */
1370 1.1 mrg for (three_wide_only = 1; three_wide_only >= 0; three_wide_only--)
1371 1.1 mrg {
1372 1.1 mrg int count;
1373 1.1 mrg
1374 1.1 mrg if (expand_set_cint32_one_inst (temp, val >> trailing_zeroes,
1375 1.1 mrg three_wide_only))
1376 1.1 mrg {
1377 1.1 mrg /* 0xFFFFA500 becomes:
1378 1.1 mrg movei temp, 0xFFFFFFA5
1379 1.1 mrg shli dest, temp, 8 */
1380 1.1 mrg emit_move_insn (dest_reg,
1381 1.1 mrg gen_rtx_ASHIFT (SImode, temp,
1382 1.1 mrg GEN_INT (trailing_zeroes)));
1383 1.1 mrg return;
1384 1.1 mrg }
1385 1.1 mrg
1386 1.1 mrg if (expand_set_cint32_one_inst (temp, val << leading_zeroes,
1387 1.1 mrg three_wide_only))
1388 1.1 mrg {
1389 1.1 mrg /* 0x7FFFFFFF becomes:
1390 1.1 mrg movei temp, -2
1391 1.1 mrg shri dest, temp, 1 */
1392 1.1 mrg emit_move_insn (dest_reg,
1393 1.1 mrg gen_rtx_LSHIFTRT (SImode, temp,
1394 1.1 mrg GEN_INT (leading_zeroes)));
1395 1.1 mrg return;
1396 1.1 mrg }
1397 1.1 mrg
1398 1.1 mrg /* Try rotating a one-instruction immediate, since rotate is
1399 1.1 mrg 3-wide. */
1400 1.1 mrg for (count = 1; count < 32; count++)
1401 1.1 mrg {
1402 1.1 mrg HOST_WIDE_INT r = rotate_right (val, count);
1403 1.1 mrg if (expand_set_cint32_one_inst (temp, r, three_wide_only))
1404 1.1 mrg {
1405 1.1 mrg /* 0xFFA5FFFF becomes:
1406 1.1 mrg movei temp, 0xFFFFFFA5
1407 1.1 mrg rli dest, temp, 16 */
1408 1.1 mrg emit_move_insn (dest_reg,
1409 1.1 mrg gen_rtx_ROTATE (SImode, temp, GEN_INT (count)));
1410 1.1 mrg return;
1411 1.1 mrg }
1412 1.1 mrg }
1413 1.1 mrg
1414 1.1 mrg if (lower == trunc_int_for_mode (lower, QImode))
1415 1.1 mrg {
1416 1.1 mrg /* We failed to use two 3-wide instructions, but the low 16
1417 1.1 mrg bits are a small number so just use a 2-wide + 3-wide
1418 1.1 mrg auli + addi pair rather than anything more exotic.
1419 1.1 mrg
1420 1.1 mrg 0x12340056 becomes:
1421 1.1 mrg auli temp, zero, 0x1234
1422 1.1 mrg addi dest, temp, 0x56 */
1423 1.1 mrg break;
1424 1.1 mrg }
1425 1.1 mrg }
1426 1.1 mrg
1427 1.1 mrg /* Fallback case: use a auli + addli/addi pair. */
1428 1.1 mrg emit_move_insn (temp, GEN_INT (upper << 16));
1429 1.1 mrg emit_move_insn (dest_reg, (gen_rtx_PLUS (SImode, temp, GEN_INT (lower))));
1430 1.1 mrg }
1431 1.1 mrg
1432 1.1 mrg
1433 1.1 mrg /* Load OP1, a 32-bit constant, into OP0, a register. We know it
1434 1.1 mrg can't be done in one insn when we get here, the move expander
1435 1.1 mrg guarantees this. */
1436 1.1 mrg void
1437 1.1 mrg tilepro_expand_set_const32 (rtx op0, rtx op1)
1438 1.1 mrg {
1439 1.1 mrg machine_mode mode = GET_MODE (op0);
1440 1.1 mrg rtx temp;
1441 1.1 mrg
1442 1.1 mrg if (CONST_INT_P (op1))
1443 1.1 mrg {
1444 1.1 mrg /* TODO: I don't know if we want to split large constants now,
1445 1.1 mrg or wait until later (with a define_split).
1446 1.1 mrg
1447 1.1 mrg Does splitting early help CSE? Does it harm other
1448 1.1 mrg optimizations that might fold loads? */
1449 1.1 mrg expand_set_cint32 (op0, op1);
1450 1.1 mrg }
1451 1.1 mrg else
1452 1.1 mrg {
1453 1.1 mrg temp = create_temp_reg_if_possible (mode, op0);
1454 1.1 mrg
1455 1.1 mrg /* A symbol, emit in the traditional way. */
1456 1.1 mrg emit_move_insn (temp, gen_rtx_HIGH (mode, op1));
1457 1.1 mrg emit_move_insn (op0, gen_rtx_LO_SUM (mode, temp, op1));
1458 1.1 mrg }
1459 1.1 mrg }
1460 1.1 mrg
1461 1.1 mrg
1462 1.1 mrg /* Expand a move instruction. Return true if all work is done. */
1463 1.1 mrg bool
1464 1.1 mrg tilepro_expand_mov (machine_mode mode, rtx *operands)
1465 1.1 mrg {
1466 1.1 mrg /* Handle sets of MEM first. */
1467 1.1 mrg if (MEM_P (operands[0]))
1468 1.1 mrg {
1469 1.1 mrg if (can_create_pseudo_p ())
1470 1.1 mrg operands[0] = validize_mem (operands[0]);
1471 1.1 mrg
1472 1.1 mrg if (reg_or_0_operand (operands[1], mode))
1473 1.1 mrg return false;
1474 1.1 mrg
1475 1.1 mrg if (!reload_in_progress)
1476 1.1 mrg operands[1] = force_reg (mode, operands[1]);
1477 1.1 mrg }
1478 1.1 mrg
1479 1.1 mrg /* Fixup TLS cases. */
1480 1.1 mrg if (CONSTANT_P (operands[1]) && tilepro_tls_referenced_p (operands[1]))
1481 1.1 mrg {
1482 1.1 mrg operands[1] = tilepro_legitimize_tls_address (operands[1]);
1483 1.1 mrg return false;
1484 1.1 mrg }
1485 1.1 mrg
1486 1.1 mrg /* Fixup PIC cases. */
1487 1.1 mrg if (flag_pic && CONSTANT_P (operands[1]))
1488 1.1 mrg {
1489 1.1 mrg if (tilepro_pic_address_needs_scratch (operands[1]))
1490 1.1 mrg operands[1] = tilepro_legitimize_pic_address (operands[1], mode, 0);
1491 1.1 mrg
1492 1.1 mrg if (symbolic_operand (operands[1], mode))
1493 1.1 mrg {
1494 1.1 mrg operands[1] = tilepro_legitimize_pic_address (operands[1],
1495 1.1 mrg mode,
1496 1.1 mrg (reload_in_progress ?
1497 1.1 mrg operands[0] :
1498 1.1 mrg NULL_RTX));
1499 1.1 mrg return false;
1500 1.1 mrg }
1501 1.1 mrg }
1502 1.1 mrg
1503 1.1 mrg /* Fixup for UNSPEC addresses. */
1504 1.1 mrg if (flag_pic
1505 1.1 mrg && GET_CODE (operands[1]) == HIGH
1506 1.1 mrg && GET_CODE (XEXP (operands[1], 0)) == CONST
1507 1.1 mrg && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == UNSPEC)
1508 1.1 mrg {
1509 1.1 mrg rtx unspec = XEXP (XEXP (operands[1], 0), 0);
1510 1.1 mrg int unspec_num = XINT (unspec, 1);
1511 1.1 mrg if (unspec_num == UNSPEC_PCREL_SYM)
1512 1.1 mrg {
1513 1.1 mrg emit_insn (gen_auli_pcrel (operands[0], const0_rtx,
1514 1.1 mrg XVECEXP (unspec, 0, 0),
1515 1.1 mrg XVECEXP (unspec, 0, 1)));
1516 1.1 mrg return true;
1517 1.1 mrg }
1518 1.1 mrg else if (flag_pic == 2 && unspec_num == UNSPEC_GOT32_SYM)
1519 1.1 mrg {
1520 1.1 mrg emit_insn (gen_addhi_got32 (operands[0], const0_rtx,
1521 1.1 mrg XVECEXP (unspec, 0, 0)));
1522 1.1 mrg return true;
1523 1.1 mrg }
1524 1.1 mrg else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_GD)
1525 1.1 mrg {
1526 1.1 mrg emit_insn (gen_tls_gd_addhi (operands[0], const0_rtx,
1527 1.1 mrg XVECEXP (unspec, 0, 0)));
1528 1.1 mrg return true;
1529 1.1 mrg }
1530 1.1 mrg else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_IE)
1531 1.1 mrg {
1532 1.1 mrg emit_insn (gen_tls_ie_addhi (operands[0], const0_rtx,
1533 1.1 mrg XVECEXP (unspec, 0, 0)));
1534 1.1 mrg return true;
1535 1.1 mrg }
1536 1.1 mrg else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_LE)
1537 1.1 mrg {
1538 1.1 mrg emit_insn (gen_tls_le_addhi (operands[0], const0_rtx,
1539 1.1 mrg XVECEXP (unspec, 0, 0)));
1540 1.1 mrg return true;
1541 1.1 mrg }
1542 1.1 mrg }
1543 1.1 mrg
1544 1.1 mrg /* Accept non-constants and valid constants unmodified. */
1545 1.1 mrg if (!CONSTANT_P (operands[1])
1546 1.1 mrg || GET_CODE (operands[1]) == HIGH || move_operand (operands[1], mode))
1547 1.1 mrg return false;
1548 1.1 mrg
1549 1.1 mrg /* Split large integers. */
1550 1.1 mrg if (GET_MODE_SIZE (mode) <= 4)
1551 1.1 mrg {
1552 1.1 mrg tilepro_expand_set_const32 (operands[0], operands[1]);
1553 1.1 mrg return true;
1554 1.1 mrg }
1555 1.1 mrg
1556 1.1 mrg return false;
1557 1.1 mrg }
1558 1.1 mrg
1559 1.1 mrg
1560 1.1 mrg /* Expand the "insv" pattern. */
1561 1.1 mrg void
1562 1.1 mrg tilepro_expand_insv (rtx operands[4])
1563 1.1 mrg {
1564 1.1 mrg rtx first_rtx = operands[2];
1565 1.1 mrg HOST_WIDE_INT first = INTVAL (first_rtx);
1566 1.1 mrg HOST_WIDE_INT width = INTVAL (operands[1]);
1567 1.1 mrg rtx v = operands[3];
1568 1.1 mrg
1569 1.1 mrg /* Shift the inserted bits into position. */
1570 1.1 mrg if (first != 0)
1571 1.1 mrg {
1572 1.1 mrg if (CONST_INT_P (v))
1573 1.1 mrg {
1574 1.1 mrg /* Shift the constant into mm position. */
1575 1.1 mrg v = gen_int_si (INTVAL (v) << first);
1576 1.1 mrg }
1577 1.1 mrg else
1578 1.1 mrg {
1579 1.1 mrg /* Shift over the value to be inserted. */
1580 1.1 mrg rtx tmp = gen_reg_rtx (SImode);
1581 1.1 mrg emit_insn (gen_ashlsi3 (tmp, v, first_rtx));
1582 1.1 mrg v = tmp;
1583 1.1 mrg }
1584 1.1 mrg }
1585 1.1 mrg
1586 1.1 mrg /* Insert the shifted bits using an 'mm' insn. */
1587 1.1 mrg emit_insn (gen_insn_mm (operands[0], v, operands[0], first_rtx,
1588 1.1 mrg GEN_INT (first + width - 1)));
1589 1.1 mrg }
1590 1.1 mrg
1591 1.1 mrg
1592 1.1 mrg /* Expand unaligned loads. */
1593 1.1 mrg void
1594 1.1 mrg tilepro_expand_unaligned_load (rtx dest_reg, rtx mem, HOST_WIDE_INT bitsize,
1595 1.1 mrg HOST_WIDE_INT bit_offset, bool sign)
1596 1.1 mrg {
1597 1.1 mrg machine_mode mode;
1598 1.1 mrg rtx addr_lo, addr_hi;
1599 1.1 mrg rtx mem_lo, mem_hi, hi;
1600 1.1 mrg rtx mema, wide_result;
1601 1.1 mrg int last_byte_offset;
1602 1.1 mrg HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1603 1.1 mrg
1604 1.1 mrg mode = GET_MODE (dest_reg);
1605 1.1 mrg
1606 1.1 mrg hi = gen_reg_rtx (mode);
1607 1.1 mrg
1608 1.1 mrg if (bitsize == 2 * BITS_PER_UNIT && (bit_offset % BITS_PER_UNIT) == 0)
1609 1.1 mrg {
1610 1.1 mrg rtx lo;
1611 1.1 mrg
1612 1.1 mrg /* When just loading a two byte value, we can load the two bytes
1613 1.1 mrg individually and combine them efficiently. */
1614 1.1 mrg
1615 1.1 mrg mem_lo = adjust_address (mem, QImode, byte_offset);
1616 1.1 mrg mem_hi = adjust_address (mem, QImode, byte_offset + 1);
1617 1.1 mrg
1618 1.1 mrg lo = gen_reg_rtx (mode);
1619 1.1 mrg emit_insn (gen_zero_extendqisi2 (lo, mem_lo));
1620 1.1 mrg
1621 1.1 mrg if (sign)
1622 1.1 mrg {
1623 1.1 mrg rtx tmp = gen_reg_rtx (mode);
1624 1.1 mrg
1625 1.1 mrg /* Do a signed load of the second byte then shift and OR it
1626 1.1 mrg in. */
1627 1.1 mrg emit_insn (gen_extendqisi2 (gen_lowpart (SImode, hi), mem_hi));
1628 1.1 mrg emit_insn (gen_ashlsi3 (gen_lowpart (SImode, tmp),
1629 1.1 mrg gen_lowpart (SImode, hi), GEN_INT (8)));
1630 1.1 mrg emit_insn (gen_iorsi3 (gen_lowpart (SImode, dest_reg),
1631 1.1 mrg gen_lowpart (SImode, lo),
1632 1.1 mrg gen_lowpart (SImode, tmp)));
1633 1.1 mrg }
1634 1.1 mrg else
1635 1.1 mrg {
1636 1.1 mrg /* Do two unsigned loads and use intlb to interleave
1637 1.1 mrg them. */
1638 1.1 mrg emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode, hi), mem_hi));
1639 1.1 mrg emit_insn (gen_insn_intlb (gen_lowpart (SImode, dest_reg),
1640 1.1 mrg gen_lowpart (SImode, hi),
1641 1.1 mrg gen_lowpart (SImode, lo)));
1642 1.1 mrg }
1643 1.1 mrg
1644 1.1 mrg return;
1645 1.1 mrg }
1646 1.1 mrg
1647 1.1 mrg mema = XEXP (mem, 0);
1648 1.1 mrg
1649 1.1 mrg /* AND addresses cannot be in any alias set, since they may
1650 1.1 mrg implicitly alias surrounding code. Ideally we'd have some alias
1651 1.1 mrg set that covered all types except those with alignment 8 or
1652 1.1 mrg higher. */
1653 1.1 mrg addr_lo = force_reg (Pmode, plus_constant (Pmode, mema, byte_offset));
1654 1.1 mrg mem_lo = change_address (mem, mode,
1655 1.1 mrg gen_rtx_AND (Pmode, addr_lo, GEN_INT (-4)));
1656 1.1 mrg set_mem_alias_set (mem_lo, 0);
1657 1.1 mrg
1658 1.1 mrg /* Load the high word at an address that will not fault if the low
1659 1.1 mrg address is aligned and at the very end of a page. */
1660 1.1 mrg last_byte_offset = (bit_offset + bitsize - 1) / BITS_PER_UNIT;
1661 1.1 mrg addr_hi = force_reg (Pmode, plus_constant (Pmode, mema, last_byte_offset));
1662 1.1 mrg mem_hi = change_address (mem, mode,
1663 1.1 mrg gen_rtx_AND (Pmode, addr_hi, GEN_INT (-4)));
1664 1.1 mrg set_mem_alias_set (mem_hi, 0);
1665 1.1 mrg
1666 1.1 mrg if (bitsize == 32)
1667 1.1 mrg {
1668 1.1 mrg addr_lo = make_safe_from (addr_lo, dest_reg);
1669 1.1 mrg wide_result = dest_reg;
1670 1.1 mrg }
1671 1.1 mrg else
1672 1.1 mrg {
1673 1.1 mrg wide_result = gen_reg_rtx (mode);
1674 1.1 mrg }
1675 1.1 mrg
1676 1.1 mrg /* Load hi first in case dest_reg is used in mema. */
1677 1.1 mrg emit_move_insn (hi, mem_hi);
1678 1.1 mrg emit_move_insn (wide_result, mem_lo);
1679 1.1 mrg
1680 1.1 mrg emit_insn (gen_insn_dword_align (gen_lowpart (SImode, wide_result),
1681 1.1 mrg gen_lowpart (SImode, wide_result),
1682 1.1 mrg gen_lowpart (SImode, hi), addr_lo));
1683 1.1 mrg
1684 1.1 mrg if (bitsize != 32)
1685 1.1 mrg {
1686 1.1 mrg rtx extracted =
1687 1.1 mrg extract_bit_field (gen_lowpart (SImode, wide_result),
1688 1.1 mrg bitsize, bit_offset % BITS_PER_UNIT,
1689 1.1 mrg !sign, gen_lowpart (SImode, dest_reg),
1690 1.1 mrg SImode, SImode, false, NULL);
1691 1.1 mrg
1692 1.1 mrg if (extracted != dest_reg)
1693 1.1 mrg emit_move_insn (dest_reg, gen_lowpart (SImode, extracted));
1694 1.1 mrg }
1695 1.1 mrg }
1696 1.1 mrg
1697 1.1 mrg
1698 1.1 mrg /* Expand unaligned stores. */
1699 1.1 mrg static void
1700 1.1 mrg tilepro_expand_unaligned_store (rtx mem, rtx src, HOST_WIDE_INT bitsize,
1701 1.1 mrg HOST_WIDE_INT bit_offset)
1702 1.1 mrg {
1703 1.1 mrg HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1704 1.1 mrg HOST_WIDE_INT bytesize = bitsize / BITS_PER_UNIT;
1705 1.1 mrg HOST_WIDE_INT shift_amt;
1706 1.1 mrg HOST_WIDE_INT i;
1707 1.1 mrg rtx mem_addr;
1708 1.1 mrg rtx store_val;
1709 1.1 mrg
1710 1.1 mrg for (i = 0, shift_amt = 0; i < bytesize; i++, shift_amt += BITS_PER_UNIT)
1711 1.1 mrg {
1712 1.1 mrg mem_addr = adjust_address (mem, QImode, byte_offset + i);
1713 1.1 mrg
1714 1.1 mrg if (shift_amt)
1715 1.1 mrg {
1716 1.1 mrg store_val = expand_simple_binop (SImode, LSHIFTRT,
1717 1.1 mrg gen_lowpart (SImode, src),
1718 1.1 mrg GEN_INT (shift_amt), NULL, 1,
1719 1.1 mrg OPTAB_LIB_WIDEN);
1720 1.1 mrg store_val = gen_lowpart (QImode, store_val);
1721 1.1 mrg }
1722 1.1 mrg else
1723 1.1 mrg {
1724 1.1 mrg store_val = gen_lowpart (QImode, src);
1725 1.1 mrg }
1726 1.1 mrg
1727 1.1 mrg emit_move_insn (mem_addr, store_val);
1728 1.1 mrg }
1729 1.1 mrg }
1730 1.1 mrg
1731 1.1 mrg
1732 1.1 mrg /* Implement the movmisalign patterns. One of the operands is a
1733 1.1 mrg memory that is not naturally aligned. Emit instructions to load
1734 1.1 mrg it. */
1735 1.1 mrg void
1736 1.1 mrg tilepro_expand_movmisalign (machine_mode mode, rtx *operands)
1737 1.1 mrg {
1738 1.1 mrg if (MEM_P (operands[1]))
1739 1.1 mrg {
1740 1.1 mrg rtx tmp;
1741 1.1 mrg
1742 1.1 mrg if (register_operand (operands[0], mode))
1743 1.1 mrg tmp = operands[0];
1744 1.1 mrg else
1745 1.1 mrg tmp = gen_reg_rtx (mode);
1746 1.1 mrg
1747 1.1 mrg tilepro_expand_unaligned_load (tmp, operands[1],
1748 1.1 mrg GET_MODE_BITSIZE (mode), 0, true);
1749 1.1 mrg
1750 1.1 mrg if (tmp != operands[0])
1751 1.1 mrg emit_move_insn (operands[0], tmp);
1752 1.1 mrg }
1753 1.1 mrg else if (MEM_P (operands[0]))
1754 1.1 mrg {
1755 1.1 mrg if (!reg_or_0_operand (operands[1], mode))
1756 1.1 mrg operands[1] = force_reg (mode, operands[1]);
1757 1.1 mrg
1758 1.1 mrg tilepro_expand_unaligned_store (operands[0], operands[1],
1759 1.1 mrg GET_MODE_BITSIZE (mode), 0);
1760 1.1 mrg }
1761 1.1 mrg else
1762 1.1 mrg gcc_unreachable ();
1763 1.1 mrg }
1764 1.1 mrg
1765 1.1 mrg
1766 1.1 mrg /* Implement the addsi3 pattern. */
1767 1.1 mrg bool
1768 1.1 mrg tilepro_expand_addsi (rtx op0, rtx op1, rtx op2)
1769 1.1 mrg {
1770 1.1 mrg rtx temp;
1771 1.1 mrg HOST_WIDE_INT n;
1772 1.1 mrg HOST_WIDE_INT high;
1773 1.1 mrg
1774 1.1 mrg /* Skip anything that only takes one instruction. */
1775 1.1 mrg if (add_operand (op2, SImode))
1776 1.1 mrg return false;
1777 1.1 mrg
1778 1.1 mrg /* We can only optimize ints here (it should be impossible to get
1779 1.1 mrg here with any other type, but it is harmless to check. */
1780 1.1 mrg if (!CONST_INT_P (op2))
1781 1.1 mrg return false;
1782 1.1 mrg
1783 1.1 mrg temp = create_temp_reg_if_possible (SImode, op0);
1784 1.1 mrg n = INTVAL (op2);
1785 1.1 mrg high = (n + (n & 0x8000)) & ~0xffff;
1786 1.1 mrg
1787 1.1 mrg emit_move_insn (temp, gen_rtx_PLUS (SImode, op1, gen_int_si (high)));
1788 1.1 mrg emit_move_insn (op0, gen_rtx_PLUS (SImode, temp, gen_int_si (n - high)));
1789 1.1 mrg
1790 1.1 mrg return true;
1791 1.1 mrg }
1792 1.1 mrg
1793 1.1 mrg
1794 1.1 mrg /* Implement the allocate_stack pattern (alloca). */
1795 1.1 mrg void
1796 1.1 mrg tilepro_allocate_stack (rtx op0, rtx op1)
1797 1.1 mrg {
1798 1.1 mrg /* Technically the correct way to initialize chain_loc is with
1799 1.1 mrg * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1800 1.1 mrg * sets the alias_set to that of a frame reference. Some of our
1801 1.1 mrg * tests rely on some unsafe assumption about when the chaining
1802 1.1 mrg * update is done, we need to be conservative about reordering the
1803 1.1 mrg * chaining instructions.
1804 1.1 mrg */
1805 1.1 mrg rtx fp_addr = gen_reg_rtx (Pmode);
1806 1.1 mrg rtx fp_value = gen_reg_rtx (Pmode);
1807 1.1 mrg rtx fp_loc;
1808 1.1 mrg
1809 1.1 mrg emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1810 1.1 mrg GEN_INT (UNITS_PER_WORD)));
1811 1.1 mrg
1812 1.1 mrg fp_loc = gen_frame_mem (Pmode, fp_addr);
1813 1.1 mrg
1814 1.1 mrg emit_move_insn (fp_value, fp_loc);
1815 1.1 mrg
1816 1.1 mrg op1 = force_reg (Pmode, op1);
1817 1.1 mrg
1818 1.1 mrg emit_move_insn (stack_pointer_rtx,
1819 1.1 mrg gen_rtx_MINUS (Pmode, stack_pointer_rtx, op1));
1820 1.1 mrg
1821 1.1 mrg emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1822 1.1 mrg GEN_INT (UNITS_PER_WORD)));
1823 1.1 mrg
1824 1.1 mrg fp_loc = gen_frame_mem (Pmode, fp_addr);
1825 1.1 mrg
1826 1.1 mrg emit_move_insn (fp_loc, fp_value);
1827 1.1 mrg
1828 1.1 mrg emit_move_insn (op0, virtual_stack_dynamic_rtx);
1829 1.1 mrg }
1830 1.1 mrg
1831 1.1 mrg
1833 1.1 mrg
1834 1.1 mrg /* Multiplies */
1835 1.1 mrg
1836 1.1 mrg /* Returns the insn_code in ENTRY. */
1837 1.1 mrg static enum insn_code
1838 1.1 mrg tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1839 1.1 mrg *entry)
1840 1.1 mrg {
1841 1.1 mrg return tilepro_multiply_insn_seq_decode_opcode[entry->compressed_opcode];
1842 1.1 mrg }
1843 1.1 mrg
1844 1.1 mrg
1845 1.1 mrg /* Returns the length of the 'op' array. */
1846 1.1 mrg static int
1847 1.1 mrg tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq *seq)
1848 1.1 mrg {
1849 1.1 mrg /* The array either uses all of its allocated slots or is terminated
1850 1.1 mrg by a bogus opcode. Either way, the array size is the index of the
1851 1.1 mrg last valid opcode plus one. */
1852 1.1 mrg int i;
1853 1.1 mrg for (i = tilepro_multiply_insn_seq_MAX_OPERATIONS - 1; i >= 0; i--)
1854 1.1 mrg if (tilepro_multiply_get_opcode (&seq->op[i]) != CODE_FOR_nothing)
1855 1.1 mrg return i + 1;
1856 1.1 mrg
1857 1.1 mrg /* An empty array is not allowed. */
1858 1.1 mrg gcc_unreachable ();
1859 1.1 mrg }
1860 1.1 mrg
1861 1.1 mrg
1862 1.1 mrg /* We precompute a number of expression trees for multiplying by
1863 1.1 mrg constants. This generates code for such an expression tree by
1864 1.1 mrg walking through the nodes in the tree (which are conveniently
1865 1.1 mrg pre-linearized) and emitting an instruction for each one. */
1866 1.1 mrg static void
1867 1.1 mrg tilepro_expand_constant_multiply_given_sequence (rtx result, rtx src,
1868 1.1 mrg const struct
1869 1.1 mrg tilepro_multiply_insn_seq
1870 1.1 mrg *seq)
1871 1.1 mrg {
1872 1.1 mrg int i;
1873 1.1 mrg int num_ops;
1874 1.1 mrg
1875 1.1 mrg /* Keep track of the subexpressions computed so far, so later
1876 1.1 mrg instructions can refer to them. We seed the array with zero and
1877 1.1 mrg the value being multiplied. */
1878 1.1 mrg int num_subexprs = 2;
1879 1.1 mrg rtx subexprs[tilepro_multiply_insn_seq_MAX_OPERATIONS + 2];
1880 1.1 mrg subexprs[0] = const0_rtx;
1881 1.1 mrg subexprs[1] = src;
1882 1.1 mrg
1883 1.1 mrg /* Determine how many instructions we are going to generate. */
1884 1.1 mrg num_ops = tilepro_multiply_get_num_ops (seq);
1885 1.1 mrg gcc_assert (num_ops > 0
1886 1.1 mrg && num_ops <= tilepro_multiply_insn_seq_MAX_OPERATIONS);
1887 1.1 mrg
1888 1.1 mrg for (i = 0; i < num_ops; i++)
1889 1.1 mrg {
1890 1.1 mrg const struct tilepro_multiply_insn_seq_entry *entry = &seq->op[i];
1891 1.1 mrg
1892 1.1 mrg /* Figure out where to store the output of this instruction. */
1893 1.1 mrg const bool is_last_op = (i + 1 == num_ops);
1894 1.1 mrg rtx out = is_last_op ? result : gen_reg_rtx (SImode);
1895 1.1 mrg
1896 1.1 mrg enum insn_code opcode = tilepro_multiply_get_opcode (entry);
1897 1.1 mrg if (opcode == CODE_FOR_ashlsi3)
1898 1.1 mrg {
1899 1.1 mrg /* Handle shift by immediate. This is a special case because
1900 1.1 mrg the meaning of the second operand is a constant shift
1901 1.1 mrg count rather than an operand index. */
1902 1.1 mrg
1903 1.1 mrg /* Make sure the shift count is in range. Zero should not
1904 1.1 mrg happen. */
1905 1.1 mrg const int shift_count = entry->rhs;
1906 1.1 mrg gcc_assert (shift_count > 0 && shift_count < 32);
1907 1.1 mrg
1908 1.1 mrg /* Emit the actual instruction. */
1909 1.1 mrg emit_insn (GEN_FCN (opcode)
1910 1.1 mrg (out, subexprs[entry->lhs],
1911 1.1 mrg gen_rtx_CONST_INT (SImode, shift_count)));
1912 1.1 mrg }
1913 1.1 mrg else
1914 1.1 mrg {
1915 1.1 mrg /* Handle a normal two-operand instruction, such as add or
1916 1.1 mrg s1a. */
1917 1.1 mrg
1918 1.1 mrg /* Make sure we are referring to a previously computed
1919 1.1 mrg subexpression. */
1920 1.1 mrg gcc_assert (entry->rhs < num_subexprs);
1921 1.1 mrg
1922 1.1 mrg /* Emit the actual instruction. */
1923 1.1 mrg emit_insn (GEN_FCN (opcode)
1924 1.1 mrg (out, subexprs[entry->lhs], subexprs[entry->rhs]));
1925 1.1 mrg }
1926 1.1 mrg
1927 1.1 mrg /* Record this subexpression for use by later expressions. */
1928 1.1 mrg subexprs[num_subexprs++] = out;
1929 1.1 mrg }
1930 1.1 mrg }
1931 1.1 mrg
1932 1.1 mrg
1933 1.1 mrg /* bsearch helper function. */
1934 1.1 mrg static int
1935 1.1 mrg tilepro_compare_multipliers (const void *key, const void *t)
1936 1.1 mrg {
1937 1.1 mrg return *(const int *) key -
1938 1.1 mrg ((const struct tilepro_multiply_insn_seq *) t)->multiplier;
1939 1.1 mrg }
1940 1.1 mrg
1941 1.1 mrg
1942 1.1 mrg /* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1943 1.1 mrg none exists. */
1944 1.1 mrg static const struct tilepro_multiply_insn_seq *
1945 1.1 mrg tilepro_find_multiply_insn_seq_for_constant (int multiplier)
1946 1.1 mrg {
1947 1.1 mrg return ((const struct tilepro_multiply_insn_seq *)
1948 1.1 mrg bsearch (&multiplier, tilepro_multiply_insn_seq_table,
1949 1.1 mrg tilepro_multiply_insn_seq_table_size,
1950 1.1 mrg sizeof tilepro_multiply_insn_seq_table[0],
1951 1.1 mrg tilepro_compare_multipliers));
1952 1.1 mrg }
1953 1.1 mrg
1954 1.1 mrg
1955 1.1 mrg /* Try to a expand constant multiply in SImode by looking it up in a
1956 1.1 mrg precompiled table. OP0 is the result operand, OP1 is the source
1957 1.1 mrg operand, and MULTIPLIER is the value of the constant. Return true
1958 1.1 mrg if it succeeds. */
1959 1.1 mrg static bool
1960 1.1 mrg tilepro_expand_const_mulsi (rtx op0, rtx op1, int multiplier)
1961 1.1 mrg {
1962 1.1 mrg /* See if we have precomputed an efficient way to multiply by this
1963 1.1 mrg constant. */
1964 1.1 mrg const struct tilepro_multiply_insn_seq *seq =
1965 1.1 mrg tilepro_find_multiply_insn_seq_for_constant (multiplier);
1966 1.1 mrg if (seq != NULL)
1967 1.1 mrg {
1968 1.1 mrg tilepro_expand_constant_multiply_given_sequence (op0, op1, seq);
1969 1.1 mrg return true;
1970 1.1 mrg }
1971 1.1 mrg else
1972 1.1 mrg return false;
1973 1.1 mrg }
1974 1.1 mrg
1975 1.1 mrg
1976 1.1 mrg /* Expand the mulsi pattern. */
1977 1.1 mrg bool
1978 1.1 mrg tilepro_expand_mulsi (rtx op0, rtx op1, rtx op2)
1979 1.1 mrg {
1980 1.1 mrg if (CONST_INT_P (op2))
1981 1.1 mrg {
1982 1.1 mrg HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op2), SImode);
1983 1.1 mrg return tilepro_expand_const_mulsi (op0, op1, n);
1984 1.1 mrg }
1985 1.1 mrg return false;
1986 1.1 mrg }
1987 1.1 mrg
1988 1.1 mrg
1989 1.1 mrg /* Expand a high multiply pattern in SImode. RESULT, OP1, OP2 are the
1990 1.1 mrg operands, and SIGN is true if it's a signed multiply, and false if
1991 1.1 mrg it's an unsigned multiply. */
1992 1.1 mrg static void
1993 1.1 mrg tilepro_expand_high_multiply (rtx result, rtx op1, rtx op2, bool sign)
1994 1.1 mrg {
1995 1.1 mrg rtx tmp0 = gen_reg_rtx (SImode);
1996 1.1 mrg rtx tmp1 = gen_reg_rtx (SImode);
1997 1.1 mrg rtx tmp2 = gen_reg_rtx (SImode);
1998 1.1 mrg rtx tmp3 = gen_reg_rtx (SImode);
1999 1.1 mrg rtx tmp4 = gen_reg_rtx (SImode);
2000 1.1 mrg rtx tmp5 = gen_reg_rtx (SImode);
2001 1.1 mrg rtx tmp6 = gen_reg_rtx (SImode);
2002 1.1 mrg rtx tmp7 = gen_reg_rtx (SImode);
2003 1.1 mrg rtx tmp8 = gen_reg_rtx (SImode);
2004 1.1 mrg rtx tmp9 = gen_reg_rtx (SImode);
2005 1.1 mrg rtx tmp10 = gen_reg_rtx (SImode);
2006 1.1 mrg rtx tmp11 = gen_reg_rtx (SImode);
2007 1.1 mrg rtx tmp12 = gen_reg_rtx (SImode);
2008 1.1 mrg rtx tmp13 = gen_reg_rtx (SImode);
2009 1.1 mrg rtx result_lo = gen_reg_rtx (SImode);
2010 1.1 mrg
2011 1.1 mrg if (sign)
2012 1.1 mrg {
2013 1.1 mrg emit_insn (gen_insn_mulhl_su (tmp0, op1, op2));
2014 1.1 mrg emit_insn (gen_insn_mulhl_su (tmp1, op2, op1));
2015 1.1 mrg emit_insn (gen_insn_mulll_uu (tmp2, op1, op2));
2016 1.1 mrg emit_insn (gen_insn_mulhh_ss (tmp3, op1, op2));
2017 1.1 mrg }
2018 1.1 mrg else
2019 1.1 mrg {
2020 1.1 mrg emit_insn (gen_insn_mulhl_uu (tmp0, op1, op2));
2021 1.1 mrg emit_insn (gen_insn_mulhl_uu (tmp1, op2, op1));
2022 1.1 mrg emit_insn (gen_insn_mulll_uu (tmp2, op1, op2));
2023 1.1 mrg emit_insn (gen_insn_mulhh_uu (tmp3, op1, op2));
2024 1.1 mrg }
2025 1.1 mrg
2026 1.1 mrg emit_move_insn (tmp4, (gen_rtx_ASHIFT (SImode, tmp0, GEN_INT (16))));
2027 1.1 mrg
2028 1.1 mrg emit_move_insn (tmp5, (gen_rtx_ASHIFT (SImode, tmp1, GEN_INT (16))));
2029 1.1 mrg
2030 1.1 mrg emit_move_insn (tmp6, (gen_rtx_PLUS (SImode, tmp4, tmp5)));
2031 1.1 mrg emit_move_insn (result_lo, (gen_rtx_PLUS (SImode, tmp2, tmp6)));
2032 1.1 mrg
2033 1.1 mrg emit_move_insn (tmp7, gen_rtx_LTU (SImode, tmp6, tmp4));
2034 1.1 mrg emit_move_insn (tmp8, gen_rtx_LTU (SImode, result_lo, tmp2));
2035 1.1 mrg
2036 1.1 mrg if (sign)
2037 1.1 mrg {
2038 1.1 mrg emit_move_insn (tmp9, (gen_rtx_ASHIFTRT (SImode, tmp0, GEN_INT (16))));
2039 1.1 mrg emit_move_insn (tmp10, (gen_rtx_ASHIFTRT (SImode, tmp1, GEN_INT (16))));
2040 1.1 mrg }
2041 1.1 mrg else
2042 1.1 mrg {
2043 1.1 mrg emit_move_insn (tmp9, (gen_rtx_LSHIFTRT (SImode, tmp0, GEN_INT (16))));
2044 1.1 mrg emit_move_insn (tmp10, (gen_rtx_LSHIFTRT (SImode, tmp1, GEN_INT (16))));
2045 1.1 mrg }
2046 1.1 mrg
2047 1.1 mrg emit_move_insn (tmp11, (gen_rtx_PLUS (SImode, tmp3, tmp7)));
2048 1.1 mrg emit_move_insn (tmp12, (gen_rtx_PLUS (SImode, tmp8, tmp9)));
2049 1.1 mrg emit_move_insn (tmp13, (gen_rtx_PLUS (SImode, tmp11, tmp12)));
2050 1.1 mrg emit_move_insn (result, (gen_rtx_PLUS (SImode, tmp13, tmp10)));
2051 1.1 mrg }
2052 1.1 mrg
2053 1.1 mrg
2054 1.1 mrg /* Implement smulsi3_highpart. */
2055 1.1 mrg void
2056 1.1 mrg tilepro_expand_smulsi3_highpart (rtx op0, rtx op1, rtx op2)
2057 1.1 mrg {
2058 1.1 mrg tilepro_expand_high_multiply (op0, op1, op2, true);
2059 1.1 mrg }
2060 1.1 mrg
2061 1.1 mrg
2062 1.1 mrg /* Implement umulsi3_highpart. */
2063 1.1 mrg void
2064 1.1 mrg tilepro_expand_umulsi3_highpart (rtx op0, rtx op1, rtx op2)
2065 1.1 mrg {
2066 1.1 mrg tilepro_expand_high_multiply (op0, op1, op2, false);
2067 1.1 mrg }
2068 1.1 mrg
2069 1.1 mrg
2071 1.1 mrg
2072 1.1 mrg /* Compare and branches */
2073 1.1 mrg
2074 1.1 mrg /* Helper function to handle DImode for tilepro_emit_setcc_internal. */
2075 1.1 mrg static bool
2076 1.1 mrg tilepro_emit_setcc_internal_di (rtx res, enum rtx_code code, rtx op0, rtx op1)
2077 1.1 mrg {
2078 1.1 mrg rtx operands[2], lo_half[2], hi_half[2];
2079 1.1 mrg rtx tmp, tmp0, tmp1, tmp2;
2080 1.1 mrg bool swap = false;
2081 1.1 mrg
2082 1.1 mrg /* Reduce the number of cases we need to handle by reversing the
2083 1.1 mrg operands. */
2084 1.1 mrg switch (code)
2085 1.1 mrg {
2086 1.1 mrg case EQ:
2087 1.1 mrg case NE:
2088 1.1 mrg case LE:
2089 1.1 mrg case LT:
2090 1.1 mrg case LEU:
2091 1.1 mrg case LTU:
2092 1.1 mrg /* We handle these compares directly. */
2093 1.1 mrg break;
2094 1.1 mrg
2095 1.1 mrg case GE:
2096 1.1 mrg case GT:
2097 1.1 mrg case GEU:
2098 1.1 mrg case GTU:
2099 1.1 mrg /* Reverse the operands. */
2100 1.1 mrg swap = true;
2101 1.1 mrg break;
2102 1.1 mrg
2103 1.1 mrg default:
2104 1.1 mrg /* We should not have called this with any other code. */
2105 1.1 mrg gcc_unreachable ();
2106 1.1 mrg }
2107 1.1 mrg
2108 1.1 mrg if (swap)
2109 1.1 mrg {
2110 1.1 mrg code = swap_condition (code);
2111 1.1 mrg tmp = op0, op0 = op1, op1 = tmp;
2112 1.1 mrg }
2113 1.1 mrg
2114 1.1 mrg operands[0] = op0;
2115 1.1 mrg operands[1] = op1;
2116 1.1 mrg
2117 1.1 mrg split_di (operands, 2, lo_half, hi_half);
2118 1.1 mrg
2119 1.1 mrg if (!reg_or_0_operand (lo_half[0], SImode))
2120 1.1 mrg lo_half[0] = force_reg (SImode, lo_half[0]);
2121 1.1 mrg
2122 1.1 mrg if (!reg_or_0_operand (hi_half[0], SImode))
2123 1.1 mrg hi_half[0] = force_reg (SImode, hi_half[0]);
2124 1.1 mrg
2125 1.1 mrg if (!CONST_INT_P (lo_half[1]) && !register_operand (lo_half[1], SImode))
2126 1.1 mrg lo_half[1] = force_reg (SImode, lo_half[1]);
2127 1.1 mrg
2128 1.1 mrg if (!CONST_INT_P (hi_half[1]) && !register_operand (hi_half[1], SImode))
2129 1.1 mrg hi_half[1] = force_reg (SImode, hi_half[1]);
2130 1.1 mrg
2131 1.1 mrg tmp0 = gen_reg_rtx (SImode);
2132 1.1 mrg tmp1 = gen_reg_rtx (SImode);
2133 1.1 mrg tmp2 = gen_reg_rtx (SImode);
2134 1.1 mrg
2135 1.1 mrg switch (code)
2136 1.1 mrg {
2137 1.1 mrg case EQ:
2138 1.1 mrg emit_insn (gen_insn_seq (tmp0, lo_half[0], lo_half[1]));
2139 1.1 mrg emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2140 1.1 mrg emit_insn (gen_andsi3 (res, tmp0, tmp1));
2141 1.1 mrg return true;
2142 1.1 mrg case NE:
2143 1.1 mrg emit_insn (gen_insn_sne (tmp0, lo_half[0], lo_half[1]));
2144 1.1 mrg emit_insn (gen_insn_sne (tmp1, hi_half[0], hi_half[1]));
2145 1.1 mrg emit_insn (gen_iorsi3 (res, tmp0, tmp1));
2146 1.1 mrg return true;
2147 1.1 mrg case LE:
2148 1.1 mrg emit_insn (gen_insn_slte (tmp0, hi_half[0], hi_half[1]));
2149 1.1 mrg emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2150 1.1 mrg emit_insn (gen_insn_slte_u (tmp2, lo_half[0], lo_half[1]));
2151 1.1 mrg emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2152 1.1 mrg return true;
2153 1.1 mrg case LT:
2154 1.1 mrg if (operands[1] == const0_rtx)
2155 1.1 mrg {
2156 1.1 mrg emit_insn (gen_lshrsi3 (res, hi_half[0], GEN_INT (31)));
2157 1.1 mrg return true;
2158 1.1 mrg }
2159 1.1 mrg else
2160 1.1 mrg {
2161 1.1 mrg emit_insn (gen_insn_slt (tmp0, hi_half[0], hi_half[1]));
2162 1.1 mrg emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2163 1.1 mrg emit_insn (gen_insn_slt_u (tmp2, lo_half[0], lo_half[1]));
2164 1.1 mrg emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2165 1.1 mrg }
2166 1.1 mrg return true;
2167 1.1 mrg case LEU:
2168 1.1 mrg emit_insn (gen_insn_slte_u (tmp0, hi_half[0], hi_half[1]));
2169 1.1 mrg emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2170 1.1 mrg emit_insn (gen_insn_slte_u (tmp2, lo_half[0], lo_half[1]));
2171 1.1 mrg emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2172 1.1 mrg return true;
2173 1.1 mrg case LTU:
2174 1.1 mrg emit_insn (gen_insn_slt_u (tmp0, hi_half[0], hi_half[1]));
2175 1.1 mrg emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2176 1.1 mrg emit_insn (gen_insn_slt_u (tmp2, lo_half[0], lo_half[1]));
2177 1.1 mrg emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2178 1.1 mrg return true;
2179 1.1 mrg default:
2180 1.1 mrg gcc_unreachable ();
2181 1.1 mrg }
2182 1.1 mrg
2183 1.1 mrg return false;
2184 1.1 mrg }
2185 1.1 mrg
2186 1.1 mrg
2187 1.1 mrg /* Certain simplifications can be done to make invalid setcc
2188 1.1 mrg operations valid. Return the final comparison, or NULL if we can't
2189 1.1 mrg work. */
2190 1.1 mrg static bool
2191 1.1 mrg tilepro_emit_setcc_internal (rtx res, enum rtx_code code, rtx op0, rtx op1,
2192 1.1 mrg machine_mode cmp_mode)
2193 1.1 mrg {
2194 1.1 mrg rtx tmp;
2195 1.1 mrg bool swap = false;
2196 1.1 mrg
2197 1.1 mrg if (cmp_mode == DImode)
2198 1.1 mrg {
2199 1.1 mrg return tilepro_emit_setcc_internal_di (res, code, op0, op1);
2200 1.1 mrg }
2201 1.1 mrg
2202 1.1 mrg /* The general case: fold the comparison code to the types of
2203 1.1 mrg compares that we have, choosing the branch as necessary. */
2204 1.1 mrg
2205 1.1 mrg switch (code)
2206 1.1 mrg {
2207 1.1 mrg case EQ:
2208 1.1 mrg case NE:
2209 1.1 mrg case LE:
2210 1.1 mrg case LT:
2211 1.1 mrg case LEU:
2212 1.1 mrg case LTU:
2213 1.1 mrg /* We have these compares. */
2214 1.1 mrg break;
2215 1.1 mrg
2216 1.1 mrg case GE:
2217 1.1 mrg case GT:
2218 1.1 mrg case GEU:
2219 1.1 mrg case GTU:
2220 1.1 mrg /* We do not have these compares, so we reverse the
2221 1.1 mrg operands. */
2222 1.1 mrg swap = true;
2223 1.1 mrg break;
2224 1.1 mrg
2225 1.1 mrg default:
2226 1.1 mrg /* We should not have called this with any other code. */
2227 1.1 mrg gcc_unreachable ();
2228 1.1 mrg }
2229 1.1 mrg
2230 1.1 mrg if (swap)
2231 1.1 mrg {
2232 1.1 mrg code = swap_condition (code);
2233 1.1 mrg tmp = op0, op0 = op1, op1 = tmp;
2234 1.1 mrg }
2235 1.1 mrg
2236 1.1 mrg if (!reg_or_0_operand (op0, SImode))
2237 1.1 mrg op0 = force_reg (SImode, op0);
2238 1.1 mrg
2239 1.1 mrg if (!CONST_INT_P (op1) && !register_operand (op1, SImode))
2240 1.1 mrg op1 = force_reg (SImode, op1);
2241 1.1 mrg
2242 1.1 mrg /* Return the setcc comparison. */
2243 1.1 mrg emit_insn (gen_rtx_SET (res, gen_rtx_fmt_ee (code, SImode, op0, op1)));
2244 1.1 mrg
2245 1.1 mrg return true;
2246 1.1 mrg }
2247 1.1 mrg
2248 1.1 mrg
2249 1.1 mrg /* Implement cstore patterns. */
2250 1.1 mrg bool
2251 1.1 mrg tilepro_emit_setcc (rtx operands[], machine_mode cmp_mode)
2252 1.1 mrg {
2253 1.1 mrg return
2254 1.1 mrg tilepro_emit_setcc_internal (operands[0], GET_CODE (operands[1]),
2255 1.1 mrg operands[2], operands[3], cmp_mode);
2256 1.1 mrg }
2257 1.1 mrg
2258 1.1 mrg
2259 1.1 mrg /* Return whether CODE is a signed comparison. */
2260 1.1 mrg static bool
2261 1.1 mrg signed_compare_p (enum rtx_code code)
2262 1.1 mrg {
2263 1.1 mrg return (code == EQ || code == NE || code == LT || code == LE
2264 1.1 mrg || code == GT || code == GE);
2265 1.1 mrg }
2266 1.1 mrg
2267 1.1 mrg
2268 1.1 mrg /* Generate the comparison for an SImode conditional branch. */
2269 1.1 mrg static rtx
2270 1.1 mrg tilepro_emit_cc_test (enum rtx_code code, rtx op0, rtx op1,
2271 1.1 mrg machine_mode cmp_mode, bool eq_ne_only)
2272 1.1 mrg {
2273 1.1 mrg enum rtx_code branch_code;
2274 1.1 mrg rtx temp;
2275 1.1 mrg
2276 1.1 mrg /* Check for a compare against zero using a comparison we can do
2277 1.1 mrg directly. */
2278 1.1 mrg if (cmp_mode != DImode
2279 1.1 mrg && op1 == const0_rtx
2280 1.1 mrg && (code == EQ || code == NE
2281 1.1 mrg || (!eq_ne_only && signed_compare_p (code))))
2282 1.1 mrg {
2283 1.1 mrg op0 = force_reg (SImode, op0);
2284 1.1 mrg return gen_rtx_fmt_ee (code, VOIDmode, op0, const0_rtx);
2285 1.1 mrg }
2286 1.1 mrg
2287 1.1 mrg /* The general case: fold the comparison code to the types of
2288 1.1 mrg compares that we have, choosing the branch as necessary. */
2289 1.1 mrg switch (code)
2290 1.1 mrg {
2291 1.1 mrg case EQ:
2292 1.1 mrg case LE:
2293 1.1 mrg case LT:
2294 1.1 mrg case LEU:
2295 1.1 mrg case LTU:
2296 1.1 mrg /* We have these compares. */
2297 1.1 mrg branch_code = NE;
2298 1.1 mrg break;
2299 1.1 mrg
2300 1.1 mrg case NE:
2301 1.1 mrg case GE:
2302 1.1 mrg case GT:
2303 1.1 mrg case GEU:
2304 1.1 mrg case GTU:
2305 1.1 mrg /* These must be reversed (except NE, but let's
2306 1.1 mrg canonicalize). */
2307 1.1 mrg code = reverse_condition (code);
2308 1.1 mrg branch_code = EQ;
2309 1.1 mrg break;
2310 1.1 mrg
2311 1.1 mrg default:
2312 1.1 mrg gcc_unreachable ();
2313 1.1 mrg }
2314 1.1 mrg
2315 1.1 mrg if (cmp_mode != DImode
2316 1.1 mrg && CONST_INT_P (op1) && (!satisfies_constraint_I (op1) || code == LEU))
2317 1.1 mrg {
2318 1.1 mrg HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op1), SImode);
2319 1.1 mrg
2320 1.1 mrg switch (code)
2321 1.1 mrg {
2322 1.1 mrg case EQ:
2323 1.1 mrg /* Subtract off the value we want to compare against and see
2324 1.1 mrg if we get zero. This is cheaper than creating a constant
2325 1.1 mrg in a register. Except that subtracting -128 is more
2326 1.1 mrg expensive than seqi to -128, so we leave that alone. */
2327 1.1 mrg /* ??? Don't do this when comparing against symbols,
2328 1.1 mrg otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2329 1.1 mrg 0), which will be declared false out of hand (at least
2330 1.1 mrg for non-weak). */
2331 1.1 mrg if (!(symbolic_operand (op0, VOIDmode)
2332 1.1 mrg || (REG_P (op0) && REG_POINTER (op0))))
2333 1.1 mrg {
2334 1.1 mrg /* To compare against MIN_INT, we add MIN_INT and check
2335 1.1 mrg for 0. */
2336 1.1 mrg HOST_WIDE_INT add;
2337 1.1 mrg if (n != -2147483647 - 1)
2338 1.1 mrg add = -n;
2339 1.1 mrg else
2340 1.1 mrg add = n;
2341 1.1 mrg
2342 1.1 mrg op0 = force_reg (SImode, op0);
2343 1.1 mrg temp = gen_reg_rtx (SImode);
2344 1.1 mrg emit_insn (gen_addsi3 (temp, op0, gen_int_si (add)));
2345 1.1 mrg return gen_rtx_fmt_ee (reverse_condition (branch_code),
2346 1.1 mrg VOIDmode, temp, const0_rtx);
2347 1.1 mrg }
2348 1.1 mrg break;
2349 1.1 mrg
2350 1.1 mrg case LEU:
2351 1.1 mrg if (n == -1)
2352 1.1 mrg break;
2353 1.1 mrg /* FALLTHRU */
2354 1.1 mrg
2355 1.1 mrg case LTU:
2356 1.1 mrg /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2357 1.1 mrg etc. */
2358 1.1 mrg {
2359 1.1 mrg int first = exact_log2 (code == LTU ? n : n + 1);
2360 1.1 mrg if (first != -1)
2361 1.1 mrg {
2362 1.1 mrg op0 = force_reg (SImode, op0);
2363 1.1 mrg temp = gen_reg_rtx (SImode);
2364 1.1 mrg emit_move_insn (temp,
2365 1.1 mrg gen_rtx_LSHIFTRT (SImode, op0,
2366 1.1 mrg gen_int_si (first)));
2367 1.1 mrg return gen_rtx_fmt_ee (reverse_condition (branch_code),
2368 1.1 mrg VOIDmode, temp, const0_rtx);
2369 1.1 mrg }
2370 1.1 mrg }
2371 1.1 mrg break;
2372 1.1 mrg
2373 1.1 mrg default:
2374 1.1 mrg break;
2375 1.1 mrg }
2376 1.1 mrg }
2377 1.1 mrg
2378 1.1 mrg /* Compute a flag saying whether we should branch. */
2379 1.1 mrg temp = gen_reg_rtx (SImode);
2380 1.1 mrg tilepro_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2381 1.1 mrg
2382 1.1 mrg /* Return the branch comparison. */
2383 1.1 mrg return gen_rtx_fmt_ee (branch_code, VOIDmode, temp, const0_rtx);
2384 1.1 mrg }
2385 1.1 mrg
2386 1.1 mrg
2387 1.1 mrg /* Generate the comparison for a conditional branch. */
2388 1.1 mrg void
2389 1.1 mrg tilepro_emit_conditional_branch (rtx operands[], machine_mode cmp_mode)
2390 1.1 mrg {
2391 1.1 mrg rtx cmp_rtx =
2392 1.1 mrg tilepro_emit_cc_test (GET_CODE (operands[0]), operands[1], operands[2],
2393 1.1 mrg cmp_mode, false);
2394 1.1 mrg rtx branch_rtx = gen_rtx_SET (pc_rtx,
2395 1.1 mrg gen_rtx_IF_THEN_ELSE (VOIDmode, cmp_rtx,
2396 1.1 mrg gen_rtx_LABEL_REF
2397 1.1 mrg (VOIDmode,
2398 1.1 mrg operands[3]),
2399 1.1 mrg pc_rtx));
2400 1.1 mrg emit_jump_insn (branch_rtx);
2401 1.1 mrg }
2402 1.1 mrg
2403 1.1 mrg
2404 1.1 mrg /* Implement the movsicc pattern. */
2405 1.1 mrg rtx
2406 1.1 mrg tilepro_emit_conditional_move (rtx cmp)
2407 1.1 mrg {
2408 1.1 mrg return
2409 1.1 mrg tilepro_emit_cc_test (GET_CODE (cmp), XEXP (cmp, 0), XEXP (cmp, 1),
2410 1.1 mrg GET_MODE (XEXP (cmp, 0)), true);
2411 1.1 mrg }
2412 1.1 mrg
2413 1.1 mrg
2414 1.1 mrg /* Return true if INSN is annotated with a REG_BR_PROB note that
2415 1.1 mrg indicates it's a branch that's predicted taken. */
2416 1.1 mrg static bool
2417 1.1 mrg cbranch_predicted_p (rtx_insn *insn)
2418 1.1 mrg {
2419 1.1 mrg rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2420 1.1 mrg
2421 1.1 mrg if (x)
2422 1.1 mrg {
2423 1.1 mrg return profile_probability::from_reg_br_prob_note (XINT (x, 0))
2424 1.1 mrg >= profile_probability::even ();
2425 1.1 mrg }
2426 1.1 mrg
2427 1.1 mrg return false;
2428 1.1 mrg }
2429 1.1 mrg
2430 1.1 mrg
2431 1.1 mrg /* Output assembly code for a specific branch instruction, appending
2432 1.1 mrg the branch prediction flag to the opcode if appropriate. */
2433 1.1 mrg static const char *
2434 1.1 mrg tilepro_output_simple_cbranch_with_opcode (rtx_insn *insn, const char *opcode,
2435 1.1 mrg int regop, bool netreg_p,
2436 1.1 mrg bool reverse_predicted)
2437 1.1 mrg {
2438 1.1 mrg static char buf[64];
2439 1.1 mrg sprintf (buf, "%s%s\t%%%c%d, %%l0", opcode,
2440 1.1 mrg (cbranch_predicted_p (insn) ^ reverse_predicted) ? "t" : "",
2441 1.1 mrg netreg_p ? 'N' : 'r', regop);
2442 1.1 mrg return buf;
2443 1.1 mrg }
2444 1.1 mrg
2445 1.1 mrg
2446 1.1 mrg /* Output assembly code for a specific branch instruction, appending
2447 1.1 mrg the branch prediction flag to the opcode if appropriate. */
2448 1.1 mrg const char *
2449 1.1 mrg tilepro_output_cbranch_with_opcode (rtx_insn *insn, rtx *operands,
2450 1.1 mrg const char *opcode,
2451 1.1 mrg const char *rev_opcode,
2452 1.1 mrg int regop, bool netreg_p)
2453 1.1 mrg {
2454 1.1 mrg const char *branch_if_false;
2455 1.1 mrg rtx taken, not_taken;
2456 1.1 mrg bool is_simple_branch;
2457 1.1 mrg
2458 1.1 mrg gcc_assert (LABEL_P (operands[0]));
2459 1.1 mrg
2460 1.1 mrg is_simple_branch = true;
2461 1.1 mrg if (INSN_ADDRESSES_SET_P ())
2462 1.1 mrg {
2463 1.1 mrg int from_addr = INSN_ADDRESSES (INSN_UID (insn));
2464 1.1 mrg int to_addr = INSN_ADDRESSES (INSN_UID (operands[0]));
2465 1.1 mrg int delta = to_addr - from_addr;
2466 1.1 mrg is_simple_branch = IN_RANGE (delta, -524288, 524280);
2467 1.1 mrg }
2468 1.1 mrg
2469 1.1 mrg if (is_simple_branch)
2470 1.1 mrg {
2471 1.1 mrg /* Just a simple conditional branch. */
2472 1.1 mrg return
2473 1.1 mrg tilepro_output_simple_cbranch_with_opcode (insn, opcode, regop,
2474 1.1 mrg netreg_p, false);
2475 1.1 mrg }
2476 1.1 mrg
2477 1.1 mrg /* Generate a reversed branch around a direct jump. This fallback
2478 1.1 mrg does not use branch-likely instructions. */
2479 1.1 mrg not_taken = gen_label_rtx ();
2480 1.1 mrg taken = operands[0];
2481 1.1 mrg
2482 1.1 mrg /* Generate the reversed branch to NOT_TAKEN. */
2483 1.1 mrg operands[0] = not_taken;
2484 1.1 mrg branch_if_false =
2485 1.1 mrg tilepro_output_simple_cbranch_with_opcode (insn, rev_opcode, regop,
2486 1.1 mrg netreg_p, true);
2487 1.1 mrg output_asm_insn (branch_if_false, operands);
2488 1.1 mrg
2489 1.1 mrg output_asm_insn ("j\t%l0", &taken);
2490 1.1 mrg
2491 1.1 mrg /* Output NOT_TAKEN. */
2492 1.1 mrg targetm.asm_out.internal_label (asm_out_file, "L",
2493 1.1 mrg CODE_LABEL_NUMBER (not_taken));
2494 1.1 mrg return "";
2495 1.1 mrg }
2496 1.1 mrg
2497 1.1 mrg
2498 1.1 mrg /* Output assembly code for a conditional branch instruction. */
2499 1.1 mrg const char *
2500 1.1 mrg tilepro_output_cbranch (rtx_insn *insn, rtx *operands, bool reversed)
2501 1.1 mrg {
2502 1.1 mrg enum rtx_code code = GET_CODE (operands[1]);
2503 1.1 mrg const char *opcode;
2504 1.1 mrg const char *rev_opcode;
2505 1.1 mrg
2506 1.1 mrg if (reversed)
2507 1.1 mrg code = reverse_condition (code);
2508 1.1 mrg
2509 1.1 mrg switch (code)
2510 1.1 mrg {
2511 1.1 mrg case NE:
2512 1.1 mrg opcode = "bnz";
2513 1.1 mrg rev_opcode = "bz";
2514 1.1 mrg break;
2515 1.1 mrg case EQ:
2516 1.1 mrg opcode = "bz";
2517 1.1 mrg rev_opcode = "bnz";
2518 1.1 mrg break;
2519 1.1 mrg case GE:
2520 1.1 mrg opcode = "bgez";
2521 1.1 mrg rev_opcode = "blz";
2522 1.1 mrg break;
2523 1.1 mrg case GT:
2524 1.1 mrg opcode = "bgz";
2525 1.1 mrg rev_opcode = "blez";
2526 1.1 mrg break;
2527 1.1 mrg case LE:
2528 1.1 mrg opcode = "blez";
2529 1.1 mrg rev_opcode = "bgz";
2530 1.1 mrg break;
2531 1.1 mrg case LT:
2532 1.1 mrg opcode = "blz";
2533 1.1 mrg rev_opcode = "bgez";
2534 1.1 mrg break;
2535 1.1 mrg default:
2536 1.1 mrg gcc_unreachable ();
2537 1.1 mrg }
2538 1.1 mrg
2539 1.1 mrg return
2540 1.1 mrg tilepro_output_cbranch_with_opcode (insn, operands, opcode, rev_opcode,
2541 1.1 mrg 2, false);
2542 1.1 mrg }
2543 1.1 mrg
2544 1.1 mrg
2545 1.1 mrg /* Implement the tablejump pattern. */
2546 1.1 mrg void
2547 1.1 mrg tilepro_expand_tablejump (rtx op0, rtx op1)
2548 1.1 mrg {
2549 1.1 mrg if (flag_pic)
2550 1.1 mrg {
2551 1.1 mrg rtx table = gen_rtx_LABEL_REF (Pmode, op1);
2552 1.1 mrg rtx temp = gen_reg_rtx (Pmode);
2553 1.1 mrg rtx text_label_symbol = tilepro_text_label_symbol ();
2554 1.1 mrg rtx text_label_rtx = tilepro_text_label_rtx ();
2555 1.1 mrg
2556 1.1 mrg emit_insn (gen_addli_pcrel (temp, text_label_rtx,
2557 1.1 mrg table, text_label_symbol));
2558 1.1 mrg emit_insn (gen_auli_pcrel (temp, temp, table, text_label_symbol));
2559 1.1 mrg emit_move_insn (temp,
2560 1.1 mrg gen_rtx_PLUS (Pmode,
2561 1.1 mrg convert_to_mode (Pmode, op0, false),
2562 1.1 mrg temp));
2563 1.1 mrg op0 = temp;
2564 1.1 mrg }
2565 1.1 mrg
2566 1.1 mrg emit_jump_insn (gen_tablejump_aux (op0, op1));
2567 1.1 mrg }
2568 1.1 mrg
2569 1.1 mrg
2570 1.1 mrg /* Expand a builtin vector binary op, by calling gen function GEN with
2571 1.1 mrg operands in the proper modes. DEST is converted to DEST_MODE, and
2572 1.1 mrg src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2573 1.1 mrg void
2574 1.1 mrg tilepro_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
2575 1.1 mrg machine_mode dest_mode,
2576 1.1 mrg rtx dest,
2577 1.1 mrg machine_mode src_mode,
2578 1.1 mrg rtx src0, rtx src1, bool do_src1)
2579 1.1 mrg {
2580 1.1 mrg dest = gen_lowpart (dest_mode, dest);
2581 1.1 mrg
2582 1.1 mrg if (src0 == const0_rtx)
2583 1.1 mrg src0 = CONST0_RTX (src_mode);
2584 1.1 mrg else
2585 1.1 mrg src0 = gen_lowpart (src_mode, src0);
2586 1.1 mrg
2587 1.1 mrg if (do_src1)
2588 1.1 mrg {
2589 1.1 mrg if (src1 == const0_rtx)
2590 1.1 mrg src1 = CONST0_RTX (src_mode);
2591 1.1 mrg else
2592 1.1 mrg src1 = gen_lowpart (src_mode, src1);
2593 1.1 mrg }
2594 1.1 mrg
2595 1.1 mrg emit_insn ((*gen) (dest, src0, src1));
2596 1.1 mrg }
2597 1.1 mrg
2598 1.1 mrg
2600 1.1 mrg
2601 1.1 mrg /* Intrinsics */
2602 1.1 mrg
2603 1.1 mrg struct tile_builtin_info
2604 1.1 mrg {
2605 1.1 mrg enum insn_code icode;
2606 1.1 mrg tree fndecl;
2607 1.1 mrg };
2608 1.1 mrg
2609 1.1 mrg static struct tile_builtin_info tilepro_builtin_info[TILEPRO_BUILTIN_max] = {
2610 1.1 mrg { CODE_FOR_addsi3, NULL }, /* add */
2611 1.1 mrg { CODE_FOR_insn_addb, NULL }, /* addb */
2612 1.1 mrg { CODE_FOR_insn_addbs_u, NULL }, /* addbs_u */
2613 1.1 mrg { CODE_FOR_insn_addh, NULL }, /* addh */
2614 1.1 mrg { CODE_FOR_insn_addhs, NULL }, /* addhs */
2615 1.1 mrg { CODE_FOR_insn_addib, NULL }, /* addib */
2616 1.1 mrg { CODE_FOR_insn_addih, NULL }, /* addih */
2617 1.1 mrg { CODE_FOR_insn_addlis, NULL }, /* addlis */
2618 1.1 mrg { CODE_FOR_ssaddsi3, NULL }, /* adds */
2619 1.1 mrg { CODE_FOR_insn_adiffb_u, NULL }, /* adiffb_u */
2620 1.1 mrg { CODE_FOR_insn_adiffh, NULL }, /* adiffh */
2621 1.1 mrg { CODE_FOR_andsi3, NULL }, /* and */
2622 1.1 mrg { CODE_FOR_insn_auli, NULL }, /* auli */
2623 1.1 mrg { CODE_FOR_insn_avgb_u, NULL }, /* avgb_u */
2624 1.1 mrg { CODE_FOR_insn_avgh, NULL }, /* avgh */
2625 1.1 mrg { CODE_FOR_insn_bitx, NULL }, /* bitx */
2626 1.1 mrg { CODE_FOR_bswapsi2, NULL }, /* bytex */
2627 1.1 mrg { CODE_FOR_clzsi2, NULL }, /* clz */
2628 1.1 mrg { CODE_FOR_insn_crc32_32, NULL }, /* crc32_32 */
2629 1.1 mrg { CODE_FOR_insn_crc32_8, NULL }, /* crc32_8 */
2630 1.1 mrg { CODE_FOR_ctzsi2, NULL }, /* ctz */
2631 1.1 mrg { CODE_FOR_insn_drain, NULL }, /* drain */
2632 1.1 mrg { CODE_FOR_insn_dtlbpr, NULL }, /* dtlbpr */
2633 1.1 mrg { CODE_FOR_insn_dword_align, NULL }, /* dword_align */
2634 1.1 mrg { CODE_FOR_insn_finv, NULL }, /* finv */
2635 1.1 mrg { CODE_FOR_insn_flush, NULL }, /* flush */
2636 1.1 mrg { CODE_FOR_insn_fnop, NULL }, /* fnop */
2637 1.1 mrg { CODE_FOR_insn_icoh, NULL }, /* icoh */
2638 1.1 mrg { CODE_FOR_insn_ill, NULL }, /* ill */
2639 1.1 mrg { CODE_FOR_insn_info, NULL }, /* info */
2640 1.1 mrg { CODE_FOR_insn_infol, NULL }, /* infol */
2641 1.1 mrg { CODE_FOR_insn_inthb, NULL }, /* inthb */
2642 1.1 mrg { CODE_FOR_insn_inthh, NULL }, /* inthh */
2643 1.1 mrg { CODE_FOR_insn_intlb, NULL }, /* intlb */
2644 1.1 mrg { CODE_FOR_insn_intlh, NULL }, /* intlh */
2645 1.1 mrg { CODE_FOR_insn_inv, NULL }, /* inv */
2646 1.1 mrg { CODE_FOR_insn_lb, NULL }, /* lb */
2647 1.1 mrg { CODE_FOR_insn_lb_u, NULL }, /* lb_u */
2648 1.1 mrg { CODE_FOR_insn_lh, NULL }, /* lh */
2649 1.1 mrg { CODE_FOR_insn_lh_u, NULL }, /* lh_u */
2650 1.1 mrg { CODE_FOR_insn_lnk, NULL }, /* lnk */
2651 1.1 mrg { CODE_FOR_insn_lw, NULL }, /* lw */
2652 1.1 mrg { CODE_FOR_insn_lw_na, NULL }, /* lw_na */
2653 1.1 mrg { CODE_FOR_insn_lb_L2, NULL }, /* lb_L2 */
2654 1.1 mrg { CODE_FOR_insn_lb_u_L2, NULL }, /* lb_u_L2 */
2655 1.1 mrg { CODE_FOR_insn_lh_L2, NULL }, /* lh_L2 */
2656 1.1 mrg { CODE_FOR_insn_lh_u_L2, NULL }, /* lh_u_L2 */
2657 1.1 mrg { CODE_FOR_insn_lw_L2, NULL }, /* lw_L2 */
2658 1.1 mrg { CODE_FOR_insn_lw_na_L2, NULL }, /* lw_na_L2 */
2659 1.1 mrg { CODE_FOR_insn_lb_miss, NULL }, /* lb_miss */
2660 1.1 mrg { CODE_FOR_insn_lb_u_miss, NULL }, /* lb_u_miss */
2661 1.1 mrg { CODE_FOR_insn_lh_miss, NULL }, /* lh_miss */
2662 1.1 mrg { CODE_FOR_insn_lh_u_miss, NULL }, /* lh_u_miss */
2663 1.1 mrg { CODE_FOR_insn_lw_miss, NULL }, /* lw_miss */
2664 1.1 mrg { CODE_FOR_insn_lw_na_miss, NULL }, /* lw_na_miss */
2665 1.1 mrg { CODE_FOR_insn_maxb_u, NULL }, /* maxb_u */
2666 1.1 mrg { CODE_FOR_insn_maxh, NULL }, /* maxh */
2667 1.1 mrg { CODE_FOR_insn_maxib_u, NULL }, /* maxib_u */
2668 1.1 mrg { CODE_FOR_insn_maxih, NULL }, /* maxih */
2669 1.1 mrg { CODE_FOR_memory_barrier, NULL }, /* mf */
2670 1.1 mrg { CODE_FOR_insn_mfspr, NULL }, /* mfspr */
2671 1.1 mrg { CODE_FOR_insn_minb_u, NULL }, /* minb_u */
2672 1.1 mrg { CODE_FOR_insn_minh, NULL }, /* minh */
2673 1.1 mrg { CODE_FOR_insn_minib_u, NULL }, /* minib_u */
2674 1.1 mrg { CODE_FOR_insn_minih, NULL }, /* minih */
2675 1.1 mrg { CODE_FOR_insn_mm, NULL }, /* mm */
2676 1.1 mrg { CODE_FOR_insn_mnz, NULL }, /* mnz */
2677 1.1 mrg { CODE_FOR_insn_mnzb, NULL }, /* mnzb */
2678 1.1 mrg { CODE_FOR_insn_mnzh, NULL }, /* mnzh */
2679 1.1 mrg { CODE_FOR_movsi, NULL }, /* move */
2680 1.1 mrg { CODE_FOR_insn_movelis, NULL }, /* movelis */
2681 1.1 mrg { CODE_FOR_insn_mtspr, NULL }, /* mtspr */
2682 1.1 mrg { CODE_FOR_insn_mulhh_ss, NULL }, /* mulhh_ss */
2683 1.1 mrg { CODE_FOR_insn_mulhh_su, NULL }, /* mulhh_su */
2684 1.1 mrg { CODE_FOR_insn_mulhh_uu, NULL }, /* mulhh_uu */
2685 1.1 mrg { CODE_FOR_insn_mulhha_ss, NULL }, /* mulhha_ss */
2686 1.1 mrg { CODE_FOR_insn_mulhha_su, NULL }, /* mulhha_su */
2687 1.1 mrg { CODE_FOR_insn_mulhha_uu, NULL }, /* mulhha_uu */
2688 1.1 mrg { CODE_FOR_insn_mulhhsa_uu, NULL }, /* mulhhsa_uu */
2689 1.1 mrg { CODE_FOR_insn_mulhl_ss, NULL }, /* mulhl_ss */
2690 1.1 mrg { CODE_FOR_insn_mulhl_su, NULL }, /* mulhl_su */
2691 1.1 mrg { CODE_FOR_insn_mulhl_us, NULL }, /* mulhl_us */
2692 1.1 mrg { CODE_FOR_insn_mulhl_uu, NULL }, /* mulhl_uu */
2693 1.1 mrg { CODE_FOR_insn_mulhla_ss, NULL }, /* mulhla_ss */
2694 1.1 mrg { CODE_FOR_insn_mulhla_su, NULL }, /* mulhla_su */
2695 1.1 mrg { CODE_FOR_insn_mulhla_us, NULL }, /* mulhla_us */
2696 1.1 mrg { CODE_FOR_insn_mulhla_uu, NULL }, /* mulhla_uu */
2697 1.1 mrg { CODE_FOR_insn_mulhlsa_uu, NULL }, /* mulhlsa_uu */
2698 1.1 mrg { CODE_FOR_insn_mulll_ss, NULL }, /* mulll_ss */
2699 1.1 mrg { CODE_FOR_insn_mulll_su, NULL }, /* mulll_su */
2700 1.1 mrg { CODE_FOR_insn_mulll_uu, NULL }, /* mulll_uu */
2701 1.1 mrg { CODE_FOR_insn_mullla_ss, NULL }, /* mullla_ss */
2702 1.1 mrg { CODE_FOR_insn_mullla_su, NULL }, /* mullla_su */
2703 1.1 mrg { CODE_FOR_insn_mullla_uu, NULL }, /* mullla_uu */
2704 1.1 mrg { CODE_FOR_insn_mulllsa_uu, NULL }, /* mulllsa_uu */
2705 1.1 mrg { CODE_FOR_insn_mvnz, NULL }, /* mvnz */
2706 1.1 mrg { CODE_FOR_insn_mvz, NULL }, /* mvz */
2707 1.1 mrg { CODE_FOR_insn_mz, NULL }, /* mz */
2708 1.1 mrg { CODE_FOR_insn_mzb, NULL }, /* mzb */
2709 1.1 mrg { CODE_FOR_insn_mzh, NULL }, /* mzh */
2710 1.1 mrg { CODE_FOR_insn_nap, NULL }, /* nap */
2711 1.1 mrg { CODE_FOR_nop, NULL }, /* nop */
2712 1.1 mrg { CODE_FOR_insn_nor, NULL }, /* nor */
2713 1.1 mrg { CODE_FOR_iorsi3, NULL }, /* or */
2714 1.1 mrg { CODE_FOR_insn_packbs_u, NULL }, /* packbs_u */
2715 1.1 mrg { CODE_FOR_insn_packhb, NULL }, /* packhb */
2716 1.1 mrg { CODE_FOR_insn_packhs, NULL }, /* packhs */
2717 1.1 mrg { CODE_FOR_insn_packlb, NULL }, /* packlb */
2718 1.1 mrg { CODE_FOR_popcountsi2, NULL }, /* pcnt */
2719 1.1 mrg { CODE_FOR_insn_prefetch, NULL }, /* prefetch */
2720 1.1 mrg { CODE_FOR_insn_prefetch_L1, NULL }, /* prefetch_L1 */
2721 1.1 mrg { CODE_FOR_rotlsi3, NULL }, /* rl */
2722 1.1 mrg { CODE_FOR_insn_s1a, NULL }, /* s1a */
2723 1.1 mrg { CODE_FOR_insn_s2a, NULL }, /* s2a */
2724 1.1 mrg { CODE_FOR_insn_s3a, NULL }, /* s3a */
2725 1.1 mrg { CODE_FOR_insn_sadab_u, NULL }, /* sadab_u */
2726 1.1 mrg { CODE_FOR_insn_sadah, NULL }, /* sadah */
2727 1.1 mrg { CODE_FOR_insn_sadah_u, NULL }, /* sadah_u */
2728 1.1 mrg { CODE_FOR_insn_sadb_u, NULL }, /* sadb_u */
2729 1.1 mrg { CODE_FOR_insn_sadh, NULL }, /* sadh */
2730 1.1 mrg { CODE_FOR_insn_sadh_u, NULL }, /* sadh_u */
2731 1.1 mrg { CODE_FOR_insn_sb, NULL }, /* sb */
2732 1.1 mrg { CODE_FOR_insn_seq, NULL }, /* seq */
2733 1.1 mrg { CODE_FOR_insn_seqb, NULL }, /* seqb */
2734 1.1 mrg { CODE_FOR_insn_seqh, NULL }, /* seqh */
2735 1.1 mrg { CODE_FOR_insn_seqib, NULL }, /* seqib */
2736 1.1 mrg { CODE_FOR_insn_seqih, NULL }, /* seqih */
2737 1.1 mrg { CODE_FOR_insn_sh, NULL }, /* sh */
2738 1.1 mrg { CODE_FOR_ashlsi3, NULL }, /* shl */
2739 1.1 mrg { CODE_FOR_insn_shlb, NULL }, /* shlb */
2740 1.1 mrg { CODE_FOR_insn_shlh, NULL }, /* shlh */
2741 1.1 mrg { CODE_FOR_insn_shlb, NULL }, /* shlib */
2742 1.1 mrg { CODE_FOR_insn_shlh, NULL }, /* shlih */
2743 1.1 mrg { CODE_FOR_lshrsi3, NULL }, /* shr */
2744 1.1 mrg { CODE_FOR_insn_shrb, NULL }, /* shrb */
2745 1.1 mrg { CODE_FOR_insn_shrh, NULL }, /* shrh */
2746 1.1 mrg { CODE_FOR_insn_shrb, NULL }, /* shrib */
2747 1.1 mrg { CODE_FOR_insn_shrh, NULL }, /* shrih */
2748 1.1 mrg { CODE_FOR_insn_slt, NULL }, /* slt */
2749 1.1 mrg { CODE_FOR_insn_slt_u, NULL }, /* slt_u */
2750 1.1 mrg { CODE_FOR_insn_sltb, NULL }, /* sltb */
2751 1.1 mrg { CODE_FOR_insn_sltb_u, NULL }, /* sltb_u */
2752 1.1 mrg { CODE_FOR_insn_slte, NULL }, /* slte */
2753 1.1 mrg { CODE_FOR_insn_slte_u, NULL }, /* slte_u */
2754 1.1 mrg { CODE_FOR_insn_slteb, NULL }, /* slteb */
2755 1.1 mrg { CODE_FOR_insn_slteb_u, NULL }, /* slteb_u */
2756 1.1 mrg { CODE_FOR_insn_slteh, NULL }, /* slteh */
2757 1.1 mrg { CODE_FOR_insn_slteh_u, NULL }, /* slteh_u */
2758 1.1 mrg { CODE_FOR_insn_slth, NULL }, /* slth */
2759 1.1 mrg { CODE_FOR_insn_slth_u, NULL }, /* slth_u */
2760 1.1 mrg { CODE_FOR_insn_sltib, NULL }, /* sltib */
2761 1.1 mrg { CODE_FOR_insn_sltib_u, NULL }, /* sltib_u */
2762 1.1 mrg { CODE_FOR_insn_sltih, NULL }, /* sltih */
2763 1.1 mrg { CODE_FOR_insn_sltih_u, NULL }, /* sltih_u */
2764 1.1 mrg { CODE_FOR_insn_sne, NULL }, /* sne */
2765 1.1 mrg { CODE_FOR_insn_sneb, NULL }, /* sneb */
2766 1.1 mrg { CODE_FOR_insn_sneh, NULL }, /* sneh */
2767 1.1 mrg { CODE_FOR_ashrsi3, NULL }, /* sra */
2768 1.1 mrg { CODE_FOR_insn_srab, NULL }, /* srab */
2769 1.1 mrg { CODE_FOR_insn_srah, NULL }, /* srah */
2770 1.1 mrg { CODE_FOR_insn_srab, NULL }, /* sraib */
2771 1.1 mrg { CODE_FOR_insn_srah, NULL }, /* sraih */
2772 1.1 mrg { CODE_FOR_subsi3, NULL }, /* sub */
2773 1.1 mrg { CODE_FOR_insn_subb, NULL }, /* subb */
2774 1.1 mrg { CODE_FOR_insn_subbs_u, NULL }, /* subbs_u */
2775 1.1 mrg { CODE_FOR_insn_subh, NULL }, /* subh */
2776 1.1 mrg { CODE_FOR_insn_subhs, NULL }, /* subhs */
2777 1.1 mrg { CODE_FOR_sssubsi3, NULL }, /* subs */
2778 1.1 mrg { CODE_FOR_insn_sw, NULL }, /* sw */
2779 1.1 mrg { CODE_FOR_insn_tblidxb0, NULL }, /* tblidxb0 */
2780 1.1 mrg { CODE_FOR_insn_tblidxb1, NULL }, /* tblidxb1 */
2781 1.1 mrg { CODE_FOR_insn_tblidxb2, NULL }, /* tblidxb2 */
2782 1.1 mrg { CODE_FOR_insn_tblidxb3, NULL }, /* tblidxb3 */
2783 1.1 mrg { CODE_FOR_insn_tns, NULL }, /* tns */
2784 1.1 mrg { CODE_FOR_insn_wh64, NULL }, /* wh64 */
2785 1.1 mrg { CODE_FOR_xorsi3, NULL }, /* xor */
2786 1.1 mrg { CODE_FOR_tilepro_network_barrier, NULL }, /* network_barrier */
2787 1.1 mrg { CODE_FOR_tilepro_idn0_receive, NULL }, /* idn0_receive */
2788 1.1 mrg { CODE_FOR_tilepro_idn1_receive, NULL }, /* idn1_receive */
2789 1.1 mrg { CODE_FOR_tilepro_idn_send, NULL }, /* idn_send */
2790 1.1 mrg { CODE_FOR_tilepro_sn_receive, NULL }, /* sn_receive */
2791 1.1 mrg { CODE_FOR_tilepro_sn_send, NULL }, /* sn_send */
2792 1.1 mrg { CODE_FOR_tilepro_udn0_receive, NULL }, /* udn0_receive */
2793 1.1 mrg { CODE_FOR_tilepro_udn1_receive, NULL }, /* udn1_receive */
2794 1.1 mrg { CODE_FOR_tilepro_udn2_receive, NULL }, /* udn2_receive */
2795 1.1 mrg { CODE_FOR_tilepro_udn3_receive, NULL }, /* udn3_receive */
2796 1.1 mrg { CODE_FOR_tilepro_udn_send, NULL }, /* udn_send */
2797 1.1 mrg };
2798 1.1 mrg
2799 1.1 mrg
2800 1.1 mrg struct tilepro_builtin_def
2801 1.1 mrg {
2802 1.1 mrg const char *name;
2803 1.1 mrg enum tilepro_builtin code;
2804 1.1 mrg bool is_const;
2805 1.1 mrg /* The first character is the return type. Subsequent characters
2806 1.1 mrg are the argument types. See char_to_type. */
2807 1.1 mrg const char *type;
2808 1.1 mrg };
2809 1.1 mrg
2810 1.1 mrg
2811 1.1 mrg static const struct tilepro_builtin_def tilepro_builtins[] = {
2812 1.1 mrg { "__insn_add", TILEPRO_INSN_ADD, true, "lll" },
2813 1.1 mrg { "__insn_addb", TILEPRO_INSN_ADDB, true, "lll" },
2814 1.1 mrg { "__insn_addbs_u", TILEPRO_INSN_ADDBS_U, false, "lll" },
2815 1.1 mrg { "__insn_addh", TILEPRO_INSN_ADDH, true, "lll" },
2816 1.1 mrg { "__insn_addhs", TILEPRO_INSN_ADDHS, false, "lll" },
2817 1.1 mrg { "__insn_addi", TILEPRO_INSN_ADD, true, "lll" },
2818 1.1 mrg { "__insn_addib", TILEPRO_INSN_ADDIB, true, "lll" },
2819 1.1 mrg { "__insn_addih", TILEPRO_INSN_ADDIH, true, "lll" },
2820 1.1 mrg { "__insn_addli", TILEPRO_INSN_ADD, true, "lll" },
2821 1.1 mrg { "__insn_addlis", TILEPRO_INSN_ADDLIS, false, "lll" },
2822 1.1 mrg { "__insn_adds", TILEPRO_INSN_ADDS, false, "lll" },
2823 1.1 mrg { "__insn_adiffb_u", TILEPRO_INSN_ADIFFB_U, true, "lll" },
2824 1.1 mrg { "__insn_adiffh", TILEPRO_INSN_ADIFFH, true, "lll" },
2825 1.1 mrg { "__insn_and", TILEPRO_INSN_AND, true, "lll" },
2826 1.1 mrg { "__insn_andi", TILEPRO_INSN_AND, true, "lll" },
2827 1.1 mrg { "__insn_auli", TILEPRO_INSN_AULI, true, "lll" },
2828 1.1 mrg { "__insn_avgb_u", TILEPRO_INSN_AVGB_U, true, "lll" },
2829 1.1 mrg { "__insn_avgh", TILEPRO_INSN_AVGH, true, "lll" },
2830 1.1 mrg { "__insn_bitx", TILEPRO_INSN_BITX, true, "ll" },
2831 1.1 mrg { "__insn_bytex", TILEPRO_INSN_BYTEX, true, "ll" },
2832 1.1 mrg { "__insn_clz", TILEPRO_INSN_CLZ, true, "ll" },
2833 1.1 mrg { "__insn_crc32_32", TILEPRO_INSN_CRC32_32, true, "lll" },
2834 1.1 mrg { "__insn_crc32_8", TILEPRO_INSN_CRC32_8, true, "lll" },
2835 1.1 mrg { "__insn_ctz", TILEPRO_INSN_CTZ, true, "ll" },
2836 1.1 mrg { "__insn_drain", TILEPRO_INSN_DRAIN, false, "v" },
2837 1.1 mrg { "__insn_dtlbpr", TILEPRO_INSN_DTLBPR, false, "vl" },
2838 1.1 mrg { "__insn_dword_align", TILEPRO_INSN_DWORD_ALIGN, true, "lllk" },
2839 1.1 mrg { "__insn_finv", TILEPRO_INSN_FINV, false, "vk" },
2840 1.1 mrg { "__insn_flush", TILEPRO_INSN_FLUSH, false, "vk" },
2841 1.1 mrg { "__insn_fnop", TILEPRO_INSN_FNOP, false, "v" },
2842 1.1 mrg { "__insn_icoh", TILEPRO_INSN_ICOH, false, "vk" },
2843 1.1 mrg { "__insn_ill", TILEPRO_INSN_ILL, false, "v" },
2844 1.1 mrg { "__insn_info", TILEPRO_INSN_INFO, false, "vl" },
2845 1.1 mrg { "__insn_infol", TILEPRO_INSN_INFOL, false, "vl" },
2846 1.1 mrg { "__insn_inthb", TILEPRO_INSN_INTHB, true, "lll" },
2847 1.1 mrg { "__insn_inthh", TILEPRO_INSN_INTHH, true, "lll" },
2848 1.1 mrg { "__insn_intlb", TILEPRO_INSN_INTLB, true, "lll" },
2849 1.1 mrg { "__insn_intlh", TILEPRO_INSN_INTLH, true, "lll" },
2850 1.1 mrg { "__insn_inv", TILEPRO_INSN_INV, false, "vp" },
2851 1.1 mrg { "__insn_lb", TILEPRO_INSN_LB, false, "lk" },
2852 1.1 mrg { "__insn_lb_u", TILEPRO_INSN_LB_U, false, "lk" },
2853 1.1 mrg { "__insn_lh", TILEPRO_INSN_LH, false, "lk" },
2854 1.1 mrg { "__insn_lh_u", TILEPRO_INSN_LH_U, false, "lk" },
2855 1.1 mrg { "__insn_lnk", TILEPRO_INSN_LNK, true, "l" },
2856 1.1 mrg { "__insn_lw", TILEPRO_INSN_LW, false, "lk" },
2857 1.1 mrg { "__insn_lw_na", TILEPRO_INSN_LW_NA, false, "lk" },
2858 1.1 mrg { "__insn_lb_L2", TILEPRO_INSN_LB_L2, false, "lk" },
2859 1.1 mrg { "__insn_lb_u_L2", TILEPRO_INSN_LB_U_L2, false, "lk" },
2860 1.1 mrg { "__insn_lh_L2", TILEPRO_INSN_LH_L2, false, "lk" },
2861 1.1 mrg { "__insn_lh_u_L2", TILEPRO_INSN_LH_U_L2, false, "lk" },
2862 1.1 mrg { "__insn_lw_L2", TILEPRO_INSN_LW_L2, false, "lk" },
2863 1.1 mrg { "__insn_lw_na_L2", TILEPRO_INSN_LW_NA_L2, false, "lk" },
2864 1.1 mrg { "__insn_lb_miss", TILEPRO_INSN_LB_MISS, false, "lk" },
2865 1.1 mrg { "__insn_lb_u_miss", TILEPRO_INSN_LB_U_MISS, false, "lk" },
2866 1.1 mrg { "__insn_lh_miss", TILEPRO_INSN_LH_MISS, false, "lk" },
2867 1.1 mrg { "__insn_lh_u_miss", TILEPRO_INSN_LH_U_MISS, false, "lk" },
2868 1.1 mrg { "__insn_lw_miss", TILEPRO_INSN_LW_MISS, false, "lk" },
2869 1.1 mrg { "__insn_lw_na_miss", TILEPRO_INSN_LW_NA_MISS, false, "lk" },
2870 1.1 mrg { "__insn_maxb_u", TILEPRO_INSN_MAXB_U, true, "lll" },
2871 1.1 mrg { "__insn_maxh", TILEPRO_INSN_MAXH, true, "lll" },
2872 1.1 mrg { "__insn_maxib_u", TILEPRO_INSN_MAXIB_U, true, "lll" },
2873 1.1 mrg { "__insn_maxih", TILEPRO_INSN_MAXIH, true, "lll" },
2874 1.1 mrg { "__insn_mf", TILEPRO_INSN_MF, false, "v" },
2875 1.1 mrg { "__insn_mfspr", TILEPRO_INSN_MFSPR, false, "ll" },
2876 1.1 mrg { "__insn_minb_u", TILEPRO_INSN_MINB_U, true, "lll" },
2877 1.1 mrg { "__insn_minh", TILEPRO_INSN_MINH, true, "lll" },
2878 1.1 mrg { "__insn_minib_u", TILEPRO_INSN_MINIB_U, true, "lll" },
2879 1.1 mrg { "__insn_minih", TILEPRO_INSN_MINIH, true, "lll" },
2880 1.1 mrg { "__insn_mm", TILEPRO_INSN_MM, true, "lllll" },
2881 1.1 mrg { "__insn_mnz", TILEPRO_INSN_MNZ, true, "lll" },
2882 1.1 mrg { "__insn_mnzb", TILEPRO_INSN_MNZB, true, "lll" },
2883 1.1 mrg { "__insn_mnzh", TILEPRO_INSN_MNZH, true, "lll" },
2884 1.1 mrg { "__insn_move", TILEPRO_INSN_MOVE, true, "ll" },
2885 1.1 mrg { "__insn_movei", TILEPRO_INSN_MOVE, true, "ll" },
2886 1.1 mrg { "__insn_moveli", TILEPRO_INSN_MOVE, true, "ll" },
2887 1.1 mrg { "__insn_movelis", TILEPRO_INSN_MOVELIS, false, "ll" },
2888 1.1 mrg { "__insn_mtspr", TILEPRO_INSN_MTSPR, false, "vll" },
2889 1.1 mrg { "__insn_mulhh_ss", TILEPRO_INSN_MULHH_SS, true, "lll" },
2890 1.1 mrg { "__insn_mulhh_su", TILEPRO_INSN_MULHH_SU, true, "lll" },
2891 1.1 mrg { "__insn_mulhh_uu", TILEPRO_INSN_MULHH_UU, true, "lll" },
2892 1.1 mrg { "__insn_mulhha_ss", TILEPRO_INSN_MULHHA_SS, true, "llll" },
2893 1.1 mrg { "__insn_mulhha_su", TILEPRO_INSN_MULHHA_SU, true, "llll" },
2894 1.1 mrg { "__insn_mulhha_uu", TILEPRO_INSN_MULHHA_UU, true, "llll" },
2895 1.1 mrg { "__insn_mulhhsa_uu", TILEPRO_INSN_MULHHSA_UU, true, "llll" },
2896 1.1 mrg { "__insn_mulhl_ss", TILEPRO_INSN_MULHL_SS, true, "lll" },
2897 1.1 mrg { "__insn_mulhl_su", TILEPRO_INSN_MULHL_SU, true, "lll" },
2898 1.1 mrg { "__insn_mulhl_us", TILEPRO_INSN_MULHL_US, true, "lll" },
2899 1.1 mrg { "__insn_mulhl_uu", TILEPRO_INSN_MULHL_UU, true, "lll" },
2900 1.1 mrg { "__insn_mulhla_ss", TILEPRO_INSN_MULHLA_SS, true, "llll" },
2901 1.1 mrg { "__insn_mulhla_su", TILEPRO_INSN_MULHLA_SU, true, "llll" },
2902 1.1 mrg { "__insn_mulhla_us", TILEPRO_INSN_MULHLA_US, true, "llll" },
2903 1.1 mrg { "__insn_mulhla_uu", TILEPRO_INSN_MULHLA_UU, true, "llll" },
2904 1.1 mrg { "__insn_mulhlsa_uu", TILEPRO_INSN_MULHLSA_UU, true, "llll" },
2905 1.1 mrg { "__insn_mulll_ss", TILEPRO_INSN_MULLL_SS, true, "lll" },
2906 1.1 mrg { "__insn_mulll_su", TILEPRO_INSN_MULLL_SU, true, "lll" },
2907 1.1 mrg { "__insn_mulll_uu", TILEPRO_INSN_MULLL_UU, true, "lll" },
2908 1.1 mrg { "__insn_mullla_ss", TILEPRO_INSN_MULLLA_SS, true, "llll" },
2909 1.1 mrg { "__insn_mullla_su", TILEPRO_INSN_MULLLA_SU, true, "llll" },
2910 1.1 mrg { "__insn_mullla_uu", TILEPRO_INSN_MULLLA_UU, true, "llll" },
2911 1.1 mrg { "__insn_mulllsa_uu", TILEPRO_INSN_MULLLSA_UU, true, "llll" },
2912 1.1 mrg { "__insn_mvnz", TILEPRO_INSN_MVNZ, true, "llll" },
2913 1.1 mrg { "__insn_mvz", TILEPRO_INSN_MVZ, true, "llll" },
2914 1.1 mrg { "__insn_mz", TILEPRO_INSN_MZ, true, "lll" },
2915 1.1 mrg { "__insn_mzb", TILEPRO_INSN_MZB, true, "lll" },
2916 1.1 mrg { "__insn_mzh", TILEPRO_INSN_MZH, true, "lll" },
2917 1.1 mrg { "__insn_nap", TILEPRO_INSN_NAP, false, "v" },
2918 1.1 mrg { "__insn_nop", TILEPRO_INSN_NOP, true, "v" },
2919 1.1 mrg { "__insn_nor", TILEPRO_INSN_NOR, true, "lll" },
2920 1.1 mrg { "__insn_or", TILEPRO_INSN_OR, true, "lll" },
2921 1.1 mrg { "__insn_ori", TILEPRO_INSN_OR, true, "lll" },
2922 1.1 mrg { "__insn_packbs_u", TILEPRO_INSN_PACKBS_U, false, "lll" },
2923 1.1 mrg { "__insn_packhb", TILEPRO_INSN_PACKHB, true, "lll" },
2924 1.1 mrg { "__insn_packhs", TILEPRO_INSN_PACKHS, false, "lll" },
2925 1.1 mrg { "__insn_packlb", TILEPRO_INSN_PACKLB, true, "lll" },
2926 1.1 mrg { "__insn_pcnt", TILEPRO_INSN_PCNT, true, "ll" },
2927 1.1 mrg { "__insn_prefetch", TILEPRO_INSN_PREFETCH, false, "vk" },
2928 1.1 mrg { "__insn_prefetch_L1", TILEPRO_INSN_PREFETCH_L1, false, "vk" },
2929 1.1 mrg { "__insn_rl", TILEPRO_INSN_RL, true, "lll" },
2930 1.1 mrg { "__insn_rli", TILEPRO_INSN_RL, true, "lll" },
2931 1.1 mrg { "__insn_s1a", TILEPRO_INSN_S1A, true, "lll" },
2932 1.1 mrg { "__insn_s2a", TILEPRO_INSN_S2A, true, "lll" },
2933 1.1 mrg { "__insn_s3a", TILEPRO_INSN_S3A, true, "lll" },
2934 1.1 mrg { "__insn_sadab_u", TILEPRO_INSN_SADAB_U, true, "llll" },
2935 1.1 mrg { "__insn_sadah", TILEPRO_INSN_SADAH, true, "llll" },
2936 1.1 mrg { "__insn_sadah_u", TILEPRO_INSN_SADAH_U, true, "llll" },
2937 1.1 mrg { "__insn_sadb_u", TILEPRO_INSN_SADB_U, true, "lll" },
2938 1.1 mrg { "__insn_sadh", TILEPRO_INSN_SADH, true, "lll" },
2939 1.1 mrg { "__insn_sadh_u", TILEPRO_INSN_SADH_U, true, "lll" },
2940 1.1 mrg { "__insn_sb", TILEPRO_INSN_SB, false, "vpl" },
2941 1.1 mrg { "__insn_seq", TILEPRO_INSN_SEQ, true, "lll" },
2942 1.1 mrg { "__insn_seqb", TILEPRO_INSN_SEQB, true, "lll" },
2943 1.1 mrg { "__insn_seqh", TILEPRO_INSN_SEQH, true, "lll" },
2944 1.1 mrg { "__insn_seqi", TILEPRO_INSN_SEQ, true, "lll" },
2945 1.1 mrg { "__insn_seqib", TILEPRO_INSN_SEQIB, true, "lll" },
2946 1.1 mrg { "__insn_seqih", TILEPRO_INSN_SEQIH, true, "lll" },
2947 1.1 mrg { "__insn_sh", TILEPRO_INSN_SH, false, "vpl" },
2948 1.1 mrg { "__insn_shl", TILEPRO_INSN_SHL, true, "lll" },
2949 1.1 mrg { "__insn_shlb", TILEPRO_INSN_SHLB, true, "lll" },
2950 1.1 mrg { "__insn_shlh", TILEPRO_INSN_SHLH, true, "lll" },
2951 1.1 mrg { "__insn_shli", TILEPRO_INSN_SHL, true, "lll" },
2952 1.1 mrg { "__insn_shlib", TILEPRO_INSN_SHLIB, true, "lll" },
2953 1.1 mrg { "__insn_shlih", TILEPRO_INSN_SHLIH, true, "lll" },
2954 1.1 mrg { "__insn_shr", TILEPRO_INSN_SHR, true, "lll" },
2955 1.1 mrg { "__insn_shrb", TILEPRO_INSN_SHRB, true, "lll" },
2956 1.1 mrg { "__insn_shrh", TILEPRO_INSN_SHRH, true, "lll" },
2957 1.1 mrg { "__insn_shri", TILEPRO_INSN_SHR, true, "lll" },
2958 1.1 mrg { "__insn_shrib", TILEPRO_INSN_SHRIB, true, "lll" },
2959 1.1 mrg { "__insn_shrih", TILEPRO_INSN_SHRIH, true, "lll" },
2960 1.1 mrg { "__insn_slt", TILEPRO_INSN_SLT, true, "lll" },
2961 1.1 mrg { "__insn_slt_u", TILEPRO_INSN_SLT_U, true, "lll" },
2962 1.1 mrg { "__insn_sltb", TILEPRO_INSN_SLTB, true, "lll" },
2963 1.1 mrg { "__insn_sltb_u", TILEPRO_INSN_SLTB_U, true, "lll" },
2964 1.1 mrg { "__insn_slte", TILEPRO_INSN_SLTE, true, "lll" },
2965 1.1 mrg { "__insn_slte_u", TILEPRO_INSN_SLTE_U, true, "lll" },
2966 1.1 mrg { "__insn_slteb", TILEPRO_INSN_SLTEB, true, "lll" },
2967 1.1 mrg { "__insn_slteb_u", TILEPRO_INSN_SLTEB_U, true, "lll" },
2968 1.1 mrg { "__insn_slteh", TILEPRO_INSN_SLTEH, true, "lll" },
2969 1.1 mrg { "__insn_slteh_u", TILEPRO_INSN_SLTEH_U, true, "lll" },
2970 1.1 mrg { "__insn_slth", TILEPRO_INSN_SLTH, true, "lll" },
2971 1.1 mrg { "__insn_slth_u", TILEPRO_INSN_SLTH_U, true, "lll" },
2972 1.1 mrg { "__insn_slti", TILEPRO_INSN_SLT, true, "lll" },
2973 1.1 mrg { "__insn_slti_u", TILEPRO_INSN_SLT_U, true, "lll" },
2974 1.1 mrg { "__insn_sltib", TILEPRO_INSN_SLTIB, true, "lll" },
2975 1.1 mrg { "__insn_sltib_u", TILEPRO_INSN_SLTIB_U, true, "lll" },
2976 1.1 mrg { "__insn_sltih", TILEPRO_INSN_SLTIH, true, "lll" },
2977 1.1 mrg { "__insn_sltih_u", TILEPRO_INSN_SLTIH_U, true, "lll" },
2978 1.1 mrg { "__insn_sne", TILEPRO_INSN_SNE, true, "lll" },
2979 1.1 mrg { "__insn_sneb", TILEPRO_INSN_SNEB, true, "lll" },
2980 1.1 mrg { "__insn_sneh", TILEPRO_INSN_SNEH, true, "lll" },
2981 1.1 mrg { "__insn_sra", TILEPRO_INSN_SRA, true, "lll" },
2982 1.1 mrg { "__insn_srab", TILEPRO_INSN_SRAB, true, "lll" },
2983 1.1 mrg { "__insn_srah", TILEPRO_INSN_SRAH, true, "lll" },
2984 1.1 mrg { "__insn_srai", TILEPRO_INSN_SRA, true, "lll" },
2985 1.1 mrg { "__insn_sraib", TILEPRO_INSN_SRAIB, true, "lll" },
2986 1.1 mrg { "__insn_sraih", TILEPRO_INSN_SRAIH, true, "lll" },
2987 1.1 mrg { "__insn_sub", TILEPRO_INSN_SUB, true, "lll" },
2988 1.1 mrg { "__insn_subb", TILEPRO_INSN_SUBB, true, "lll" },
2989 1.1 mrg { "__insn_subbs_u", TILEPRO_INSN_SUBBS_U, false, "lll" },
2990 1.1 mrg { "__insn_subh", TILEPRO_INSN_SUBH, true, "lll" },
2991 1.1 mrg { "__insn_subhs", TILEPRO_INSN_SUBHS, false, "lll" },
2992 1.1 mrg { "__insn_subs", TILEPRO_INSN_SUBS, false, "lll" },
2993 1.1 mrg { "__insn_sw", TILEPRO_INSN_SW, false, "vpl" },
2994 1.1 mrg { "__insn_tblidxb0", TILEPRO_INSN_TBLIDXB0, true, "lll" },
2995 1.1 mrg { "__insn_tblidxb1", TILEPRO_INSN_TBLIDXB1, true, "lll" },
2996 1.1 mrg { "__insn_tblidxb2", TILEPRO_INSN_TBLIDXB2, true, "lll" },
2997 1.1 mrg { "__insn_tblidxb3", TILEPRO_INSN_TBLIDXB3, true, "lll" },
2998 1.1 mrg { "__insn_tns", TILEPRO_INSN_TNS, false, "lp" },
2999 1.1 mrg { "__insn_wh64", TILEPRO_INSN_WH64, false, "vp" },
3000 1.1 mrg { "__insn_xor", TILEPRO_INSN_XOR, true, "lll" },
3001 1.1 mrg { "__insn_xori", TILEPRO_INSN_XOR, true, "lll" },
3002 1.1 mrg { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER, false, "v" },
3003 1.1 mrg { "__tile_idn0_receive", TILEPRO_IDN0_RECEIVE, false, "l" },
3004 1.1 mrg { "__tile_idn1_receive", TILEPRO_IDN1_RECEIVE, false, "l" },
3005 1.1 mrg { "__tile_idn_send", TILEPRO_IDN_SEND, false, "vl" },
3006 1.1 mrg { "__tile_sn_receive", TILEPRO_SN_RECEIVE, false, "l" },
3007 1.1 mrg { "__tile_sn_send", TILEPRO_SN_SEND, false, "vl" },
3008 1.1 mrg { "__tile_udn0_receive", TILEPRO_UDN0_RECEIVE, false, "l" },
3009 1.1 mrg { "__tile_udn1_receive", TILEPRO_UDN1_RECEIVE, false, "l" },
3010 1.1 mrg { "__tile_udn2_receive", TILEPRO_UDN2_RECEIVE, false, "l" },
3011 1.1 mrg { "__tile_udn3_receive", TILEPRO_UDN3_RECEIVE, false, "l" },
3012 1.1 mrg { "__tile_udn_send", TILEPRO_UDN_SEND, false, "vl" },
3013 1.1 mrg };
3014 1.1 mrg
3015 1.1 mrg
3016 1.1 mrg /* Convert a character in a builtin type string to a tree type. */
3017 1.1 mrg static tree
3018 1.1 mrg char_to_type (char c)
3019 1.1 mrg {
3020 1.1 mrg static tree volatile_ptr_type_node = NULL;
3021 1.1 mrg static tree volatile_const_ptr_type_node = NULL;
3022 1.1 mrg
3023 1.1 mrg if (volatile_ptr_type_node == NULL)
3024 1.1 mrg {
3025 1.1 mrg volatile_ptr_type_node =
3026 1.1 mrg build_pointer_type (build_qualified_type (void_type_node,
3027 1.1 mrg TYPE_QUAL_VOLATILE));
3028 1.1 mrg volatile_const_ptr_type_node =
3029 1.1 mrg build_pointer_type (build_qualified_type (void_type_node,
3030 1.1 mrg TYPE_QUAL_CONST
3031 1.1 mrg | TYPE_QUAL_VOLATILE));
3032 1.1 mrg }
3033 1.1 mrg
3034 1.1 mrg switch (c)
3035 1.1 mrg {
3036 1.1 mrg case 'v':
3037 1.1 mrg return void_type_node;
3038 1.1 mrg case 'l':
3039 1.1 mrg return long_unsigned_type_node;
3040 1.1 mrg case 'p':
3041 1.1 mrg return volatile_ptr_type_node;
3042 1.1 mrg case 'k':
3043 1.1 mrg return volatile_const_ptr_type_node;
3044 1.1 mrg default:
3045 1.1 mrg gcc_unreachable ();
3046 1.1 mrg }
3047 1.1 mrg }
3048 1.1 mrg
3049 1.1 mrg
3050 1.1 mrg /* Implement TARGET_INIT_BUILTINS. */
3051 1.1 mrg static void
3052 1.1 mrg tilepro_init_builtins (void)
3053 1.1 mrg {
3054 1.1 mrg size_t i;
3055 1.1 mrg
3056 1.1 mrg for (i = 0; i < ARRAY_SIZE (tilepro_builtins); i++)
3057 1.1 mrg {
3058 1.1 mrg const struct tilepro_builtin_def *p = &tilepro_builtins[i];
3059 1.1 mrg tree ftype, ret_type, arg_type_list = void_list_node;
3060 1.1 mrg tree decl;
3061 1.1 mrg int j;
3062 1.1 mrg
3063 1.1 mrg for (j = strlen (p->type) - 1; j > 0; j--)
3064 1.1 mrg {
3065 1.1 mrg arg_type_list =
3066 1.1 mrg tree_cons (NULL_TREE, char_to_type (p->type[j]), arg_type_list);
3067 1.1 mrg }
3068 1.1 mrg
3069 1.1 mrg ret_type = char_to_type (p->type[0]);
3070 1.1 mrg
3071 1.1 mrg ftype = build_function_type (ret_type, arg_type_list);
3072 1.1 mrg
3073 1.1 mrg decl = add_builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
3074 1.1 mrg NULL, NULL);
3075 1.1 mrg
3076 1.1 mrg if (p->is_const)
3077 1.1 mrg TREE_READONLY (decl) = 1;
3078 1.1 mrg TREE_NOTHROW (decl) = 1;
3079 1.1 mrg
3080 1.1 mrg if (tilepro_builtin_info[p->code].fndecl == NULL)
3081 1.1 mrg tilepro_builtin_info[p->code].fndecl = decl;
3082 1.1 mrg }
3083 1.1 mrg }
3084 1.1 mrg
3085 1.1 mrg
3086 1.1 mrg /* Implement TARGET_EXPAND_BUILTIN. */
3087 1.1 mrg static rtx
3088 1.1 mrg tilepro_expand_builtin (tree exp,
3089 1.1 mrg rtx target,
3090 1.1 mrg rtx subtarget ATTRIBUTE_UNUSED,
3091 1.1 mrg machine_mode mode ATTRIBUTE_UNUSED,
3092 1.1 mrg int ignore ATTRIBUTE_UNUSED)
3093 1.1 mrg {
3094 1.1 mrg #define MAX_BUILTIN_ARGS 4
3095 1.1 mrg
3096 1.1 mrg tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3097 1.1 mrg unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
3098 1.1 mrg tree arg;
3099 1.1 mrg call_expr_arg_iterator iter;
3100 1.1 mrg enum insn_code icode;
3101 1.1 mrg rtx op[MAX_BUILTIN_ARGS + 1], pat;
3102 1.1 mrg int opnum;
3103 1.1 mrg bool nonvoid;
3104 1.1 mrg insn_gen_fn fn;
3105 1.1 mrg
3106 1.1 mrg if (fcode >= TILEPRO_BUILTIN_max)
3107 1.1 mrg internal_error ("bad builtin fcode");
3108 1.1 mrg icode = tilepro_builtin_info[fcode].icode;
3109 1.1 mrg if (icode == 0)
3110 1.1 mrg internal_error ("bad builtin icode");
3111 1.1 mrg
3112 1.1 mrg nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
3113 1.1 mrg
3114 1.1 mrg opnum = nonvoid;
3115 1.1 mrg FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3116 1.1 mrg {
3117 1.1 mrg const struct insn_operand_data *insn_op;
3118 1.1 mrg
3119 1.1 mrg if (arg == error_mark_node)
3120 1.1 mrg return NULL_RTX;
3121 1.1 mrg if (opnum > MAX_BUILTIN_ARGS)
3122 1.1 mrg return NULL_RTX;
3123 1.1 mrg
3124 1.1 mrg insn_op = &insn_data[icode].operand[opnum];
3125 1.1 mrg
3126 1.1 mrg op[opnum] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
3127 1.1 mrg
3128 1.1 mrg if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3129 1.1 mrg op[opnum] = copy_to_mode_reg (insn_op->mode, op[opnum]);
3130 1.1 mrg
3131 1.1 mrg if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3132 1.1 mrg {
3133 1.1 mrg /* We still failed to meet the predicate even after moving
3134 1.1 mrg into a register. Assume we needed an immediate. */
3135 1.1 mrg error_at (EXPR_LOCATION (exp),
3136 1.1 mrg "operand must be an immediate of the right size");
3137 1.1 mrg return const0_rtx;
3138 1.1 mrg }
3139 1.1 mrg
3140 1.1 mrg opnum++;
3141 1.1 mrg }
3142 1.1 mrg
3143 1.1 mrg if (nonvoid)
3144 1.1 mrg {
3145 1.1 mrg machine_mode tmode = insn_data[icode].operand[0].mode;
3146 1.1 mrg if (!target
3147 1.1 mrg || GET_MODE (target) != tmode
3148 1.1 mrg || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3149 1.1 mrg target = gen_reg_rtx (tmode);
3150 1.1 mrg op[0] = target;
3151 1.1 mrg }
3152 1.1 mrg
3153 1.1 mrg fn = GEN_FCN (icode);
3154 1.1 mrg switch (opnum)
3155 1.1 mrg {
3156 1.1 mrg case 0:
3157 1.1 mrg pat = fn (NULL_RTX);
3158 1.1 mrg break;
3159 1.1 mrg case 1:
3160 1.1 mrg pat = fn (op[0]);
3161 1.1 mrg break;
3162 1.1 mrg case 2:
3163 1.1 mrg pat = fn (op[0], op[1]);
3164 1.1 mrg break;
3165 1.1 mrg case 3:
3166 1.1 mrg pat = fn (op[0], op[1], op[2]);
3167 1.1 mrg break;
3168 1.1 mrg case 4:
3169 1.1 mrg pat = fn (op[0], op[1], op[2], op[3]);
3170 1.1 mrg break;
3171 1.1 mrg case 5:
3172 1.1 mrg pat = fn (op[0], op[1], op[2], op[3], op[4]);
3173 1.1 mrg break;
3174 1.1 mrg default:
3175 1.1 mrg gcc_unreachable ();
3176 1.1 mrg }
3177 1.1 mrg if (!pat)
3178 1.1 mrg return NULL_RTX;
3179 1.1 mrg
3180 1.1 mrg /* If we are generating a prefetch, tell the scheduler not to move
3181 1.1 mrg it around. */
3182 1.1 mrg if (GET_CODE (pat) == PREFETCH)
3183 1.1 mrg PREFETCH_SCHEDULE_BARRIER_P (pat) = true;
3184 1.1 mrg
3185 1.1 mrg emit_insn (pat);
3186 1.1 mrg
3187 1.1 mrg if (nonvoid)
3188 1.1 mrg return target;
3189 1.1 mrg else
3190 1.1 mrg return const0_rtx;
3191 1.1 mrg }
3192 1.1 mrg
3193 1.1 mrg
3194 1.1 mrg /* Implement TARGET_BUILTIN_DECL. */
3195 1.1 mrg static tree
3196 1.1 mrg tilepro_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3197 1.1 mrg {
3198 1.1 mrg if (code >= TILEPRO_BUILTIN_max)
3199 1.1 mrg return error_mark_node;
3200 1.1 mrg
3201 1.1 mrg return tilepro_builtin_info[code].fndecl;
3202 1.1 mrg }
3203 1.1 mrg
3204 1.1 mrg
3206 1.1 mrg
3207 1.1 mrg /* Stack frames */
3208 1.1 mrg
3209 1.1 mrg /* Return whether REGNO needs to be saved in the stack frame. */
3210 1.1 mrg static bool
3211 1.1 mrg need_to_save_reg (unsigned int regno)
3212 1.1 mrg {
3213 1.1 mrg if (!call_used_or_fixed_reg_p (regno)
3214 1.1 mrg && df_regs_ever_live_p (regno))
3215 1.1 mrg return true;
3216 1.1 mrg
3217 1.1 mrg if (flag_pic
3218 1.1 mrg && (regno == PIC_OFFSET_TABLE_REGNUM
3219 1.1 mrg || regno == TILEPRO_PIC_TEXT_LABEL_REGNUM)
3220 1.1 mrg && (crtl->uses_pic_offset_table || crtl->saves_all_registers))
3221 1.1 mrg return true;
3222 1.1 mrg
3223 1.1 mrg if (crtl->calls_eh_return)
3224 1.1 mrg {
3225 1.1 mrg unsigned i;
3226 1.1 mrg for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; i++)
3227 1.1 mrg {
3228 1.1 mrg if (regno == EH_RETURN_DATA_REGNO (i))
3229 1.1 mrg return true;
3230 1.1 mrg }
3231 1.1 mrg }
3232 1.1 mrg
3233 1.1 mrg return false;
3234 1.1 mrg }
3235 1.1 mrg
3236 1.1 mrg
3237 1.1 mrg /* Return the size of the register savev area. This function is only
3238 1.1 mrg correct starting with local register allocation */
3239 1.1 mrg static int
3240 1.1 mrg tilepro_saved_regs_size (void)
3241 1.1 mrg {
3242 1.1 mrg int reg_save_size = 0;
3243 1.1 mrg int regno;
3244 1.1 mrg int offset_to_frame;
3245 1.1 mrg int align_mask;
3246 1.1 mrg
3247 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
3248 1.1 mrg if (need_to_save_reg (regno))
3249 1.1 mrg reg_save_size += UNITS_PER_WORD;
3250 1.1 mrg
3251 1.1 mrg /* Pad out the register save area if necessary to make
3252 1.1 mrg frame_pointer_rtx be as aligned as the stack pointer. */
3253 1.1 mrg offset_to_frame = crtl->args.pretend_args_size + reg_save_size;
3254 1.1 mrg align_mask = (STACK_BOUNDARY / BITS_PER_UNIT) - 1;
3255 1.1 mrg reg_save_size += (-offset_to_frame) & align_mask;
3256 1.1 mrg
3257 1.1 mrg return reg_save_size;
3258 1.1 mrg }
3259 1.1 mrg
3260 1.1 mrg
3261 1.1 mrg /* Round up frame size SIZE. */
3262 1.1 mrg static int
3263 1.1 mrg round_frame_size (int size)
3264 1.1 mrg {
3265 1.1 mrg return ((size + STACK_BOUNDARY / BITS_PER_UNIT - 1)
3266 1.1 mrg & -STACK_BOUNDARY / BITS_PER_UNIT);
3267 1.1 mrg }
3268 1.1 mrg
3269 1.1 mrg
3270 1.1 mrg /* Emit a store in the stack frame to save REGNO at address ADDR, and
3271 1.1 mrg emit the corresponding REG_CFA_OFFSET note described by CFA and
3272 1.1 mrg CFA_OFFSET. Return the emitted insn. */
3273 1.1 mrg static rtx
3274 1.1 mrg frame_emit_store (int regno, int regno_note, rtx addr, rtx cfa,
3275 1.1 mrg int cfa_offset)
3276 1.1 mrg {
3277 1.1 mrg rtx reg = gen_rtx_REG (Pmode, regno);
3278 1.1 mrg rtx mem = gen_frame_mem (Pmode, addr);
3279 1.1 mrg rtx mov = gen_movsi (mem, reg);
3280 1.1 mrg
3281 1.1 mrg /* Describe what just happened in a way that dwarf understands. We
3282 1.1 mrg use temporary registers to hold the address to make scheduling
3283 1.1 mrg easier, and use the REG_CFA_OFFSET to describe the address as an
3284 1.1 mrg offset from the CFA. */
3285 1.1 mrg rtx reg_note = gen_rtx_REG (Pmode, regno_note);
3286 1.1 mrg rtx cfa_relative_addr = gen_rtx_PLUS (Pmode, cfa, gen_int_si (cfa_offset));
3287 1.1 mrg rtx cfa_relative_mem = gen_frame_mem (Pmode, cfa_relative_addr);
3288 1.1 mrg rtx real = gen_rtx_SET (cfa_relative_mem, reg_note);
3289 1.1 mrg add_reg_note (mov, REG_CFA_OFFSET, real);
3290 1.1 mrg
3291 1.1 mrg return emit_insn (mov);
3292 1.1 mrg }
3293 1.1 mrg
3294 1.1 mrg
3295 1.1 mrg /* Emit a load in the stack frame to load REGNO from address ADDR.
3296 1.1 mrg Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3297 1.1 mrg non-null. Return the emitted insn. */
3298 1.1 mrg static rtx_insn *
3299 1.1 mrg frame_emit_load (int regno, rtx addr, rtx *cfa_restores)
3300 1.1 mrg {
3301 1.1 mrg rtx reg = gen_rtx_REG (Pmode, regno);
3302 1.1 mrg rtx mem = gen_frame_mem (Pmode, addr);
3303 1.1 mrg if (cfa_restores)
3304 1.1 mrg *cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, *cfa_restores);
3305 1.1 mrg return emit_insn (gen_movsi (reg, mem));
3306 1.1 mrg }
3307 1.1 mrg
3308 1.1 mrg
3309 1.1 mrg /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3310 1.1 mrg including sequences. */
3311 1.1 mrg static rtx_insn *
3312 1.1 mrg set_frame_related_p (void)
3313 1.1 mrg {
3314 1.1 mrg rtx_insn *seq = get_insns ();
3315 1.1 mrg rtx_insn *insn;
3316 1.1 mrg
3317 1.1 mrg end_sequence ();
3318 1.1 mrg
3319 1.1 mrg if (!seq)
3320 1.1 mrg return NULL;
3321 1.1 mrg
3322 1.1 mrg if (INSN_P (seq))
3323 1.1 mrg {
3324 1.1 mrg insn = seq;
3325 1.1 mrg while (insn != NULL_RTX)
3326 1.1 mrg {
3327 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
3328 1.1 mrg insn = NEXT_INSN (insn);
3329 1.1 mrg }
3330 1.1 mrg seq = emit_insn (seq);
3331 1.1 mrg }
3332 1.1 mrg else
3333 1.1 mrg {
3334 1.1 mrg seq = emit_insn (seq);
3335 1.1 mrg RTX_FRAME_RELATED_P (seq) = 1;
3336 1.1 mrg }
3337 1.1 mrg return seq;
3338 1.1 mrg }
3339 1.1 mrg
3340 1.1 mrg
3341 1.1 mrg #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3342 1.1 mrg
3343 1.1 mrg /* This emits code for 'sp += offset'.
3344 1.1 mrg
3345 1.1 mrg The ABI only allows us to modify 'sp' in a single 'addi' or
3346 1.1 mrg 'addli', so the backtracer understands it. Larger amounts cannot
3347 1.1 mrg use those instructions, so are added by placing the offset into a
3348 1.1 mrg large register and using 'add'.
3349 1.1 mrg
3350 1.1 mrg This happens after reload, so we need to expand it ourselves. */
3351 1.1 mrg static rtx_insn *
3352 1.1 mrg emit_sp_adjust (int offset, int *next_scratch_regno, bool frame_related,
3353 1.1 mrg rtx reg_notes)
3354 1.1 mrg {
3355 1.1 mrg rtx to_add;
3356 1.1 mrg rtx imm_rtx = gen_int_si (offset);
3357 1.1 mrg
3358 1.1 mrg rtx_insn *insn;
3359 1.1 mrg if (satisfies_constraint_J (imm_rtx))
3360 1.1 mrg {
3361 1.1 mrg /* We can add this using a single addi or addli. */
3362 1.1 mrg to_add = imm_rtx;
3363 1.1 mrg }
3364 1.1 mrg else
3365 1.1 mrg {
3366 1.1 mrg rtx tmp = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3367 1.1 mrg tilepro_expand_set_const32 (tmp, imm_rtx);
3368 1.1 mrg to_add = tmp;
3369 1.1 mrg }
3370 1.1 mrg
3371 1.1 mrg /* Actually adjust the stack pointer. */
3372 1.1 mrg insn = emit_insn (gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx,
3373 1.1 mrg to_add));
3374 1.1 mrg REG_NOTES (insn) = reg_notes;
3375 1.1 mrg
3376 1.1 mrg /* Describe what just happened in a way that dwarf understands. */
3377 1.1 mrg if (frame_related)
3378 1.1 mrg {
3379 1.1 mrg rtx real = gen_rtx_SET (stack_pointer_rtx,
3380 1.1 mrg gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3381 1.1 mrg imm_rtx));
3382 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
3383 1.1 mrg add_reg_note (insn, REG_CFA_ADJUST_CFA, real);
3384 1.1 mrg }
3385 1.1 mrg
3386 1.1 mrg return insn;
3387 1.1 mrg }
3388 1.1 mrg
3389 1.1 mrg
3390 1.1 mrg /* Return whether the current function is leaf. This takes into
3391 1.1 mrg account whether the function calls tls_get_addr. */
3392 1.1 mrg static bool
3393 1.1 mrg tilepro_current_function_is_leaf (void)
3394 1.1 mrg {
3395 1.1 mrg return crtl->is_leaf && !cfun->machine->calls_tls_get_addr;
3396 1.1 mrg }
3397 1.1 mrg
3398 1.1 mrg
3399 1.1 mrg /* Return the frame size. */
3400 1.1 mrg static int
3401 1.1 mrg compute_total_frame_size (void)
3402 1.1 mrg {
3403 1.1 mrg int total_size = (get_frame_size () + tilepro_saved_regs_size ()
3404 1.1 mrg + crtl->outgoing_args_size
3405 1.1 mrg + crtl->args.pretend_args_size);
3406 1.1 mrg
3407 1.1 mrg if (!tilepro_current_function_is_leaf () || cfun->calls_alloca)
3408 1.1 mrg {
3409 1.1 mrg /* Make room for save area in callee. */
3410 1.1 mrg total_size += STACK_POINTER_OFFSET;
3411 1.1 mrg }
3412 1.1 mrg
3413 1.1 mrg return round_frame_size (total_size);
3414 1.1 mrg }
3415 1.1 mrg
3416 1.1 mrg
3417 1.1 mrg /* Return nonzero if this function is known to have a null epilogue.
3418 1.1 mrg This allows the optimizer to omit jumps to jumps if no stack was
3419 1.1 mrg created. */
3420 1.1 mrg bool
3421 1.1 mrg tilepro_can_use_return_insn_p (void)
3422 1.1 mrg {
3423 1.1 mrg return (reload_completed
3424 1.1 mrg && cfun->static_chain_decl == 0
3425 1.1 mrg && compute_total_frame_size () == 0
3426 1.1 mrg && tilepro_current_function_is_leaf ()
3427 1.1 mrg && !crtl->profile && !df_regs_ever_live_p (TILEPRO_LINK_REGNUM));
3428 1.1 mrg }
3429 1.1 mrg
3430 1.1 mrg
3431 1.1 mrg /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3432 1.1 mrg is a frame pointer, it computes the value relative to
3433 1.1 mrg that. Otherwise it uses the stack pointer. */
3434 1.1 mrg static rtx
3435 1.1 mrg compute_frame_addr (int offset_from_fp, int *next_scratch_regno)
3436 1.1 mrg {
3437 1.1 mrg rtx base_reg_rtx, tmp_reg_rtx, offset_rtx;
3438 1.1 mrg int offset_from_base;
3439 1.1 mrg
3440 1.1 mrg if (frame_pointer_needed)
3441 1.1 mrg {
3442 1.1 mrg base_reg_rtx = hard_frame_pointer_rtx;
3443 1.1 mrg offset_from_base = offset_from_fp;
3444 1.1 mrg }
3445 1.1 mrg else
3446 1.1 mrg {
3447 1.1 mrg int offset_from_sp = compute_total_frame_size () + offset_from_fp;
3448 1.1 mrg base_reg_rtx = stack_pointer_rtx;
3449 1.1 mrg offset_from_base = offset_from_sp;
3450 1.1 mrg }
3451 1.1 mrg
3452 1.1 mrg if (offset_from_base == 0)
3453 1.1 mrg return base_reg_rtx;
3454 1.1 mrg
3455 1.1 mrg /* Compute the new value of the stack pointer. */
3456 1.1 mrg tmp_reg_rtx = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3457 1.1 mrg offset_rtx = gen_int_si (offset_from_base);
3458 1.1 mrg
3459 1.1 mrg if (!tilepro_expand_addsi (tmp_reg_rtx, base_reg_rtx, offset_rtx))
3460 1.1 mrg {
3461 1.1 mrg emit_insn (gen_rtx_SET (tmp_reg_rtx,
3462 1.1 mrg gen_rtx_PLUS (Pmode, base_reg_rtx,
3463 1.1 mrg offset_rtx)));
3464 1.1 mrg }
3465 1.1 mrg
3466 1.1 mrg return tmp_reg_rtx;
3467 1.1 mrg }
3468 1.1 mrg
3469 1.1 mrg
3470 1.1 mrg /* The stack frame looks like this:
3471 1.1 mrg +-------------+
3472 1.1 mrg | ... |
3473 1.1 mrg | incoming |
3474 1.1 mrg | stack args |
3475 1.1 mrg AP -> +-------------+
3476 1.1 mrg | caller's HFP|
3477 1.1 mrg +-------------+
3478 1.1 mrg | lr save |
3479 1.1 mrg HFP -> +-------------+
3480 1.1 mrg | var args |
3481 1.1 mrg | reg save | crtl->args.pretend_args_size bytes
3482 1.1 mrg +-------------+
3483 1.1 mrg | ... |
3484 1.1 mrg | saved regs | tilepro_saved_regs_size() bytes
3485 1.1 mrg FP -> +-------------+
3486 1.1 mrg | ... |
3487 1.1 mrg | vars | get_frame_size() bytes
3488 1.1 mrg +-------------+
3489 1.1 mrg | ... |
3490 1.1 mrg | outgoing |
3491 1.1 mrg | stack args | crtl->outgoing_args_size bytes
3492 1.1 mrg +-------------+
3493 1.1 mrg | HFP | 4 bytes (only here if nonleaf / alloca)
3494 1.1 mrg +-------------+
3495 1.1 mrg | callee lr | 4 bytes (only here if nonleaf / alloca)
3496 1.1 mrg | save |
3497 1.1 mrg SP -> +-------------+
3498 1.1 mrg
3499 1.1 mrg HFP == incoming SP.
3500 1.1 mrg
3501 1.1 mrg For functions with a frame larger than 32767 bytes, or which use
3502 1.1 mrg alloca (), r52 is used as a frame pointer. Otherwise there is no
3503 1.1 mrg frame pointer.
3504 1.1 mrg
3505 1.1 mrg FP is saved at SP+4 before calling a subroutine so the
3506 1.1 mrg callee can chain. */
3507 1.1 mrg void
3508 1.1 mrg tilepro_expand_prologue (void)
3509 1.1 mrg {
3510 1.1 mrg #define ROUND_ROBIN_SIZE 4
3511 1.1 mrg /* We round-robin through four scratch registers to hold temporary
3512 1.1 mrg addresses for saving registers, to make instruction scheduling
3513 1.1 mrg easier. */
3514 1.1 mrg rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3515 1.1 mrg NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3516 1.1 mrg };
3517 1.1 mrg rtx insn, cfa;
3518 1.1 mrg unsigned int which_scratch;
3519 1.1 mrg int offset, start_offset, regno;
3520 1.1 mrg
3521 1.1 mrg /* A register that holds a copy of the incoming fp. */
3522 1.1 mrg int fp_copy_regno = -1;
3523 1.1 mrg
3524 1.1 mrg /* A register that holds a copy of the incoming sp. */
3525 1.1 mrg int sp_copy_regno = -1;
3526 1.1 mrg
3527 1.1 mrg /* Next scratch register number to hand out (postdecrementing). */
3528 1.1 mrg int next_scratch_regno = 29;
3529 1.1 mrg
3530 1.1 mrg int total_size = compute_total_frame_size ();
3531 1.1 mrg
3532 1.1 mrg if (flag_stack_usage_info)
3533 1.1 mrg current_function_static_stack_size = total_size;
3534 1.1 mrg
3535 1.1 mrg /* Save lr first in its special location because code after this
3536 1.1 mrg might use the link register as a scratch register. */
3537 1.1 mrg if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM) || crtl->calls_eh_return)
3538 1.1 mrg {
3539 1.1 mrg FRP (frame_emit_store (TILEPRO_LINK_REGNUM, TILEPRO_LINK_REGNUM,
3540 1.1 mrg stack_pointer_rtx, stack_pointer_rtx, 0));
3541 1.1 mrg emit_insn (gen_blockage ());
3542 1.1 mrg }
3543 1.1 mrg
3544 1.1 mrg if (total_size == 0)
3545 1.1 mrg {
3546 1.1 mrg /* Load the PIC register if needed. */
3547 1.1 mrg if (flag_pic && crtl->uses_pic_offset_table)
3548 1.1 mrg load_pic_register (false);
3549 1.1 mrg
3550 1.1 mrg return;
3551 1.1 mrg }
3552 1.1 mrg
3553 1.1 mrg cfa = stack_pointer_rtx;
3554 1.1 mrg
3555 1.1 mrg if (frame_pointer_needed)
3556 1.1 mrg {
3557 1.1 mrg fp_copy_regno = next_scratch_regno--;
3558 1.1 mrg
3559 1.1 mrg /* Copy the old frame pointer aside so we can save it later. */
3560 1.1 mrg insn = FRP (emit_move_insn (gen_rtx_REG (word_mode, fp_copy_regno),
3561 1.1 mrg hard_frame_pointer_rtx));
3562 1.1 mrg add_reg_note (insn, REG_CFA_REGISTER, NULL_RTX);
3563 1.1 mrg
3564 1.1 mrg /* Set up the frame pointer. */
3565 1.1 mrg insn = FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
3566 1.1 mrg add_reg_note (insn, REG_CFA_DEF_CFA, hard_frame_pointer_rtx);
3567 1.1 mrg cfa = hard_frame_pointer_rtx;
3568 1.1 mrg REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
3569 1.1 mrg
3570 1.1 mrg /* fp holds a copy of the incoming sp, in case we need to store
3571 1.1 mrg it. */
3572 1.1 mrg sp_copy_regno = HARD_FRAME_POINTER_REGNUM;
3573 1.1 mrg }
3574 1.1 mrg else if (!tilepro_current_function_is_leaf ())
3575 1.1 mrg {
3576 1.1 mrg /* Copy the old stack pointer aside so we can save it later. */
3577 1.1 mrg sp_copy_regno = next_scratch_regno--;
3578 1.1 mrg emit_move_insn (gen_rtx_REG (Pmode, sp_copy_regno),
3579 1.1 mrg stack_pointer_rtx);
3580 1.1 mrg }
3581 1.1 mrg
3582 1.1 mrg if (tilepro_current_function_is_leaf ())
3583 1.1 mrg {
3584 1.1 mrg /* No need to store chain pointer to caller's frame. */
3585 1.1 mrg emit_sp_adjust (-total_size, &next_scratch_regno,
3586 1.1 mrg !frame_pointer_needed, NULL_RTX);
3587 1.1 mrg }
3588 1.1 mrg else
3589 1.1 mrg {
3590 1.1 mrg /* Save the frame pointer (incoming sp value) to support
3591 1.1 mrg backtracing. First we need to create an rtx with the store
3592 1.1 mrg address. */
3593 1.1 mrg rtx chain_addr = gen_rtx_REG (Pmode, next_scratch_regno--);
3594 1.1 mrg rtx size_rtx = gen_int_si (-(total_size - UNITS_PER_WORD));
3595 1.1 mrg
3596 1.1 mrg if (add_operand (size_rtx, Pmode))
3597 1.1 mrg {
3598 1.1 mrg /* Expose more parallelism by computing this value from the
3599 1.1 mrg original stack pointer, not the one after we have pushed
3600 1.1 mrg the frame. */
3601 1.1 mrg rtx p = gen_rtx_PLUS (Pmode, stack_pointer_rtx, size_rtx);
3602 1.1 mrg emit_insn (gen_rtx_SET (chain_addr, p));
3603 1.1 mrg emit_sp_adjust (-total_size, &next_scratch_regno,
3604 1.1 mrg !frame_pointer_needed, NULL_RTX);
3605 1.1 mrg }
3606 1.1 mrg else
3607 1.1 mrg {
3608 1.1 mrg /* The stack frame is large, so just store the incoming sp
3609 1.1 mrg value at *(new_sp + UNITS_PER_WORD). */
3610 1.1 mrg rtx p;
3611 1.1 mrg emit_sp_adjust (-total_size, &next_scratch_regno,
3612 1.1 mrg !frame_pointer_needed, NULL_RTX);
3613 1.1 mrg p = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3614 1.1 mrg GEN_INT (UNITS_PER_WORD));
3615 1.1 mrg emit_insn (gen_rtx_SET (chain_addr, p));
3616 1.1 mrg }
3617 1.1 mrg
3618 1.1 mrg /* Save our frame pointer for backtrace chaining. */
3619 1.1 mrg emit_insn (gen_movsi (gen_frame_mem (SImode, chain_addr),
3620 1.1 mrg gen_rtx_REG (SImode, sp_copy_regno)));
3621 1.1 mrg }
3622 1.1 mrg
3623 1.1 mrg /* Compute where to start storing registers we need to save. */
3624 1.1 mrg start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
3625 1.1 mrg offset = start_offset;
3626 1.1 mrg
3627 1.1 mrg /* Store all registers that need saving. */
3628 1.1 mrg which_scratch = 0;
3629 1.1 mrg for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
3630 1.1 mrg if (need_to_save_reg (regno))
3631 1.1 mrg {
3632 1.1 mrg rtx r = reg_save_addr[which_scratch];
3633 1.1 mrg int from_regno;
3634 1.1 mrg int cfa_offset = frame_pointer_needed ? offset : total_size + offset;
3635 1.1 mrg
3636 1.1 mrg if (r == NULL_RTX)
3637 1.1 mrg {
3638 1.1 mrg rtx p = compute_frame_addr (offset, &next_scratch_regno);
3639 1.1 mrg r = gen_rtx_REG (word_mode, next_scratch_regno--);
3640 1.1 mrg reg_save_addr[which_scratch] = r;
3641 1.1 mrg
3642 1.1 mrg emit_insn (gen_rtx_SET (r, p));
3643 1.1 mrg }
3644 1.1 mrg else
3645 1.1 mrg {
3646 1.1 mrg /* Advance to the next stack slot to store this register. */
3647 1.1 mrg int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
3648 1.1 mrg rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
3649 1.1 mrg emit_insn (gen_rtx_SET (r, p));
3650 1.1 mrg }
3651 1.1 mrg
3652 1.1 mrg /* Save this register to the stack (but use the old fp value
3653 1.1 mrg we copied aside if appropriate). */
3654 1.1 mrg from_regno = (fp_copy_regno >= 0
3655 1.1 mrg && regno ==
3656 1.1 mrg HARD_FRAME_POINTER_REGNUM) ? fp_copy_regno : regno;
3657 1.1 mrg FRP (frame_emit_store (from_regno, regno, r, cfa, cfa_offset));
3658 1.1 mrg
3659 1.1 mrg offset -= UNITS_PER_WORD;
3660 1.1 mrg which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
3661 1.1 mrg }
3662 1.1 mrg
3663 1.1 mrg /* If profiling, force that to happen after the frame is set up. */
3664 1.1 mrg if (crtl->profile)
3665 1.1 mrg emit_insn (gen_blockage ());
3666 1.1 mrg
3667 1.1 mrg /* Load the PIC register if needed. */
3668 1.1 mrg if (flag_pic && crtl->uses_pic_offset_table)
3669 1.1 mrg load_pic_register (false);
3670 1.1 mrg }
3671 1.1 mrg
3672 1.1 mrg
3673 1.1 mrg /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
3674 1.1 mrg true for a sibcall_epilogue pattern, and false for an epilogue
3675 1.1 mrg pattern. */
3676 1.1 mrg void
3677 1.1 mrg tilepro_expand_epilogue (bool sibcall_p)
3678 1.1 mrg {
3679 1.1 mrg /* We round-robin through four scratch registers to hold temporary
3680 1.1 mrg addresses for saving registers, to make instruction scheduling
3681 1.1 mrg easier. */
3682 1.1 mrg rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3683 1.1 mrg NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3684 1.1 mrg };
3685 1.1 mrg rtx_insn *last_insn, *insn;
3686 1.1 mrg unsigned int which_scratch;
3687 1.1 mrg int offset, start_offset, regno;
3688 1.1 mrg rtx cfa_restores = NULL_RTX;
3689 1.1 mrg
3690 1.1 mrg /* A register that holds a copy of the incoming fp. */
3691 1.1 mrg int fp_copy_regno = -1;
3692 1.1 mrg
3693 1.1 mrg /* Next scratch register number to hand out (postdecrementing). */
3694 1.1 mrg int next_scratch_regno = 29;
3695 1.1 mrg
3696 1.1 mrg int total_size = compute_total_frame_size ();
3697 1.1 mrg
3698 1.1 mrg last_insn = get_last_insn ();
3699 1.1 mrg
3700 1.1 mrg /* Load lr first since we are going to need it first. */
3701 1.1 mrg insn = NULL;
3702 1.1 mrg if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM))
3703 1.1 mrg {
3704 1.1 mrg insn = frame_emit_load (TILEPRO_LINK_REGNUM,
3705 1.1 mrg compute_frame_addr (0, &next_scratch_regno),
3706 1.1 mrg &cfa_restores);
3707 1.1 mrg }
3708 1.1 mrg
3709 1.1 mrg if (total_size == 0)
3710 1.1 mrg {
3711 1.1 mrg if (insn)
3712 1.1 mrg {
3713 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
3714 1.1 mrg REG_NOTES (insn) = cfa_restores;
3715 1.1 mrg }
3716 1.1 mrg goto done;
3717 1.1 mrg }
3718 1.1 mrg
3719 1.1 mrg /* Compute where to start restoring registers. */
3720 1.1 mrg start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
3721 1.1 mrg offset = start_offset;
3722 1.1 mrg
3723 1.1 mrg if (frame_pointer_needed)
3724 1.1 mrg fp_copy_regno = next_scratch_regno--;
3725 1.1 mrg
3726 1.1 mrg /* Restore all callee-saved registers. */
3727 1.1 mrg which_scratch = 0;
3728 1.1 mrg for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
3729 1.1 mrg if (need_to_save_reg (regno))
3730 1.1 mrg {
3731 1.1 mrg rtx r = reg_save_addr[which_scratch];
3732 1.1 mrg if (r == NULL_RTX)
3733 1.1 mrg {
3734 1.1 mrg r = compute_frame_addr (offset, &next_scratch_regno);
3735 1.1 mrg reg_save_addr[which_scratch] = r;
3736 1.1 mrg }
3737 1.1 mrg else
3738 1.1 mrg {
3739 1.1 mrg /* Advance to the next stack slot to store this
3740 1.1 mrg register. */
3741 1.1 mrg int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
3742 1.1 mrg rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
3743 1.1 mrg emit_insn (gen_rtx_SET (r, p));
3744 1.1 mrg }
3745 1.1 mrg
3746 1.1 mrg if (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
3747 1.1 mrg frame_emit_load (fp_copy_regno, r, NULL);
3748 1.1 mrg else
3749 1.1 mrg frame_emit_load (regno, r, &cfa_restores);
3750 1.1 mrg
3751 1.1 mrg offset -= UNITS_PER_WORD;
3752 1.1 mrg which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
3753 1.1 mrg }
3754 1.1 mrg
3755 1.1 mrg if (!tilepro_current_function_is_leaf ())
3756 1.1 mrg cfa_restores =
3757 1.1 mrg alloc_reg_note (REG_CFA_RESTORE, stack_pointer_rtx, cfa_restores);
3758 1.1 mrg
3759 1.1 mrg emit_insn (gen_blockage ());
3760 1.1 mrg
3761 1.1 mrg if (frame_pointer_needed)
3762 1.1 mrg {
3763 1.1 mrg /* Restore the old stack pointer by copying from the frame
3764 1.1 mrg pointer. */
3765 1.1 mrg insn = emit_insn (gen_sp_restore (stack_pointer_rtx,
3766 1.1 mrg hard_frame_pointer_rtx));
3767 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
3768 1.1 mrg REG_NOTES (insn) = cfa_restores;
3769 1.1 mrg add_reg_note (insn, REG_CFA_DEF_CFA, stack_pointer_rtx);
3770 1.1 mrg }
3771 1.1 mrg else
3772 1.1 mrg {
3773 1.1 mrg insn = emit_sp_adjust (total_size, &next_scratch_regno, true,
3774 1.1 mrg cfa_restores);
3775 1.1 mrg }
3776 1.1 mrg
3777 1.1 mrg if (crtl->calls_eh_return)
3778 1.1 mrg emit_insn (gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx,
3779 1.1 mrg EH_RETURN_STACKADJ_RTX));
3780 1.1 mrg
3781 1.1 mrg /* Restore the old frame pointer. */
3782 1.1 mrg if (frame_pointer_needed)
3783 1.1 mrg {
3784 1.1 mrg insn = emit_move_insn (hard_frame_pointer_rtx,
3785 1.1 mrg gen_rtx_REG (Pmode, fp_copy_regno));
3786 1.1 mrg add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
3787 1.1 mrg }
3788 1.1 mrg
3789 1.1 mrg /* Mark the pic registers as live outside of the function. */
3790 1.1 mrg if (flag_pic)
3791 1.1 mrg {
3792 1.1 mrg emit_use (cfun->machine->text_label_rtx);
3793 1.1 mrg emit_use (cfun->machine->got_rtx);
3794 1.1 mrg }
3795 1.1 mrg
3796 1.1 mrg done:
3797 1.1 mrg if (!sibcall_p)
3798 1.1 mrg {
3799 1.1 mrg /* Emit the actual 'return' instruction. */
3800 1.1 mrg emit_jump_insn (gen__return ());
3801 1.1 mrg }
3802 1.1 mrg else
3803 1.1 mrg {
3804 1.1 mrg emit_use (gen_rtx_REG (Pmode, TILEPRO_LINK_REGNUM));
3805 1.1 mrg }
3806 1.1 mrg
3807 1.1 mrg /* Mark all insns we just emitted as frame-related. */
3808 1.1 mrg for (; last_insn != NULL_RTX; last_insn = next_insn (last_insn))
3809 1.1 mrg RTX_FRAME_RELATED_P (last_insn) = 1;
3810 1.1 mrg }
3811 1.1 mrg
3812 1.1 mrg #undef ROUND_ROBIN_SIZE
3813 1.1 mrg
3814 1.1 mrg
3815 1.1 mrg /* Implement INITIAL_ELIMINATION_OFFSET. */
3816 1.1 mrg int
3817 1.1 mrg tilepro_initial_elimination_offset (int from, int to)
3818 1.1 mrg {
3819 1.1 mrg int total_size = compute_total_frame_size ();
3820 1.1 mrg
3821 1.1 mrg if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
3822 1.1 mrg {
3823 1.1 mrg return (total_size - crtl->args.pretend_args_size
3824 1.1 mrg - tilepro_saved_regs_size ());
3825 1.1 mrg }
3826 1.1 mrg else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
3827 1.1 mrg {
3828 1.1 mrg return -(crtl->args.pretend_args_size + tilepro_saved_regs_size ());
3829 1.1 mrg }
3830 1.1 mrg else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
3831 1.1 mrg {
3832 1.1 mrg return STACK_POINTER_OFFSET + total_size;
3833 1.1 mrg }
3834 1.1 mrg else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
3835 1.1 mrg {
3836 1.1 mrg return STACK_POINTER_OFFSET;
3837 1.1 mrg }
3838 1.1 mrg else
3839 1.1 mrg gcc_unreachable ();
3840 1.1 mrg }
3841 1.1 mrg
3842 1.1 mrg
3843 1.1 mrg /* Return an RTX indicating where the return address to the
3844 1.1 mrg calling function can be found. */
3845 1.1 mrg rtx
3846 1.1 mrg tilepro_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
3847 1.1 mrg {
3848 1.1 mrg if (count != 0)
3849 1.1 mrg return const0_rtx;
3850 1.1 mrg
3851 1.1 mrg return get_hard_reg_initial_val (Pmode, TILEPRO_LINK_REGNUM);
3852 1.1 mrg }
3853 1.1 mrg
3854 1.1 mrg
3855 1.1 mrg /* Implement EH_RETURN_HANDLER_RTX. */
3856 1.1 mrg rtx
3857 1.1 mrg tilepro_eh_return_handler_rtx (void)
3858 1.1 mrg {
3859 1.1 mrg /* The MEM needs to be volatile to prevent it from being
3860 1.1 mrg deleted. */
3861 1.1 mrg rtx tmp = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
3862 1.1 mrg MEM_VOLATILE_P (tmp) = true;
3863 1.1 mrg return tmp;
3864 1.1 mrg }
3865 1.1 mrg
3866 1.1 mrg
3868 1.1 mrg
3869 1.1 mrg /* Registers */
3870 1.1 mrg
3871 1.1 mrg /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
3872 1.1 mrg static void
3873 1.1 mrg tilepro_conditional_register_usage (void)
3874 1.1 mrg {
3875 1.1 mrg global_regs[TILEPRO_NETORDER_REGNUM] = 1;
3876 1.1 mrg /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used. */
3877 1.1 mrg if (TILEPRO_PIC_TEXT_LABEL_REGNUM != INVALID_REGNUM)
3878 1.1 mrg fixed_regs[TILEPRO_PIC_TEXT_LABEL_REGNUM] = 1;
3879 1.1 mrg if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3880 1.1 mrg fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
3881 1.1 mrg }
3882 1.1 mrg
3883 1.1 mrg
3884 1.1 mrg /* Implement TARGET_FRAME_POINTER_REQUIRED. */
3885 1.1 mrg static bool
3886 1.1 mrg tilepro_frame_pointer_required (void)
3887 1.1 mrg {
3888 1.1 mrg return crtl->calls_eh_return || cfun->calls_alloca;
3889 1.1 mrg }
3890 1.1 mrg
3891 1.1 mrg
3893 1.1 mrg
3894 1.1 mrg /* Scheduling and reorg */
3895 1.1 mrg
3896 1.1 mrg /* Return the length of INSN. LENGTH is the initial length computed
3897 1.1 mrg by attributes in the machine-description file. This is where we
3898 1.1 mrg account for bundles. */
3899 1.1 mrg int
3900 1.1 mrg tilepro_adjust_insn_length (rtx_insn *insn, int length)
3901 1.1 mrg {
3902 1.1 mrg machine_mode mode = GET_MODE (insn);
3903 1.1 mrg
3904 1.1 mrg /* A non-termininating instruction in a bundle has length 0. */
3905 1.1 mrg if (mode == SImode)
3906 1.1 mrg return 0;
3907 1.1 mrg
3908 1.1 mrg /* By default, there is not length adjustment. */
3909 1.1 mrg return length;
3910 1.1 mrg }
3911 1.1 mrg
3912 1.1 mrg
3913 1.1 mrg /* Implement TARGET_SCHED_ISSUE_RATE. */
3914 1.1 mrg static int
3915 1.1 mrg tilepro_issue_rate (void)
3916 1.1 mrg {
3917 1.1 mrg return 3;
3918 1.1 mrg }
3919 1.1 mrg
3920 1.1 mrg
3921 1.1 mrg /* Return the rtx for the jump target. */
3922 1.1 mrg static rtx
3923 1.1 mrg get_jump_target (rtx branch)
3924 1.1 mrg {
3925 1.1 mrg if (CALL_P (branch))
3926 1.1 mrg {
3927 1.1 mrg rtx call;
3928 1.1 mrg call = PATTERN (branch);
3929 1.1 mrg
3930 1.1 mrg if (GET_CODE (call) == PARALLEL)
3931 1.1 mrg call = XVECEXP (call, 0, 0);
3932 1.1 mrg
3933 1.1 mrg if (GET_CODE (call) == SET)
3934 1.1 mrg call = SET_SRC (call);
3935 1.1 mrg
3936 1.1 mrg if (GET_CODE (call) == CALL)
3937 1.1 mrg return XEXP (XEXP (call, 0), 0);
3938 1.1 mrg }
3939 1.1 mrg return 0;
3940 1.1 mrg }
3941 1.1 mrg
3942 1.1 mrg /* Implement TARGET_SCHED_ADJUST_COST. */
3943 1.1 mrg static int
3944 1.1 mrg tilepro_sched_adjust_cost (rtx_insn *insn, int dep_type, rtx_insn *dep_insn,
3945 1.1 mrg int cost, unsigned int)
3946 1.1 mrg {
3947 1.1 mrg /* If we have a true dependence, INSN is a call, and DEP_INSN
3948 1.1 mrg defines a register that is needed by the call (argument or stack
3949 1.1 mrg pointer), set its latency to 0 so that it can be bundled with
3950 1.1 mrg the call. Explicitly check for and exclude the case when
3951 1.1 mrg DEP_INSN defines the target of the jump. */
3952 1.1 mrg if (CALL_P (insn) && dep_type == REG_DEP_TRUE)
3953 1.1 mrg {
3954 1.1 mrg rtx target = get_jump_target (insn);
3955 1.1 mrg if (!REG_P (target) || !set_of (target, dep_insn))
3956 1.1 mrg return 0;
3957 1.1 mrg }
3958 1.1 mrg
3959 1.1 mrg return cost;
3960 1.1 mrg }
3961 1.1 mrg
3962 1.1 mrg
3963 1.1 mrg /* Skip over irrelevant NOTEs and such and look for the next insn we
3964 1.1 mrg would consider bundling. */
3965 1.1 mrg static rtx_insn *
3966 1.1 mrg next_insn_to_bundle (rtx_insn *r, rtx_insn *end)
3967 1.1 mrg {
3968 1.1 mrg for (; r != end; r = NEXT_INSN (r))
3969 1.1 mrg {
3970 1.1 mrg if (NONDEBUG_INSN_P (r)
3971 1.1 mrg && GET_CODE (PATTERN (r)) != USE
3972 1.1 mrg && GET_CODE (PATTERN (r)) != CLOBBER)
3973 1.1 mrg return r;
3974 1.1 mrg }
3975 1.1 mrg
3976 1.1 mrg return NULL;
3977 1.1 mrg }
3978 1.1 mrg
3979 1.1 mrg
3980 1.1 mrg /* Go through all insns, and use the information generated during
3981 1.1 mrg scheduling to generate SEQUENCEs to represent bundles of
3982 1.1 mrg instructions issued simultaneously. */
3983 1.1 mrg static void
3984 1.1 mrg tilepro_gen_bundles (void)
3985 1.1 mrg {
3986 1.1 mrg basic_block bb;
3987 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
3988 1.1 mrg {
3989 1.1 mrg rtx_insn *insn, *next;
3990 1.1 mrg rtx_insn *end = NEXT_INSN (BB_END (bb));
3991 1.1 mrg
3992 1.1 mrg for (insn = next_insn_to_bundle (BB_HEAD (bb), end); insn; insn = next)
3993 1.1 mrg {
3994 1.1 mrg next = next_insn_to_bundle (NEXT_INSN (insn), end);
3995 1.1 mrg
3996 1.1 mrg /* Never wrap {} around inline asm. */
3997 1.1 mrg if (GET_CODE (PATTERN (insn)) != ASM_INPUT)
3998 1.1 mrg {
3999 1.1 mrg if (next == NULL_RTX || GET_MODE (next) == TImode
4000 1.1 mrg /* NOTE: The scheduler incorrectly believes a call
4001 1.1 mrg insn can execute in the same cycle as the insn
4002 1.1 mrg after the call. This is of course impossible.
4003 1.1 mrg Really we need to fix the scheduler somehow, so
4004 1.1 mrg the code after the call gets scheduled
4005 1.1 mrg optimally. */
4006 1.1 mrg || CALL_P (insn))
4007 1.1 mrg {
4008 1.1 mrg /* Mark current insn as the end of a bundle. */
4009 1.1 mrg PUT_MODE (insn, QImode);
4010 1.1 mrg }
4011 1.1 mrg else
4012 1.1 mrg {
4013 1.1 mrg /* Mark it as part of a bundle. */
4014 1.1 mrg PUT_MODE (insn, SImode);
4015 1.1 mrg }
4016 1.1 mrg }
4017 1.1 mrg }
4018 1.1 mrg }
4019 1.1 mrg }
4020 1.1 mrg
4021 1.1 mrg
4022 1.1 mrg /* Helper function for tilepro_fixup_pcrel_references. */
4023 1.1 mrg static void
4024 1.1 mrg replace_pc_relative_symbol_ref (rtx_insn *insn, rtx opnds[4], bool first_insn_p)
4025 1.1 mrg {
4026 1.1 mrg rtx_insn *new_insns;
4027 1.1 mrg
4028 1.1 mrg start_sequence ();
4029 1.1 mrg
4030 1.1 mrg if (flag_pic == 1)
4031 1.1 mrg {
4032 1.1 mrg if (!first_insn_p)
4033 1.1 mrg {
4034 1.1 mrg emit_insn (gen_add_got16 (opnds[0], tilepro_got_rtx (),
4035 1.1 mrg opnds[2]));
4036 1.1 mrg emit_insn (gen_insn_lw (opnds[0], opnds[0]));
4037 1.1 mrg }
4038 1.1 mrg }
4039 1.1 mrg else
4040 1.1 mrg {
4041 1.1 mrg if (first_insn_p)
4042 1.1 mrg {
4043 1.1 mrg emit_insn (gen_addhi_got32 (opnds[0], tilepro_got_rtx (),
4044 1.1 mrg opnds[2]));
4045 1.1 mrg }
4046 1.1 mrg else
4047 1.1 mrg {
4048 1.1 mrg emit_insn (gen_addlo_got32 (opnds[0], opnds[1], opnds[2]));
4049 1.1 mrg emit_insn (gen_insn_lw (opnds[0], opnds[0]));
4050 1.1 mrg }
4051 1.1 mrg }
4052 1.1 mrg
4053 1.1 mrg new_insns = get_insns ();
4054 1.1 mrg end_sequence ();
4055 1.1 mrg
4056 1.1 mrg if (new_insns)
4057 1.1 mrg emit_insn_before (new_insns, insn);
4058 1.1 mrg
4059 1.1 mrg delete_insn (insn);
4060 1.1 mrg }
4061 1.1 mrg
4062 1.1 mrg
4063 1.1 mrg /* Returns whether INSN is a pc-relative addli insn. */
4064 1.1 mrg static bool
4065 1.1 mrg match_addli_pcrel (rtx_insn *insn)
4066 1.1 mrg {
4067 1.1 mrg rtx pattern = PATTERN (insn);
4068 1.1 mrg rtx unspec;
4069 1.1 mrg
4070 1.1 mrg if (GET_CODE (pattern) != SET)
4071 1.1 mrg return false;
4072 1.1 mrg
4073 1.1 mrg if (GET_CODE (SET_SRC (pattern)) != LO_SUM)
4074 1.1 mrg return false;
4075 1.1 mrg
4076 1.1 mrg if (GET_CODE (XEXP (SET_SRC (pattern), 1)) != CONST)
4077 1.1 mrg return false;
4078 1.1 mrg
4079 1.1 mrg unspec = XEXP (XEXP (SET_SRC (pattern), 1), 0);
4080 1.1 mrg
4081 1.1 mrg return (GET_CODE (unspec) == UNSPEC
4082 1.1 mrg && XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4083 1.1 mrg }
4084 1.1 mrg
4085 1.1 mrg
4086 1.1 mrg /* Helper function for tilepro_fixup_pcrel_references. */
4087 1.1 mrg static void
4088 1.1 mrg replace_addli_pcrel (rtx_insn *insn)
4089 1.1 mrg {
4090 1.1 mrg rtx pattern = PATTERN (insn);
4091 1.1 mrg rtx set_src;
4092 1.1 mrg rtx unspec;
4093 1.1 mrg rtx opnds[4];
4094 1.1 mrg bool first_insn_p;
4095 1.1 mrg
4096 1.1 mrg gcc_assert (GET_CODE (pattern) == SET);
4097 1.1 mrg opnds[0] = SET_DEST (pattern);
4098 1.1 mrg
4099 1.1 mrg set_src = SET_SRC (pattern);
4100 1.1 mrg gcc_assert (GET_CODE (set_src) == LO_SUM);
4101 1.1 mrg gcc_assert (GET_CODE (XEXP (set_src, 1)) == CONST);
4102 1.1 mrg opnds[1] = XEXP (set_src, 0);
4103 1.1 mrg
4104 1.1 mrg unspec = XEXP (XEXP (set_src, 1), 0);
4105 1.1 mrg gcc_assert (GET_CODE (unspec) == UNSPEC);
4106 1.1 mrg gcc_assert (XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4107 1.1 mrg opnds[2] = XVECEXP (unspec, 0, 0);
4108 1.1 mrg opnds[3] = XVECEXP (unspec, 0, 1);
4109 1.1 mrg
4110 1.1 mrg /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4111 1.1 mrg if (GET_CODE (opnds[2]) != SYMBOL_REF)
4112 1.1 mrg return;
4113 1.1 mrg
4114 1.1 mrg first_insn_p = (opnds[1] == tilepro_text_label_rtx ());
4115 1.1 mrg
4116 1.1 mrg replace_pc_relative_symbol_ref (insn, opnds, first_insn_p);
4117 1.1 mrg }
4118 1.1 mrg
4119 1.1 mrg
4120 1.1 mrg /* Returns whether INSN is a pc-relative auli insn. */
4121 1.1 mrg static bool
4122 1.1 mrg match_auli_pcrel (rtx_insn *insn)
4123 1.1 mrg {
4124 1.1 mrg rtx pattern = PATTERN (insn);
4125 1.1 mrg rtx high;
4126 1.1 mrg rtx unspec;
4127 1.1 mrg
4128 1.1 mrg if (GET_CODE (pattern) != SET)
4129 1.1 mrg return false;
4130 1.1 mrg
4131 1.1 mrg if (GET_CODE (SET_SRC (pattern)) != PLUS)
4132 1.1 mrg return false;
4133 1.1 mrg
4134 1.1 mrg high = XEXP (SET_SRC (pattern), 1);
4135 1.1 mrg
4136 1.1 mrg if (GET_CODE (high) != HIGH
4137 1.1 mrg || GET_CODE (XEXP (high, 0)) != CONST)
4138 1.1 mrg return false;
4139 1.1 mrg
4140 1.1 mrg unspec = XEXP (XEXP (high, 0), 0);
4141 1.1 mrg
4142 1.1 mrg return (GET_CODE (unspec) == UNSPEC
4143 1.1 mrg && XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4144 1.1 mrg }
4145 1.1 mrg
4146 1.1 mrg
4147 1.1 mrg /* Helper function for tilepro_fixup_pcrel_references. */
4148 1.1 mrg static void
4149 1.1 mrg replace_auli_pcrel (rtx_insn *insn)
4150 1.1 mrg {
4151 1.1 mrg rtx pattern = PATTERN (insn);
4152 1.1 mrg rtx set_src;
4153 1.1 mrg rtx high;
4154 1.1 mrg rtx unspec;
4155 1.1 mrg rtx opnds[4];
4156 1.1 mrg bool first_insn_p;
4157 1.1 mrg
4158 1.1 mrg gcc_assert (GET_CODE (pattern) == SET);
4159 1.1 mrg opnds[0] = SET_DEST (pattern);
4160 1.1 mrg
4161 1.1 mrg set_src = SET_SRC (pattern);
4162 1.1 mrg gcc_assert (GET_CODE (set_src) == PLUS);
4163 1.1 mrg opnds[1] = XEXP (set_src, 0);
4164 1.1 mrg
4165 1.1 mrg high = XEXP (set_src, 1);
4166 1.1 mrg gcc_assert (GET_CODE (high) == HIGH);
4167 1.1 mrg gcc_assert (GET_CODE (XEXP (high, 0)) == CONST);
4168 1.1 mrg
4169 1.1 mrg unspec = XEXP (XEXP (high, 0), 0);
4170 1.1 mrg gcc_assert (GET_CODE (unspec) == UNSPEC);
4171 1.1 mrg gcc_assert (XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4172 1.1 mrg opnds[2] = XVECEXP (unspec, 0, 0);
4173 1.1 mrg opnds[3] = XVECEXP (unspec, 0, 1);
4174 1.1 mrg
4175 1.1 mrg /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4176 1.1 mrg if (GET_CODE (opnds[2]) != SYMBOL_REF)
4177 1.1 mrg return;
4178 1.1 mrg
4179 1.1 mrg first_insn_p = (opnds[1] == tilepro_text_label_rtx ());
4180 1.1 mrg
4181 1.1 mrg replace_pc_relative_symbol_ref (insn, opnds, first_insn_p);
4182 1.1 mrg }
4183 1.1 mrg
4184 1.1 mrg
4185 1.1 mrg /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4186 1.1 mrg going through the GOT when the symbol is local to the compilation
4187 1.1 mrg unit. But such a symbol requires that the common text_label that
4188 1.1 mrg we generate at the beginning of the function be in the same section
4189 1.1 mrg as the reference to the SYMBOL_REF. This may not be true if we
4190 1.1 mrg generate hot/cold sections. This function looks for such cases and
4191 1.1 mrg replaces such references with the longer sequence going through the
4192 1.1 mrg GOT.
4193 1.1 mrg
4194 1.1 mrg We expect one of the following two instruction sequences:
4195 1.1 mrg addli tmp1, txt_label_reg, lo16(sym - txt_label)
4196 1.1 mrg auli tmp2, tmp1, ha16(sym - txt_label)
4197 1.1 mrg
4198 1.1 mrg auli tmp1, txt_label_reg, ha16(sym - txt_label)
4199 1.1 mrg addli tmp2, tmp1, lo16(sym - txt_label)
4200 1.1 mrg
4201 1.1 mrg If we're compiling -fpic, we replace the first instruction with
4202 1.1 mrg nothing, and the second instruction with:
4203 1.1 mrg
4204 1.1 mrg addli tmp2, got_rtx, got(sym)
4205 1.1 mrg lw tmp2, tmp2
4206 1.1 mrg
4207 1.1 mrg If we're compiling -fPIC, we replace the first instruction with:
4208 1.1 mrg
4209 1.1 mrg auli tmp1, got_rtx, got_ha16(sym)
4210 1.1 mrg
4211 1.1 mrg and the second instruction with:
4212 1.1 mrg
4213 1.1 mrg addli tmp2, tmp1, got_lo16(sym)
4214 1.1 mrg lw tmp2, tmp2
4215 1.1 mrg
4216 1.1 mrg Note that we're careful to disturb the instruction sequence as
4217 1.1 mrg little as possible, since it's very late in the compilation
4218 1.1 mrg process.
4219 1.1 mrg */
4220 1.1 mrg static void
4221 1.1 mrg tilepro_fixup_pcrel_references (void)
4222 1.1 mrg {
4223 1.1 mrg rtx_insn *insn, *next_insn;
4224 1.1 mrg bool same_section_as_entry = true;
4225 1.1 mrg
4226 1.1 mrg for (insn = get_insns (); insn; insn = next_insn)
4227 1.1 mrg {
4228 1.1 mrg next_insn = NEXT_INSN (insn);
4229 1.1 mrg
4230 1.1 mrg if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
4231 1.1 mrg {
4232 1.1 mrg same_section_as_entry = !same_section_as_entry;
4233 1.1 mrg continue;
4234 1.1 mrg }
4235 1.1 mrg
4236 1.1 mrg if (same_section_as_entry)
4237 1.1 mrg continue;
4238 1.1 mrg
4239 1.1 mrg if (!(INSN_P (insn)
4240 1.1 mrg && GET_CODE (PATTERN (insn)) != USE
4241 1.1 mrg && GET_CODE (PATTERN (insn)) != CLOBBER))
4242 1.1 mrg continue;
4243 1.1 mrg
4244 1.1 mrg if (match_addli_pcrel (insn))
4245 1.1 mrg replace_addli_pcrel (insn);
4246 1.1 mrg else if (match_auli_pcrel (insn))
4247 1.1 mrg replace_auli_pcrel (insn);
4248 1.1 mrg }
4249 1.1 mrg }
4250 1.1 mrg
4251 1.1 mrg
4252 1.1 mrg /* Ensure that no var tracking notes are emitted in the middle of a
4253 1.1 mrg three-instruction bundle. */
4254 1.1 mrg static void
4255 1.1 mrg reorder_var_tracking_notes (void)
4256 1.1 mrg {
4257 1.1 mrg basic_block bb;
4258 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
4259 1.1 mrg {
4260 1.1 mrg rtx_insn *insn, *next;
4261 1.1 mrg rtx_insn *queue = NULL;
4262 1.1 mrg bool in_bundle = false;
4263 1.1 mrg
4264 1.1 mrg for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4265 1.1 mrg {
4266 1.1 mrg next = NEXT_INSN (insn);
4267 1.1 mrg
4268 1.1 mrg if (INSN_P (insn))
4269 1.1 mrg {
4270 1.1 mrg /* Emit queued up notes at the last instruction of a bundle. */
4271 1.1 mrg if (GET_MODE (insn) == QImode)
4272 1.1 mrg {
4273 1.1 mrg while (queue)
4274 1.1 mrg {
4275 1.1 mrg rtx_insn *next_queue = PREV_INSN (queue);
4276 1.1 mrg SET_PREV_INSN (NEXT_INSN (insn)) = queue;
4277 1.1 mrg SET_NEXT_INSN (queue) = NEXT_INSN (insn);
4278 1.1 mrg SET_NEXT_INSN (insn) = queue;
4279 1.1 mrg SET_PREV_INSN (queue) = insn;
4280 1.1 mrg queue = next_queue;
4281 1.1 mrg }
4282 1.1 mrg in_bundle = false;
4283 1.1 mrg }
4284 1.1 mrg else if (GET_MODE (insn) == SImode)
4285 1.1 mrg in_bundle = true;
4286 1.1 mrg }
4287 1.1 mrg else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4288 1.1 mrg {
4289 1.1 mrg if (in_bundle)
4290 1.1 mrg {
4291 1.1 mrg rtx_insn *prev = PREV_INSN (insn);
4292 1.1 mrg SET_PREV_INSN (next) = prev;
4293 1.1 mrg SET_NEXT_INSN (prev) = next;
4294 1.1 mrg
4295 1.1 mrg SET_PREV_INSN (insn) = queue;
4296 1.1 mrg queue = insn;
4297 1.1 mrg }
4298 1.1 mrg }
4299 1.1 mrg }
4300 1.1 mrg }
4301 1.1 mrg }
4302 1.1 mrg
4303 1.1 mrg
4304 1.1 mrg /* Perform machine dependent operations on the rtl chain INSNS. */
4305 1.1 mrg static void
4306 1.1 mrg tilepro_reorg (void)
4307 1.1 mrg {
4308 1.1 mrg /* We are freeing block_for_insn in the toplev to keep compatibility
4309 1.1 mrg with old MDEP_REORGS that are not CFG based. Recompute it
4310 1.1 mrg now. */
4311 1.1 mrg compute_bb_for_insn ();
4312 1.1 mrg
4313 1.1 mrg if (flag_reorder_blocks_and_partition)
4314 1.1 mrg {
4315 1.1 mrg tilepro_fixup_pcrel_references ();
4316 1.1 mrg }
4317 1.1 mrg
4318 1.1 mrg if (flag_schedule_insns_after_reload)
4319 1.1 mrg {
4320 1.1 mrg split_all_insns ();
4321 1.1 mrg
4322 1.1 mrg timevar_push (TV_SCHED2);
4323 1.1 mrg schedule_insns ();
4324 1.1 mrg timevar_pop (TV_SCHED2);
4325 1.1 mrg
4326 1.1 mrg /* Examine the schedule to group into bundles. */
4327 1.1 mrg tilepro_gen_bundles ();
4328 1.1 mrg }
4329 1.1 mrg
4330 1.1 mrg df_analyze ();
4331 1.1 mrg
4332 1.1 mrg if (flag_var_tracking)
4333 1.1 mrg {
4334 1.1 mrg timevar_push (TV_VAR_TRACKING);
4335 1.1 mrg variable_tracking_main ();
4336 1.1 mrg reorder_var_tracking_notes ();
4337 1.1 mrg timevar_pop (TV_VAR_TRACKING);
4338 1.1 mrg }
4339 1.1 mrg
4340 1.1 mrg df_finish_pass (false);
4341 1.1 mrg }
4342 1.1 mrg
4343 1.1 mrg
4345 1.1 mrg
4346 1.1 mrg /* Assembly */
4347 1.1 mrg
4348 1.1 mrg /* Select a format to encode pointers in exception handling data.
4349 1.1 mrg CODE is 0 for data, 1 for code labels, 2 for function pointers.
4350 1.1 mrg GLOBAL is true if the symbol may be affected by dynamic
4351 1.1 mrg relocations. */
4352 1.1 mrg int
4353 1.1 mrg tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED, int global)
4354 1.1 mrg {
4355 1.1 mrg return (global ? DW_EH_PE_indirect : 0) | DW_EH_PE_pcrel | DW_EH_PE_sdata4;
4356 1.1 mrg }
4357 1.1 mrg
4358 1.1 mrg
4359 1.1 mrg /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4360 1.1 mrg static void
4361 1.1 mrg tilepro_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4362 1.1 mrg HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4363 1.1 mrg tree function)
4364 1.1 mrg {
4365 1.1 mrg const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
4366 1.1 mrg rtx this_rtx, funexp;
4367 1.1 mrg rtx_insn *insn;
4368 1.1 mrg
4369 1.1 mrg /* Pretend to be a post-reload pass while generating rtl. */
4370 1.1 mrg reload_completed = 1;
4371 1.1 mrg
4372 1.1 mrg /* Mark the end of the (empty) prologue. */
4373 1.1 mrg emit_note (NOTE_INSN_PROLOGUE_END);
4374 1.1 mrg
4375 1.1 mrg /* Find the "this" pointer. If the function returns a structure,
4376 1.1 mrg the structure return pointer is in $1. */
4377 1.1 mrg if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4378 1.1 mrg this_rtx = gen_rtx_REG (Pmode, 1);
4379 1.1 mrg else
4380 1.1 mrg this_rtx = gen_rtx_REG (Pmode, 0);
4381 1.1 mrg
4382 1.1 mrg /* Add DELTA to THIS_RTX. */
4383 1.1 mrg emit_insn (gen_addsi3 (this_rtx, this_rtx, GEN_INT (delta)));
4384 1.1 mrg
4385 1.1 mrg /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4386 1.1 mrg if (vcall_offset)
4387 1.1 mrg {
4388 1.1 mrg rtx tmp;
4389 1.1 mrg
4390 1.1 mrg tmp = gen_rtx_REG (Pmode, 29);
4391 1.1 mrg emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4392 1.1 mrg
4393 1.1 mrg emit_insn (gen_addsi3 (tmp, tmp, GEN_INT (vcall_offset)));
4394 1.1 mrg
4395 1.1 mrg emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4396 1.1 mrg
4397 1.1 mrg emit_insn (gen_addsi3 (this_rtx, this_rtx, tmp));
4398 1.1 mrg }
4399 1.1 mrg
4400 1.1 mrg /* Generate a tail call to the target function. */
4401 1.1 mrg if (!TREE_USED (function))
4402 1.1 mrg {
4403 1.1 mrg assemble_external (function);
4404 1.1 mrg TREE_USED (function) = 1;
4405 1.1 mrg }
4406 1.1 mrg funexp = XEXP (DECL_RTL (function), 0);
4407 1.1 mrg funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
4408 1.1 mrg insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
4409 1.1 mrg SIBLING_CALL_P (insn) = 1;
4410 1.1 mrg
4411 1.1 mrg /* Run just enough of rest_of_compilation to get the insns emitted.
4412 1.1 mrg There's not really enough bulk here to make other passes such as
4413 1.1 mrg instruction scheduling worth while.
4414 1.1 mrg
4415 1.1 mrg We don't currently bundle, but the instruciton sequence is all
4416 1.1 mrg serial except for the tail call, so we're only wasting one cycle.
4417 1.1 mrg */
4418 1.1 mrg insn = get_insns ();
4419 1.1 mrg shorten_branches (insn);
4420 1.1 mrg assemble_start_function (thunk_fndecl, fnname);
4421 1.1 mrg final_start_function (insn, file, 1);
4422 1.1 mrg final (insn, file, 1);
4423 1.1 mrg final_end_function ();
4424 1.1 mrg assemble_end_function (thunk_fndecl, fnname);
4425 1.1 mrg
4426 1.1 mrg /* Stop pretending to be a post-reload pass. */
4427 1.1 mrg reload_completed = 0;
4428 1.1 mrg }
4429 1.1 mrg
4430 1.1 mrg
4431 1.1 mrg /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
4432 1.1 mrg static void
4433 1.1 mrg tilepro_asm_trampoline_template (FILE *file)
4434 1.1 mrg {
4435 1.1 mrg fprintf (file, "\tlnk r10\n");
4436 1.1 mrg fprintf (file, "\taddi r10, r10, 32\n");
4437 1.1 mrg fprintf (file, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode));
4438 1.1 mrg fprintf (file, "\tlw r10, r10\n");
4439 1.1 mrg fprintf (file, "\tjr r11\n");
4440 1.1 mrg fprintf (file, "\t.word 0 # <function address>\n");
4441 1.1 mrg fprintf (file, "\t.word 0 # <static chain value>\n");
4442 1.1 mrg }
4443 1.1 mrg
4444 1.1 mrg
4445 1.1 mrg /* Implement TARGET_TRAMPOLINE_INIT. */
4446 1.1 mrg static void
4447 1.1 mrg tilepro_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4448 1.1 mrg {
4449 1.1 mrg rtx fnaddr, chaddr;
4450 1.1 mrg rtx mem;
4451 1.1 mrg rtx begin_addr, end_addr;
4452 1.1 mrg int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
4453 1.1 mrg
4454 1.1 mrg fnaddr = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
4455 1.1 mrg chaddr = copy_to_reg (static_chain);
4456 1.1 mrg
4457 1.1 mrg emit_block_move (m_tramp, assemble_trampoline_template (),
4458 1.1 mrg GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
4459 1.1 mrg
4460 1.1 mrg mem = adjust_address (m_tramp, ptr_mode,
4461 1.1 mrg TRAMPOLINE_SIZE - 2 * ptr_mode_size);
4462 1.1 mrg emit_move_insn (mem, fnaddr);
4463 1.1 mrg mem = adjust_address (m_tramp, ptr_mode,
4464 1.1 mrg TRAMPOLINE_SIZE - ptr_mode_size);
4465 1.1 mrg emit_move_insn (mem, chaddr);
4466 1.1 mrg
4467 1.1 mrg /* Get pointers to the beginning and end of the code block. */
4468 1.1 mrg begin_addr = force_reg (Pmode, XEXP (m_tramp, 0));
4469 1.1 mrg end_addr = force_reg (Pmode, plus_constant (Pmode, XEXP (m_tramp, 0),
4470 1.1 mrg TRAMPOLINE_SIZE));
4471 1.1 mrg
4472 1.1 mrg maybe_emit_call_builtin___clear_cache (begin_addr, end_addr);
4473 1.1 mrg }
4474 1.1 mrg
4475 1.1 mrg
4476 1.1 mrg /* Implement TARGET_PRINT_OPERAND. */
4477 1.1 mrg static void
4478 1.1 mrg tilepro_print_operand (FILE *file, rtx x, int code)
4479 1.1 mrg {
4480 1.1 mrg switch (code)
4481 1.1 mrg {
4482 1.1 mrg case 'c':
4483 1.1 mrg /* Print the compare operator opcode for conditional moves. */
4484 1.1 mrg switch (GET_CODE (x))
4485 1.1 mrg {
4486 1.1 mrg case EQ:
4487 1.1 mrg fputs ("z", file);
4488 1.1 mrg break;
4489 1.1 mrg case NE:
4490 1.1 mrg fputs ("nz", file);
4491 1.1 mrg break;
4492 1.1 mrg default:
4493 1.1 mrg output_operand_lossage ("invalid %%c operand");
4494 1.1 mrg }
4495 1.1 mrg return;
4496 1.1 mrg
4497 1.1 mrg case 'C':
4498 1.1 mrg /* Print the compare operator opcode for conditional moves. */
4499 1.1 mrg switch (GET_CODE (x))
4500 1.1 mrg {
4501 1.1 mrg case EQ:
4502 1.1 mrg fputs ("nz", file);
4503 1.1 mrg break;
4504 1.1 mrg case NE:
4505 1.1 mrg fputs ("z", file);
4506 1.1 mrg break;
4507 1.1 mrg default:
4508 1.1 mrg output_operand_lossage ("invalid %%C operand");
4509 1.1 mrg }
4510 1.1 mrg return;
4511 1.1 mrg
4512 1.1 mrg case 'h':
4513 1.1 mrg {
4514 1.1 mrg /* Print the high 16 bits of a 32-bit constant. */
4515 1.1 mrg HOST_WIDE_INT i;
4516 1.1 mrg if (CONST_INT_P (x))
4517 1.1 mrg i = INTVAL (x);
4518 1.1 mrg else if (GET_CODE (x) == CONST_DOUBLE)
4519 1.1 mrg i = CONST_DOUBLE_LOW (x);
4520 1.1 mrg else
4521 1.1 mrg {
4522 1.1 mrg output_operand_lossage ("invalid %%h operand");
4523 1.1 mrg return;
4524 1.1 mrg }
4525 1.1 mrg i = trunc_int_for_mode (i >> 16, HImode);
4526 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4527 1.1 mrg return;
4528 1.1 mrg }
4529 1.1 mrg
4530 1.1 mrg case 'H':
4531 1.1 mrg {
4532 1.1 mrg rtx addr = NULL;
4533 1.1 mrg const char *opstr = NULL;
4534 1.1 mrg bool pcrel = false;
4535 1.1 mrg if (GET_CODE (x) == CONST
4536 1.1 mrg && GET_CODE (XEXP (x, 0)) == UNSPEC)
4537 1.1 mrg {
4538 1.1 mrg addr = XVECEXP (XEXP (x, 0), 0, 0);
4539 1.1 mrg switch (XINT (XEXP (x, 0), 1))
4540 1.1 mrg {
4541 1.1 mrg case UNSPEC_GOT32_SYM:
4542 1.1 mrg opstr = "got_ha16";
4543 1.1 mrg break;
4544 1.1 mrg case UNSPEC_PCREL_SYM:
4545 1.1 mrg opstr = "ha16";
4546 1.1 mrg pcrel = true;
4547 1.1 mrg break;
4548 1.1 mrg case UNSPEC_TLS_GD:
4549 1.1 mrg opstr = "tls_gd_ha16";
4550 1.1 mrg break;
4551 1.1 mrg case UNSPEC_TLS_IE:
4552 1.1 mrg opstr = "tls_ie_ha16";
4553 1.1 mrg break;
4554 1.1 mrg case UNSPEC_TLS_LE:
4555 1.1 mrg opstr = "tls_le_ha16";
4556 1.1 mrg break;
4557 1.1 mrg default:
4558 1.1 mrg output_operand_lossage ("invalid %%H operand");
4559 1.1 mrg }
4560 1.1 mrg }
4561 1.1 mrg else
4562 1.1 mrg {
4563 1.1 mrg addr = x;
4564 1.1 mrg opstr = "ha16";
4565 1.1 mrg }
4566 1.1 mrg
4567 1.1 mrg fputs (opstr, file);
4568 1.1 mrg fputc ('(', file);
4569 1.1 mrg output_addr_const (file, addr);
4570 1.1 mrg
4571 1.1 mrg if (pcrel)
4572 1.1 mrg {
4573 1.1 mrg rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
4574 1.1 mrg fputs (" - " , file);
4575 1.1 mrg output_addr_const (file, addr2);
4576 1.1 mrg }
4577 1.1 mrg
4578 1.1 mrg fputc (')', file);
4579 1.1 mrg return;
4580 1.1 mrg }
4581 1.1 mrg
4582 1.1 mrg case 'I':
4583 1.1 mrg /* Print an auto-inc memory operand. */
4584 1.1 mrg if (!MEM_P (x))
4585 1.1 mrg {
4586 1.1 mrg output_operand_lossage ("invalid %%I operand");
4587 1.1 mrg return;
4588 1.1 mrg }
4589 1.1 mrg
4590 1.1 mrg output_memory_autoinc_first = true;
4591 1.1 mrg output_address (GET_MODE (x), XEXP (x, 0));
4592 1.1 mrg return;
4593 1.1 mrg
4594 1.1 mrg case 'i':
4595 1.1 mrg /* Print an auto-inc memory operand. */
4596 1.1 mrg if (!MEM_P (x))
4597 1.1 mrg {
4598 1.1 mrg output_operand_lossage ("invalid %%i operand");
4599 1.1 mrg return;
4600 1.1 mrg }
4601 1.1 mrg
4602 1.1 mrg output_memory_autoinc_first = false;
4603 1.1 mrg output_address (GET_MODE (x), XEXP (x, 0));
4604 1.1 mrg return;
4605 1.1 mrg
4606 1.1 mrg case 'j':
4607 1.1 mrg {
4608 1.1 mrg /* Print the low 8 bits of a constant. */
4609 1.1 mrg HOST_WIDE_INT i;
4610 1.1 mrg if (CONST_INT_P (x))
4611 1.1 mrg i = INTVAL (x);
4612 1.1 mrg else if (GET_CODE (x) == CONST_DOUBLE)
4613 1.1 mrg i = CONST_DOUBLE_LOW (x);
4614 1.1 mrg else if (GET_CODE (x) == CONST_VECTOR
4615 1.1 mrg && CONST_INT_P (CONST_VECTOR_ELT (x, 0)))
4616 1.1 mrg i = INTVAL (CONST_VECTOR_ELT (x, 0));
4617 1.1 mrg else
4618 1.1 mrg {
4619 1.1 mrg output_operand_lossage ("invalid %%j operand");
4620 1.1 mrg return;
4621 1.1 mrg }
4622 1.1 mrg i = trunc_int_for_mode (i, QImode);
4623 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4624 1.1 mrg return;
4625 1.1 mrg }
4626 1.1 mrg
4627 1.1 mrg case 'L':
4628 1.1 mrg {
4629 1.1 mrg rtx addr = NULL;
4630 1.1 mrg const char *opstr = NULL;
4631 1.1 mrg bool pcrel = false;
4632 1.1 mrg if (GET_CODE (x) == CONST
4633 1.1 mrg && GET_CODE (XEXP (x, 0)) == UNSPEC)
4634 1.1 mrg {
4635 1.1 mrg addr = XVECEXP (XEXP (x, 0), 0, 0);
4636 1.1 mrg switch (XINT (XEXP (x, 0), 1))
4637 1.1 mrg {
4638 1.1 mrg case UNSPEC_GOT16_SYM:
4639 1.1 mrg opstr = "got";
4640 1.1 mrg break;
4641 1.1 mrg case UNSPEC_GOT32_SYM:
4642 1.1 mrg opstr = "got_lo16";
4643 1.1 mrg break;
4644 1.1 mrg case UNSPEC_PCREL_SYM:
4645 1.1 mrg opstr = "lo16";
4646 1.1 mrg pcrel = true;
4647 1.1 mrg break;
4648 1.1 mrg case UNSPEC_TLS_GD:
4649 1.1 mrg opstr = "tls_gd_lo16";
4650 1.1 mrg break;
4651 1.1 mrg case UNSPEC_TLS_IE:
4652 1.1 mrg opstr = "tls_ie_lo16";
4653 1.1 mrg break;
4654 1.1 mrg case UNSPEC_TLS_LE:
4655 1.1 mrg opstr = "tls_le_lo16";
4656 1.1 mrg break;
4657 1.1 mrg default:
4658 1.1 mrg output_operand_lossage ("invalid %%L operand");
4659 1.1 mrg }
4660 1.1 mrg }
4661 1.1 mrg else
4662 1.1 mrg {
4663 1.1 mrg addr = x;
4664 1.1 mrg opstr = "lo16";
4665 1.1 mrg }
4666 1.1 mrg
4667 1.1 mrg fputs (opstr, file);
4668 1.1 mrg fputc ('(', file);
4669 1.1 mrg output_addr_const (file, addr);
4670 1.1 mrg
4671 1.1 mrg if (pcrel)
4672 1.1 mrg {
4673 1.1 mrg rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
4674 1.1 mrg fputs (" - " , file);
4675 1.1 mrg output_addr_const (file, addr2);
4676 1.1 mrg }
4677 1.1 mrg
4678 1.1 mrg fputc (')', file);
4679 1.1 mrg return;
4680 1.1 mrg }
4681 1.1 mrg
4682 1.1 mrg case 'p':
4683 1.1 mrg if (GET_CODE (x) == SYMBOL_REF)
4684 1.1 mrg {
4685 1.1 mrg if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
4686 1.1 mrg fprintf (file, "plt(");
4687 1.1 mrg output_addr_const (file, x);
4688 1.1 mrg if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
4689 1.1 mrg fprintf (file, ")");
4690 1.1 mrg }
4691 1.1 mrg else
4692 1.1 mrg output_addr_const (file, x);
4693 1.1 mrg return;
4694 1.1 mrg
4695 1.1 mrg case 'P':
4696 1.1 mrg {
4697 1.1 mrg /* Print a 32-bit constant plus one. */
4698 1.1 mrg HOST_WIDE_INT i;
4699 1.1 mrg if (!CONST_INT_P (x))
4700 1.1 mrg {
4701 1.1 mrg output_operand_lossage ("invalid %%P operand");
4702 1.1 mrg return;
4703 1.1 mrg }
4704 1.1 mrg i = trunc_int_for_mode (INTVAL (x) + 1, SImode);
4705 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4706 1.1 mrg return;
4707 1.1 mrg }
4708 1.1 mrg
4709 1.1 mrg case 'M':
4710 1.1 mrg {
4711 1.1 mrg /* Print an mm-style bit range. */
4712 1.1 mrg int first_bit, last_bit;
4713 1.1 mrg
4714 1.1 mrg if (!CONST_INT_P (x)
4715 1.1 mrg || !tilepro_bitfield_operand_p (INTVAL (x), &first_bit,
4716 1.1 mrg &last_bit))
4717 1.1 mrg {
4718 1.1 mrg output_operand_lossage ("invalid %%M operand");
4719 1.1 mrg return;
4720 1.1 mrg }
4721 1.1 mrg
4722 1.1 mrg fprintf (file, "%d, %d", first_bit, last_bit);
4723 1.1 mrg return;
4724 1.1 mrg }
4725 1.1 mrg
4726 1.1 mrg case 'N':
4727 1.1 mrg {
4728 1.1 mrg const char *reg = NULL;
4729 1.1 mrg
4730 1.1 mrg /* Print a network register. */
4731 1.1 mrg if (!CONST_INT_P (x))
4732 1.1 mrg {
4733 1.1 mrg output_operand_lossage ("invalid %%N operand");
4734 1.1 mrg return;
4735 1.1 mrg }
4736 1.1 mrg
4737 1.1 mrg switch (INTVAL (x))
4738 1.1 mrg {
4739 1.1 mrg case TILEPRO_NETREG_IDN0: reg = "idn0"; break;
4740 1.1 mrg case TILEPRO_NETREG_IDN1: reg = "idn1"; break;
4741 1.1 mrg case TILEPRO_NETREG_SN: reg = "sn"; break;
4742 1.1 mrg case TILEPRO_NETREG_UDN0: reg = "udn0"; break;
4743 1.1 mrg case TILEPRO_NETREG_UDN1: reg = "udn1"; break;
4744 1.1 mrg case TILEPRO_NETREG_UDN2: reg = "udn2"; break;
4745 1.1 mrg case TILEPRO_NETREG_UDN3: reg = "udn3"; break;
4746 1.1 mrg default: gcc_unreachable ();
4747 1.1 mrg }
4748 1.1 mrg
4749 1.1 mrg fprintf (file, reg);
4750 1.1 mrg return;
4751 1.1 mrg }
4752 1.1 mrg
4753 1.1 mrg case 't':
4754 1.1 mrg {
4755 1.1 mrg /* Log base 2 of a power of two. */
4756 1.1 mrg HOST_WIDE_INT i;
4757 1.1 mrg HOST_WIDE_INT n;
4758 1.1 mrg
4759 1.1 mrg if (!CONST_INT_P (x))
4760 1.1 mrg {
4761 1.1 mrg output_operand_lossage ("invalid %%t operand");
4762 1.1 mrg return;
4763 1.1 mrg }
4764 1.1 mrg n = trunc_int_for_mode (INTVAL (x), SImode);
4765 1.1 mrg i = exact_log2 (n);
4766 1.1 mrg if (i < 0)
4767 1.1 mrg {
4768 1.1 mrg output_operand_lossage ("invalid %%t operand");
4769 1.1 mrg return;
4770 1.1 mrg }
4771 1.1 mrg
4772 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4773 1.1 mrg return;
4774 1.1 mrg }
4775 1.1 mrg break;
4776 1.1 mrg
4777 1.1 mrg case 'r':
4778 1.1 mrg /* In this case we need a register. Use 'zero' if the
4779 1.1 mrg operand is const0_rtx. */
4780 1.1 mrg if (x == const0_rtx
4781 1.1 mrg || (GET_MODE (x) != VOIDmode && x == CONST0_RTX (GET_MODE (x))))
4782 1.1 mrg {
4783 1.1 mrg fputs ("zero", file);
4784 1.1 mrg return;
4785 1.1 mrg }
4786 1.1 mrg else if (!REG_P (x))
4787 1.1 mrg {
4788 1.1 mrg output_operand_lossage ("invalid %%r operand");
4789 1.1 mrg return;
4790 1.1 mrg }
4791 1.1 mrg /* FALLTHRU */
4792 1.1 mrg
4793 1.1 mrg case 0:
4794 1.1 mrg if (REG_P (x))
4795 1.1 mrg {
4796 1.1 mrg fprintf (file, "%s", reg_names[REGNO (x)]);
4797 1.1 mrg return;
4798 1.1 mrg }
4799 1.1 mrg else if (MEM_P (x))
4800 1.1 mrg {
4801 1.1 mrg output_address (VOIDmode, XEXP (x, 0));
4802 1.1 mrg return;
4803 1.1 mrg }
4804 1.1 mrg else
4805 1.1 mrg {
4806 1.1 mrg output_addr_const (file, x);
4807 1.1 mrg return;
4808 1.1 mrg }
4809 1.1 mrg break;
4810 1.1 mrg }
4811 1.1 mrg
4812 1.1 mrg debug_rtx (x);
4813 1.1 mrg output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4814 1.1 mrg code, code);
4815 1.1 mrg }
4816 1.1 mrg
4817 1.1 mrg
4818 1.1 mrg /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
4819 1.1 mrg static void
4820 1.1 mrg tilepro_print_operand_address (FILE *file, machine_mode mode, rtx addr)
4821 1.1 mrg {
4822 1.1 mrg if (GET_CODE (addr) == POST_DEC
4823 1.1 mrg || GET_CODE (addr) == POST_INC)
4824 1.1 mrg {
4825 1.1 mrg int offset = GET_MODE_SIZE (mode);
4826 1.1 mrg
4827 1.1 mrg gcc_assert (mode != VOIDmode);
4828 1.1 mrg
4829 1.1 mrg if (output_memory_autoinc_first)
4830 1.1 mrg fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
4831 1.1 mrg else
4832 1.1 mrg fprintf (file, "%d",
4833 1.1 mrg GET_CODE (addr) == POST_DEC ? -offset : offset);
4834 1.1 mrg }
4835 1.1 mrg else if (GET_CODE (addr) == POST_MODIFY)
4836 1.1 mrg {
4837 1.1 mrg gcc_assert (mode != VOIDmode);
4838 1.1 mrg
4839 1.1 mrg gcc_assert (GET_CODE (XEXP (addr, 1)) == PLUS);
4840 1.1 mrg
4841 1.1 mrg if (output_memory_autoinc_first)
4842 1.1 mrg fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
4843 1.1 mrg else
4844 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4845 1.1 mrg INTVAL (XEXP (XEXP (addr, 1), 1)));
4846 1.1 mrg }
4847 1.1 mrg else
4848 1.1 mrg tilepro_print_operand (file, addr, 'r');
4849 1.1 mrg }
4850 1.1 mrg
4851 1.1 mrg
4852 1.1 mrg /* Machine mode of current insn, for determining curly brace
4853 1.1 mrg placement. */
4854 1.1 mrg static machine_mode insn_mode;
4855 1.1 mrg
4856 1.1 mrg
4857 1.1 mrg /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
4858 1.1 mrg void
4859 1.1 mrg tilepro_final_prescan_insn (rtx_insn *insn)
4860 1.1 mrg {
4861 1.1 mrg /* Record this for tilepro_asm_output_opcode to examine. */
4862 1.1 mrg insn_mode = GET_MODE (insn);
4863 1.1 mrg }
4864 1.1 mrg
4865 1.1 mrg
4866 1.1 mrg /* While emitting asm, are we currently inside '{' for a bundle? */
4867 1.1 mrg static bool tilepro_in_bundle = false;
4868 1.1 mrg
4869 1.1 mrg /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
4870 1.1 mrg appropriate given the bundling information recorded by
4871 1.1 mrg tilepro_gen_bundles. */
4872 1.1 mrg const char *
4873 1.1 mrg tilepro_asm_output_opcode (FILE *stream, const char *code)
4874 1.1 mrg {
4875 1.1 mrg bool pseudo = !strcmp (code, "pseudo");
4876 1.1 mrg
4877 1.1 mrg if (!tilepro_in_bundle && insn_mode == SImode)
4878 1.1 mrg {
4879 1.1 mrg /* Start a new bundle. */
4880 1.1 mrg fprintf (stream, "{\n\t");
4881 1.1 mrg tilepro_in_bundle = true;
4882 1.1 mrg }
4883 1.1 mrg
4884 1.1 mrg if (tilepro_in_bundle && insn_mode == QImode)
4885 1.1 mrg {
4886 1.1 mrg /* Close an existing bundle. */
4887 1.1 mrg static char buf[100];
4888 1.1 mrg
4889 1.1 mrg gcc_assert (strlen (code) + 3 + 1 < sizeof (buf));
4890 1.1 mrg
4891 1.1 mrg strcpy (buf, pseudo ? "" : code);
4892 1.1 mrg strcat (buf, "\n\t}");
4893 1.1 mrg tilepro_in_bundle = false;
4894 1.1 mrg
4895 1.1 mrg return buf;
4896 1.1 mrg }
4897 1.1 mrg else
4898 1.1 mrg {
4899 1.1 mrg return pseudo ? "" : code;
4900 1.1 mrg }
4901 1.1 mrg }
4902 1.1 mrg
4903 1.1 mrg
4904 1.1 mrg /* Output assembler code to FILE to increment profiler label # LABELNO
4905 1.1 mrg for profiling a function entry. */
4906 1.1 mrg void
4907 1.1 mrg tilepro_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
4908 1.1 mrg {
4909 1.1 mrg if (tilepro_in_bundle)
4910 1.1 mrg {
4911 1.1 mrg fprintf (file, "\t}\n");
4912 1.1 mrg }
4913 1.1 mrg
4914 1.1 mrg if (flag_pic)
4915 1.1 mrg {
4916 1.1 mrg fprintf (file,
4917 1.1 mrg "\t{\n"
4918 1.1 mrg "\tmove\tr10, lr\n"
4919 1.1 mrg "\tjal\tplt(%s)\n"
4920 1.1 mrg "\t}\n", MCOUNT_NAME);
4921 1.1 mrg }
4922 1.1 mrg else
4923 1.1 mrg {
4924 1.1 mrg fprintf (file,
4925 1.1 mrg "\t{\n"
4926 1.1 mrg "\tmove\tr10, lr\n"
4927 1.1 mrg "\tjal\t%s\n"
4928 1.1 mrg "\t}\n", MCOUNT_NAME);
4929 1.1 mrg }
4930 1.1 mrg
4931 1.1 mrg tilepro_in_bundle = false;
4932 1.1 mrg }
4933 1.1 mrg
4934 1.1 mrg
4935 1.1 mrg /* Implement TARGET_ASM_FILE_END. */
4936 1.1 mrg static void
4937 1.1 mrg tilepro_file_end (void)
4938 1.1 mrg {
4939 1.1 mrg if (NEED_INDICATE_EXEC_STACK)
4940 1.1 mrg file_end_indicate_exec_stack ();
4941 1.1 mrg }
4942 1.1 mrg
4943 1.1 mrg
4944 1.1 mrg #undef TARGET_HAVE_TLS
4945 1.1 mrg #define TARGET_HAVE_TLS HAVE_AS_TLS
4946 1.1 mrg
4947 1.1 mrg #undef TARGET_OPTION_OVERRIDE
4948 1.1 mrg #define TARGET_OPTION_OVERRIDE tilepro_option_override
4949 1.1 mrg
4950 1.1 mrg #ifdef TARGET_THREAD_SSP_OFFSET
4951 1.1 mrg #undef TARGET_STACK_PROTECT_GUARD
4952 1.1 mrg #define TARGET_STACK_PROTECT_GUARD hook_tree_void_null
4953 1.1 mrg #endif
4954 1.1 mrg
4955 1.1 mrg #undef TARGET_SCALAR_MODE_SUPPORTED_P
4956 1.1 mrg #define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4957 1.1 mrg
4958 1.1 mrg #undef TARGET_VECTOR_MODE_SUPPORTED_P
4959 1.1 mrg #define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4960 1.1 mrg
4961 1.1 mrg #undef TARGET_CANNOT_FORCE_CONST_MEM
4962 1.1 mrg #define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4963 1.1 mrg
4964 1.1 mrg #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4965 1.1 mrg #define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4966 1.1 mrg
4967 1.1 mrg #undef TARGET_PASS_BY_REFERENCE
4968 1.1 mrg #define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
4969 1.1 mrg
4970 1.1 mrg #undef TARGET_RETURN_IN_MEMORY
4971 1.1 mrg #define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
4972 1.1 mrg
4973 1.1 mrg #undef TARGET_FUNCTION_ARG_BOUNDARY
4974 1.1 mrg #define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
4975 1.1 mrg
4976 1.1 mrg #undef TARGET_FUNCTION_ARG
4977 1.1 mrg #define TARGET_FUNCTION_ARG tilepro_function_arg
4978 1.1 mrg
4979 1.1 mrg #undef TARGET_FUNCTION_ARG_ADVANCE
4980 1.1 mrg #define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
4981 1.1 mrg
4982 1.1 mrg #undef TARGET_FUNCTION_VALUE
4983 1.1 mrg #define TARGET_FUNCTION_VALUE tilepro_function_value
4984 1.1 mrg
4985 1.1 mrg #undef TARGET_LIBCALL_VALUE
4986 1.1 mrg #define TARGET_LIBCALL_VALUE tilepro_libcall_value
4987 1.1 mrg
4988 1.1 mrg #undef TARGET_FUNCTION_VALUE_REGNO_P
4989 1.1 mrg #define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
4990 1.1 mrg
4991 1.1 mrg #undef TARGET_PROMOTE_FUNCTION_MODE
4992 1.1 mrg #define TARGET_PROMOTE_FUNCTION_MODE \
4993 1.1 mrg default_promote_function_mode_always_promote
4994 1.1 mrg
4995 1.1 mrg #undef TARGET_PROMOTE_PROTOTYPES
4996 1.1 mrg #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
4997 1.1 mrg
4998 1.1 mrg #undef TARGET_BUILD_BUILTIN_VA_LIST
4999 1.1 mrg #define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
5000 1.1 mrg
5001 1.1 mrg #undef TARGET_EXPAND_BUILTIN_VA_START
5002 1.1 mrg #define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
5003 1.1 mrg
5004 1.1 mrg #undef TARGET_SETUP_INCOMING_VARARGS
5005 1.1 mrg #define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
5006 1.1 mrg
5007 1.1 mrg #undef TARGET_GIMPLIFY_VA_ARG_EXPR
5008 1.1 mrg #define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
5009 1.1 mrg
5010 1.1 mrg #undef TARGET_RTX_COSTS
5011 1.1 mrg #define TARGET_RTX_COSTS tilepro_rtx_costs
5012 1.1 mrg
5013 1.1 mrg /* Limit to what we can reach in one addli. */
5014 1.1 mrg #undef TARGET_MIN_ANCHOR_OFFSET
5015 1.1 mrg #define TARGET_MIN_ANCHOR_OFFSET -32768
5016 1.1 mrg #undef TARGET_MAX_ANCHOR_OFFSET
5017 1.1 mrg #define TARGET_MAX_ANCHOR_OFFSET 32767
5018 1.1 mrg
5019 1.1 mrg #undef TARGET_LEGITIMATE_CONSTANT_P
5020 1.1 mrg #define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5021 1.1 mrg
5022 1.1 mrg #undef TARGET_LRA_P
5023 1.1 mrg #define TARGET_LRA_P hook_bool_void_false
5024 1.1 mrg
5025 1.1 mrg #undef TARGET_LEGITIMATE_ADDRESS_P
5026 1.1 mrg #define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5027 1.1 mrg
5028 1.1 mrg #undef TARGET_LEGITIMIZE_ADDRESS
5029 1.1 mrg #define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5030 1.1 mrg
5031 1.1 mrg #undef TARGET_DELEGITIMIZE_ADDRESS
5032 1.1 mrg #define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5033 1.1 mrg
5034 1.1 mrg #undef TARGET_INIT_BUILTINS
5035 1.1 mrg #define TARGET_INIT_BUILTINS tilepro_init_builtins
5036 1.1 mrg
5037 1.1 mrg #undef TARGET_BUILTIN_DECL
5038 1.1 mrg #define TARGET_BUILTIN_DECL tilepro_builtin_decl
5039 1.1 mrg
5040 1.1 mrg #undef TARGET_EXPAND_BUILTIN
5041 1.1 mrg #define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5042 1.1 mrg
5043 1.1 mrg #undef TARGET_CONDITIONAL_REGISTER_USAGE
5044 1.1 mrg #define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5045 1.1 mrg
5046 1.1 mrg #undef TARGET_FRAME_POINTER_REQUIRED
5047 1.1 mrg #define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5048 1.1 mrg
5049 1.1 mrg #undef TARGET_DELAY_SCHED2
5050 1.1 mrg #define TARGET_DELAY_SCHED2 true
5051 1.1 mrg
5052 1.1 mrg #undef TARGET_DELAY_VARTRACK
5053 1.1 mrg #define TARGET_DELAY_VARTRACK true
5054 1.1 mrg
5055 1.1 mrg #undef TARGET_SCHED_ISSUE_RATE
5056 1.1 mrg #define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5057 1.1 mrg
5058 1.1 mrg #undef TARGET_SCHED_ADJUST_COST
5059 1.1 mrg #define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5060 1.1 mrg
5061 1.1 mrg #undef TARGET_MACHINE_DEPENDENT_REORG
5062 1.1 mrg #define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5063 1.1 mrg
5064 1.1 mrg #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5065 1.1 mrg #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5066 1.1 mrg hook_bool_const_tree_hwi_hwi_const_tree_true
5067 1.1 mrg
5068 1.1 mrg #undef TARGET_ASM_OUTPUT_MI_THUNK
5069 1.1 mrg #define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5070 1.1 mrg
5071 1.1 mrg #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5072 1.1 mrg #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5073 1.1 mrg
5074 1.1 mrg #undef TARGET_TRAMPOLINE_INIT
5075 1.1 mrg #define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5076 1.1 mrg
5077 1.1 mrg #undef TARGET_PRINT_OPERAND
5078 1.1 mrg #define TARGET_PRINT_OPERAND tilepro_print_operand
5079 1.1 mrg
5080 1.1 mrg #undef TARGET_PRINT_OPERAND_ADDRESS
5081 1.1 mrg #define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5082 1.1 mrg
5083 1.1 mrg #undef TARGET_ASM_FILE_END
5084 #define TARGET_ASM_FILE_END tilepro_file_end
5085
5086 #undef TARGET_CAN_USE_DOLOOP_P
5087 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5088
5089 #undef TARGET_CONSTANT_ALIGNMENT
5090 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
5091
5092 struct gcc_target targetm = TARGET_INITIALIZER;
5093
5094 #include "gt-tilepro.h"
5095