xtensa.cc revision 1.1 1 1.1 mrg /* Subroutines for insn-output.cc for Tensilica's Xtensa architecture.
2 1.1 mrg Copyright (C) 2001-2022 Free Software Foundation, Inc.
3 1.1 mrg Contributed by Bob Wilson (bwilson (at) tensilica.com) at Tensilica.
4 1.1 mrg
5 1.1 mrg This file is part of GCC.
6 1.1 mrg
7 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
8 1.1 mrg the terms of the GNU General Public License as published by the Free
9 1.1 mrg Software Foundation; either version 3, or (at your option) any later
10 1.1 mrg version.
11 1.1 mrg
12 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 1.1 mrg for more details.
16 1.1 mrg
17 1.1 mrg You should have received a copy of the GNU General Public License
18 1.1 mrg along with GCC; see the file COPYING3. If not see
19 1.1 mrg <http://www.gnu.org/licenses/>. */
20 1.1 mrg
21 1.1 mrg #define IN_TARGET_CODE 1
22 1.1 mrg
23 1.1 mrg #include "config.h"
24 1.1 mrg #include "system.h"
25 1.1 mrg #include "coretypes.h"
26 1.1 mrg #include "backend.h"
27 1.1 mrg #include "target.h"
28 1.1 mrg #include "rtl.h"
29 1.1 mrg #include "tree.h"
30 1.1 mrg #include "gimple.h"
31 1.1 mrg #include "cfghooks.h"
32 1.1 mrg #include "df.h"
33 1.1 mrg #include "memmodel.h"
34 1.1 mrg #include "tm_p.h"
35 1.1 mrg #include "stringpool.h"
36 1.1 mrg #include "attribs.h"
37 1.1 mrg #include "optabs.h"
38 1.1 mrg #include "regs.h"
39 1.1 mrg #include "emit-rtl.h"
40 1.1 mrg #include "recog.h"
41 1.1 mrg #include "diagnostic-core.h"
42 1.1 mrg #include "cfgrtl.h"
43 1.1 mrg #include "output.h"
44 1.1 mrg #include "fold-const.h"
45 1.1 mrg #include "stor-layout.h"
46 1.1 mrg #include "calls.h"
47 1.1 mrg #include "varasm.h"
48 1.1 mrg #include "alias.h"
49 1.1 mrg #include "explow.h"
50 1.1 mrg #include "expr.h"
51 1.1 mrg #include "reload.h"
52 1.1 mrg #include "langhooks.h"
53 1.1 mrg #include "gimplify.h"
54 1.1 mrg #include "builtins.h"
55 1.1 mrg #include "dumpfile.h"
56 1.1 mrg #include "hw-doloop.h"
57 1.1 mrg #include "rtl-iter.h"
58 1.1 mrg
59 1.1 mrg /* This file should be included last. */
60 1.1 mrg #include "target-def.h"
61 1.1 mrg
62 1.1 mrg /* Enumeration for all of the relational tests, so that we can build
63 1.1 mrg arrays indexed by the test type, and not worry about the order
64 1.1 mrg of EQ, NE, etc. */
65 1.1 mrg
66 1.1 mrg enum internal_test
67 1.1 mrg {
68 1.1 mrg ITEST_EQ,
69 1.1 mrg ITEST_NE,
70 1.1 mrg ITEST_GT,
71 1.1 mrg ITEST_GE,
72 1.1 mrg ITEST_LT,
73 1.1 mrg ITEST_LE,
74 1.1 mrg ITEST_GTU,
75 1.1 mrg ITEST_GEU,
76 1.1 mrg ITEST_LTU,
77 1.1 mrg ITEST_LEU,
78 1.1 mrg ITEST_MAX
79 1.1 mrg };
80 1.1 mrg
81 1.1 mrg /* Array giving truth value on whether or not a given hard register
82 1.1 mrg can support a given mode. */
83 1.1 mrg static char xtensa_hard_regno_mode_ok_p
84 1.1 mrg [(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
85 1.1 mrg
86 1.1 mrg /* Largest block move to handle in-line. */
87 1.1 mrg #define LARGEST_MOVE_RATIO 15
88 1.1 mrg
89 1.1 mrg /* Define the structure for the machine field in struct function. */
90 1.1 mrg struct GTY(()) machine_function
91 1.1 mrg {
92 1.1 mrg int accesses_prev_frame;
93 1.1 mrg bool need_a7_copy;
94 1.1 mrg bool vararg_a7;
95 1.1 mrg rtx vararg_a7_copy;
96 1.1 mrg rtx_insn *set_frame_ptr_insn;
97 1.1 mrg /* Current frame size calculated by compute_frame_size. */
98 1.1 mrg unsigned current_frame_size;
99 1.1 mrg /* Callee-save area size in the current frame calculated by
100 1.1 mrg compute_frame_size. */
101 1.1 mrg int callee_save_size;
102 1.1 mrg bool frame_laid_out;
103 1.1 mrg bool epilogue_done;
104 1.1 mrg };
105 1.1 mrg
106 1.1 mrg /* Vector, indexed by hard register number, which contains 1 for a
107 1.1 mrg register that is allowable in a candidate for leaf function
108 1.1 mrg treatment. */
109 1.1 mrg
110 1.1 mrg const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
111 1.1 mrg {
112 1.1 mrg 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
113 1.1 mrg 1, 1, 1,
114 1.1 mrg 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
115 1.1 mrg 1
116 1.1 mrg };
117 1.1 mrg
118 1.1 mrg static void xtensa_option_override (void);
119 1.1 mrg static enum internal_test map_test_to_internal_test (enum rtx_code);
120 1.1 mrg static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
121 1.1 mrg static rtx gen_float_relational (enum rtx_code, rtx, rtx);
122 1.1 mrg static rtx gen_conditional_move (enum rtx_code, machine_mode, rtx, rtx);
123 1.1 mrg static rtx fixup_subreg_mem (rtx);
124 1.1 mrg static struct machine_function * xtensa_init_machine_status (void);
125 1.1 mrg static rtx xtensa_legitimize_tls_address (rtx);
126 1.1 mrg static rtx xtensa_legitimize_address (rtx, rtx, machine_mode);
127 1.1 mrg static bool xtensa_mode_dependent_address_p (const_rtx, addr_space_t);
128 1.1 mrg static bool xtensa_return_in_msb (const_tree);
129 1.1 mrg static void printx (FILE *, signed int);
130 1.1 mrg static rtx xtensa_builtin_saveregs (void);
131 1.1 mrg static bool xtensa_legitimate_address_p (machine_mode, rtx, bool);
132 1.1 mrg static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
133 1.1 mrg int) ATTRIBUTE_UNUSED;
134 1.1 mrg static section *xtensa_select_rtx_section (machine_mode, rtx,
135 1.1 mrg unsigned HOST_WIDE_INT);
136 1.1 mrg static bool xtensa_rtx_costs (rtx, machine_mode, int, int, int *, bool);
137 1.1 mrg static int xtensa_register_move_cost (machine_mode, reg_class_t,
138 1.1 mrg reg_class_t);
139 1.1 mrg static int xtensa_memory_move_cost (machine_mode, reg_class_t, bool);
140 1.1 mrg static tree xtensa_build_builtin_va_list (void);
141 1.1 mrg static bool xtensa_return_in_memory (const_tree, const_tree);
142 1.1 mrg static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
143 1.1 mrg gimple_seq *);
144 1.1 mrg static void xtensa_function_arg_advance (cumulative_args_t,
145 1.1 mrg const function_arg_info &);
146 1.1 mrg static rtx xtensa_function_arg (cumulative_args_t, const function_arg_info &);
147 1.1 mrg static rtx xtensa_function_incoming_arg (cumulative_args_t,
148 1.1 mrg const function_arg_info &);
149 1.1 mrg static rtx xtensa_function_value (const_tree, const_tree, bool);
150 1.1 mrg static rtx xtensa_libcall_value (machine_mode, const_rtx);
151 1.1 mrg static bool xtensa_function_value_regno_p (const unsigned int);
152 1.1 mrg static unsigned int xtensa_function_arg_boundary (machine_mode,
153 1.1 mrg const_tree);
154 1.1 mrg static void xtensa_init_builtins (void);
155 1.1 mrg static tree xtensa_fold_builtin (tree, int, tree *, bool);
156 1.1 mrg static rtx xtensa_expand_builtin (tree, rtx, rtx, machine_mode, int);
157 1.1 mrg static void xtensa_va_start (tree, rtx);
158 1.1 mrg static bool xtensa_frame_pointer_required (void);
159 1.1 mrg static rtx xtensa_static_chain (const_tree, bool);
160 1.1 mrg static void xtensa_asm_trampoline_template (FILE *);
161 1.1 mrg static void xtensa_trampoline_init (rtx, tree, rtx);
162 1.1 mrg static bool xtensa_output_addr_const_extra (FILE *, rtx);
163 1.1 mrg static bool xtensa_cannot_force_const_mem (machine_mode, rtx);
164 1.1 mrg
165 1.1 mrg static reg_class_t xtensa_preferred_reload_class (rtx, reg_class_t);
166 1.1 mrg static reg_class_t xtensa_preferred_output_reload_class (rtx, reg_class_t);
167 1.1 mrg static reg_class_t xtensa_secondary_reload (bool, rtx, reg_class_t,
168 1.1 mrg machine_mode,
169 1.1 mrg struct secondary_reload_info *);
170 1.1 mrg
171 1.1 mrg static bool constantpool_address_p (const_rtx addr);
172 1.1 mrg static bool xtensa_legitimate_constant_p (machine_mode, rtx);
173 1.1 mrg static void xtensa_reorg (void);
174 1.1 mrg static bool xtensa_can_use_doloop_p (const widest_int &, const widest_int &,
175 1.1 mrg unsigned int, bool);
176 1.1 mrg static const char *xtensa_invalid_within_doloop (const rtx_insn *);
177 1.1 mrg
178 1.1 mrg static bool xtensa_member_type_forces_blk (const_tree,
179 1.1 mrg machine_mode mode);
180 1.1 mrg
181 1.1 mrg static void xtensa_conditional_register_usage (void);
182 1.1 mrg static unsigned int xtensa_hard_regno_nregs (unsigned int, machine_mode);
183 1.1 mrg static bool xtensa_hard_regno_mode_ok (unsigned int, machine_mode);
184 1.1 mrg static bool xtensa_modes_tieable_p (machine_mode, machine_mode);
185 1.1 mrg static HOST_WIDE_INT xtensa_constant_alignment (const_tree, HOST_WIDE_INT);
186 1.1 mrg static bool xtensa_can_eliminate (const int from ATTRIBUTE_UNUSED,
187 1.1 mrg const int to);
188 1.1 mrg static HOST_WIDE_INT xtensa_starting_frame_offset (void);
189 1.1 mrg static unsigned HOST_WIDE_INT xtensa_asan_shadow_offset (void);
190 1.1 mrg
191 1.1 mrg static rtx xtensa_delegitimize_address (rtx);
192 1.1 mrg
193 1.1 mrg
194 1.1 mrg
196 1.1 mrg /* These hooks specify assembly directives for creating certain kinds
197 1.1 mrg of integer object. */
198 1.1 mrg
199 1.1 mrg #undef TARGET_ASM_ALIGNED_SI_OP
200 1.1 mrg #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
201 1.1 mrg
202 1.1 mrg #undef TARGET_ASM_SELECT_RTX_SECTION
203 1.1 mrg #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
204 1.1 mrg
205 1.1 mrg #undef TARGET_LEGITIMIZE_ADDRESS
206 1.1 mrg #define TARGET_LEGITIMIZE_ADDRESS xtensa_legitimize_address
207 1.1 mrg #undef TARGET_MODE_DEPENDENT_ADDRESS_P
208 1.1 mrg #define TARGET_MODE_DEPENDENT_ADDRESS_P xtensa_mode_dependent_address_p
209 1.1 mrg
210 1.1 mrg #undef TARGET_REGISTER_MOVE_COST
211 1.1 mrg #define TARGET_REGISTER_MOVE_COST xtensa_register_move_cost
212 1.1 mrg #undef TARGET_MEMORY_MOVE_COST
213 1.1 mrg #define TARGET_MEMORY_MOVE_COST xtensa_memory_move_cost
214 1.1 mrg #undef TARGET_RTX_COSTS
215 1.1 mrg #define TARGET_RTX_COSTS xtensa_rtx_costs
216 1.1 mrg #undef TARGET_ADDRESS_COST
217 1.1 mrg #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
218 1.1 mrg
219 1.1 mrg #undef TARGET_MEMBER_TYPE_FORCES_BLK
220 1.1 mrg #define TARGET_MEMBER_TYPE_FORCES_BLK xtensa_member_type_forces_blk
221 1.1 mrg
222 1.1 mrg #undef TARGET_BUILD_BUILTIN_VA_LIST
223 1.1 mrg #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
224 1.1 mrg
225 1.1 mrg #undef TARGET_EXPAND_BUILTIN_VA_START
226 1.1 mrg #define TARGET_EXPAND_BUILTIN_VA_START xtensa_va_start
227 1.1 mrg
228 1.1 mrg #undef TARGET_PROMOTE_FUNCTION_MODE
229 1.1 mrg #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
230 1.1 mrg #undef TARGET_PROMOTE_PROTOTYPES
231 1.1 mrg #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
232 1.1 mrg
233 1.1 mrg #undef TARGET_RETURN_IN_MEMORY
234 1.1 mrg #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
235 1.1 mrg #undef TARGET_FUNCTION_VALUE
236 1.1 mrg #define TARGET_FUNCTION_VALUE xtensa_function_value
237 1.1 mrg #undef TARGET_LIBCALL_VALUE
238 1.1 mrg #define TARGET_LIBCALL_VALUE xtensa_libcall_value
239 1.1 mrg #undef TARGET_FUNCTION_VALUE_REGNO_P
240 1.1 mrg #define TARGET_FUNCTION_VALUE_REGNO_P xtensa_function_value_regno_p
241 1.1 mrg
242 1.1 mrg #undef TARGET_SPLIT_COMPLEX_ARG
243 1.1 mrg #define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
244 1.1 mrg #undef TARGET_MUST_PASS_IN_STACK
245 1.1 mrg #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
246 1.1 mrg #undef TARGET_FUNCTION_ARG_ADVANCE
247 1.1 mrg #define TARGET_FUNCTION_ARG_ADVANCE xtensa_function_arg_advance
248 1.1 mrg #undef TARGET_FUNCTION_ARG
249 1.1 mrg #define TARGET_FUNCTION_ARG xtensa_function_arg
250 1.1 mrg #undef TARGET_FUNCTION_INCOMING_ARG
251 1.1 mrg #define TARGET_FUNCTION_INCOMING_ARG xtensa_function_incoming_arg
252 1.1 mrg #undef TARGET_FUNCTION_ARG_BOUNDARY
253 1.1 mrg #define TARGET_FUNCTION_ARG_BOUNDARY xtensa_function_arg_boundary
254 1.1 mrg
255 1.1 mrg #undef TARGET_EXPAND_BUILTIN_SAVEREGS
256 1.1 mrg #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
257 1.1 mrg #undef TARGET_GIMPLIFY_VA_ARG_EXPR
258 1.1 mrg #define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
259 1.1 mrg
260 1.1 mrg #undef TARGET_RETURN_IN_MSB
261 1.1 mrg #define TARGET_RETURN_IN_MSB xtensa_return_in_msb
262 1.1 mrg
263 1.1 mrg #undef TARGET_INIT_BUILTINS
264 1.1 mrg #define TARGET_INIT_BUILTINS xtensa_init_builtins
265 1.1 mrg #undef TARGET_FOLD_BUILTIN
266 1.1 mrg #define TARGET_FOLD_BUILTIN xtensa_fold_builtin
267 1.1 mrg #undef TARGET_EXPAND_BUILTIN
268 1.1 mrg #define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
269 1.1 mrg
270 1.1 mrg #undef TARGET_PREFERRED_RELOAD_CLASS
271 1.1 mrg #define TARGET_PREFERRED_RELOAD_CLASS xtensa_preferred_reload_class
272 1.1 mrg #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
273 1.1 mrg #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xtensa_preferred_output_reload_class
274 1.1 mrg
275 1.1 mrg #undef TARGET_SECONDARY_RELOAD
276 1.1 mrg #define TARGET_SECONDARY_RELOAD xtensa_secondary_reload
277 1.1 mrg
278 1.1 mrg #undef TARGET_HAVE_TLS
279 1.1 mrg #define TARGET_HAVE_TLS HAVE_AS_TLS
280 1.1 mrg
281 1.1 mrg #undef TARGET_CANNOT_FORCE_CONST_MEM
282 1.1 mrg #define TARGET_CANNOT_FORCE_CONST_MEM xtensa_cannot_force_const_mem
283 1.1 mrg
284 1.1 mrg #undef TARGET_LRA_P
285 1.1 mrg #define TARGET_LRA_P hook_bool_void_false
286 1.1 mrg
287 1.1 mrg #undef TARGET_LEGITIMATE_ADDRESS_P
288 1.1 mrg #define TARGET_LEGITIMATE_ADDRESS_P xtensa_legitimate_address_p
289 1.1 mrg
290 1.1 mrg #undef TARGET_FRAME_POINTER_REQUIRED
291 1.1 mrg #define TARGET_FRAME_POINTER_REQUIRED xtensa_frame_pointer_required
292 1.1 mrg
293 1.1 mrg #undef TARGET_STATIC_CHAIN
294 1.1 mrg #define TARGET_STATIC_CHAIN xtensa_static_chain
295 1.1 mrg #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
296 1.1 mrg #define TARGET_ASM_TRAMPOLINE_TEMPLATE xtensa_asm_trampoline_template
297 1.1 mrg #undef TARGET_TRAMPOLINE_INIT
298 1.1 mrg #define TARGET_TRAMPOLINE_INIT xtensa_trampoline_init
299 1.1 mrg
300 1.1 mrg #undef TARGET_OPTION_OVERRIDE
301 1.1 mrg #define TARGET_OPTION_OVERRIDE xtensa_option_override
302 1.1 mrg
303 1.1 mrg #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
304 1.1 mrg #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA xtensa_output_addr_const_extra
305 1.1 mrg
306 1.1 mrg #undef TARGET_LEGITIMATE_CONSTANT_P
307 1.1 mrg #define TARGET_LEGITIMATE_CONSTANT_P xtensa_legitimate_constant_p
308 1.1 mrg
309 1.1 mrg #undef TARGET_MACHINE_DEPENDENT_REORG
310 1.1 mrg #define TARGET_MACHINE_DEPENDENT_REORG xtensa_reorg
311 1.1 mrg
312 1.1 mrg #undef TARGET_CAN_USE_DOLOOP_P
313 1.1 mrg #define TARGET_CAN_USE_DOLOOP_P xtensa_can_use_doloop_p
314 1.1 mrg
315 1.1 mrg #undef TARGET_INVALID_WITHIN_DOLOOP
316 1.1 mrg #define TARGET_INVALID_WITHIN_DOLOOP xtensa_invalid_within_doloop
317 1.1 mrg
318 1.1 mrg #undef TARGET_CONDITIONAL_REGISTER_USAGE
319 1.1 mrg #define TARGET_CONDITIONAL_REGISTER_USAGE xtensa_conditional_register_usage
320 1.1 mrg
321 1.1 mrg #undef TARGET_HARD_REGNO_NREGS
322 1.1 mrg #define TARGET_HARD_REGNO_NREGS xtensa_hard_regno_nregs
323 1.1 mrg #undef TARGET_HARD_REGNO_MODE_OK
324 1.1 mrg #define TARGET_HARD_REGNO_MODE_OK xtensa_hard_regno_mode_ok
325 1.1 mrg
326 1.1 mrg #undef TARGET_MODES_TIEABLE_P
327 1.1 mrg #define TARGET_MODES_TIEABLE_P xtensa_modes_tieable_p
328 1.1 mrg
329 1.1 mrg #undef TARGET_CONSTANT_ALIGNMENT
330 1.1 mrg #define TARGET_CONSTANT_ALIGNMENT xtensa_constant_alignment
331 1.1 mrg
332 1.1 mrg #undef TARGET_CAN_ELIMINATE
333 1.1 mrg #define TARGET_CAN_ELIMINATE xtensa_can_eliminate
334 1.1 mrg
335 1.1 mrg #undef TARGET_STARTING_FRAME_OFFSET
336 1.1 mrg #define TARGET_STARTING_FRAME_OFFSET xtensa_starting_frame_offset
337 1.1 mrg
338 1.1 mrg #undef TARGET_ASAN_SHADOW_OFFSET
339 1.1 mrg #define TARGET_ASAN_SHADOW_OFFSET xtensa_asan_shadow_offset
340 1.1 mrg
341 1.1 mrg #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
342 1.1 mrg #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
343 1.1 mrg
344 1.1 mrg #undef TARGET_DELEGITIMIZE_ADDRESS
345 1.1 mrg #define TARGET_DELEGITIMIZE_ADDRESS xtensa_delegitimize_address
346 1.1 mrg
347 1.1 mrg struct gcc_target targetm = TARGET_INITIALIZER;
348 1.1 mrg
349 1.1 mrg
350 1.1 mrg /* Functions to test Xtensa immediate operand validity. */
352 1.1 mrg
353 1.1 mrg bool
354 1.1 mrg xtensa_simm8 (HOST_WIDE_INT v)
355 1.1 mrg {
356 1.1 mrg return v >= -128 && v <= 127;
357 1.1 mrg }
358 1.1 mrg
359 1.1 mrg
360 1.1 mrg bool
361 1.1 mrg xtensa_simm8x256 (HOST_WIDE_INT v)
362 1.1 mrg {
363 1.1 mrg return (v & 255) == 0 && (v >= -32768 && v <= 32512);
364 1.1 mrg }
365 1.1 mrg
366 1.1 mrg
367 1.1 mrg bool
368 1.1 mrg xtensa_simm12b (HOST_WIDE_INT v)
369 1.1 mrg {
370 1.1 mrg return v >= -2048 && v <= 2047;
371 1.1 mrg }
372 1.1 mrg
373 1.1 mrg
374 1.1 mrg static bool
375 1.1 mrg xtensa_uimm8 (HOST_WIDE_INT v)
376 1.1 mrg {
377 1.1 mrg return v >= 0 && v <= 255;
378 1.1 mrg }
379 1.1 mrg
380 1.1 mrg
381 1.1 mrg static bool
382 1.1 mrg xtensa_uimm8x2 (HOST_WIDE_INT v)
383 1.1 mrg {
384 1.1 mrg return (v & 1) == 0 && (v >= 0 && v <= 510);
385 1.1 mrg }
386 1.1 mrg
387 1.1 mrg
388 1.1 mrg static bool
389 1.1 mrg xtensa_uimm8x4 (HOST_WIDE_INT v)
390 1.1 mrg {
391 1.1 mrg return (v & 3) == 0 && (v >= 0 && v <= 1020);
392 1.1 mrg }
393 1.1 mrg
394 1.1 mrg
395 1.1 mrg static bool
396 1.1 mrg xtensa_b4const (HOST_WIDE_INT v)
397 1.1 mrg {
398 1.1 mrg switch (v)
399 1.1 mrg {
400 1.1 mrg case -1:
401 1.1 mrg case 1:
402 1.1 mrg case 2:
403 1.1 mrg case 3:
404 1.1 mrg case 4:
405 1.1 mrg case 5:
406 1.1 mrg case 6:
407 1.1 mrg case 7:
408 1.1 mrg case 8:
409 1.1 mrg case 10:
410 1.1 mrg case 12:
411 1.1 mrg case 16:
412 1.1 mrg case 32:
413 1.1 mrg case 64:
414 1.1 mrg case 128:
415 1.1 mrg case 256:
416 1.1 mrg return true;
417 1.1 mrg }
418 1.1 mrg return false;
419 1.1 mrg }
420 1.1 mrg
421 1.1 mrg
422 1.1 mrg bool
423 1.1 mrg xtensa_b4const_or_zero (HOST_WIDE_INT v)
424 1.1 mrg {
425 1.1 mrg if (v == 0)
426 1.1 mrg return true;
427 1.1 mrg return xtensa_b4const (v);
428 1.1 mrg }
429 1.1 mrg
430 1.1 mrg
431 1.1 mrg bool
432 1.1 mrg xtensa_b4constu (HOST_WIDE_INT v)
433 1.1 mrg {
434 1.1 mrg switch (v)
435 1.1 mrg {
436 1.1 mrg case 32768:
437 1.1 mrg case 65536:
438 1.1 mrg case 2:
439 1.1 mrg case 3:
440 1.1 mrg case 4:
441 1.1 mrg case 5:
442 1.1 mrg case 6:
443 1.1 mrg case 7:
444 1.1 mrg case 8:
445 1.1 mrg case 10:
446 1.1 mrg case 12:
447 1.1 mrg case 16:
448 1.1 mrg case 32:
449 1.1 mrg case 64:
450 1.1 mrg case 128:
451 1.1 mrg case 256:
452 1.1 mrg return true;
453 1.1 mrg }
454 1.1 mrg return false;
455 1.1 mrg }
456 1.1 mrg
457 1.1 mrg
458 1.1 mrg bool
459 1.1 mrg xtensa_mask_immediate (HOST_WIDE_INT v)
460 1.1 mrg {
461 1.1 mrg #define MAX_MASK_SIZE 16
462 1.1 mrg int mask_size;
463 1.1 mrg
464 1.1 mrg for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
465 1.1 mrg {
466 1.1 mrg if ((v & 1) == 0)
467 1.1 mrg return false;
468 1.1 mrg v = v >> 1;
469 1.1 mrg if (v == 0)
470 1.1 mrg return true;
471 1.1 mrg }
472 1.1 mrg
473 1.1 mrg return false;
474 1.1 mrg }
475 1.1 mrg
476 1.1 mrg
477 1.1 mrg /* This is just like the standard true_regnum() function except that it
478 1.1 mrg works even when reg_renumber is not initialized. */
479 1.1 mrg
480 1.1 mrg int
481 1.1 mrg xt_true_regnum (rtx x)
482 1.1 mrg {
483 1.1 mrg if (GET_CODE (x) == REG)
484 1.1 mrg {
485 1.1 mrg if (reg_renumber
486 1.1 mrg && REGNO (x) >= FIRST_PSEUDO_REGISTER
487 1.1 mrg && reg_renumber[REGNO (x)] >= 0)
488 1.1 mrg return reg_renumber[REGNO (x)];
489 1.1 mrg return REGNO (x);
490 1.1 mrg }
491 1.1 mrg if (GET_CODE (x) == SUBREG)
492 1.1 mrg {
493 1.1 mrg int base = xt_true_regnum (SUBREG_REG (x));
494 1.1 mrg if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
495 1.1 mrg return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
496 1.1 mrg GET_MODE (SUBREG_REG (x)),
497 1.1 mrg SUBREG_BYTE (x), GET_MODE (x));
498 1.1 mrg }
499 1.1 mrg return -1;
500 1.1 mrg }
501 1.1 mrg
502 1.1 mrg
503 1.1 mrg int
504 1.1 mrg xtensa_valid_move (machine_mode mode, rtx *operands)
505 1.1 mrg {
506 1.1 mrg /* Either the destination or source must be a register, and the
507 1.1 mrg MAC16 accumulator doesn't count. */
508 1.1 mrg
509 1.1 mrg if (register_operand (operands[0], mode))
510 1.1 mrg {
511 1.1 mrg int dst_regnum = xt_true_regnum (operands[0]);
512 1.1 mrg
513 1.1 mrg if (xtensa_tls_referenced_p (operands[1]))
514 1.1 mrg return FALSE;
515 1.1 mrg
516 1.1 mrg /* The stack pointer can only be assigned with a MOVSP opcode. */
517 1.1 mrg if (dst_regnum == STACK_POINTER_REGNUM)
518 1.1 mrg return !TARGET_WINDOWED_ABI
519 1.1 mrg || (mode == SImode
520 1.1 mrg && register_operand (operands[1], mode)
521 1.1 mrg && !ACC_REG_P (xt_true_regnum (operands[1])));
522 1.1 mrg
523 1.1 mrg if (!ACC_REG_P (dst_regnum))
524 1.1 mrg return true;
525 1.1 mrg }
526 1.1 mrg if (register_operand (operands[1], mode))
527 1.1 mrg {
528 1.1 mrg int src_regnum = xt_true_regnum (operands[1]);
529 1.1 mrg if (!ACC_REG_P (src_regnum))
530 1.1 mrg return true;
531 1.1 mrg }
532 1.1 mrg return FALSE;
533 1.1 mrg }
534 1.1 mrg
535 1.1 mrg
536 1.1 mrg int
537 1.1 mrg smalloffset_mem_p (rtx op)
538 1.1 mrg {
539 1.1 mrg if (GET_CODE (op) == MEM)
540 1.1 mrg {
541 1.1 mrg rtx addr = XEXP (op, 0);
542 1.1 mrg if (GET_CODE (addr) == REG)
543 1.1 mrg return BASE_REG_P (addr, 0);
544 1.1 mrg if (GET_CODE (addr) == PLUS)
545 1.1 mrg {
546 1.1 mrg rtx offset = XEXP (addr, 0);
547 1.1 mrg HOST_WIDE_INT val;
548 1.1 mrg if (GET_CODE (offset) != CONST_INT)
549 1.1 mrg offset = XEXP (addr, 1);
550 1.1 mrg if (GET_CODE (offset) != CONST_INT)
551 1.1 mrg return FALSE;
552 1.1 mrg
553 1.1 mrg val = INTVAL (offset);
554 1.1 mrg return (val & 3) == 0 && (val >= 0 && val <= 60);
555 1.1 mrg }
556 1.1 mrg }
557 1.1 mrg return FALSE;
558 1.1 mrg }
559 1.1 mrg
560 1.1 mrg
561 1.1 mrg static bool
562 1.1 mrg constantpool_address_p (const_rtx addr)
563 1.1 mrg {
564 1.1 mrg const_rtx sym = addr;
565 1.1 mrg
566 1.1 mrg if (GET_CODE (addr) == CONST)
567 1.1 mrg {
568 1.1 mrg rtx offset;
569 1.1 mrg
570 1.1 mrg /* Only handle (PLUS (SYM, OFFSET)) form. */
571 1.1 mrg addr = XEXP (addr, 0);
572 1.1 mrg if (GET_CODE (addr) != PLUS)
573 1.1 mrg return false;
574 1.1 mrg
575 1.1 mrg /* Make sure the address is word aligned. */
576 1.1 mrg offset = XEXP (addr, 1);
577 1.1 mrg if ((!CONST_INT_P (offset))
578 1.1 mrg || ((INTVAL (offset) & 3) != 0))
579 1.1 mrg return false;
580 1.1 mrg
581 1.1 mrg sym = XEXP (addr, 0);
582 1.1 mrg }
583 1.1 mrg
584 1.1 mrg if ((GET_CODE (sym) == SYMBOL_REF)
585 1.1 mrg && CONSTANT_POOL_ADDRESS_P (sym))
586 1.1 mrg return true;
587 1.1 mrg return false;
588 1.1 mrg }
589 1.1 mrg
590 1.1 mrg
591 1.1 mrg int
592 1.1 mrg constantpool_mem_p (rtx op)
593 1.1 mrg {
594 1.1 mrg if (GET_CODE (op) == SUBREG)
595 1.1 mrg op = SUBREG_REG (op);
596 1.1 mrg if (GET_CODE (op) == MEM)
597 1.1 mrg return constantpool_address_p (XEXP (op, 0));
598 1.1 mrg return FALSE;
599 1.1 mrg }
600 1.1 mrg
601 1.1 mrg
602 1.1 mrg /* Return TRUE if X is a thread-local symbol. */
603 1.1 mrg
604 1.1 mrg static bool
605 1.1 mrg xtensa_tls_symbol_p (rtx x)
606 1.1 mrg {
607 1.1 mrg if (! targetm.have_tls)
608 1.1 mrg return false;
609 1.1 mrg
610 1.1 mrg return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
611 1.1 mrg }
612 1.1 mrg
613 1.1 mrg
614 1.1 mrg void
615 1.1 mrg xtensa_extend_reg (rtx dst, rtx src)
616 1.1 mrg {
617 1.1 mrg rtx temp = gen_reg_rtx (SImode);
618 1.1 mrg rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
619 1.1 mrg
620 1.1 mrg /* Generate paradoxical subregs as needed so that the modes match. */
621 1.1 mrg src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
622 1.1 mrg dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
623 1.1 mrg
624 1.1 mrg emit_insn (gen_ashlsi3 (temp, src, shift));
625 1.1 mrg emit_insn (gen_ashrsi3 (dst, temp, shift));
626 1.1 mrg }
627 1.1 mrg
628 1.1 mrg
629 1.1 mrg bool
630 1.1 mrg xtensa_mem_offset (unsigned v, machine_mode mode)
631 1.1 mrg {
632 1.1 mrg switch (mode)
633 1.1 mrg {
634 1.1 mrg case E_BLKmode:
635 1.1 mrg /* Handle the worst case for block moves. See xtensa_expand_block_move
636 1.1 mrg where we emit an optimized block move operation if the block can be
637 1.1 mrg moved in < "move_ratio" pieces. The worst case is when the block is
638 1.1 mrg aligned but has a size of (3 mod 4) (does this happen?) so that the
639 1.1 mrg last piece requires a byte load/store. */
640 1.1 mrg return (xtensa_uimm8 (v)
641 1.1 mrg && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
642 1.1 mrg
643 1.1 mrg case E_QImode:
644 1.1 mrg return xtensa_uimm8 (v);
645 1.1 mrg
646 1.1 mrg case E_HImode:
647 1.1 mrg return xtensa_uimm8x2 (v);
648 1.1 mrg
649 1.1 mrg case E_DImode:
650 1.1 mrg case E_DFmode:
651 1.1 mrg return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
652 1.1 mrg
653 1.1 mrg default:
654 1.1 mrg break;
655 1.1 mrg }
656 1.1 mrg
657 1.1 mrg return xtensa_uimm8x4 (v);
658 1.1 mrg }
659 1.1 mrg
660 1.1 mrg
661 1.1 mrg /* Make normal rtx_code into something we can index from an array. */
662 1.1 mrg
663 1.1 mrg static enum internal_test
664 1.1 mrg map_test_to_internal_test (enum rtx_code test_code)
665 1.1 mrg {
666 1.1 mrg enum internal_test test = ITEST_MAX;
667 1.1 mrg
668 1.1 mrg switch (test_code)
669 1.1 mrg {
670 1.1 mrg default: break;
671 1.1 mrg case EQ: test = ITEST_EQ; break;
672 1.1 mrg case NE: test = ITEST_NE; break;
673 1.1 mrg case GT: test = ITEST_GT; break;
674 1.1 mrg case GE: test = ITEST_GE; break;
675 1.1 mrg case LT: test = ITEST_LT; break;
676 1.1 mrg case LE: test = ITEST_LE; break;
677 1.1 mrg case GTU: test = ITEST_GTU; break;
678 1.1 mrg case GEU: test = ITEST_GEU; break;
679 1.1 mrg case LTU: test = ITEST_LTU; break;
680 1.1 mrg case LEU: test = ITEST_LEU; break;
681 1.1 mrg }
682 1.1 mrg
683 1.1 mrg return test;
684 1.1 mrg }
685 1.1 mrg
686 1.1 mrg
687 1.1 mrg /* Generate the code to compare two integer values. The return value is
688 1.1 mrg the comparison expression. */
689 1.1 mrg
690 1.1 mrg static rtx
691 1.1 mrg gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
692 1.1 mrg rtx cmp0, /* first operand to compare */
693 1.1 mrg rtx cmp1, /* second operand to compare */
694 1.1 mrg int *p_invert /* whether branch needs to reverse test */)
695 1.1 mrg {
696 1.1 mrg struct cmp_info
697 1.1 mrg {
698 1.1 mrg enum rtx_code test_code; /* test code to use in insn */
699 1.1 mrg bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
700 1.1 mrg int const_add; /* constant to add (convert LE -> LT) */
701 1.1 mrg int reverse_regs; /* reverse registers in test */
702 1.1 mrg int invert_const; /* != 0 if invert value if cmp1 is constant */
703 1.1 mrg int invert_reg; /* != 0 if invert value if cmp1 is register */
704 1.1 mrg int unsignedp; /* != 0 for unsigned comparisons. */
705 1.1 mrg };
706 1.1 mrg
707 1.1 mrg static struct cmp_info info[ (int)ITEST_MAX ] = {
708 1.1 mrg
709 1.1 mrg { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
710 1.1 mrg { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
711 1.1 mrg
712 1.1 mrg { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
713 1.1 mrg { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
714 1.1 mrg { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
715 1.1 mrg { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
716 1.1 mrg
717 1.1 mrg { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
718 1.1 mrg { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
719 1.1 mrg { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
720 1.1 mrg { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
721 1.1 mrg };
722 1.1 mrg
723 1.1 mrg enum internal_test test;
724 1.1 mrg machine_mode mode;
725 1.1 mrg struct cmp_info *p_info;
726 1.1 mrg
727 1.1 mrg test = map_test_to_internal_test (test_code);
728 1.1 mrg gcc_assert (test != ITEST_MAX);
729 1.1 mrg
730 1.1 mrg p_info = &info[ (int)test ];
731 1.1 mrg
732 1.1 mrg mode = GET_MODE (cmp0);
733 1.1 mrg if (mode == VOIDmode)
734 1.1 mrg mode = GET_MODE (cmp1);
735 1.1 mrg
736 1.1 mrg /* Make sure we can handle any constants given to us. */
737 1.1 mrg if (GET_CODE (cmp1) == CONST_INT)
738 1.1 mrg {
739 1.1 mrg HOST_WIDE_INT value = INTVAL (cmp1);
740 1.1 mrg unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
741 1.1 mrg
742 1.1 mrg /* if the immediate overflows or does not fit in the immediate field,
743 1.1 mrg spill it to a register */
744 1.1 mrg
745 1.1 mrg if ((p_info->unsignedp ?
746 1.1 mrg (uvalue + p_info->const_add > uvalue) :
747 1.1 mrg (value + p_info->const_add > value)) != (p_info->const_add > 0))
748 1.1 mrg {
749 1.1 mrg cmp1 = force_reg (mode, cmp1);
750 1.1 mrg }
751 1.1 mrg else if (!(p_info->const_range_p) (value + p_info->const_add))
752 1.1 mrg {
753 1.1 mrg cmp1 = force_reg (mode, cmp1);
754 1.1 mrg }
755 1.1 mrg }
756 1.1 mrg else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
757 1.1 mrg {
758 1.1 mrg cmp1 = force_reg (mode, cmp1);
759 1.1 mrg }
760 1.1 mrg
761 1.1 mrg /* See if we need to invert the result. */
762 1.1 mrg *p_invert = ((GET_CODE (cmp1) == CONST_INT)
763 1.1 mrg ? p_info->invert_const
764 1.1 mrg : p_info->invert_reg);
765 1.1 mrg
766 1.1 mrg /* Comparison to constants, may involve adding 1 to change a LT into LE.
767 1.1 mrg Comparison between two registers, may involve switching operands. */
768 1.1 mrg if (GET_CODE (cmp1) == CONST_INT)
769 1.1 mrg {
770 1.1 mrg if (p_info->const_add != 0)
771 1.1 mrg cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
772 1.1 mrg
773 1.1 mrg }
774 1.1 mrg else if (p_info->reverse_regs)
775 1.1 mrg {
776 1.1 mrg rtx temp = cmp0;
777 1.1 mrg cmp0 = cmp1;
778 1.1 mrg cmp1 = temp;
779 1.1 mrg }
780 1.1 mrg
781 1.1 mrg return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
782 1.1 mrg }
783 1.1 mrg
784 1.1 mrg
785 1.1 mrg /* Generate the code to compare two float values. The return value is
786 1.1 mrg the comparison expression. */
787 1.1 mrg
788 1.1 mrg static rtx
789 1.1 mrg gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
790 1.1 mrg rtx cmp0, /* first operand to compare */
791 1.1 mrg rtx cmp1 /* second operand to compare */)
792 1.1 mrg {
793 1.1 mrg rtx (*gen_fn) (rtx, rtx, rtx);
794 1.1 mrg rtx brtmp;
795 1.1 mrg int reverse_regs, invert;
796 1.1 mrg
797 1.1 mrg switch (test_code)
798 1.1 mrg {
799 1.1 mrg case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
800 1.1 mrg case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
801 1.1 mrg case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
802 1.1 mrg case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
803 1.1 mrg case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
804 1.1 mrg case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
805 1.1 mrg case UNEQ: reverse_regs = 0; invert = 0; gen_fn = gen_suneq_sf; break;
806 1.1 mrg case LTGT: reverse_regs = 0; invert = 1; gen_fn = gen_suneq_sf; break;
807 1.1 mrg case UNLE: reverse_regs = 0; invert = 0; gen_fn = gen_sunle_sf; break;
808 1.1 mrg case UNGT: reverse_regs = 1; invert = 0; gen_fn = gen_sunlt_sf; break;
809 1.1 mrg case UNLT: reverse_regs = 0; invert = 0; gen_fn = gen_sunlt_sf; break;
810 1.1 mrg case UNGE: reverse_regs = 1; invert = 0; gen_fn = gen_sunle_sf; break;
811 1.1 mrg case UNORDERED:
812 1.1 mrg reverse_regs = 0; invert = 0; gen_fn = gen_sunordered_sf; break;
813 1.1 mrg case ORDERED:
814 1.1 mrg reverse_regs = 0; invert = 1; gen_fn = gen_sunordered_sf; break;
815 1.1 mrg default:
816 1.1 mrg fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
817 1.1 mrg reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
818 1.1 mrg }
819 1.1 mrg
820 1.1 mrg if (reverse_regs)
821 1.1 mrg {
822 1.1 mrg rtx temp = cmp0;
823 1.1 mrg cmp0 = cmp1;
824 1.1 mrg cmp1 = temp;
825 1.1 mrg }
826 1.1 mrg
827 1.1 mrg brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
828 1.1 mrg emit_insn (gen_fn (brtmp, cmp0, cmp1));
829 1.1 mrg
830 1.1 mrg return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
831 1.1 mrg }
832 1.1 mrg
833 1.1 mrg
834 1.1 mrg void
835 1.1 mrg xtensa_expand_conditional_branch (rtx *operands, machine_mode mode)
836 1.1 mrg {
837 1.1 mrg enum rtx_code test_code = GET_CODE (operands[0]);
838 1.1 mrg rtx cmp0 = operands[1];
839 1.1 mrg rtx cmp1 = operands[2];
840 1.1 mrg rtx cmp;
841 1.1 mrg int invert;
842 1.1 mrg rtx label1, label2;
843 1.1 mrg
844 1.1 mrg switch (mode)
845 1.1 mrg {
846 1.1 mrg case E_DFmode:
847 1.1 mrg default:
848 1.1 mrg fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
849 1.1 mrg
850 1.1 mrg case E_SImode:
851 1.1 mrg invert = FALSE;
852 1.1 mrg cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
853 1.1 mrg break;
854 1.1 mrg
855 1.1 mrg case E_SFmode:
856 1.1 mrg if (!TARGET_HARD_FLOAT)
857 1.1 mrg fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
858 1.1 mrg cmp0, cmp1));
859 1.1 mrg invert = FALSE;
860 1.1 mrg cmp = gen_float_relational (test_code, cmp0, cmp1);
861 1.1 mrg break;
862 1.1 mrg }
863 1.1 mrg
864 1.1 mrg /* Generate the branch. */
865 1.1 mrg
866 1.1 mrg label1 = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
867 1.1 mrg label2 = pc_rtx;
868 1.1 mrg
869 1.1 mrg if (invert)
870 1.1 mrg {
871 1.1 mrg label2 = label1;
872 1.1 mrg label1 = pc_rtx;
873 1.1 mrg }
874 1.1 mrg
875 1.1 mrg emit_jump_insn (gen_rtx_SET (pc_rtx,
876 1.1 mrg gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
877 1.1 mrg label1,
878 1.1 mrg label2)));
879 1.1 mrg }
880 1.1 mrg
881 1.1 mrg
882 1.1 mrg static rtx
883 1.1 mrg gen_conditional_move (enum rtx_code code, machine_mode mode,
884 1.1 mrg rtx op0, rtx op1)
885 1.1 mrg {
886 1.1 mrg if (mode == SImode)
887 1.1 mrg {
888 1.1 mrg rtx cmp;
889 1.1 mrg
890 1.1 mrg /* Jump optimization calls get_condition() which canonicalizes
891 1.1 mrg comparisons like (GE x <const>) to (GT x <const-1>).
892 1.1 mrg Transform those comparisons back to GE, since that is the
893 1.1 mrg comparison supported in Xtensa. We shouldn't have to
894 1.1 mrg transform <LE x const> comparisons, because neither
895 1.1 mrg xtensa_expand_conditional_branch() nor get_condition() will
896 1.1 mrg produce them. */
897 1.1 mrg
898 1.1 mrg if ((code == GT) && (op1 == constm1_rtx))
899 1.1 mrg {
900 1.1 mrg code = GE;
901 1.1 mrg op1 = const0_rtx;
902 1.1 mrg }
903 1.1 mrg cmp = gen_rtx_fmt_ee (code, VOIDmode, pc_rtx, const0_rtx);
904 1.1 mrg
905 1.1 mrg if (boolean_operator (cmp, VOIDmode))
906 1.1 mrg {
907 1.1 mrg /* Swap the operands to make const0 second. */
908 1.1 mrg if (op0 == const0_rtx)
909 1.1 mrg {
910 1.1 mrg op0 = op1;
911 1.1 mrg op1 = const0_rtx;
912 1.1 mrg }
913 1.1 mrg
914 1.1 mrg /* If not comparing against zero, emit a comparison (subtract). */
915 1.1 mrg if (op1 != const0_rtx)
916 1.1 mrg {
917 1.1 mrg op0 = expand_binop (SImode, sub_optab, op0, op1,
918 1.1 mrg 0, 0, OPTAB_LIB_WIDEN);
919 1.1 mrg op1 = const0_rtx;
920 1.1 mrg }
921 1.1 mrg }
922 1.1 mrg else if (branch_operator (cmp, VOIDmode))
923 1.1 mrg {
924 1.1 mrg /* Swap the operands to make const0 second. */
925 1.1 mrg if (op0 == const0_rtx)
926 1.1 mrg {
927 1.1 mrg op0 = op1;
928 1.1 mrg op1 = const0_rtx;
929 1.1 mrg
930 1.1 mrg switch (code)
931 1.1 mrg {
932 1.1 mrg case LT: code = GE; break;
933 1.1 mrg case GE: code = LT; break;
934 1.1 mrg default: gcc_unreachable ();
935 1.1 mrg }
936 1.1 mrg }
937 1.1 mrg
938 1.1 mrg if (op1 != const0_rtx)
939 1.1 mrg return 0;
940 1.1 mrg }
941 1.1 mrg else
942 1.1 mrg return 0;
943 1.1 mrg
944 1.1 mrg return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
945 1.1 mrg }
946 1.1 mrg
947 1.1 mrg if (TARGET_HARD_FLOAT && mode == SFmode)
948 1.1 mrg return gen_float_relational (code, op0, op1);
949 1.1 mrg
950 1.1 mrg return 0;
951 1.1 mrg }
952 1.1 mrg
953 1.1 mrg
954 1.1 mrg int
955 1.1 mrg xtensa_expand_conditional_move (rtx *operands, int isflt)
956 1.1 mrg {
957 1.1 mrg rtx dest = operands[0];
958 1.1 mrg rtx cmp = operands[1];
959 1.1 mrg machine_mode cmp_mode = GET_MODE (XEXP (cmp, 0));
960 1.1 mrg rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
961 1.1 mrg
962 1.1 mrg if (!(cmp = gen_conditional_move (GET_CODE (cmp), cmp_mode,
963 1.1 mrg XEXP (cmp, 0), XEXP (cmp, 1))))
964 1.1 mrg return 0;
965 1.1 mrg
966 1.1 mrg if (isflt)
967 1.1 mrg gen_fn = (cmp_mode == SImode
968 1.1 mrg ? gen_movsfcc_internal0
969 1.1 mrg : gen_movsfcc_internal1);
970 1.1 mrg else
971 1.1 mrg gen_fn = (cmp_mode == SImode
972 1.1 mrg ? gen_movsicc_internal0
973 1.1 mrg : gen_movsicc_internal1);
974 1.1 mrg
975 1.1 mrg emit_insn (gen_fn (dest, XEXP (cmp, 0), operands[2], operands[3], cmp));
976 1.1 mrg return 1;
977 1.1 mrg }
978 1.1 mrg
979 1.1 mrg
980 1.1 mrg int
981 1.1 mrg xtensa_expand_scc (rtx operands[4], machine_mode cmp_mode)
982 1.1 mrg {
983 1.1 mrg rtx dest = operands[0];
984 1.1 mrg rtx cmp;
985 1.1 mrg rtx one_tmp, zero_tmp;
986 1.1 mrg rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
987 1.1 mrg
988 1.1 mrg if (!(cmp = gen_conditional_move (GET_CODE (operands[1]), cmp_mode,
989 1.1 mrg operands[2], operands[3])))
990 1.1 mrg return 0;
991 1.1 mrg
992 1.1 mrg one_tmp = gen_reg_rtx (SImode);
993 1.1 mrg zero_tmp = gen_reg_rtx (SImode);
994 1.1 mrg emit_insn (gen_movsi (one_tmp, const_true_rtx));
995 1.1 mrg emit_insn (gen_movsi (zero_tmp, const0_rtx));
996 1.1 mrg
997 1.1 mrg gen_fn = (cmp_mode == SImode
998 1.1 mrg ? gen_movsicc_internal0
999 1.1 mrg : gen_movsicc_internal1);
1000 1.1 mrg emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1001 1.1 mrg return 1;
1002 1.1 mrg }
1003 1.1 mrg
1004 1.1 mrg
1005 1.1 mrg /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1006 1.1 mrg for the output, i.e., the input operands are twice as big as MODE. */
1007 1.1 mrg
1008 1.1 mrg void
1009 1.1 mrg xtensa_split_operand_pair (rtx operands[4], machine_mode mode)
1010 1.1 mrg {
1011 1.1 mrg switch (GET_CODE (operands[1]))
1012 1.1 mrg {
1013 1.1 mrg case REG:
1014 1.1 mrg operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1015 1.1 mrg operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
1016 1.1 mrg break;
1017 1.1 mrg
1018 1.1 mrg case MEM:
1019 1.1 mrg operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
1020 1.1 mrg operands[2] = adjust_address (operands[1], mode, 0);
1021 1.1 mrg break;
1022 1.1 mrg
1023 1.1 mrg case CONST_INT:
1024 1.1 mrg case CONST_DOUBLE:
1025 1.1 mrg split_double (operands[1], &operands[2], &operands[3]);
1026 1.1 mrg break;
1027 1.1 mrg
1028 1.1 mrg default:
1029 1.1 mrg gcc_unreachable ();
1030 1.1 mrg }
1031 1.1 mrg
1032 1.1 mrg switch (GET_CODE (operands[0]))
1033 1.1 mrg {
1034 1.1 mrg case REG:
1035 1.1 mrg operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
1036 1.1 mrg operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
1037 1.1 mrg break;
1038 1.1 mrg
1039 1.1 mrg case MEM:
1040 1.1 mrg operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
1041 1.1 mrg operands[0] = adjust_address (operands[0], mode, 0);
1042 1.1 mrg break;
1043 1.1 mrg
1044 1.1 mrg default:
1045 1.1 mrg gcc_unreachable ();
1046 1.1 mrg }
1047 1.1 mrg }
1048 1.1 mrg
1049 1.1 mrg
1050 1.1 mrg /* Emit insns to move operands[1] into operands[0].
1051 1.1 mrg Return 1 if we have written out everything that needs to be done to
1052 1.1 mrg do the move. Otherwise, return 0 and the caller will emit the move
1053 1.1 mrg normally. */
1054 1.1 mrg
1055 1.1 mrg int
1056 1.1 mrg xtensa_emit_move_sequence (rtx *operands, machine_mode mode)
1057 1.1 mrg {
1058 1.1 mrg rtx src = operands[1];
1059 1.1 mrg
1060 1.1 mrg if (CONSTANT_P (src)
1061 1.1 mrg && (GET_CODE (src) != CONST_INT || ! xtensa_simm12b (INTVAL (src))))
1062 1.1 mrg {
1063 1.1 mrg rtx dst = operands[0];
1064 1.1 mrg
1065 1.1 mrg if (xtensa_tls_referenced_p (src))
1066 1.1 mrg {
1067 1.1 mrg rtx addend = NULL;
1068 1.1 mrg
1069 1.1 mrg if (GET_CODE (src) == CONST && GET_CODE (XEXP (src, 0)) == PLUS)
1070 1.1 mrg {
1071 1.1 mrg addend = XEXP (XEXP (src, 0), 1);
1072 1.1 mrg src = XEXP (XEXP (src, 0), 0);
1073 1.1 mrg }
1074 1.1 mrg
1075 1.1 mrg src = xtensa_legitimize_tls_address (src);
1076 1.1 mrg if (addend)
1077 1.1 mrg {
1078 1.1 mrg src = gen_rtx_PLUS (mode, src, addend);
1079 1.1 mrg src = force_operand (src, dst);
1080 1.1 mrg }
1081 1.1 mrg emit_move_insn (dst, src);
1082 1.1 mrg return 1;
1083 1.1 mrg }
1084 1.1 mrg
1085 1.1 mrg if (! TARGET_AUTO_LITPOOLS && ! TARGET_CONST16)
1086 1.1 mrg {
1087 1.1 mrg /* Try to emit MOVI + SLLI sequence, that is smaller
1088 1.1 mrg than L32R + literal. */
1089 1.1 mrg if (optimize_size && mode == SImode && CONST_INT_P (src)
1090 1.1 mrg && register_operand (dst, mode))
1091 1.1 mrg {
1092 1.1 mrg HOST_WIDE_INT srcval = INTVAL (src);
1093 1.1 mrg int shift = ctz_hwi (srcval);
1094 1.1 mrg
1095 1.1 mrg if (xtensa_simm12b (srcval >> shift))
1096 1.1 mrg {
1097 1.1 mrg emit_move_insn (dst, GEN_INT (srcval >> shift));
1098 1.1 mrg emit_insn (gen_ashlsi3_internal (dst, dst, GEN_INT (shift)));
1099 1.1 mrg return 1;
1100 1.1 mrg }
1101 1.1 mrg }
1102 1.1 mrg
1103 1.1 mrg src = force_const_mem (SImode, src);
1104 1.1 mrg operands[1] = src;
1105 1.1 mrg }
1106 1.1 mrg
1107 1.1 mrg /* PC-relative loads are always SImode, and CONST16 is only
1108 1.1 mrg supported in the movsi pattern, so add a SUBREG for any other
1109 1.1 mrg (smaller) mode. */
1110 1.1 mrg
1111 1.1 mrg if (mode != SImode)
1112 1.1 mrg {
1113 1.1 mrg if (register_operand (dst, mode))
1114 1.1 mrg {
1115 1.1 mrg emit_move_insn (simplify_gen_subreg (SImode, dst, mode, 0), src);
1116 1.1 mrg return 1;
1117 1.1 mrg }
1118 1.1 mrg else
1119 1.1 mrg {
1120 1.1 mrg src = force_reg (SImode, src);
1121 1.1 mrg src = gen_lowpart_SUBREG (mode, src);
1122 1.1 mrg operands[1] = src;
1123 1.1 mrg }
1124 1.1 mrg }
1125 1.1 mrg }
1126 1.1 mrg
1127 1.1 mrg if (!(reload_in_progress | reload_completed)
1128 1.1 mrg && !xtensa_valid_move (mode, operands))
1129 1.1 mrg operands[1] = force_reg (mode, operands[1]);
1130 1.1 mrg
1131 1.1 mrg operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1132 1.1 mrg
1133 1.1 mrg /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1134 1.1 mrg instruction won't be recognized after reload, so we remove the
1135 1.1 mrg subreg and adjust mem accordingly. */
1136 1.1 mrg if (reload_in_progress)
1137 1.1 mrg {
1138 1.1 mrg operands[0] = fixup_subreg_mem (operands[0]);
1139 1.1 mrg operands[1] = fixup_subreg_mem (operands[1]);
1140 1.1 mrg }
1141 1.1 mrg return 0;
1142 1.1 mrg }
1143 1.1 mrg
1144 1.1 mrg
1145 1.1 mrg static rtx
1146 1.1 mrg fixup_subreg_mem (rtx x)
1147 1.1 mrg {
1148 1.1 mrg if (GET_CODE (x) == SUBREG
1149 1.1 mrg && GET_CODE (SUBREG_REG (x)) == REG
1150 1.1 mrg && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1151 1.1 mrg {
1152 1.1 mrg rtx temp =
1153 1.1 mrg gen_rtx_SUBREG (GET_MODE (x),
1154 1.1 mrg reg_equiv_mem (REGNO (SUBREG_REG (x))),
1155 1.1 mrg SUBREG_BYTE (x));
1156 1.1 mrg x = alter_subreg (&temp, true);
1157 1.1 mrg }
1158 1.1 mrg return x;
1159 1.1 mrg }
1160 1.1 mrg
1161 1.1 mrg
1162 1.1 mrg /* Check if an incoming argument in a7 is expected to be used soon and
1163 1.1 mrg if OPND is a register or register pair that includes a7. If so,
1164 1.1 mrg create a new pseudo and copy a7 into that pseudo at the very
1165 1.1 mrg beginning of the function, followed by the special "set_frame_ptr"
1166 1.1 mrg unspec_volatile insn. The return value is either the original
1167 1.1 mrg operand, if it is not a7, or the new pseudo containing a copy of
1168 1.1 mrg the incoming argument. This is necessary because the register
1169 1.1 mrg allocator will ignore conflicts with a7 and may either assign some
1170 1.1 mrg other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1171 1.1 mrg the incoming argument in a7. By copying the argument out of a7 as
1172 1.1 mrg the very first thing, and then immediately following that with an
1173 1.1 mrg unspec_volatile to keep the scheduler away, we should avoid any
1174 1.1 mrg problems. Putting the set_frame_ptr insn at the beginning, with
1175 1.1 mrg only the a7 copy before it, also makes it easier for the prologue
1176 1.1 mrg expander to initialize the frame pointer after the a7 copy and to
1177 1.1 mrg fix up the a7 copy to use the stack pointer instead of the frame
1178 1.1 mrg pointer. */
1179 1.1 mrg
1180 1.1 mrg rtx
1181 1.1 mrg xtensa_copy_incoming_a7 (rtx opnd)
1182 1.1 mrg {
1183 1.1 mrg rtx entry_insns = 0;
1184 1.1 mrg rtx reg, tmp;
1185 1.1 mrg machine_mode mode;
1186 1.1 mrg
1187 1.1 mrg if (!cfun->machine->need_a7_copy)
1188 1.1 mrg return opnd;
1189 1.1 mrg
1190 1.1 mrg /* This function should never be called again once a7 has been copied. */
1191 1.1 mrg gcc_assert (!cfun->machine->set_frame_ptr_insn);
1192 1.1 mrg
1193 1.1 mrg mode = GET_MODE (opnd);
1194 1.1 mrg
1195 1.1 mrg /* The operand using a7 may come in a later instruction, so just return
1196 1.1 mrg the original operand if it doesn't use a7. */
1197 1.1 mrg reg = opnd;
1198 1.1 mrg if (GET_CODE (reg) == SUBREG)
1199 1.1 mrg {
1200 1.1 mrg gcc_assert (SUBREG_BYTE (reg) == 0);
1201 1.1 mrg reg = SUBREG_REG (reg);
1202 1.1 mrg }
1203 1.1 mrg if (GET_CODE (reg) != REG
1204 1.1 mrg || REGNO (reg) > A7_REG
1205 1.1 mrg || REGNO (reg) + hard_regno_nregs (A7_REG, mode) <= A7_REG)
1206 1.1 mrg return opnd;
1207 1.1 mrg
1208 1.1 mrg /* 1-word args will always be in a7; 2-word args in a6/a7. */
1209 1.1 mrg gcc_assert (REGNO (reg) + hard_regno_nregs (A7_REG, mode) - 1 == A7_REG);
1210 1.1 mrg
1211 1.1 mrg cfun->machine->need_a7_copy = false;
1212 1.1 mrg
1213 1.1 mrg /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1214 1.1 mrg create the REG for a7 so that hard_frame_pointer_rtx is not used. */
1215 1.1 mrg
1216 1.1 mrg start_sequence ();
1217 1.1 mrg tmp = gen_reg_rtx (mode);
1218 1.1 mrg
1219 1.1 mrg switch (mode)
1220 1.1 mrg {
1221 1.1 mrg case E_DFmode:
1222 1.1 mrg case E_DImode:
1223 1.1 mrg /* Copy the value out of A7 here but keep the first word in A6 until
1224 1.1 mrg after the set_frame_ptr insn. Otherwise, the register allocator
1225 1.1 mrg may decide to put "subreg (tmp, 0)" in A7 and clobber the incoming
1226 1.1 mrg value. */
1227 1.1 mrg emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1228 1.1 mrg gen_raw_REG (SImode, A7_REG)));
1229 1.1 mrg break;
1230 1.1 mrg case E_SFmode:
1231 1.1 mrg emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1232 1.1 mrg break;
1233 1.1 mrg case E_SImode:
1234 1.1 mrg emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1235 1.1 mrg break;
1236 1.1 mrg case E_HImode:
1237 1.1 mrg emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1238 1.1 mrg break;
1239 1.1 mrg case E_QImode:
1240 1.1 mrg emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1241 1.1 mrg break;
1242 1.1 mrg default:
1243 1.1 mrg gcc_unreachable ();
1244 1.1 mrg }
1245 1.1 mrg
1246 1.1 mrg cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1247 1.1 mrg
1248 1.1 mrg /* For DF and DI mode arguments, copy the incoming value in A6 now. */
1249 1.1 mrg if (mode == DFmode || mode == DImode)
1250 1.1 mrg emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1251 1.1 mrg gen_rtx_REG (SImode, A7_REG - 1)));
1252 1.1 mrg entry_insns = get_insns ();
1253 1.1 mrg end_sequence ();
1254 1.1 mrg
1255 1.1 mrg if (cfun->machine->vararg_a7)
1256 1.1 mrg {
1257 1.1 mrg /* This is called from within builtin_saveregs, which will insert the
1258 1.1 mrg saveregs code at the function entry, ahead of anything placed at
1259 1.1 mrg the function entry now. Instead, save the sequence to be inserted
1260 1.1 mrg at the beginning of the saveregs code. */
1261 1.1 mrg cfun->machine->vararg_a7_copy = entry_insns;
1262 1.1 mrg }
1263 1.1 mrg else
1264 1.1 mrg {
1265 1.1 mrg /* Put entry_insns after the NOTE that starts the function. If
1266 1.1 mrg this is inside a start_sequence, make the outer-level insn
1267 1.1 mrg chain current, so the code is placed at the start of the
1268 1.1 mrg function. */
1269 1.1 mrg push_topmost_sequence ();
1270 1.1 mrg /* Do not use entry_of_function() here. This is called from within
1271 1.1 mrg expand_function_start, when the CFG still holds GIMPLE. */
1272 1.1 mrg emit_insn_after (entry_insns, get_insns ());
1273 1.1 mrg pop_topmost_sequence ();
1274 1.1 mrg }
1275 1.1 mrg
1276 1.1 mrg return tmp;
1277 1.1 mrg }
1278 1.1 mrg
1279 1.1 mrg
1280 1.1 mrg /* Try to expand a block move operation to a sequence of RTL move
1281 1.1 mrg instructions. If not optimizing, or if the block size is not a
1282 1.1 mrg constant, or if the block is too large, the expansion fails and GCC
1283 1.1 mrg falls back to calling memcpy().
1284 1.1 mrg
1285 1.1 mrg operands[0] is the destination
1286 1.1 mrg operands[1] is the source
1287 1.1 mrg operands[2] is the length
1288 1.1 mrg operands[3] is the alignment */
1289 1.1 mrg
1290 1.1 mrg int
1291 1.1 mrg xtensa_expand_block_move (rtx *operands)
1292 1.1 mrg {
1293 1.1 mrg static const machine_mode mode_from_align[] =
1294 1.1 mrg {
1295 1.1 mrg VOIDmode, QImode, HImode, VOIDmode, SImode,
1296 1.1 mrg };
1297 1.1 mrg
1298 1.1 mrg rtx dst_mem = operands[0];
1299 1.1 mrg rtx src_mem = operands[1];
1300 1.1 mrg HOST_WIDE_INT bytes, align;
1301 1.1 mrg int num_pieces, move_ratio;
1302 1.1 mrg rtx temp[2];
1303 1.1 mrg machine_mode mode[2];
1304 1.1 mrg int amount[2];
1305 1.1 mrg bool active[2];
1306 1.1 mrg int phase = 0;
1307 1.1 mrg int next;
1308 1.1 mrg int offset_ld = 0;
1309 1.1 mrg int offset_st = 0;
1310 1.1 mrg rtx x;
1311 1.1 mrg
1312 1.1 mrg /* If this is not a fixed size move, just call memcpy. */
1313 1.1 mrg if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1314 1.1 mrg return 0;
1315 1.1 mrg
1316 1.1 mrg bytes = INTVAL (operands[2]);
1317 1.1 mrg align = INTVAL (operands[3]);
1318 1.1 mrg
1319 1.1 mrg /* Anything to move? */
1320 1.1 mrg if (bytes <= 0)
1321 1.1 mrg return 0;
1322 1.1 mrg
1323 1.1 mrg if (align > MOVE_MAX)
1324 1.1 mrg align = MOVE_MAX;
1325 1.1 mrg
1326 1.1 mrg /* Decide whether to expand inline based on the optimization level. */
1327 1.1 mrg move_ratio = 4;
1328 1.1 mrg if (optimize > 2)
1329 1.1 mrg move_ratio = LARGEST_MOVE_RATIO;
1330 1.1 mrg num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1331 1.1 mrg if (num_pieces > move_ratio)
1332 1.1 mrg return 0;
1333 1.1 mrg
1334 1.1 mrg x = XEXP (dst_mem, 0);
1335 1.1 mrg if (!REG_P (x))
1336 1.1 mrg {
1337 1.1 mrg x = force_reg (Pmode, x);
1338 1.1 mrg dst_mem = replace_equiv_address (dst_mem, x);
1339 1.1 mrg }
1340 1.1 mrg
1341 1.1 mrg x = XEXP (src_mem, 0);
1342 1.1 mrg if (!REG_P (x))
1343 1.1 mrg {
1344 1.1 mrg x = force_reg (Pmode, x);
1345 1.1 mrg src_mem = replace_equiv_address (src_mem, x);
1346 1.1 mrg }
1347 1.1 mrg
1348 1.1 mrg active[0] = active[1] = false;
1349 1.1 mrg
1350 1.1 mrg do
1351 1.1 mrg {
1352 1.1 mrg next = phase;
1353 1.1 mrg phase ^= 1;
1354 1.1 mrg
1355 1.1 mrg if (bytes > 0)
1356 1.1 mrg {
1357 1.1 mrg int next_amount;
1358 1.1 mrg
1359 1.1 mrg next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1360 1.1 mrg next_amount = MIN (next_amount, align);
1361 1.1 mrg
1362 1.1 mrg amount[next] = next_amount;
1363 1.1 mrg mode[next] = mode_from_align[next_amount];
1364 1.1 mrg temp[next] = gen_reg_rtx (mode[next]);
1365 1.1 mrg
1366 1.1 mrg x = adjust_address (src_mem, mode[next], offset_ld);
1367 1.1 mrg emit_insn (gen_rtx_SET (temp[next], x));
1368 1.1 mrg
1369 1.1 mrg offset_ld += next_amount;
1370 1.1 mrg bytes -= next_amount;
1371 1.1 mrg active[next] = true;
1372 1.1 mrg }
1373 1.1 mrg
1374 1.1 mrg if (active[phase])
1375 1.1 mrg {
1376 1.1 mrg active[phase] = false;
1377 1.1 mrg
1378 1.1 mrg x = adjust_address (dst_mem, mode[phase], offset_st);
1379 1.1 mrg emit_insn (gen_rtx_SET (x, temp[phase]));
1380 1.1 mrg
1381 1.1 mrg offset_st += amount[phase];
1382 1.1 mrg }
1383 1.1 mrg }
1384 1.1 mrg while (active[next]);
1385 1.1 mrg
1386 1.1 mrg return 1;
1387 1.1 mrg }
1388 1.1 mrg
1389 1.1 mrg
1390 1.1 mrg void
1391 1.1 mrg xtensa_expand_nonlocal_goto (rtx *operands)
1392 1.1 mrg {
1393 1.1 mrg rtx goto_handler = operands[1];
1394 1.1 mrg rtx containing_fp = operands[3];
1395 1.1 mrg
1396 1.1 mrg /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1397 1.1 mrg is too big to generate in-line. */
1398 1.1 mrg
1399 1.1 mrg if (GET_CODE (containing_fp) != REG)
1400 1.1 mrg containing_fp = force_reg (Pmode, containing_fp);
1401 1.1 mrg
1402 1.1 mrg emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1403 1.1 mrg LCT_NORMAL, VOIDmode,
1404 1.1 mrg containing_fp, Pmode,
1405 1.1 mrg goto_handler, Pmode);
1406 1.1 mrg }
1407 1.1 mrg
1408 1.1 mrg
1409 1.1 mrg static struct machine_function *
1410 1.1 mrg xtensa_init_machine_status (void)
1411 1.1 mrg {
1412 1.1 mrg return ggc_cleared_alloc<machine_function> ();
1413 1.1 mrg }
1414 1.1 mrg
1415 1.1 mrg
1416 1.1 mrg /* Shift VAL of mode MODE left by COUNT bits. */
1417 1.1 mrg
1418 1.1 mrg static inline rtx
1419 1.1 mrg xtensa_expand_mask_and_shift (rtx val, machine_mode mode, rtx count)
1420 1.1 mrg {
1421 1.1 mrg val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1422 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1423 1.1 mrg return expand_simple_binop (SImode, ASHIFT, val, count,
1424 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1425 1.1 mrg }
1426 1.1 mrg
1427 1.1 mrg
1428 1.1 mrg /* Structure to hold the initial parameters for a compare_and_swap operation
1429 1.1 mrg in HImode and QImode. */
1430 1.1 mrg
1431 1.1 mrg struct alignment_context
1432 1.1 mrg {
1433 1.1 mrg rtx memsi; /* SI aligned memory location. */
1434 1.1 mrg rtx shift; /* Bit offset with regard to lsb. */
1435 1.1 mrg rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
1436 1.1 mrg rtx modemaski; /* ~modemask */
1437 1.1 mrg };
1438 1.1 mrg
1439 1.1 mrg
1440 1.1 mrg /* Initialize structure AC for word access to HI and QI mode memory. */
1441 1.1 mrg
1442 1.1 mrg static void
1443 1.1 mrg init_alignment_context (struct alignment_context *ac, rtx mem)
1444 1.1 mrg {
1445 1.1 mrg machine_mode mode = GET_MODE (mem);
1446 1.1 mrg rtx byteoffset = NULL_RTX;
1447 1.1 mrg bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1448 1.1 mrg
1449 1.1 mrg if (aligned)
1450 1.1 mrg ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1451 1.1 mrg else
1452 1.1 mrg {
1453 1.1 mrg /* Alignment is unknown. */
1454 1.1 mrg rtx addr, align;
1455 1.1 mrg
1456 1.1 mrg /* Force the address into a register. */
1457 1.1 mrg addr = force_reg (Pmode, XEXP (mem, 0));
1458 1.1 mrg
1459 1.1 mrg /* Align it to SImode. */
1460 1.1 mrg align = expand_simple_binop (Pmode, AND, addr,
1461 1.1 mrg GEN_INT (-GET_MODE_SIZE (SImode)),
1462 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1463 1.1 mrg /* Generate MEM. */
1464 1.1 mrg ac->memsi = gen_rtx_MEM (SImode, align);
1465 1.1 mrg MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
1466 1.1 mrg set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
1467 1.1 mrg set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
1468 1.1 mrg
1469 1.1 mrg byteoffset = expand_simple_binop (Pmode, AND, addr,
1470 1.1 mrg GEN_INT (GET_MODE_SIZE (SImode) - 1),
1471 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1472 1.1 mrg }
1473 1.1 mrg
1474 1.1 mrg /* Calculate shiftcount. */
1475 1.1 mrg if (TARGET_BIG_ENDIAN)
1476 1.1 mrg {
1477 1.1 mrg ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
1478 1.1 mrg if (!aligned)
1479 1.1 mrg ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
1480 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1481 1.1 mrg }
1482 1.1 mrg else
1483 1.1 mrg {
1484 1.1 mrg if (aligned)
1485 1.1 mrg ac->shift = NULL_RTX;
1486 1.1 mrg else
1487 1.1 mrg ac->shift = byteoffset;
1488 1.1 mrg }
1489 1.1 mrg
1490 1.1 mrg if (ac->shift != NULL_RTX)
1491 1.1 mrg {
1492 1.1 mrg /* Shift is the byte count, but we need the bitcount. */
1493 1.1 mrg gcc_assert (exact_log2 (BITS_PER_UNIT) >= 0);
1494 1.1 mrg ac->shift = expand_simple_binop (SImode, ASHIFT, ac->shift,
1495 1.1 mrg GEN_INT (exact_log2 (BITS_PER_UNIT)),
1496 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1497 1.1 mrg ac->modemask = expand_simple_binop (SImode, ASHIFT,
1498 1.1 mrg GEN_INT (GET_MODE_MASK (mode)),
1499 1.1 mrg ac->shift,
1500 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1501 1.1 mrg }
1502 1.1 mrg else
1503 1.1 mrg ac->modemask = GEN_INT (GET_MODE_MASK (mode));
1504 1.1 mrg
1505 1.1 mrg ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
1506 1.1 mrg }
1507 1.1 mrg
1508 1.1 mrg
1509 1.1 mrg /* Expand an atomic compare and swap operation for HImode and QImode.
1510 1.1 mrg MEM is the memory location, CMP the old value to compare MEM with
1511 1.1 mrg and NEW_RTX the value to set if CMP == MEM. */
1512 1.1 mrg
1513 1.1 mrg void
1514 1.1 mrg xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new_rtx)
1515 1.1 mrg {
1516 1.1 mrg machine_mode mode = GET_MODE (mem);
1517 1.1 mrg struct alignment_context ac;
1518 1.1 mrg rtx tmp, cmpv, newv, val;
1519 1.1 mrg rtx oldval = gen_reg_rtx (SImode);
1520 1.1 mrg rtx res = gen_reg_rtx (SImode);
1521 1.1 mrg rtx_code_label *csloop = gen_label_rtx ();
1522 1.1 mrg rtx_code_label *csend = gen_label_rtx ();
1523 1.1 mrg
1524 1.1 mrg init_alignment_context (&ac, mem);
1525 1.1 mrg
1526 1.1 mrg if (ac.shift != NULL_RTX)
1527 1.1 mrg {
1528 1.1 mrg cmp = xtensa_expand_mask_and_shift (cmp, mode, ac.shift);
1529 1.1 mrg new_rtx = xtensa_expand_mask_and_shift (new_rtx, mode, ac.shift);
1530 1.1 mrg }
1531 1.1 mrg
1532 1.1 mrg /* Load the surrounding word into VAL with the MEM value masked out. */
1533 1.1 mrg val = force_reg (SImode, expand_simple_binop (SImode, AND, ac.memsi,
1534 1.1 mrg ac.modemaski, NULL_RTX, 1,
1535 1.1 mrg OPTAB_DIRECT));
1536 1.1 mrg emit_label (csloop);
1537 1.1 mrg
1538 1.1 mrg /* Patch CMP and NEW_RTX into VAL at correct position. */
1539 1.1 mrg cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
1540 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT));
1541 1.1 mrg newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new_rtx, val,
1542 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT));
1543 1.1 mrg
1544 1.1 mrg /* Jump to end if we're done. */
1545 1.1 mrg emit_insn (gen_sync_compare_and_swapsi (res, ac.memsi, cmpv, newv));
1546 1.1 mrg emit_cmp_and_jump_insns (res, cmpv, EQ, const0_rtx, SImode, true, csend);
1547 1.1 mrg
1548 1.1 mrg /* Check for changes outside mode. */
1549 1.1 mrg emit_move_insn (oldval, val);
1550 1.1 mrg tmp = expand_simple_binop (SImode, AND, res, ac.modemaski,
1551 1.1 mrg val, 1, OPTAB_DIRECT);
1552 1.1 mrg if (tmp != val)
1553 1.1 mrg emit_move_insn (val, tmp);
1554 1.1 mrg
1555 1.1 mrg /* Loop internal if so. */
1556 1.1 mrg emit_cmp_and_jump_insns (oldval, val, NE, const0_rtx, SImode, true, csloop);
1557 1.1 mrg
1558 1.1 mrg emit_label (csend);
1559 1.1 mrg
1560 1.1 mrg /* Return the correct part of the bitfield. */
1561 1.1 mrg convert_move (target,
1562 1.1 mrg (ac.shift == NULL_RTX ? res
1563 1.1 mrg : expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
1564 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT)),
1565 1.1 mrg 1);
1566 1.1 mrg }
1567 1.1 mrg
1568 1.1 mrg
1569 1.1 mrg /* Expand an atomic operation CODE of mode MODE (either HImode or QImode --
1570 1.1 mrg the default expansion works fine for SImode). MEM is the memory location
1571 1.1 mrg and VAL the value to play with. If AFTER is true then store the value
1572 1.1 mrg MEM holds after the operation, if AFTER is false then store the value MEM
1573 1.1 mrg holds before the operation. If TARGET is zero then discard that value, else
1574 1.1 mrg store it to TARGET. */
1575 1.1 mrg
1576 1.1 mrg void
1577 1.1 mrg xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1578 1.1 mrg bool after)
1579 1.1 mrg {
1580 1.1 mrg machine_mode mode = GET_MODE (mem);
1581 1.1 mrg struct alignment_context ac;
1582 1.1 mrg rtx_code_label *csloop = gen_label_rtx ();
1583 1.1 mrg rtx cmp, tmp;
1584 1.1 mrg rtx old = gen_reg_rtx (SImode);
1585 1.1 mrg rtx new_rtx = gen_reg_rtx (SImode);
1586 1.1 mrg rtx orig = NULL_RTX;
1587 1.1 mrg
1588 1.1 mrg init_alignment_context (&ac, mem);
1589 1.1 mrg
1590 1.1 mrg /* Prepare values before the compare-and-swap loop. */
1591 1.1 mrg if (ac.shift != NULL_RTX)
1592 1.1 mrg val = xtensa_expand_mask_and_shift (val, mode, ac.shift);
1593 1.1 mrg switch (code)
1594 1.1 mrg {
1595 1.1 mrg case PLUS:
1596 1.1 mrg case MINUS:
1597 1.1 mrg orig = gen_reg_rtx (SImode);
1598 1.1 mrg convert_move (orig, val, 1);
1599 1.1 mrg break;
1600 1.1 mrg
1601 1.1 mrg case SET:
1602 1.1 mrg case IOR:
1603 1.1 mrg case XOR:
1604 1.1 mrg break;
1605 1.1 mrg
1606 1.1 mrg case MULT: /* NAND */
1607 1.1 mrg case AND:
1608 1.1 mrg /* val = "11..1<val>11..1" */
1609 1.1 mrg val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
1610 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1611 1.1 mrg break;
1612 1.1 mrg
1613 1.1 mrg default:
1614 1.1 mrg gcc_unreachable ();
1615 1.1 mrg }
1616 1.1 mrg
1617 1.1 mrg /* Load full word. Subsequent loads are performed by S32C1I. */
1618 1.1 mrg cmp = force_reg (SImode, ac.memsi);
1619 1.1 mrg
1620 1.1 mrg emit_label (csloop);
1621 1.1 mrg emit_move_insn (old, cmp);
1622 1.1 mrg
1623 1.1 mrg switch (code)
1624 1.1 mrg {
1625 1.1 mrg case PLUS:
1626 1.1 mrg case MINUS:
1627 1.1 mrg val = expand_simple_binop (SImode, code, old, orig,
1628 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1629 1.1 mrg val = expand_simple_binop (SImode, AND, val, ac.modemask,
1630 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1631 1.1 mrg /* FALLTHRU */
1632 1.1 mrg case SET:
1633 1.1 mrg tmp = expand_simple_binop (SImode, AND, old, ac.modemaski,
1634 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1635 1.1 mrg tmp = expand_simple_binop (SImode, IOR, tmp, val,
1636 1.1 mrg new_rtx, 1, OPTAB_DIRECT);
1637 1.1 mrg break;
1638 1.1 mrg
1639 1.1 mrg case AND:
1640 1.1 mrg case IOR:
1641 1.1 mrg case XOR:
1642 1.1 mrg tmp = expand_simple_binop (SImode, code, old, val,
1643 1.1 mrg new_rtx, 1, OPTAB_DIRECT);
1644 1.1 mrg break;
1645 1.1 mrg
1646 1.1 mrg case MULT: /* NAND */
1647 1.1 mrg tmp = expand_simple_binop (SImode, AND, old, val,
1648 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1649 1.1 mrg tmp = expand_simple_binop (SImode, XOR, tmp, ac.modemask,
1650 1.1 mrg new_rtx, 1, OPTAB_DIRECT);
1651 1.1 mrg break;
1652 1.1 mrg
1653 1.1 mrg default:
1654 1.1 mrg gcc_unreachable ();
1655 1.1 mrg }
1656 1.1 mrg
1657 1.1 mrg if (tmp != new_rtx)
1658 1.1 mrg emit_move_insn (new_rtx, tmp);
1659 1.1 mrg emit_insn (gen_sync_compare_and_swapsi (cmp, ac.memsi, old, new_rtx));
1660 1.1 mrg emit_cmp_and_jump_insns (cmp, old, NE, const0_rtx, SImode, true, csloop);
1661 1.1 mrg
1662 1.1 mrg if (target)
1663 1.1 mrg {
1664 1.1 mrg tmp = (after ? new_rtx : cmp);
1665 1.1 mrg convert_move (target,
1666 1.1 mrg (ac.shift == NULL_RTX ? tmp
1667 1.1 mrg : expand_simple_binop (SImode, LSHIFTRT, tmp, ac.shift,
1668 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT)),
1669 1.1 mrg 1);
1670 1.1 mrg }
1671 1.1 mrg }
1672 1.1 mrg
1673 1.1 mrg
1674 1.1 mrg void
1675 1.1 mrg xtensa_setup_frame_addresses (void)
1676 1.1 mrg {
1677 1.1 mrg /* Set flag to cause TARGET_FRAME_POINTER_REQUIRED to return true. */
1678 1.1 mrg cfun->machine->accesses_prev_frame = 1;
1679 1.1 mrg
1680 1.1 mrg if (TARGET_WINDOWED_ABI)
1681 1.1 mrg emit_library_call
1682 1.1 mrg (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1683 1.1 mrg LCT_NORMAL, VOIDmode);
1684 1.1 mrg }
1685 1.1 mrg
1686 1.1 mrg
1687 1.1 mrg /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1688 1.1 mrg a comment showing where the end of the loop is. However, if there is a
1689 1.1 mrg label or a branch at the end of the loop then we need to place a nop
1690 1.1 mrg there. If the loop ends with a label we need the nop so that branches
1691 1.1 mrg targeting that label will target the nop (and thus remain in the loop),
1692 1.1 mrg instead of targeting the instruction after the loop (and thus exiting
1693 1.1 mrg the loop). If the loop ends with a branch, we need the nop in case the
1694 1.1 mrg branch is targeting a location inside the loop. When the branch
1695 1.1 mrg executes it will cause the loop count to be decremented even if it is
1696 1.1 mrg taken (because it is the last instruction in the loop), so we need to
1697 1.1 mrg nop after the branch to prevent the loop count from being decremented
1698 1.1 mrg when the branch is taken. */
1699 1.1 mrg
1700 1.1 mrg void
1701 1.1 mrg xtensa_emit_loop_end (rtx_insn *insn, rtx *operands)
1702 1.1 mrg {
1703 1.1 mrg char done = 0;
1704 1.1 mrg
1705 1.1 mrg for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1706 1.1 mrg {
1707 1.1 mrg switch (GET_CODE (insn))
1708 1.1 mrg {
1709 1.1 mrg case NOTE:
1710 1.1 mrg case BARRIER:
1711 1.1 mrg break;
1712 1.1 mrg
1713 1.1 mrg case CODE_LABEL:
1714 1.1 mrg output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1715 1.1 mrg done = 1;
1716 1.1 mrg break;
1717 1.1 mrg
1718 1.1 mrg default:
1719 1.1 mrg {
1720 1.1 mrg rtx body = PATTERN (insn);
1721 1.1 mrg
1722 1.1 mrg if (JUMP_P (body))
1723 1.1 mrg {
1724 1.1 mrg output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1725 1.1 mrg done = 1;
1726 1.1 mrg }
1727 1.1 mrg else if ((GET_CODE (body) != USE)
1728 1.1 mrg && (GET_CODE (body) != CLOBBER))
1729 1.1 mrg done = 1;
1730 1.1 mrg }
1731 1.1 mrg break;
1732 1.1 mrg }
1733 1.1 mrg }
1734 1.1 mrg
1735 1.1 mrg output_asm_insn ("%1_LEND:", operands);
1736 1.1 mrg }
1737 1.1 mrg
1738 1.1 mrg
1739 1.1 mrg char *
1740 1.1 mrg xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1741 1.1 mrg {
1742 1.1 mrg static char result[64];
1743 1.1 mrg enum rtx_code code;
1744 1.1 mrg const char *op;
1745 1.1 mrg
1746 1.1 mrg code = GET_CODE (operands[3]);
1747 1.1 mrg switch (code)
1748 1.1 mrg {
1749 1.1 mrg case EQ: op = inverted ? "ne" : "eq"; break;
1750 1.1 mrg case NE: op = inverted ? "eq" : "ne"; break;
1751 1.1 mrg case LT: op = inverted ? "ge" : "lt"; break;
1752 1.1 mrg case GE: op = inverted ? "lt" : "ge"; break;
1753 1.1 mrg case LTU: op = inverted ? "geu" : "ltu"; break;
1754 1.1 mrg case GEU: op = inverted ? "ltu" : "geu"; break;
1755 1.1 mrg default: gcc_unreachable ();
1756 1.1 mrg }
1757 1.1 mrg
1758 1.1 mrg if (immed)
1759 1.1 mrg {
1760 1.1 mrg if (INTVAL (operands[1]) == 0)
1761 1.1 mrg sprintf (result, "b%sz%s\t%%0, %%2", op,
1762 1.1 mrg (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1763 1.1 mrg else
1764 1.1 mrg sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1765 1.1 mrg }
1766 1.1 mrg else
1767 1.1 mrg sprintf (result, "b%s\t%%0, %%1, %%2", op);
1768 1.1 mrg
1769 1.1 mrg return result;
1770 1.1 mrg }
1771 1.1 mrg
1772 1.1 mrg
1773 1.1 mrg char *
1774 1.1 mrg xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1775 1.1 mrg {
1776 1.1 mrg static char result[64];
1777 1.1 mrg const char *op;
1778 1.1 mrg
1779 1.1 mrg switch (GET_CODE (operands[3]))
1780 1.1 mrg {
1781 1.1 mrg case EQ: op = inverted ? "bs" : "bc"; break;
1782 1.1 mrg case NE: op = inverted ? "bc" : "bs"; break;
1783 1.1 mrg default: gcc_unreachable ();
1784 1.1 mrg }
1785 1.1 mrg
1786 1.1 mrg if (immed)
1787 1.1 mrg {
1788 1.1 mrg unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1789 1.1 mrg operands[1] = GEN_INT (bitnum);
1790 1.1 mrg sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1791 1.1 mrg }
1792 1.1 mrg else
1793 1.1 mrg sprintf (result, "b%s\t%%0, %%1, %%2", op);
1794 1.1 mrg
1795 1.1 mrg return result;
1796 1.1 mrg }
1797 1.1 mrg
1798 1.1 mrg
1799 1.1 mrg char *
1800 1.1 mrg xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1801 1.1 mrg {
1802 1.1 mrg static char result[64];
1803 1.1 mrg enum rtx_code code;
1804 1.1 mrg const char *op;
1805 1.1 mrg
1806 1.1 mrg code = GET_CODE (operands[4]);
1807 1.1 mrg if (isbool)
1808 1.1 mrg {
1809 1.1 mrg switch (code)
1810 1.1 mrg {
1811 1.1 mrg case EQ: op = inverted ? "t" : "f"; break;
1812 1.1 mrg case NE: op = inverted ? "f" : "t"; break;
1813 1.1 mrg default: gcc_unreachable ();
1814 1.1 mrg }
1815 1.1 mrg }
1816 1.1 mrg else
1817 1.1 mrg {
1818 1.1 mrg switch (code)
1819 1.1 mrg {
1820 1.1 mrg case EQ: op = inverted ? "nez" : "eqz"; break;
1821 1.1 mrg case NE: op = inverted ? "eqz" : "nez"; break;
1822 1.1 mrg case LT: op = inverted ? "gez" : "ltz"; break;
1823 1.1 mrg case GE: op = inverted ? "ltz" : "gez"; break;
1824 1.1 mrg default: gcc_unreachable ();
1825 1.1 mrg }
1826 1.1 mrg }
1827 1.1 mrg
1828 1.1 mrg sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1829 1.1 mrg op, isfp ? ".s" : "", inverted ? 3 : 2);
1830 1.1 mrg return result;
1831 1.1 mrg }
1832 1.1 mrg
1833 1.1 mrg
1834 1.1 mrg char *
1835 1.1 mrg xtensa_emit_call (int callop, rtx *operands)
1836 1.1 mrg {
1837 1.1 mrg static char result[64];
1838 1.1 mrg rtx tgt = operands[callop];
1839 1.1 mrg
1840 1.1 mrg if (GET_CODE (tgt) == CONST_INT)
1841 1.1 mrg sprintf (result, "call%d\t" HOST_WIDE_INT_PRINT_HEX,
1842 1.1 mrg WINDOW_SIZE, INTVAL (tgt));
1843 1.1 mrg else if (register_operand (tgt, VOIDmode))
1844 1.1 mrg sprintf (result, "callx%d\t%%%d", WINDOW_SIZE, callop);
1845 1.1 mrg else
1846 1.1 mrg sprintf (result, "call%d\t%%%d", WINDOW_SIZE, callop);
1847 1.1 mrg
1848 1.1 mrg return result;
1849 1.1 mrg }
1850 1.1 mrg
1851 1.1 mrg
1852 1.1 mrg bool
1853 1.1 mrg xtensa_legitimate_address_p (machine_mode mode, rtx addr, bool strict)
1854 1.1 mrg {
1855 1.1 mrg /* Allow constant pool addresses. */
1856 1.1 mrg if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
1857 1.1 mrg && ! TARGET_CONST16 && constantpool_address_p (addr)
1858 1.1 mrg && ! xtensa_tls_referenced_p (addr))
1859 1.1 mrg return true;
1860 1.1 mrg
1861 1.1 mrg while (GET_CODE (addr) == SUBREG)
1862 1.1 mrg addr = SUBREG_REG (addr);
1863 1.1 mrg
1864 1.1 mrg /* Allow base registers. */
1865 1.1 mrg if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1866 1.1 mrg return true;
1867 1.1 mrg
1868 1.1 mrg /* Check for "register + offset" addressing. */
1869 1.1 mrg if (GET_CODE (addr) == PLUS)
1870 1.1 mrg {
1871 1.1 mrg rtx xplus0 = XEXP (addr, 0);
1872 1.1 mrg rtx xplus1 = XEXP (addr, 1);
1873 1.1 mrg enum rtx_code code0;
1874 1.1 mrg enum rtx_code code1;
1875 1.1 mrg
1876 1.1 mrg while (GET_CODE (xplus0) == SUBREG)
1877 1.1 mrg xplus0 = SUBREG_REG (xplus0);
1878 1.1 mrg code0 = GET_CODE (xplus0);
1879 1.1 mrg
1880 1.1 mrg while (GET_CODE (xplus1) == SUBREG)
1881 1.1 mrg xplus1 = SUBREG_REG (xplus1);
1882 1.1 mrg code1 = GET_CODE (xplus1);
1883 1.1 mrg
1884 1.1 mrg /* Swap operands if necessary so the register is first. */
1885 1.1 mrg if (code0 != REG && code1 == REG)
1886 1.1 mrg {
1887 1.1 mrg xplus0 = XEXP (addr, 1);
1888 1.1 mrg xplus1 = XEXP (addr, 0);
1889 1.1 mrg code0 = GET_CODE (xplus0);
1890 1.1 mrg code1 = GET_CODE (xplus1);
1891 1.1 mrg }
1892 1.1 mrg
1893 1.1 mrg if (code0 == REG && BASE_REG_P (xplus0, strict)
1894 1.1 mrg && code1 == CONST_INT
1895 1.1 mrg && xtensa_mem_offset (INTVAL (xplus1), mode))
1896 1.1 mrg return true;
1897 1.1 mrg }
1898 1.1 mrg
1899 1.1 mrg return false;
1900 1.1 mrg }
1901 1.1 mrg
1902 1.1 mrg
1903 1.1 mrg /* Construct the SYMBOL_REF for the _TLS_MODULE_BASE_ symbol. */
1904 1.1 mrg
1905 1.1 mrg static GTY(()) rtx xtensa_tls_module_base_symbol;
1906 1.1 mrg
1907 1.1 mrg static rtx
1908 1.1 mrg xtensa_tls_module_base (void)
1909 1.1 mrg {
1910 1.1 mrg if (! xtensa_tls_module_base_symbol)
1911 1.1 mrg {
1912 1.1 mrg xtensa_tls_module_base_symbol =
1913 1.1 mrg gen_rtx_SYMBOL_REF (Pmode, "_TLS_MODULE_BASE_");
1914 1.1 mrg SYMBOL_REF_FLAGS (xtensa_tls_module_base_symbol)
1915 1.1 mrg |= TLS_MODEL_GLOBAL_DYNAMIC << SYMBOL_FLAG_TLS_SHIFT;
1916 1.1 mrg }
1917 1.1 mrg
1918 1.1 mrg return xtensa_tls_module_base_symbol;
1919 1.1 mrg }
1920 1.1 mrg
1921 1.1 mrg
1922 1.1 mrg static rtx_insn *
1923 1.1 mrg xtensa_call_tls_desc (rtx sym, rtx *retp)
1924 1.1 mrg {
1925 1.1 mrg rtx fn, arg, a_io;
1926 1.1 mrg rtx_insn *call_insn, *insns;
1927 1.1 mrg
1928 1.1 mrg start_sequence ();
1929 1.1 mrg fn = gen_reg_rtx (Pmode);
1930 1.1 mrg arg = gen_reg_rtx (Pmode);
1931 1.1 mrg a_io = gen_rtx_REG (Pmode, WINDOW_SIZE + 2);
1932 1.1 mrg
1933 1.1 mrg emit_insn (gen_tls_func (fn, sym));
1934 1.1 mrg emit_insn (gen_tls_arg (arg, sym));
1935 1.1 mrg emit_move_insn (a_io, arg);
1936 1.1 mrg call_insn = emit_call_insn (gen_tls_call (a_io, fn, sym, const1_rtx));
1937 1.1 mrg use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), a_io);
1938 1.1 mrg insns = get_insns ();
1939 1.1 mrg end_sequence ();
1940 1.1 mrg
1941 1.1 mrg *retp = a_io;
1942 1.1 mrg return insns;
1943 1.1 mrg }
1944 1.1 mrg
1945 1.1 mrg
1946 1.1 mrg static rtx
1947 1.1 mrg xtensa_legitimize_tls_address (rtx x)
1948 1.1 mrg {
1949 1.1 mrg unsigned int model = SYMBOL_REF_TLS_MODEL (x);
1950 1.1 mrg rtx dest, tp, ret, modbase, base, addend;
1951 1.1 mrg rtx_insn *insns;
1952 1.1 mrg
1953 1.1 mrg dest = gen_reg_rtx (Pmode);
1954 1.1 mrg switch (model)
1955 1.1 mrg {
1956 1.1 mrg case TLS_MODEL_GLOBAL_DYNAMIC:
1957 1.1 mrg insns = xtensa_call_tls_desc (x, &ret);
1958 1.1 mrg emit_libcall_block (insns, dest, ret, x);
1959 1.1 mrg break;
1960 1.1 mrg
1961 1.1 mrg case TLS_MODEL_LOCAL_DYNAMIC:
1962 1.1 mrg base = gen_reg_rtx (Pmode);
1963 1.1 mrg modbase = xtensa_tls_module_base ();
1964 1.1 mrg insns = xtensa_call_tls_desc (modbase, &ret);
1965 1.1 mrg emit_libcall_block (insns, base, ret, modbase);
1966 1.1 mrg addend = force_reg (SImode, gen_sym_DTPOFF (x));
1967 1.1 mrg emit_insn (gen_addsi3 (dest, base, addend));
1968 1.1 mrg break;
1969 1.1 mrg
1970 1.1 mrg case TLS_MODEL_INITIAL_EXEC:
1971 1.1 mrg case TLS_MODEL_LOCAL_EXEC:
1972 1.1 mrg tp = gen_reg_rtx (SImode);
1973 1.1 mrg emit_insn (gen_get_thread_pointersi (tp));
1974 1.1 mrg addend = force_reg (SImode, gen_sym_TPOFF (x));
1975 1.1 mrg emit_insn (gen_addsi3 (dest, tp, addend));
1976 1.1 mrg break;
1977 1.1 mrg
1978 1.1 mrg default:
1979 1.1 mrg gcc_unreachable ();
1980 1.1 mrg }
1981 1.1 mrg
1982 1.1 mrg return dest;
1983 1.1 mrg }
1984 1.1 mrg
1985 1.1 mrg
1986 1.1 mrg rtx
1987 1.1 mrg xtensa_legitimize_address (rtx x,
1988 1.1 mrg rtx oldx ATTRIBUTE_UNUSED,
1989 1.1 mrg machine_mode mode)
1990 1.1 mrg {
1991 1.1 mrg if (xtensa_tls_symbol_p (x))
1992 1.1 mrg return xtensa_legitimize_tls_address (x);
1993 1.1 mrg
1994 1.1 mrg if (GET_CODE (x) == PLUS)
1995 1.1 mrg {
1996 1.1 mrg rtx plus0 = XEXP (x, 0);
1997 1.1 mrg rtx plus1 = XEXP (x, 1);
1998 1.1 mrg
1999 1.1 mrg if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
2000 1.1 mrg {
2001 1.1 mrg plus0 = XEXP (x, 1);
2002 1.1 mrg plus1 = XEXP (x, 0);
2003 1.1 mrg }
2004 1.1 mrg
2005 1.1 mrg /* Try to split up the offset to use an ADDMI instruction. */
2006 1.1 mrg if (GET_CODE (plus0) == REG
2007 1.1 mrg && GET_CODE (plus1) == CONST_INT
2008 1.1 mrg && !xtensa_mem_offset (INTVAL (plus1), mode)
2009 1.1 mrg && !xtensa_simm8 (INTVAL (plus1))
2010 1.1 mrg && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
2011 1.1 mrg && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
2012 1.1 mrg {
2013 1.1 mrg rtx temp = gen_reg_rtx (Pmode);
2014 1.1 mrg rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
2015 1.1 mrg emit_insn (gen_rtx_SET (temp, gen_rtx_PLUS (Pmode, plus0,
2016 1.1 mrg addmi_offset)));
2017 1.1 mrg return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
2018 1.1 mrg }
2019 1.1 mrg }
2020 1.1 mrg
2021 1.1 mrg return x;
2022 1.1 mrg }
2023 1.1 mrg
2024 1.1 mrg /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
2025 1.1 mrg
2026 1.1 mrg Treat constant-pool references as "mode dependent" since they can
2027 1.1 mrg only be accessed with SImode loads. This works around a bug in the
2028 1.1 mrg combiner where a constant pool reference is temporarily converted
2029 1.1 mrg to an HImode load, which is then assumed to zero-extend based on
2030 1.1 mrg our definition of LOAD_EXTEND_OP. This is wrong because the high
2031 1.1 mrg bits of a 16-bit value in the constant pool are now sign-extended
2032 1.1 mrg by default. */
2033 1.1 mrg
2034 1.1 mrg static bool
2035 1.1 mrg xtensa_mode_dependent_address_p (const_rtx addr,
2036 1.1 mrg addr_space_t as ATTRIBUTE_UNUSED)
2037 1.1 mrg {
2038 1.1 mrg return constantpool_address_p (addr);
2039 1.1 mrg }
2040 1.1 mrg
2041 1.1 mrg /* Return TRUE if X contains any TLS symbol references. */
2042 1.1 mrg
2043 1.1 mrg bool
2044 1.1 mrg xtensa_tls_referenced_p (rtx x)
2045 1.1 mrg {
2046 1.1 mrg if (! targetm.have_tls)
2047 1.1 mrg return false;
2048 1.1 mrg
2049 1.1 mrg subrtx_iterator::array_type array;
2050 1.1 mrg FOR_EACH_SUBRTX (iter, array, x, ALL)
2051 1.1 mrg {
2052 1.1 mrg const_rtx x = *iter;
2053 1.1 mrg if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0)
2054 1.1 mrg return true;
2055 1.1 mrg
2056 1.1 mrg /* Ignore TLS references that have already been legitimized. */
2057 1.1 mrg if (GET_CODE (x) == UNSPEC)
2058 1.1 mrg switch (XINT (x, 1))
2059 1.1 mrg {
2060 1.1 mrg case UNSPEC_TPOFF:
2061 1.1 mrg case UNSPEC_DTPOFF:
2062 1.1 mrg case UNSPEC_TLS_FUNC:
2063 1.1 mrg case UNSPEC_TLS_ARG:
2064 1.1 mrg case UNSPEC_TLS_CALL:
2065 1.1 mrg iter.skip_subrtxes ();
2066 1.1 mrg break;
2067 1.1 mrg default:
2068 1.1 mrg break;
2069 1.1 mrg }
2070 1.1 mrg }
2071 1.1 mrg return false;
2072 1.1 mrg }
2073 1.1 mrg
2074 1.1 mrg
2075 1.1 mrg /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
2076 1.1 mrg
2077 1.1 mrg static bool
2078 1.1 mrg xtensa_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2079 1.1 mrg {
2080 1.1 mrg return xtensa_tls_referenced_p (x);
2081 1.1 mrg }
2082 1.1 mrg
2083 1.1 mrg
2084 1.1 mrg /* Return the debugger register number to use for 'regno'. */
2085 1.1 mrg
2086 1.1 mrg int
2087 1.1 mrg xtensa_dbx_register_number (int regno)
2088 1.1 mrg {
2089 1.1 mrg int first = -1;
2090 1.1 mrg
2091 1.1 mrg if (GP_REG_P (regno))
2092 1.1 mrg {
2093 1.1 mrg regno -= GP_REG_FIRST;
2094 1.1 mrg first = 0;
2095 1.1 mrg }
2096 1.1 mrg else if (BR_REG_P (regno))
2097 1.1 mrg {
2098 1.1 mrg regno -= BR_REG_FIRST;
2099 1.1 mrg first = 16;
2100 1.1 mrg }
2101 1.1 mrg else if (FP_REG_P (regno))
2102 1.1 mrg {
2103 1.1 mrg regno -= FP_REG_FIRST;
2104 1.1 mrg first = 48;
2105 1.1 mrg }
2106 1.1 mrg else if (ACC_REG_P (regno))
2107 1.1 mrg {
2108 1.1 mrg first = 0x200; /* Start of Xtensa special registers. */
2109 1.1 mrg regno = 16; /* ACCLO is special register 16. */
2110 1.1 mrg }
2111 1.1 mrg
2112 1.1 mrg /* When optimizing, we sometimes get asked about pseudo-registers
2113 1.1 mrg that don't represent hard registers. Return 0 for these. */
2114 1.1 mrg if (first == -1)
2115 1.1 mrg return 0;
2116 1.1 mrg
2117 1.1 mrg return first + regno;
2118 1.1 mrg }
2119 1.1 mrg
2120 1.1 mrg
2121 1.1 mrg /* Argument support functions. */
2122 1.1 mrg
2123 1.1 mrg /* Initialize CUMULATIVE_ARGS for a function. */
2124 1.1 mrg
2125 1.1 mrg void
2126 1.1 mrg init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
2127 1.1 mrg {
2128 1.1 mrg cum->arg_words = 0;
2129 1.1 mrg cum->incoming = incoming;
2130 1.1 mrg }
2131 1.1 mrg
2132 1.1 mrg
2133 1.1 mrg /* Advance the argument to the next argument position. */
2134 1.1 mrg
2135 1.1 mrg static void
2136 1.1 mrg xtensa_function_arg_advance (cumulative_args_t cum,
2137 1.1 mrg const function_arg_info &arg)
2138 1.1 mrg {
2139 1.1 mrg int words, max;
2140 1.1 mrg int *arg_words;
2141 1.1 mrg
2142 1.1 mrg arg_words = &get_cumulative_args (cum)->arg_words;
2143 1.1 mrg max = MAX_ARGS_IN_REGISTERS;
2144 1.1 mrg
2145 1.1 mrg words = ((arg.promoted_size_in_bytes () + UNITS_PER_WORD - 1)
2146 1.1 mrg / UNITS_PER_WORD);
2147 1.1 mrg
2148 1.1 mrg if (*arg_words < max
2149 1.1 mrg && (targetm.calls.must_pass_in_stack (arg)
2150 1.1 mrg || *arg_words + words > max))
2151 1.1 mrg *arg_words = max;
2152 1.1 mrg
2153 1.1 mrg *arg_words += words;
2154 1.1 mrg }
2155 1.1 mrg
2156 1.1 mrg
2157 1.1 mrg /* Return an RTL expression containing the register for the given argument,
2158 1.1 mrg or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
2159 1.1 mrg if this is an incoming argument to the current function. */
2160 1.1 mrg
2161 1.1 mrg static rtx
2162 1.1 mrg xtensa_function_arg_1 (cumulative_args_t cum_v, const function_arg_info &arg,
2163 1.1 mrg bool incoming_p)
2164 1.1 mrg {
2165 1.1 mrg CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2166 1.1 mrg int regbase, words, max;
2167 1.1 mrg int *arg_words;
2168 1.1 mrg int regno;
2169 1.1 mrg
2170 1.1 mrg arg_words = &cum->arg_words;
2171 1.1 mrg regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
2172 1.1 mrg max = MAX_ARGS_IN_REGISTERS;
2173 1.1 mrg
2174 1.1 mrg words = ((arg.promoted_size_in_bytes () + UNITS_PER_WORD - 1)
2175 1.1 mrg / UNITS_PER_WORD);
2176 1.1 mrg
2177 1.1 mrg if (arg.type && (TYPE_ALIGN (arg.type) > BITS_PER_WORD))
2178 1.1 mrg {
2179 1.1 mrg int align = MIN (TYPE_ALIGN (arg.type), STACK_BOUNDARY) / BITS_PER_WORD;
2180 1.1 mrg *arg_words = (*arg_words + align - 1) & -align;
2181 1.1 mrg }
2182 1.1 mrg
2183 1.1 mrg if (*arg_words + words > max)
2184 1.1 mrg return (rtx)0;
2185 1.1 mrg
2186 1.1 mrg regno = regbase + *arg_words;
2187 1.1 mrg
2188 1.1 mrg if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
2189 1.1 mrg cfun->machine->need_a7_copy = TARGET_WINDOWED_ABI;
2190 1.1 mrg
2191 1.1 mrg return gen_rtx_REG (arg.mode, regno);
2192 1.1 mrg }
2193 1.1 mrg
2194 1.1 mrg /* Implement TARGET_FUNCTION_ARG. */
2195 1.1 mrg
2196 1.1 mrg static rtx
2197 1.1 mrg xtensa_function_arg (cumulative_args_t cum, const function_arg_info &arg)
2198 1.1 mrg {
2199 1.1 mrg return xtensa_function_arg_1 (cum, arg, false);
2200 1.1 mrg }
2201 1.1 mrg
2202 1.1 mrg /* Implement TARGET_FUNCTION_INCOMING_ARG. */
2203 1.1 mrg
2204 1.1 mrg static rtx
2205 1.1 mrg xtensa_function_incoming_arg (cumulative_args_t cum,
2206 1.1 mrg const function_arg_info &arg)
2207 1.1 mrg {
2208 1.1 mrg return xtensa_function_arg_1 (cum, arg, true);
2209 1.1 mrg }
2210 1.1 mrg
2211 1.1 mrg static unsigned int
2212 1.1 mrg xtensa_function_arg_boundary (machine_mode mode, const_tree type)
2213 1.1 mrg {
2214 1.1 mrg unsigned int alignment;
2215 1.1 mrg
2216 1.1 mrg alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
2217 1.1 mrg if (alignment < PARM_BOUNDARY)
2218 1.1 mrg alignment = PARM_BOUNDARY;
2219 1.1 mrg if (alignment > STACK_BOUNDARY)
2220 1.1 mrg alignment = STACK_BOUNDARY;
2221 1.1 mrg return alignment;
2222 1.1 mrg }
2223 1.1 mrg
2224 1.1 mrg
2225 1.1 mrg static bool
2226 1.1 mrg xtensa_return_in_msb (const_tree valtype)
2227 1.1 mrg {
2228 1.1 mrg return (TARGET_BIG_ENDIAN
2229 1.1 mrg && AGGREGATE_TYPE_P (valtype)
2230 1.1 mrg && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
2231 1.1 mrg }
2232 1.1 mrg
2233 1.1 mrg
2234 1.1 mrg static void
2235 1.1 mrg xtensa_option_override (void)
2236 1.1 mrg {
2237 1.1 mrg int regno;
2238 1.1 mrg machine_mode mode;
2239 1.1 mrg
2240 1.1 mrg if (xtensa_windowed_abi == -1)
2241 1.1 mrg xtensa_windowed_abi = TARGET_WINDOWED_ABI_DEFAULT;
2242 1.1 mrg
2243 1.1 mrg if (! TARGET_THREADPTR)
2244 1.1 mrg targetm.have_tls = false;
2245 1.1 mrg
2246 1.1 mrg /* Use CONST16 in the absence of L32R.
2247 1.1 mrg Set it in the TARGET_OPTION_OVERRIDE to avoid dependency on xtensa
2248 1.1 mrg configuration in the xtensa-common.cc */
2249 1.1 mrg
2250 1.1 mrg if (!TARGET_L32R)
2251 1.1 mrg target_flags |= MASK_CONST16;
2252 1.1 mrg
2253 1.1 mrg if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
2254 1.1 mrg error ("boolean registers required for the floating-point option");
2255 1.1 mrg
2256 1.1 mrg /* Set up array giving whether a given register can hold a given mode. */
2257 1.1 mrg for (mode = VOIDmode;
2258 1.1 mrg mode != MAX_MACHINE_MODE;
2259 1.1 mrg mode = (machine_mode) ((int) mode + 1))
2260 1.1 mrg {
2261 1.1 mrg int size = GET_MODE_SIZE (mode);
2262 1.1 mrg enum mode_class mclass = GET_MODE_CLASS (mode);
2263 1.1 mrg
2264 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2265 1.1 mrg {
2266 1.1 mrg int temp;
2267 1.1 mrg
2268 1.1 mrg if (ACC_REG_P (regno))
2269 1.1 mrg temp = (TARGET_MAC16
2270 1.1 mrg && (mclass == MODE_INT) && (size <= UNITS_PER_WORD));
2271 1.1 mrg else if (GP_REG_P (regno))
2272 1.1 mrg temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
2273 1.1 mrg else if (FP_REG_P (regno))
2274 1.1 mrg temp = (TARGET_HARD_FLOAT && (mode == SFmode));
2275 1.1 mrg else if (BR_REG_P (regno))
2276 1.1 mrg temp = (TARGET_BOOLEANS && (mode == CCmode));
2277 1.1 mrg else
2278 1.1 mrg temp = FALSE;
2279 1.1 mrg
2280 1.1 mrg xtensa_hard_regno_mode_ok_p[(int) mode][regno] = temp;
2281 1.1 mrg }
2282 1.1 mrg }
2283 1.1 mrg
2284 1.1 mrg init_machine_status = xtensa_init_machine_status;
2285 1.1 mrg
2286 1.1 mrg /* Check PIC settings. PIC is only supported when using L32R
2287 1.1 mrg instructions, and some targets need to always use PIC. */
2288 1.1 mrg if (flag_pic && TARGET_CONST16)
2289 1.1 mrg error ("%<-f%s%> is not supported with CONST16 instructions",
2290 1.1 mrg (flag_pic > 1 ? "PIC" : "pic"));
2291 1.1 mrg else if (TARGET_FORCE_NO_PIC)
2292 1.1 mrg flag_pic = 0;
2293 1.1 mrg else if (XTENSA_ALWAYS_PIC)
2294 1.1 mrg {
2295 1.1 mrg if (TARGET_CONST16)
2296 1.1 mrg error ("PIC is required but not supported with CONST16 instructions");
2297 1.1 mrg flag_pic = 1;
2298 1.1 mrg }
2299 1.1 mrg /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
2300 1.1 mrg if (flag_pic > 1)
2301 1.1 mrg flag_pic = 1;
2302 1.1 mrg if (flag_pic && !flag_pie)
2303 1.1 mrg flag_shlib = 1;
2304 1.1 mrg
2305 1.1 mrg /* Hot/cold partitioning does not work on this architecture, because of
2306 1.1 mrg constant pools (the load instruction cannot necessarily reach that far).
2307 1.1 mrg Therefore disable it on this architecture. */
2308 1.1 mrg if (flag_reorder_blocks_and_partition)
2309 1.1 mrg {
2310 1.1 mrg flag_reorder_blocks_and_partition = 0;
2311 1.1 mrg flag_reorder_blocks = 1;
2312 1.1 mrg }
2313 1.1 mrg }
2314 1.1 mrg
2315 1.1 mrg /* Implement TARGET_HARD_REGNO_NREGS. */
2316 1.1 mrg
2317 1.1 mrg static unsigned int
2318 1.1 mrg xtensa_hard_regno_nregs (unsigned int regno, machine_mode mode)
2319 1.1 mrg {
2320 1.1 mrg if (FP_REG_P (regno))
2321 1.1 mrg return CEIL (GET_MODE_SIZE (mode), UNITS_PER_FPREG);
2322 1.1 mrg return CEIL (GET_MODE_SIZE (mode), UNITS_PER_WORD);
2323 1.1 mrg }
2324 1.1 mrg
2325 1.1 mrg /* Implement TARGET_HARD_REGNO_MODE_OK. */
2326 1.1 mrg
2327 1.1 mrg static bool
2328 1.1 mrg xtensa_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
2329 1.1 mrg {
2330 1.1 mrg return xtensa_hard_regno_mode_ok_p[mode][regno];
2331 1.1 mrg }
2332 1.1 mrg
2333 1.1 mrg /* Implement TARGET_MODES_TIEABLE_P. */
2334 1.1 mrg
2335 1.1 mrg static bool
2336 1.1 mrg xtensa_modes_tieable_p (machine_mode mode1, machine_mode mode2)
2337 1.1 mrg {
2338 1.1 mrg return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
2339 1.1 mrg || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
2340 1.1 mrg == (GET_MODE_CLASS (mode2) == MODE_FLOAT
2341 1.1 mrg || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
2342 1.1 mrg }
2343 1.1 mrg
2344 1.1 mrg /* A C compound statement to output to stdio stream STREAM the
2345 1.1 mrg assembler syntax for an instruction operand X. X is an RTL
2346 1.1 mrg expression.
2347 1.1 mrg
2348 1.1 mrg CODE is a value that can be used to specify one of several ways
2349 1.1 mrg of printing the operand. It is used when identical operands
2350 1.1 mrg must be printed differently depending on the context. CODE
2351 1.1 mrg comes from the '%' specification that was used to request
2352 1.1 mrg printing of the operand. If the specification was just '%DIGIT'
2353 1.1 mrg then CODE is 0; if the specification was '%LTR DIGIT' then CODE
2354 1.1 mrg is the ASCII code for LTR.
2355 1.1 mrg
2356 1.1 mrg If X is a register, this macro should print the register's name.
2357 1.1 mrg The names can be found in an array 'reg_names' whose type is
2358 1.1 mrg 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
2359 1.1 mrg
2360 1.1 mrg When the machine description has a specification '%PUNCT' (a '%'
2361 1.1 mrg followed by a punctuation character), this macro is called with
2362 1.1 mrg a null pointer for X and the punctuation character for CODE.
2363 1.1 mrg
2364 1.1 mrg 'a', 'c', 'l', and 'n' are reserved.
2365 1.1 mrg
2366 1.1 mrg The Xtensa specific codes are:
2367 1.1 mrg
2368 1.1 mrg 'd' CONST_INT, print as signed decimal
2369 1.1 mrg 'x' CONST_INT, print as signed hexadecimal
2370 1.1 mrg 'K' CONST_INT, print number of bits in mask for EXTUI
2371 1.1 mrg 'R' CONST_INT, print (X & 0x1f)
2372 1.1 mrg 'L' CONST_INT, print ((32 - X) & 0x1f)
2373 1.1 mrg 'D' REG, print second register of double-word register operand
2374 1.1 mrg 'N' MEM, print address of next word following a memory operand
2375 1.1 mrg 'v' MEM, if memory reference is volatile, output a MEMW before it
2376 1.1 mrg 't' any constant, add "@h" suffix for top 16 bits
2377 1.1 mrg 'b' any constant, add "@l" suffix for bottom 16 bits
2378 1.1 mrg */
2379 1.1 mrg
2380 1.1 mrg static void
2381 1.1 mrg printx (FILE *file, signed int val)
2382 1.1 mrg {
2383 1.1 mrg /* Print a hexadecimal value in a nice way. */
2384 1.1 mrg if ((val > -0xa) && (val < 0xa))
2385 1.1 mrg fprintf (file, "%d", val);
2386 1.1 mrg else if (val < 0)
2387 1.1 mrg fprintf (file, "-0x%x", -val);
2388 1.1 mrg else
2389 1.1 mrg fprintf (file, "0x%x", val);
2390 1.1 mrg }
2391 1.1 mrg
2392 1.1 mrg
2393 1.1 mrg void
2394 1.1 mrg print_operand (FILE *file, rtx x, int letter)
2395 1.1 mrg {
2396 1.1 mrg if (!x)
2397 1.1 mrg error ("%<PRINT_OPERAND%> null pointer");
2398 1.1 mrg
2399 1.1 mrg switch (letter)
2400 1.1 mrg {
2401 1.1 mrg case 'D':
2402 1.1 mrg if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2403 1.1 mrg fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
2404 1.1 mrg else
2405 1.1 mrg output_operand_lossage ("invalid %%D value");
2406 1.1 mrg break;
2407 1.1 mrg
2408 1.1 mrg case 'v':
2409 1.1 mrg if (GET_CODE (x) == MEM)
2410 1.1 mrg {
2411 1.1 mrg /* For a volatile memory reference, emit a MEMW before the
2412 1.1 mrg load or store. */
2413 1.1 mrg if (MEM_VOLATILE_P (x) && TARGET_SERIALIZE_VOLATILE)
2414 1.1 mrg fprintf (file, "memw\n\t");
2415 1.1 mrg }
2416 1.1 mrg else
2417 1.1 mrg output_operand_lossage ("invalid %%v value");
2418 1.1 mrg break;
2419 1.1 mrg
2420 1.1 mrg case 'N':
2421 1.1 mrg if (GET_CODE (x) == MEM
2422 1.1 mrg && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
2423 1.1 mrg {
2424 1.1 mrg x = adjust_address (x, GET_MODE (x) == DFmode ? E_SFmode : E_SImode,
2425 1.1 mrg 4);
2426 1.1 mrg output_address (GET_MODE (x), XEXP (x, 0));
2427 1.1 mrg }
2428 1.1 mrg else
2429 1.1 mrg output_operand_lossage ("invalid %%N value");
2430 1.1 mrg break;
2431 1.1 mrg
2432 1.1 mrg case 'K':
2433 1.1 mrg if (GET_CODE (x) == CONST_INT)
2434 1.1 mrg {
2435 1.1 mrg int num_bits = 0;
2436 1.1 mrg unsigned val = INTVAL (x);
2437 1.1 mrg while (val & 1)
2438 1.1 mrg {
2439 1.1 mrg num_bits += 1;
2440 1.1 mrg val = val >> 1;
2441 1.1 mrg }
2442 1.1 mrg if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2443 1.1 mrg fatal_insn ("invalid mask", x);
2444 1.1 mrg
2445 1.1 mrg fprintf (file, "%d", num_bits);
2446 1.1 mrg }
2447 1.1 mrg else
2448 1.1 mrg output_operand_lossage ("invalid %%K value");
2449 1.1 mrg break;
2450 1.1 mrg
2451 1.1 mrg case 'L':
2452 1.1 mrg if (GET_CODE (x) == CONST_INT)
2453 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INTVAL (x)) & 0x1f);
2454 1.1 mrg else
2455 1.1 mrg output_operand_lossage ("invalid %%L value");
2456 1.1 mrg break;
2457 1.1 mrg
2458 1.1 mrg case 'R':
2459 1.1 mrg if (GET_CODE (x) == CONST_INT)
2460 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x1f);
2461 1.1 mrg else
2462 1.1 mrg output_operand_lossage ("invalid %%R value");
2463 1.1 mrg break;
2464 1.1 mrg
2465 1.1 mrg case 'x':
2466 1.1 mrg if (GET_CODE (x) == CONST_INT)
2467 1.1 mrg printx (file, INTVAL (x));
2468 1.1 mrg else
2469 1.1 mrg output_operand_lossage ("invalid %%x value");
2470 1.1 mrg break;
2471 1.1 mrg
2472 1.1 mrg case 'd':
2473 1.1 mrg if (GET_CODE (x) == CONST_INT)
2474 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2475 1.1 mrg else
2476 1.1 mrg output_operand_lossage ("invalid %%d value");
2477 1.1 mrg break;
2478 1.1 mrg
2479 1.1 mrg case 't':
2480 1.1 mrg case 'b':
2481 1.1 mrg if (GET_CODE (x) == CONST_INT)
2482 1.1 mrg {
2483 1.1 mrg printx (file, INTVAL (x));
2484 1.1 mrg fputs (letter == 't' ? "@h" : "@l", file);
2485 1.1 mrg }
2486 1.1 mrg else if (GET_CODE (x) == CONST_DOUBLE)
2487 1.1 mrg {
2488 1.1 mrg if (GET_MODE (x) == SFmode)
2489 1.1 mrg {
2490 1.1 mrg long l;
2491 1.1 mrg REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2492 1.1 mrg fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2493 1.1 mrg }
2494 1.1 mrg else
2495 1.1 mrg output_operand_lossage ("invalid %%t/%%b value");
2496 1.1 mrg }
2497 1.1 mrg else if (GET_CODE (x) == CONST)
2498 1.1 mrg {
2499 1.1 mrg /* X must be a symbolic constant on ELF. Write an expression
2500 1.1 mrg suitable for 'const16' that sets the high or low 16 bits. */
2501 1.1 mrg if (GET_CODE (XEXP (x, 0)) != PLUS
2502 1.1 mrg || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2503 1.1 mrg && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2504 1.1 mrg || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2505 1.1 mrg output_operand_lossage ("invalid %%t/%%b value");
2506 1.1 mrg print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2507 1.1 mrg fputs (letter == 't' ? "@h" : "@l", file);
2508 1.1 mrg /* There must be a non-alphanumeric character between 'h' or 'l'
2509 1.1 mrg and the number. The '-' is added by print_operand() already. */
2510 1.1 mrg if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2511 1.1 mrg fputs ("+", file);
2512 1.1 mrg print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2513 1.1 mrg }
2514 1.1 mrg else
2515 1.1 mrg {
2516 1.1 mrg output_addr_const (file, x);
2517 1.1 mrg fputs (letter == 't' ? "@h" : "@l", file);
2518 1.1 mrg }
2519 1.1 mrg break;
2520 1.1 mrg
2521 1.1 mrg case 'y':
2522 1.1 mrg if (GET_CODE (x) == CONST_DOUBLE &&
2523 1.1 mrg GET_MODE (x) == SFmode)
2524 1.1 mrg {
2525 1.1 mrg long l;
2526 1.1 mrg REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2527 1.1 mrg fprintf (file, "0x%08lx", l);
2528 1.1 mrg break;
2529 1.1 mrg }
2530 1.1 mrg
2531 1.1 mrg /* fall through */
2532 1.1 mrg
2533 1.1 mrg default:
2534 1.1 mrg if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2535 1.1 mrg fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2536 1.1 mrg else if (GET_CODE (x) == MEM)
2537 1.1 mrg output_address (GET_MODE (x), XEXP (x, 0));
2538 1.1 mrg else if (GET_CODE (x) == CONST_INT)
2539 1.1 mrg fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2540 1.1 mrg else
2541 1.1 mrg output_addr_const (file, x);
2542 1.1 mrg }
2543 1.1 mrg }
2544 1.1 mrg
2545 1.1 mrg
2546 1.1 mrg /* A C compound statement to output to stdio stream STREAM the
2547 1.1 mrg assembler syntax for an instruction operand that is a memory
2548 1.1 mrg reference whose address is ADDR. ADDR is an RTL expression. */
2549 1.1 mrg
2550 1.1 mrg void
2551 1.1 mrg print_operand_address (FILE *file, rtx addr)
2552 1.1 mrg {
2553 1.1 mrg if (!addr)
2554 1.1 mrg error ("%<PRINT_OPERAND_ADDRESS%>, null pointer");
2555 1.1 mrg
2556 1.1 mrg switch (GET_CODE (addr))
2557 1.1 mrg {
2558 1.1 mrg default:
2559 1.1 mrg fatal_insn ("invalid address", addr);
2560 1.1 mrg break;
2561 1.1 mrg
2562 1.1 mrg case REG:
2563 1.1 mrg fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2564 1.1 mrg break;
2565 1.1 mrg
2566 1.1 mrg case PLUS:
2567 1.1 mrg {
2568 1.1 mrg rtx reg = (rtx)0;
2569 1.1 mrg rtx offset = (rtx)0;
2570 1.1 mrg rtx arg0 = XEXP (addr, 0);
2571 1.1 mrg rtx arg1 = XEXP (addr, 1);
2572 1.1 mrg
2573 1.1 mrg if (GET_CODE (arg0) == REG)
2574 1.1 mrg {
2575 1.1 mrg reg = arg0;
2576 1.1 mrg offset = arg1;
2577 1.1 mrg }
2578 1.1 mrg else if (GET_CODE (arg1) == REG)
2579 1.1 mrg {
2580 1.1 mrg reg = arg1;
2581 1.1 mrg offset = arg0;
2582 1.1 mrg }
2583 1.1 mrg else
2584 1.1 mrg fatal_insn ("no register in address", addr);
2585 1.1 mrg
2586 1.1 mrg if (CONSTANT_P (offset))
2587 1.1 mrg {
2588 1.1 mrg fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2589 1.1 mrg output_addr_const (file, offset);
2590 1.1 mrg }
2591 1.1 mrg else
2592 1.1 mrg fatal_insn ("address offset not a constant", addr);
2593 1.1 mrg }
2594 1.1 mrg break;
2595 1.1 mrg
2596 1.1 mrg case LABEL_REF:
2597 1.1 mrg case SYMBOL_REF:
2598 1.1 mrg case CONST_INT:
2599 1.1 mrg case CONST:
2600 1.1 mrg output_addr_const (file, addr);
2601 1.1 mrg break;
2602 1.1 mrg }
2603 1.1 mrg }
2604 1.1 mrg
2605 1.1 mrg /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2606 1.1 mrg
2607 1.1 mrg static bool
2608 1.1 mrg xtensa_output_addr_const_extra (FILE *fp, rtx x)
2609 1.1 mrg {
2610 1.1 mrg if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
2611 1.1 mrg {
2612 1.1 mrg switch (XINT (x, 1))
2613 1.1 mrg {
2614 1.1 mrg case UNSPEC_TPOFF:
2615 1.1 mrg output_addr_const (fp, XVECEXP (x, 0, 0));
2616 1.1 mrg fputs ("@TPOFF", fp);
2617 1.1 mrg return true;
2618 1.1 mrg case UNSPEC_DTPOFF:
2619 1.1 mrg output_addr_const (fp, XVECEXP (x, 0, 0));
2620 1.1 mrg fputs ("@DTPOFF", fp);
2621 1.1 mrg return true;
2622 1.1 mrg case UNSPEC_PLT:
2623 1.1 mrg if (flag_pic)
2624 1.1 mrg {
2625 1.1 mrg output_addr_const (fp, XVECEXP (x, 0, 0));
2626 1.1 mrg fputs ("@PLT", fp);
2627 1.1 mrg return true;
2628 1.1 mrg }
2629 1.1 mrg break;
2630 1.1 mrg default:
2631 1.1 mrg break;
2632 1.1 mrg }
2633 1.1 mrg }
2634 1.1 mrg return false;
2635 1.1 mrg }
2636 1.1 mrg
2637 1.1 mrg static void
2638 1.1 mrg xtensa_output_integer_literal_parts (FILE *file, rtx x, int size)
2639 1.1 mrg {
2640 1.1 mrg if (size > 4 && !(size & (size - 1)))
2641 1.1 mrg {
2642 1.1 mrg rtx first, second;
2643 1.1 mrg
2644 1.1 mrg split_double (x, &first, &second);
2645 1.1 mrg xtensa_output_integer_literal_parts (file, first, size / 2);
2646 1.1 mrg fputs (", ", file);
2647 1.1 mrg xtensa_output_integer_literal_parts (file, second, size / 2);
2648 1.1 mrg }
2649 1.1 mrg else if (size == 4)
2650 1.1 mrg {
2651 1.1 mrg output_addr_const (file, x);
2652 1.1 mrg }
2653 1.1 mrg else
2654 1.1 mrg {
2655 1.1 mrg gcc_unreachable();
2656 1.1 mrg }
2657 1.1 mrg }
2658 1.1 mrg
2659 1.1 mrg void
2660 1.1 mrg xtensa_output_literal (FILE *file, rtx x, machine_mode mode, int labelno)
2661 1.1 mrg {
2662 1.1 mrg long value_long[2];
2663 1.1 mrg
2664 1.1 mrg fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2665 1.1 mrg
2666 1.1 mrg switch (GET_MODE_CLASS (mode))
2667 1.1 mrg {
2668 1.1 mrg case MODE_FLOAT:
2669 1.1 mrg gcc_assert (GET_CODE (x) == CONST_DOUBLE);
2670 1.1 mrg
2671 1.1 mrg switch (mode)
2672 1.1 mrg {
2673 1.1 mrg case E_SFmode:
2674 1.1 mrg REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x),
2675 1.1 mrg value_long[0]);
2676 1.1 mrg if (HOST_BITS_PER_LONG > 32)
2677 1.1 mrg value_long[0] &= 0xffffffff;
2678 1.1 mrg fprintf (file, "0x%08lx\n", value_long[0]);
2679 1.1 mrg break;
2680 1.1 mrg
2681 1.1 mrg case E_DFmode:
2682 1.1 mrg REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x),
2683 1.1 mrg value_long);
2684 1.1 mrg if (HOST_BITS_PER_LONG > 32)
2685 1.1 mrg {
2686 1.1 mrg value_long[0] &= 0xffffffff;
2687 1.1 mrg value_long[1] &= 0xffffffff;
2688 1.1 mrg }
2689 1.1 mrg fprintf (file, "0x%08lx, 0x%08lx\n",
2690 1.1 mrg value_long[0], value_long[1]);
2691 1.1 mrg break;
2692 1.1 mrg
2693 1.1 mrg default:
2694 1.1 mrg gcc_unreachable ();
2695 1.1 mrg }
2696 1.1 mrg
2697 1.1 mrg break;
2698 1.1 mrg
2699 1.1 mrg case MODE_INT:
2700 1.1 mrg case MODE_PARTIAL_INT:
2701 1.1 mrg xtensa_output_integer_literal_parts (file, x, GET_MODE_SIZE (mode));
2702 1.1 mrg fputs ("\n", file);
2703 1.1 mrg break;
2704 1.1 mrg
2705 1.1 mrg default:
2706 1.1 mrg gcc_unreachable ();
2707 1.1 mrg }
2708 1.1 mrg }
2709 1.1 mrg
2710 1.1 mrg static bool
2711 1.1 mrg xtensa_call_save_reg(int regno)
2712 1.1 mrg {
2713 1.1 mrg if (TARGET_WINDOWED_ABI)
2714 1.1 mrg return false;
2715 1.1 mrg
2716 1.1 mrg if (regno == A0_REG)
2717 1.1 mrg return crtl->profile || !crtl->is_leaf || crtl->calls_eh_return ||
2718 1.1 mrg df_regs_ever_live_p (regno);
2719 1.1 mrg
2720 1.1 mrg if (crtl->calls_eh_return && regno >= 2 && regno < 4)
2721 1.1 mrg return true;
2722 1.1 mrg
2723 1.1 mrg return !call_used_or_fixed_reg_p (regno) && df_regs_ever_live_p (regno);
2724 1.1 mrg }
2725 1.1 mrg
2726 1.1 mrg /* Return the bytes needed to compute the frame pointer from the current
2727 1.1 mrg stack pointer. */
2728 1.1 mrg
2729 1.1 mrg #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2730 1.1 mrg #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2731 1.1 mrg
2732 1.1 mrg long
2733 1.1 mrg compute_frame_size (poly_int64 size)
2734 1.1 mrg {
2735 1.1 mrg int regno;
2736 1.1 mrg
2737 1.1 mrg if (reload_completed && cfun->machine->frame_laid_out)
2738 1.1 mrg return cfun->machine->current_frame_size;
2739 1.1 mrg
2740 1.1 mrg /* Add space for the incoming static chain value. */
2741 1.1 mrg if (cfun->static_chain_decl != NULL)
2742 1.1 mrg size += (1 * UNITS_PER_WORD);
2743 1.1 mrg
2744 1.1 mrg cfun->machine->callee_save_size = 0;
2745 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2746 1.1 mrg {
2747 1.1 mrg if (xtensa_call_save_reg(regno))
2748 1.1 mrg cfun->machine->callee_save_size += UNITS_PER_WORD;
2749 1.1 mrg }
2750 1.1 mrg
2751 1.1 mrg cfun->machine->current_frame_size =
2752 1.1 mrg XTENSA_STACK_ALIGN (size
2753 1.1 mrg + cfun->machine->callee_save_size
2754 1.1 mrg + crtl->outgoing_args_size
2755 1.1 mrg + (WINDOW_SIZE * UNITS_PER_WORD));
2756 1.1 mrg cfun->machine->callee_save_size =
2757 1.1 mrg XTENSA_STACK_ALIGN (cfun->machine->callee_save_size);
2758 1.1 mrg cfun->machine->frame_laid_out = true;
2759 1.1 mrg return cfun->machine->current_frame_size;
2760 1.1 mrg }
2761 1.1 mrg
2762 1.1 mrg
2763 1.1 mrg bool
2764 1.1 mrg xtensa_frame_pointer_required (void)
2765 1.1 mrg {
2766 1.1 mrg /* The code to expand builtin_frame_addr and builtin_return_addr
2767 1.1 mrg currently uses the hard_frame_pointer instead of frame_pointer.
2768 1.1 mrg This seems wrong but maybe it's necessary for other architectures.
2769 1.1 mrg This function is derived from the i386 code. */
2770 1.1 mrg
2771 1.1 mrg if (cfun->machine->accesses_prev_frame || cfun->has_nonlocal_label)
2772 1.1 mrg return true;
2773 1.1 mrg
2774 1.1 mrg return false;
2775 1.1 mrg }
2776 1.1 mrg
2777 1.1 mrg HOST_WIDE_INT
2778 1.1 mrg xtensa_initial_elimination_offset (int from, int to ATTRIBUTE_UNUSED)
2779 1.1 mrg {
2780 1.1 mrg long frame_size = compute_frame_size (get_frame_size ());
2781 1.1 mrg HOST_WIDE_INT offset;
2782 1.1 mrg
2783 1.1 mrg switch (from)
2784 1.1 mrg {
2785 1.1 mrg case FRAME_POINTER_REGNUM:
2786 1.1 mrg if (FRAME_GROWS_DOWNWARD)
2787 1.1 mrg offset = frame_size - (WINDOW_SIZE * UNITS_PER_WORD)
2788 1.1 mrg - cfun->machine->callee_save_size;
2789 1.1 mrg else
2790 1.1 mrg offset = 0;
2791 1.1 mrg break;
2792 1.1 mrg case ARG_POINTER_REGNUM:
2793 1.1 mrg offset = frame_size;
2794 1.1 mrg break;
2795 1.1 mrg default:
2796 1.1 mrg gcc_unreachable ();
2797 1.1 mrg }
2798 1.1 mrg
2799 1.1 mrg return offset;
2800 1.1 mrg }
2801 1.1 mrg
2802 1.1 mrg /* minimum frame = reg save area (4 words) plus static chain (1 word)
2803 1.1 mrg and the total number of words must be a multiple of 128 bits. */
2804 1.1 mrg #define MIN_FRAME_SIZE (8 * UNITS_PER_WORD)
2805 1.1 mrg
2806 1.1 mrg void
2807 1.1 mrg xtensa_expand_prologue (void)
2808 1.1 mrg {
2809 1.1 mrg HOST_WIDE_INT total_size;
2810 1.1 mrg rtx_insn *insn = NULL;
2811 1.1 mrg rtx note_rtx;
2812 1.1 mrg
2813 1.1 mrg
2814 1.1 mrg total_size = compute_frame_size (get_frame_size ());
2815 1.1 mrg
2816 1.1 mrg if (flag_stack_usage_info)
2817 1.1 mrg current_function_static_stack_size = total_size;
2818 1.1 mrg
2819 1.1 mrg if (TARGET_WINDOWED_ABI)
2820 1.1 mrg {
2821 1.1 mrg if (total_size < (1 << (12+3)))
2822 1.1 mrg insn = emit_insn (gen_entry (GEN_INT (total_size)));
2823 1.1 mrg else
2824 1.1 mrg {
2825 1.1 mrg /* Use a8 as a temporary since a0-a7 may be live. */
2826 1.1 mrg rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2827 1.1 mrg emit_insn (gen_entry (GEN_INT (MIN_FRAME_SIZE)));
2828 1.1 mrg emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2829 1.1 mrg emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2830 1.1 mrg insn = emit_insn (gen_movsi (stack_pointer_rtx, tmp_reg));
2831 1.1 mrg }
2832 1.1 mrg }
2833 1.1 mrg else
2834 1.1 mrg {
2835 1.1 mrg int regno;
2836 1.1 mrg HOST_WIDE_INT offset = 0;
2837 1.1 mrg int callee_save_size = cfun->machine->callee_save_size;
2838 1.1 mrg
2839 1.1 mrg /* -128 is a limit of single addi instruction. */
2840 1.1 mrg if (total_size > 0 && total_size <= 128)
2841 1.1 mrg {
2842 1.1 mrg insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2843 1.1 mrg GEN_INT (-total_size)));
2844 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
2845 1.1 mrg note_rtx = gen_rtx_SET (stack_pointer_rtx,
2846 1.1 mrg plus_constant (Pmode, stack_pointer_rtx,
2847 1.1 mrg -total_size));
2848 1.1 mrg add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2849 1.1 mrg offset = total_size - UNITS_PER_WORD;
2850 1.1 mrg }
2851 1.1 mrg else if (callee_save_size)
2852 1.1 mrg {
2853 1.1 mrg /* 1020 is maximal s32i offset, if the frame is bigger than that
2854 1.1 mrg * we move sp to the end of callee-saved save area, save and then
2855 1.1 mrg * move it to its final location. */
2856 1.1 mrg if (total_size > 1024)
2857 1.1 mrg {
2858 1.1 mrg insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2859 1.1 mrg GEN_INT (-callee_save_size)));
2860 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
2861 1.1 mrg note_rtx = gen_rtx_SET (stack_pointer_rtx,
2862 1.1 mrg plus_constant (Pmode, stack_pointer_rtx,
2863 1.1 mrg -callee_save_size));
2864 1.1 mrg add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2865 1.1 mrg offset = callee_save_size - UNITS_PER_WORD;
2866 1.1 mrg }
2867 1.1 mrg else
2868 1.1 mrg {
2869 1.1 mrg rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2870 1.1 mrg emit_move_insn (tmp_reg, GEN_INT (total_size));
2871 1.1 mrg insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
2872 1.1 mrg stack_pointer_rtx, tmp_reg));
2873 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
2874 1.1 mrg note_rtx = gen_rtx_SET (stack_pointer_rtx,
2875 1.1 mrg plus_constant (Pmode, stack_pointer_rtx,
2876 1.1 mrg -total_size));
2877 1.1 mrg add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2878 1.1 mrg offset = total_size - UNITS_PER_WORD;
2879 1.1 mrg }
2880 1.1 mrg }
2881 1.1 mrg
2882 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2883 1.1 mrg {
2884 1.1 mrg if (xtensa_call_save_reg(regno))
2885 1.1 mrg {
2886 1.1 mrg rtx x = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
2887 1.1 mrg rtx mem = gen_frame_mem (SImode, x);
2888 1.1 mrg rtx reg = gen_rtx_REG (SImode, regno);
2889 1.1 mrg
2890 1.1 mrg offset -= UNITS_PER_WORD;
2891 1.1 mrg insn = emit_move_insn (mem, reg);
2892 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
2893 1.1 mrg add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2894 1.1 mrg gen_rtx_SET (mem, reg));
2895 1.1 mrg }
2896 1.1 mrg }
2897 1.1 mrg if (total_size > 1024
2898 1.1 mrg || (!callee_save_size && total_size > 128))
2899 1.1 mrg {
2900 1.1 mrg rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2901 1.1 mrg emit_move_insn (tmp_reg, GEN_INT (total_size -
2902 1.1 mrg callee_save_size));
2903 1.1 mrg insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
2904 1.1 mrg stack_pointer_rtx, tmp_reg));
2905 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
2906 1.1 mrg note_rtx = gen_rtx_SET (stack_pointer_rtx,
2907 1.1 mrg plus_constant (Pmode, stack_pointer_rtx,
2908 1.1 mrg callee_save_size -
2909 1.1 mrg total_size));
2910 1.1 mrg add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2911 1.1 mrg }
2912 1.1 mrg }
2913 1.1 mrg
2914 1.1 mrg if (frame_pointer_needed)
2915 1.1 mrg {
2916 1.1 mrg if (cfun->machine->set_frame_ptr_insn)
2917 1.1 mrg {
2918 1.1 mrg rtx_insn *first;
2919 1.1 mrg
2920 1.1 mrg push_topmost_sequence ();
2921 1.1 mrg first = get_insns ();
2922 1.1 mrg pop_topmost_sequence ();
2923 1.1 mrg
2924 1.1 mrg /* For all instructions prior to set_frame_ptr_insn, replace
2925 1.1 mrg hard_frame_pointer references with stack_pointer. */
2926 1.1 mrg for (insn = first;
2927 1.1 mrg insn != cfun->machine->set_frame_ptr_insn;
2928 1.1 mrg insn = NEXT_INSN (insn))
2929 1.1 mrg {
2930 1.1 mrg if (INSN_P (insn))
2931 1.1 mrg {
2932 1.1 mrg PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2933 1.1 mrg hard_frame_pointer_rtx,
2934 1.1 mrg stack_pointer_rtx);
2935 1.1 mrg df_insn_rescan (insn);
2936 1.1 mrg }
2937 1.1 mrg }
2938 1.1 mrg }
2939 1.1 mrg else
2940 1.1 mrg {
2941 1.1 mrg insn = emit_insn (gen_movsi (hard_frame_pointer_rtx,
2942 1.1 mrg stack_pointer_rtx));
2943 1.1 mrg if (!TARGET_WINDOWED_ABI)
2944 1.1 mrg {
2945 1.1 mrg note_rtx = gen_rtx_SET (hard_frame_pointer_rtx,
2946 1.1 mrg stack_pointer_rtx);
2947 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
2948 1.1 mrg add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2949 1.1 mrg }
2950 1.1 mrg }
2951 1.1 mrg }
2952 1.1 mrg
2953 1.1 mrg if (TARGET_WINDOWED_ABI)
2954 1.1 mrg {
2955 1.1 mrg /* Create a note to describe the CFA. Because this is only used to set
2956 1.1 mrg DW_AT_frame_base for debug info, don't bother tracking changes through
2957 1.1 mrg each instruction in the prologue. It just takes up space. */
2958 1.1 mrg note_rtx = gen_rtx_SET ((frame_pointer_needed
2959 1.1 mrg ? hard_frame_pointer_rtx
2960 1.1 mrg : stack_pointer_rtx),
2961 1.1 mrg plus_constant (Pmode, stack_pointer_rtx,
2962 1.1 mrg -total_size));
2963 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
2964 1.1 mrg add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2965 1.1 mrg }
2966 1.1 mrg }
2967 1.1 mrg
2968 1.1 mrg void
2969 1.1 mrg xtensa_expand_epilogue (void)
2970 1.1 mrg {
2971 1.1 mrg if (!TARGET_WINDOWED_ABI)
2972 1.1 mrg {
2973 1.1 mrg int regno;
2974 1.1 mrg HOST_WIDE_INT offset;
2975 1.1 mrg
2976 1.1 mrg if (cfun->machine->current_frame_size > (frame_pointer_needed ? 127 : 1024))
2977 1.1 mrg {
2978 1.1 mrg rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2979 1.1 mrg emit_move_insn (tmp_reg, GEN_INT (cfun->machine->current_frame_size -
2980 1.1 mrg cfun->machine->callee_save_size));
2981 1.1 mrg emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_needed ?
2982 1.1 mrg hard_frame_pointer_rtx : stack_pointer_rtx,
2983 1.1 mrg tmp_reg));
2984 1.1 mrg offset = cfun->machine->callee_save_size - UNITS_PER_WORD;
2985 1.1 mrg }
2986 1.1 mrg else
2987 1.1 mrg {
2988 1.1 mrg if (frame_pointer_needed)
2989 1.1 mrg emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
2990 1.1 mrg offset = cfun->machine->current_frame_size - UNITS_PER_WORD;
2991 1.1 mrg }
2992 1.1 mrg
2993 1.1 mrg /* Prevent reordering of saved a0 update and loading it back from
2994 1.1 mrg the save area. */
2995 1.1 mrg if (crtl->calls_eh_return)
2996 1.1 mrg emit_insn (gen_blockage ());
2997 1.1 mrg
2998 1.1 mrg for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2999 1.1 mrg {
3000 1.1 mrg if (xtensa_call_save_reg(regno))
3001 1.1 mrg {
3002 1.1 mrg rtx x = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3003 1.1 mrg
3004 1.1 mrg offset -= UNITS_PER_WORD;
3005 1.1 mrg emit_move_insn (gen_rtx_REG (SImode, regno),
3006 1.1 mrg gen_frame_mem (SImode, x));
3007 1.1 mrg }
3008 1.1 mrg }
3009 1.1 mrg
3010 1.1 mrg if (cfun->machine->current_frame_size > 0)
3011 1.1 mrg {
3012 1.1 mrg if (frame_pointer_needed || /* always reachable with addi */
3013 1.1 mrg cfun->machine->current_frame_size > 1024 ||
3014 1.1 mrg cfun->machine->current_frame_size <= 127)
3015 1.1 mrg {
3016 1.1 mrg if (cfun->machine->current_frame_size <= 127)
3017 1.1 mrg offset = cfun->machine->current_frame_size;
3018 1.1 mrg else
3019 1.1 mrg offset = cfun->machine->callee_save_size;
3020 1.1 mrg
3021 1.1 mrg emit_insn (gen_addsi3 (stack_pointer_rtx,
3022 1.1 mrg stack_pointer_rtx,
3023 1.1 mrg GEN_INT (offset)));
3024 1.1 mrg }
3025 1.1 mrg else
3026 1.1 mrg {
3027 1.1 mrg rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
3028 1.1 mrg emit_move_insn (tmp_reg,
3029 1.1 mrg GEN_INT (cfun->machine->current_frame_size));
3030 1.1 mrg emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
3031 1.1 mrg tmp_reg));
3032 1.1 mrg }
3033 1.1 mrg }
3034 1.1 mrg
3035 1.1 mrg if (crtl->calls_eh_return)
3036 1.1 mrg emit_insn (gen_add3_insn (stack_pointer_rtx,
3037 1.1 mrg stack_pointer_rtx,
3038 1.1 mrg EH_RETURN_STACKADJ_RTX));
3039 1.1 mrg }
3040 1.1 mrg cfun->machine->epilogue_done = true;
3041 1.1 mrg emit_jump_insn (gen_return ());
3042 1.1 mrg }
3043 1.1 mrg
3044 1.1 mrg bool
3045 1.1 mrg xtensa_use_return_instruction_p (void)
3046 1.1 mrg {
3047 1.1 mrg if (!reload_completed)
3048 1.1 mrg return false;
3049 1.1 mrg if (TARGET_WINDOWED_ABI)
3050 1.1 mrg return true;
3051 1.1 mrg if (compute_frame_size (get_frame_size ()) == 0)
3052 1.1 mrg return true;
3053 1.1 mrg return cfun->machine->epilogue_done;
3054 1.1 mrg }
3055 1.1 mrg
3056 1.1 mrg void
3057 1.1 mrg xtensa_set_return_address (rtx address, rtx scratch)
3058 1.1 mrg {
3059 1.1 mrg HOST_WIDE_INT total_size = compute_frame_size (get_frame_size ());
3060 1.1 mrg rtx frame = frame_pointer_needed ?
3061 1.1 mrg hard_frame_pointer_rtx : stack_pointer_rtx;
3062 1.1 mrg rtx a0_addr = plus_constant (Pmode, frame,
3063 1.1 mrg total_size - UNITS_PER_WORD);
3064 1.1 mrg rtx note = gen_rtx_SET (gen_frame_mem (SImode, a0_addr),
3065 1.1 mrg gen_rtx_REG (SImode, A0_REG));
3066 1.1 mrg rtx insn;
3067 1.1 mrg
3068 1.1 mrg if (total_size > 1024) {
3069 1.1 mrg emit_move_insn (scratch, GEN_INT (total_size - UNITS_PER_WORD));
3070 1.1 mrg emit_insn (gen_addsi3 (scratch, frame, scratch));
3071 1.1 mrg a0_addr = scratch;
3072 1.1 mrg }
3073 1.1 mrg
3074 1.1 mrg insn = emit_move_insn (gen_frame_mem (SImode, a0_addr), address);
3075 1.1 mrg RTX_FRAME_RELATED_P (insn) = 1;
3076 1.1 mrg add_reg_note (insn, REG_FRAME_RELATED_EXPR, note);
3077 1.1 mrg }
3078 1.1 mrg
3079 1.1 mrg rtx
3080 1.1 mrg xtensa_return_addr (int count, rtx frame)
3081 1.1 mrg {
3082 1.1 mrg rtx result, retaddr, curaddr, label;
3083 1.1 mrg
3084 1.1 mrg if (!TARGET_WINDOWED_ABI)
3085 1.1 mrg {
3086 1.1 mrg if (count != 0)
3087 1.1 mrg return const0_rtx;
3088 1.1 mrg
3089 1.1 mrg return get_hard_reg_initial_val (Pmode, A0_REG);
3090 1.1 mrg }
3091 1.1 mrg
3092 1.1 mrg if (count == -1)
3093 1.1 mrg retaddr = gen_rtx_REG (Pmode, A0_REG);
3094 1.1 mrg else
3095 1.1 mrg {
3096 1.1 mrg rtx addr = plus_constant (Pmode, frame, -4 * UNITS_PER_WORD);
3097 1.1 mrg addr = memory_address (Pmode, addr);
3098 1.1 mrg retaddr = gen_reg_rtx (Pmode);
3099 1.1 mrg emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
3100 1.1 mrg }
3101 1.1 mrg
3102 1.1 mrg /* The 2 most-significant bits of the return address on Xtensa hold
3103 1.1 mrg the register window size. To get the real return address, these
3104 1.1 mrg bits must be replaced with the high bits from some address in the
3105 1.1 mrg code. */
3106 1.1 mrg
3107 1.1 mrg /* Get the 2 high bits of a local label in the code. */
3108 1.1 mrg curaddr = gen_reg_rtx (Pmode);
3109 1.1 mrg label = gen_label_rtx ();
3110 1.1 mrg emit_label (label);
3111 1.1 mrg LABEL_PRESERVE_P (label) = 1;
3112 1.1 mrg emit_move_insn (curaddr, gen_rtx_LABEL_REF (Pmode, label));
3113 1.1 mrg emit_insn (gen_lshrsi3 (curaddr, curaddr, GEN_INT (30)));
3114 1.1 mrg emit_insn (gen_ashlsi3 (curaddr, curaddr, GEN_INT (30)));
3115 1.1 mrg
3116 1.1 mrg /* Clear the 2 high bits of the return address. */
3117 1.1 mrg result = gen_reg_rtx (Pmode);
3118 1.1 mrg emit_insn (gen_ashlsi3 (result, retaddr, GEN_INT (2)));
3119 1.1 mrg emit_insn (gen_lshrsi3 (result, result, GEN_INT (2)));
3120 1.1 mrg
3121 1.1 mrg /* Combine them to get the result. */
3122 1.1 mrg emit_insn (gen_iorsi3 (result, result, curaddr));
3123 1.1 mrg return result;
3124 1.1 mrg }
3125 1.1 mrg
3126 1.1 mrg /* Disable the use of word-sized or smaller complex modes for structures,
3127 1.1 mrg and for function arguments in particular, where they cause problems with
3128 1.1 mrg register a7. The xtensa_copy_incoming_a7 function assumes that there is
3129 1.1 mrg a single reference to an argument in a7, but with small complex modes the
3130 1.1 mrg real and imaginary components may be extracted separately, leading to two
3131 1.1 mrg uses of the register, only one of which would be replaced. */
3132 1.1 mrg
3133 1.1 mrg static bool
3134 1.1 mrg xtensa_member_type_forces_blk (const_tree, machine_mode mode)
3135 1.1 mrg {
3136 1.1 mrg return mode == CQImode || mode == CHImode;
3137 1.1 mrg }
3138 1.1 mrg
3139 1.1 mrg /* Create the va_list data type.
3140 1.1 mrg
3141 1.1 mrg This structure is set up by __builtin_saveregs. The __va_reg field
3142 1.1 mrg points to a stack-allocated region holding the contents of the
3143 1.1 mrg incoming argument registers. The __va_ndx field is an index
3144 1.1 mrg initialized to the position of the first unnamed (variable)
3145 1.1 mrg argument. This same index is also used to address the arguments
3146 1.1 mrg passed in memory. Thus, the __va_stk field is initialized to point
3147 1.1 mrg to the position of the first argument in memory offset to account
3148 1.1 mrg for the arguments passed in registers and to account for the size
3149 1.1 mrg of the argument registers not being 16-byte aligned. E.G., there
3150 1.1 mrg are 6 argument registers of 4 bytes each, but we want the __va_ndx
3151 1.1 mrg for the first stack argument to have the maximal alignment of 16
3152 1.1 mrg bytes, so we offset the __va_stk address by 32 bytes so that
3153 1.1 mrg __va_stk[32] references the first argument on the stack. */
3154 1.1 mrg
3155 1.1 mrg static tree
3156 1.1 mrg xtensa_build_builtin_va_list (void)
3157 1.1 mrg {
3158 1.1 mrg tree f_stk, f_reg, f_ndx, record, type_decl;
3159 1.1 mrg
3160 1.1 mrg record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3161 1.1 mrg type_decl = build_decl (BUILTINS_LOCATION,
3162 1.1 mrg TYPE_DECL, get_identifier ("__va_list_tag"), record);
3163 1.1 mrg
3164 1.1 mrg f_stk = build_decl (BUILTINS_LOCATION,
3165 1.1 mrg FIELD_DECL, get_identifier ("__va_stk"),
3166 1.1 mrg ptr_type_node);
3167 1.1 mrg f_reg = build_decl (BUILTINS_LOCATION,
3168 1.1 mrg FIELD_DECL, get_identifier ("__va_reg"),
3169 1.1 mrg ptr_type_node);
3170 1.1 mrg f_ndx = build_decl (BUILTINS_LOCATION,
3171 1.1 mrg FIELD_DECL, get_identifier ("__va_ndx"),
3172 1.1 mrg integer_type_node);
3173 1.1 mrg
3174 1.1 mrg DECL_FIELD_CONTEXT (f_stk) = record;
3175 1.1 mrg DECL_FIELD_CONTEXT (f_reg) = record;
3176 1.1 mrg DECL_FIELD_CONTEXT (f_ndx) = record;
3177 1.1 mrg
3178 1.1 mrg TYPE_STUB_DECL (record) = type_decl;
3179 1.1 mrg TYPE_NAME (record) = type_decl;
3180 1.1 mrg TYPE_FIELDS (record) = f_stk;
3181 1.1 mrg DECL_CHAIN (f_stk) = f_reg;
3182 1.1 mrg DECL_CHAIN (f_reg) = f_ndx;
3183 1.1 mrg
3184 1.1 mrg layout_type (record);
3185 1.1 mrg return record;
3186 1.1 mrg }
3187 1.1 mrg
3188 1.1 mrg
3189 1.1 mrg /* Save the incoming argument registers on the stack. Returns the
3190 1.1 mrg address of the saved registers. */
3191 1.1 mrg
3192 1.1 mrg static rtx
3193 1.1 mrg xtensa_builtin_saveregs (void)
3194 1.1 mrg {
3195 1.1 mrg rtx gp_regs;
3196 1.1 mrg int arg_words = crtl->args.info.arg_words;
3197 1.1 mrg int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
3198 1.1 mrg
3199 1.1 mrg if (gp_left <= 0)
3200 1.1 mrg return const0_rtx;
3201 1.1 mrg
3202 1.1 mrg /* Allocate the general-purpose register space. */
3203 1.1 mrg gp_regs = assign_stack_local
3204 1.1 mrg (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
3205 1.1 mrg set_mem_alias_set (gp_regs, get_varargs_alias_set ());
3206 1.1 mrg
3207 1.1 mrg /* Now store the incoming registers. */
3208 1.1 mrg cfun->machine->need_a7_copy = TARGET_WINDOWED_ABI;
3209 1.1 mrg cfun->machine->vararg_a7 = true;
3210 1.1 mrg move_block_from_reg (GP_ARG_FIRST + arg_words,
3211 1.1 mrg adjust_address (gp_regs, BLKmode,
3212 1.1 mrg arg_words * UNITS_PER_WORD),
3213 1.1 mrg gp_left);
3214 1.1 mrg if (cfun->machine->vararg_a7_copy != 0)
3215 1.1 mrg emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ());
3216 1.1 mrg
3217 1.1 mrg return XEXP (gp_regs, 0);
3218 1.1 mrg }
3219 1.1 mrg
3220 1.1 mrg
3221 1.1 mrg /* Implement `va_start' for varargs and stdarg. We look at the
3222 1.1 mrg current function to fill in an initial va_list. */
3223 1.1 mrg
3224 1.1 mrg static void
3225 1.1 mrg xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
3226 1.1 mrg {
3227 1.1 mrg tree f_stk, stk;
3228 1.1 mrg tree f_reg, reg;
3229 1.1 mrg tree f_ndx, ndx;
3230 1.1 mrg tree t, u;
3231 1.1 mrg int arg_words;
3232 1.1 mrg
3233 1.1 mrg arg_words = crtl->args.info.arg_words;
3234 1.1 mrg
3235 1.1 mrg f_stk = TYPE_FIELDS (va_list_type_node);
3236 1.1 mrg f_reg = DECL_CHAIN (f_stk);
3237 1.1 mrg f_ndx = DECL_CHAIN (f_reg);
3238 1.1 mrg
3239 1.1 mrg stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
3240 1.1 mrg reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
3241 1.1 mrg f_reg, NULL_TREE);
3242 1.1 mrg ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
3243 1.1 mrg f_ndx, NULL_TREE);
3244 1.1 mrg
3245 1.1 mrg /* Call __builtin_saveregs; save the result in __va_reg */
3246 1.1 mrg u = make_tree (sizetype, expand_builtin_saveregs ());
3247 1.1 mrg u = fold_convert (ptr_type_node, u);
3248 1.1 mrg t = build2 (MODIFY_EXPR, ptr_type_node, reg, u);
3249 1.1 mrg TREE_SIDE_EFFECTS (t) = 1;
3250 1.1 mrg expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3251 1.1 mrg
3252 1.1 mrg /* Set the __va_stk member to ($arg_ptr - 32). */
3253 1.1 mrg u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
3254 1.1 mrg u = fold_build_pointer_plus_hwi (u, -32);
3255 1.1 mrg t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
3256 1.1 mrg TREE_SIDE_EFFECTS (t) = 1;
3257 1.1 mrg expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3258 1.1 mrg
3259 1.1 mrg /* Set the __va_ndx member. If the first variable argument is on
3260 1.1 mrg the stack, adjust __va_ndx by 2 words to account for the extra
3261 1.1 mrg alignment offset for __va_stk. */
3262 1.1 mrg if (arg_words >= MAX_ARGS_IN_REGISTERS)
3263 1.1 mrg arg_words += 2;
3264 1.1 mrg t = build2 (MODIFY_EXPR, integer_type_node, ndx,
3265 1.1 mrg build_int_cst (integer_type_node, arg_words * UNITS_PER_WORD));
3266 1.1 mrg TREE_SIDE_EFFECTS (t) = 1;
3267 1.1 mrg expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3268 1.1 mrg }
3269 1.1 mrg
3270 1.1 mrg
3271 1.1 mrg /* Implement `va_arg'. */
3272 1.1 mrg
3273 1.1 mrg static tree
3274 1.1 mrg xtensa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
3275 1.1 mrg gimple_seq *post_p ATTRIBUTE_UNUSED)
3276 1.1 mrg {
3277 1.1 mrg tree f_stk, stk;
3278 1.1 mrg tree f_reg, reg;
3279 1.1 mrg tree f_ndx, ndx;
3280 1.1 mrg tree type_size, array, orig_ndx, addr, size, va_size, t;
3281 1.1 mrg tree lab_false, lab_over, lab_false2;
3282 1.1 mrg bool indirect;
3283 1.1 mrg
3284 1.1 mrg indirect = pass_va_arg_by_reference (type);
3285 1.1 mrg if (indirect)
3286 1.1 mrg type = build_pointer_type (type);
3287 1.1 mrg
3288 1.1 mrg /* Handle complex values as separate real and imaginary parts. */
3289 1.1 mrg if (TREE_CODE (type) == COMPLEX_TYPE)
3290 1.1 mrg {
3291 1.1 mrg tree real_part, imag_part;
3292 1.1 mrg
3293 1.1 mrg real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
3294 1.1 mrg pre_p, NULL);
3295 1.1 mrg real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
3296 1.1 mrg
3297 1.1 mrg imag_part = xtensa_gimplify_va_arg_expr (unshare_expr (valist),
3298 1.1 mrg TREE_TYPE (type),
3299 1.1 mrg pre_p, NULL);
3300 1.1 mrg imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
3301 1.1 mrg
3302 1.1 mrg return build2 (COMPLEX_EXPR, type, real_part, imag_part);
3303 1.1 mrg }
3304 1.1 mrg
3305 1.1 mrg f_stk = TYPE_FIELDS (va_list_type_node);
3306 1.1 mrg f_reg = DECL_CHAIN (f_stk);
3307 1.1 mrg f_ndx = DECL_CHAIN (f_reg);
3308 1.1 mrg
3309 1.1 mrg stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist,
3310 1.1 mrg f_stk, NULL_TREE);
3311 1.1 mrg reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
3312 1.1 mrg f_reg, NULL_TREE);
3313 1.1 mrg ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
3314 1.1 mrg f_ndx, NULL_TREE);
3315 1.1 mrg
3316 1.1 mrg type_size = size_in_bytes (type);
3317 1.1 mrg va_size = round_up (type_size, UNITS_PER_WORD);
3318 1.1 mrg gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
3319 1.1 mrg
3320 1.1 mrg
3321 1.1 mrg /* First align __va_ndx if necessary for this arg:
3322 1.1 mrg
3323 1.1 mrg orig_ndx = (AP).__va_ndx;
3324 1.1 mrg if (__alignof__ (TYPE) > 4 )
3325 1.1 mrg orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
3326 1.1 mrg & -__alignof__ (TYPE)); */
3327 1.1 mrg
3328 1.1 mrg orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
3329 1.1 mrg
3330 1.1 mrg if (TYPE_ALIGN (type) > BITS_PER_WORD)
3331 1.1 mrg {
3332 1.1 mrg int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
3333 1.1 mrg
3334 1.1 mrg t = build2 (PLUS_EXPR, integer_type_node, unshare_expr (orig_ndx),
3335 1.1 mrg build_int_cst (integer_type_node, align - 1));
3336 1.1 mrg t = build2 (BIT_AND_EXPR, integer_type_node, t,
3337 1.1 mrg build_int_cst (integer_type_node, -align));
3338 1.1 mrg gimplify_assign (unshare_expr (orig_ndx), t, pre_p);
3339 1.1 mrg }
3340 1.1 mrg
3341 1.1 mrg
3342 1.1 mrg /* Increment __va_ndx to point past the argument:
3343 1.1 mrg
3344 1.1 mrg (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
3345 1.1 mrg
3346 1.1 mrg t = fold_convert (integer_type_node, va_size);
3347 1.1 mrg t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
3348 1.1 mrg gimplify_assign (unshare_expr (ndx), t, pre_p);
3349 1.1 mrg
3350 1.1 mrg
3351 1.1 mrg /* Check if the argument is in registers:
3352 1.1 mrg
3353 1.1 mrg if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
3354 1.1 mrg && !must_pass_in_stack (type))
3355 1.1 mrg __array = (AP).__va_reg; */
3356 1.1 mrg
3357 1.1 mrg array = create_tmp_var (ptr_type_node);
3358 1.1 mrg
3359 1.1 mrg lab_over = NULL;
3360 1.1 mrg if (!must_pass_va_arg_in_stack (type))
3361 1.1 mrg {
3362 1.1 mrg lab_false = create_artificial_label (UNKNOWN_LOCATION);
3363 1.1 mrg lab_over = create_artificial_label (UNKNOWN_LOCATION);
3364 1.1 mrg
3365 1.1 mrg t = build2 (GT_EXPR, boolean_type_node, unshare_expr (ndx),
3366 1.1 mrg build_int_cst (integer_type_node,
3367 1.1 mrg MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
3368 1.1 mrg t = build3 (COND_EXPR, void_type_node, t,
3369 1.1 mrg build1 (GOTO_EXPR, void_type_node, lab_false),
3370 1.1 mrg NULL_TREE);
3371 1.1 mrg gimplify_and_add (t, pre_p);
3372 1.1 mrg
3373 1.1 mrg gimplify_assign (unshare_expr (array), reg, pre_p);
3374 1.1 mrg
3375 1.1 mrg t = build1 (GOTO_EXPR, void_type_node, lab_over);
3376 1.1 mrg gimplify_and_add (t, pre_p);
3377 1.1 mrg
3378 1.1 mrg t = build1 (LABEL_EXPR, void_type_node, lab_false);
3379 1.1 mrg gimplify_and_add (t, pre_p);
3380 1.1 mrg }
3381 1.1 mrg
3382 1.1 mrg
3383 1.1 mrg /* ...otherwise, the argument is on the stack (never split between
3384 1.1 mrg registers and the stack -- change __va_ndx if necessary):
3385 1.1 mrg
3386 1.1 mrg else
3387 1.1 mrg {
3388 1.1 mrg if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
3389 1.1 mrg (AP).__va_ndx = 32 + __va_size (TYPE);
3390 1.1 mrg __array = (AP).__va_stk;
3391 1.1 mrg } */
3392 1.1 mrg
3393 1.1 mrg lab_false2 = create_artificial_label (UNKNOWN_LOCATION);
3394 1.1 mrg
3395 1.1 mrg t = build2 (GT_EXPR, boolean_type_node, unshare_expr (orig_ndx),
3396 1.1 mrg build_int_cst (integer_type_node,
3397 1.1 mrg MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
3398 1.1 mrg t = build3 (COND_EXPR, void_type_node, t,
3399 1.1 mrg build1 (GOTO_EXPR, void_type_node, lab_false2),
3400 1.1 mrg NULL_TREE);
3401 1.1 mrg gimplify_and_add (t, pre_p);
3402 1.1 mrg
3403 1.1 mrg t = size_binop (PLUS_EXPR, unshare_expr (va_size), size_int (32));
3404 1.1 mrg t = fold_convert (integer_type_node, t);
3405 1.1 mrg gimplify_assign (unshare_expr (ndx), t, pre_p);
3406 1.1 mrg
3407 1.1 mrg t = build1 (LABEL_EXPR, void_type_node, lab_false2);
3408 1.1 mrg gimplify_and_add (t, pre_p);
3409 1.1 mrg
3410 1.1 mrg gimplify_assign (array, stk, pre_p);
3411 1.1 mrg
3412 1.1 mrg if (lab_over)
3413 1.1 mrg {
3414 1.1 mrg t = build1 (LABEL_EXPR, void_type_node, lab_over);
3415 1.1 mrg gimplify_and_add (t, pre_p);
3416 1.1 mrg }
3417 1.1 mrg
3418 1.1 mrg
3419 1.1 mrg /* Given the base array pointer (__array) and index to the subsequent
3420 1.1 mrg argument (__va_ndx), find the address:
3421 1.1 mrg
3422 1.1 mrg __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
3423 1.1 mrg ? sizeof (TYPE)
3424 1.1 mrg : __va_size (TYPE))
3425 1.1 mrg
3426 1.1 mrg The results are endian-dependent because values smaller than one word
3427 1.1 mrg are aligned differently. */
3428 1.1 mrg
3429 1.1 mrg
3430 1.1 mrg if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
3431 1.1 mrg {
3432 1.1 mrg t = fold_build2 (GE_EXPR, boolean_type_node, unshare_expr (type_size),
3433 1.1 mrg size_int (PARM_BOUNDARY / BITS_PER_UNIT));
3434 1.1 mrg t = fold_build3 (COND_EXPR, sizetype, t, unshare_expr (va_size),
3435 1.1 mrg unshare_expr (type_size));
3436 1.1 mrg size = t;
3437 1.1 mrg }
3438 1.1 mrg else
3439 1.1 mrg size = unshare_expr (va_size);
3440 1.1 mrg
3441 1.1 mrg t = fold_convert (sizetype, unshare_expr (ndx));
3442 1.1 mrg t = build2 (MINUS_EXPR, sizetype, t, size);
3443 1.1 mrg addr = fold_build_pointer_plus (unshare_expr (array), t);
3444 1.1 mrg
3445 1.1 mrg addr = fold_convert (build_pointer_type (type), addr);
3446 1.1 mrg if (indirect)
3447 1.1 mrg addr = build_va_arg_indirect_ref (addr);
3448 1.1 mrg return build_va_arg_indirect_ref (addr);
3449 1.1 mrg }
3450 1.1 mrg
3451 1.1 mrg
3452 1.1 mrg /* Builtins. */
3453 1.1 mrg
3454 1.1 mrg enum xtensa_builtin
3455 1.1 mrg {
3456 1.1 mrg XTENSA_BUILTIN_UMULSIDI3,
3457 1.1 mrg XTENSA_BUILTIN_max
3458 1.1 mrg };
3459 1.1 mrg
3460 1.1 mrg
3461 1.1 mrg static void
3462 1.1 mrg xtensa_init_builtins (void)
3463 1.1 mrg {
3464 1.1 mrg tree ftype, decl;
3465 1.1 mrg
3466 1.1 mrg ftype = build_function_type_list (unsigned_intDI_type_node,
3467 1.1 mrg unsigned_intSI_type_node,
3468 1.1 mrg unsigned_intSI_type_node, NULL_TREE);
3469 1.1 mrg
3470 1.1 mrg decl = add_builtin_function ("__builtin_umulsidi3", ftype,
3471 1.1 mrg XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
3472 1.1 mrg "__umulsidi3", NULL_TREE);
3473 1.1 mrg TREE_NOTHROW (decl) = 1;
3474 1.1 mrg TREE_READONLY (decl) = 1;
3475 1.1 mrg }
3476 1.1 mrg
3477 1.1 mrg
3478 1.1 mrg static tree
3479 1.1 mrg xtensa_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
3480 1.1 mrg bool ignore ATTRIBUTE_UNUSED)
3481 1.1 mrg {
3482 1.1 mrg unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
3483 1.1 mrg tree arg0, arg1;
3484 1.1 mrg
3485 1.1 mrg switch (fcode)
3486 1.1 mrg {
3487 1.1 mrg case XTENSA_BUILTIN_UMULSIDI3:
3488 1.1 mrg arg0 = args[0];
3489 1.1 mrg arg1 = args[1];
3490 1.1 mrg if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3491 1.1 mrg || TARGET_MUL32_HIGH)
3492 1.1 mrg return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
3493 1.1 mrg fold_convert (unsigned_intDI_type_node, arg0),
3494 1.1 mrg fold_convert (unsigned_intDI_type_node, arg1));
3495 1.1 mrg break;
3496 1.1 mrg
3497 1.1 mrg default:
3498 1.1 mrg internal_error ("bad builtin code");
3499 1.1 mrg break;
3500 1.1 mrg }
3501 1.1 mrg
3502 1.1 mrg return NULL;
3503 1.1 mrg }
3504 1.1 mrg
3505 1.1 mrg
3506 1.1 mrg static rtx
3507 1.1 mrg xtensa_expand_builtin (tree exp, rtx target,
3508 1.1 mrg rtx subtarget ATTRIBUTE_UNUSED,
3509 1.1 mrg machine_mode mode ATTRIBUTE_UNUSED,
3510 1.1 mrg int ignore)
3511 1.1 mrg {
3512 1.1 mrg tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3513 1.1 mrg unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
3514 1.1 mrg
3515 1.1 mrg switch (fcode)
3516 1.1 mrg {
3517 1.1 mrg case XTENSA_BUILTIN_UMULSIDI3:
3518 1.1 mrg /* The umulsidi3 builtin is just a mechanism to avoid calling the real
3519 1.1 mrg __umulsidi3 function when the Xtensa configuration can directly
3520 1.1 mrg implement it. If not, just call the function. */
3521 1.1 mrg return expand_call (exp, target, ignore);
3522 1.1 mrg
3523 1.1 mrg default:
3524 1.1 mrg internal_error ("bad builtin code");
3525 1.1 mrg }
3526 1.1 mrg return NULL_RTX;
3527 1.1 mrg }
3528 1.1 mrg
3529 1.1 mrg /* Worker function for TARGET_PREFERRED_RELOAD_CLASS. */
3530 1.1 mrg
3531 1.1 mrg static reg_class_t
3532 1.1 mrg xtensa_preferred_reload_class (rtx x, reg_class_t rclass)
3533 1.1 mrg {
3534 1.1 mrg if (CONSTANT_P (x) && CONST_DOUBLE_P (x))
3535 1.1 mrg return NO_REGS;
3536 1.1 mrg
3537 1.1 mrg /* Don't use the stack pointer or hard frame pointer for reloads!
3538 1.1 mrg The hard frame pointer would normally be OK except that it may
3539 1.1 mrg briefly hold an incoming argument in the prologue, and reload
3540 1.1 mrg won't know that it is live because the hard frame pointer is
3541 1.1 mrg treated specially. */
3542 1.1 mrg
3543 1.1 mrg if (rclass == AR_REGS || rclass == GR_REGS)
3544 1.1 mrg return RL_REGS;
3545 1.1 mrg
3546 1.1 mrg return rclass;
3547 1.1 mrg }
3548 1.1 mrg
3549 1.1 mrg /* Worker function for TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
3550 1.1 mrg
3551 1.1 mrg static reg_class_t
3552 1.1 mrg xtensa_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
3553 1.1 mrg reg_class_t rclass)
3554 1.1 mrg {
3555 1.1 mrg /* Don't use the stack pointer or hard frame pointer for reloads!
3556 1.1 mrg The hard frame pointer would normally be OK except that it may
3557 1.1 mrg briefly hold an incoming argument in the prologue, and reload
3558 1.1 mrg won't know that it is live because the hard frame pointer is
3559 1.1 mrg treated specially. */
3560 1.1 mrg
3561 1.1 mrg if (rclass == AR_REGS || rclass == GR_REGS)
3562 1.1 mrg return RL_REGS;
3563 1.1 mrg
3564 1.1 mrg return rclass;
3565 1.1 mrg }
3566 1.1 mrg
3567 1.1 mrg /* Worker function for TARGET_SECONDARY_RELOAD. */
3568 1.1 mrg
3569 1.1 mrg static reg_class_t
3570 1.1 mrg xtensa_secondary_reload (bool in_p, rtx x, reg_class_t rclass,
3571 1.1 mrg machine_mode mode, secondary_reload_info *sri)
3572 1.1 mrg {
3573 1.1 mrg int regno;
3574 1.1 mrg
3575 1.1 mrg if (in_p && constantpool_mem_p (x))
3576 1.1 mrg {
3577 1.1 mrg if (rclass == FP_REGS)
3578 1.1 mrg return RL_REGS;
3579 1.1 mrg
3580 1.1 mrg if (mode == QImode)
3581 1.1 mrg sri->icode = CODE_FOR_reloadqi_literal;
3582 1.1 mrg else if (mode == HImode)
3583 1.1 mrg sri->icode = CODE_FOR_reloadhi_literal;
3584 1.1 mrg }
3585 1.1 mrg
3586 1.1 mrg regno = xt_true_regnum (x);
3587 1.1 mrg if (ACC_REG_P (regno))
3588 1.1 mrg return ((rclass == GR_REGS || rclass == RL_REGS) ? NO_REGS : RL_REGS);
3589 1.1 mrg if (rclass == ACC_REG)
3590 1.1 mrg return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
3591 1.1 mrg
3592 1.1 mrg return NO_REGS;
3593 1.1 mrg }
3594 1.1 mrg
3595 1.1 mrg
3596 1.1 mrg void
3597 1.1 mrg order_regs_for_local_alloc (void)
3598 1.1 mrg {
3599 1.1 mrg if (!leaf_function_p ())
3600 1.1 mrg {
3601 1.1 mrg static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
3602 1.1 mrg REG_ALLOC_ORDER;
3603 1.1 mrg static const int reg_nonleaf_alloc_order_call0[FIRST_PSEUDO_REGISTER] =
3604 1.1 mrg {
3605 1.1 mrg 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 12, 13, 14, 15,
3606 1.1 mrg 18,
3607 1.1 mrg 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
3608 1.1 mrg 0, 1, 16, 17,
3609 1.1 mrg 35,
3610 1.1 mrg };
3611 1.1 mrg
3612 1.1 mrg memcpy (reg_alloc_order, TARGET_WINDOWED_ABI ?
3613 1.1 mrg reg_nonleaf_alloc_order : reg_nonleaf_alloc_order_call0,
3614 1.1 mrg FIRST_PSEUDO_REGISTER * sizeof (int));
3615 1.1 mrg }
3616 1.1 mrg else
3617 1.1 mrg {
3618 1.1 mrg int i, num_arg_regs;
3619 1.1 mrg int nxt = 0;
3620 1.1 mrg
3621 1.1 mrg /* Use the AR registers in increasing order (skipping a0 and a1)
3622 1.1 mrg but save the incoming argument registers for a last resort. */
3623 1.1 mrg num_arg_regs = crtl->args.info.arg_words;
3624 1.1 mrg if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
3625 1.1 mrg num_arg_regs = MAX_ARGS_IN_REGISTERS;
3626 1.1 mrg for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
3627 1.1 mrg reg_alloc_order[nxt++] = i + num_arg_regs;
3628 1.1 mrg for (i = 0; i < num_arg_regs; i++)
3629 1.1 mrg reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
3630 1.1 mrg
3631 1.1 mrg /* List the coprocessor registers in order. */
3632 1.1 mrg for (i = 0; i < BR_REG_NUM; i++)
3633 1.1 mrg reg_alloc_order[nxt++] = BR_REG_FIRST + i;
3634 1.1 mrg
3635 1.1 mrg /* List the FP registers in order for now. */
3636 1.1 mrg for (i = 0; i < 16; i++)
3637 1.1 mrg reg_alloc_order[nxt++] = FP_REG_FIRST + i;
3638 1.1 mrg
3639 1.1 mrg /* GCC requires that we list *all* the registers.... */
3640 1.1 mrg reg_alloc_order[nxt++] = 0; /* a0 = return address */
3641 1.1 mrg reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
3642 1.1 mrg reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
3643 1.1 mrg reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
3644 1.1 mrg
3645 1.1 mrg reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
3646 1.1 mrg }
3647 1.1 mrg }
3648 1.1 mrg
3649 1.1 mrg
3650 1.1 mrg /* Some Xtensa targets support multiple bss sections. If the section
3651 1.1 mrg name ends with ".bss", add SECTION_BSS to the flags. */
3652 1.1 mrg
3653 1.1 mrg static unsigned int
3654 1.1 mrg xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
3655 1.1 mrg {
3656 1.1 mrg unsigned int flags = default_section_type_flags (decl, name, reloc);
3657 1.1 mrg const char *suffix;
3658 1.1 mrg
3659 1.1 mrg suffix = strrchr (name, '.');
3660 1.1 mrg if (suffix && strcmp (suffix, ".bss") == 0)
3661 1.1 mrg {
3662 1.1 mrg if (!decl || (TREE_CODE (decl) == VAR_DECL
3663 1.1 mrg && DECL_INITIAL (decl) == NULL_TREE))
3664 1.1 mrg flags |= SECTION_BSS; /* @nobits */
3665 1.1 mrg else
3666 1.1 mrg warning (0, "only uninitialized variables can be placed in a "
3667 1.1 mrg "%<.bss%> section");
3668 1.1 mrg }
3669 1.1 mrg
3670 1.1 mrg return flags;
3671 1.1 mrg }
3672 1.1 mrg
3673 1.1 mrg
3674 1.1 mrg /* The literal pool stays with the function. */
3675 1.1 mrg
3676 1.1 mrg static section *
3677 1.1 mrg xtensa_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
3678 1.1 mrg rtx x ATTRIBUTE_UNUSED,
3679 1.1 mrg unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
3680 1.1 mrg {
3681 1.1 mrg return function_section (current_function_decl);
3682 1.1 mrg }
3683 1.1 mrg
3684 1.1 mrg /* Worker function for TARGET_REGISTER_MOVE_COST. */
3685 1.1 mrg
3686 1.1 mrg static int
3687 1.1 mrg xtensa_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3688 1.1 mrg reg_class_t from, reg_class_t to)
3689 1.1 mrg {
3690 1.1 mrg if (from == to && from != BR_REGS && to != BR_REGS)
3691 1.1 mrg return 2;
3692 1.1 mrg else if (reg_class_subset_p (from, AR_REGS)
3693 1.1 mrg && reg_class_subset_p (to, AR_REGS))
3694 1.1 mrg return 2;
3695 1.1 mrg else if (reg_class_subset_p (from, AR_REGS) && to == ACC_REG)
3696 1.1 mrg return 3;
3697 1.1 mrg else if (from == ACC_REG && reg_class_subset_p (to, AR_REGS))
3698 1.1 mrg return 3;
3699 1.1 mrg else
3700 1.1 mrg return 10;
3701 1.1 mrg }
3702 1.1 mrg
3703 1.1 mrg /* Worker function for TARGET_MEMORY_MOVE_COST. */
3704 1.1 mrg
3705 1.1 mrg static int
3706 1.1 mrg xtensa_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3707 1.1 mrg reg_class_t rclass ATTRIBUTE_UNUSED,
3708 1.1 mrg bool in ATTRIBUTE_UNUSED)
3709 1.1 mrg {
3710 1.1 mrg return 4;
3711 1.1 mrg }
3712 1.1 mrg
3713 1.1 mrg /* Compute a (partial) cost for rtx X. Return true if the complete
3714 1.1 mrg cost has been computed, and false if subexpressions should be
3715 1.1 mrg scanned. In either case, *TOTAL contains the cost result. */
3716 1.1 mrg
3717 1.1 mrg static bool
3718 1.1 mrg xtensa_rtx_costs (rtx x, machine_mode mode, int outer_code,
3719 1.1 mrg int opno ATTRIBUTE_UNUSED,
3720 1.1 mrg int *total, bool speed ATTRIBUTE_UNUSED)
3721 1.1 mrg {
3722 1.1 mrg int code = GET_CODE (x);
3723 1.1 mrg
3724 1.1 mrg switch (code)
3725 1.1 mrg {
3726 1.1 mrg case CONST_INT:
3727 1.1 mrg switch (outer_code)
3728 1.1 mrg {
3729 1.1 mrg case SET:
3730 1.1 mrg if (xtensa_simm12b (INTVAL (x)))
3731 1.1 mrg {
3732 1.1 mrg *total = 4;
3733 1.1 mrg return true;
3734 1.1 mrg }
3735 1.1 mrg break;
3736 1.1 mrg case PLUS:
3737 1.1 mrg if (xtensa_simm8 (INTVAL (x))
3738 1.1 mrg || xtensa_simm8x256 (INTVAL (x)))
3739 1.1 mrg {
3740 1.1 mrg *total = 0;
3741 1.1 mrg return true;
3742 1.1 mrg }
3743 1.1 mrg break;
3744 1.1 mrg case AND:
3745 1.1 mrg if (xtensa_mask_immediate (INTVAL (x)))
3746 1.1 mrg {
3747 1.1 mrg *total = 0;
3748 1.1 mrg return true;
3749 1.1 mrg }
3750 1.1 mrg break;
3751 1.1 mrg case COMPARE:
3752 1.1 mrg if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
3753 1.1 mrg {
3754 1.1 mrg *total = 0;
3755 1.1 mrg return true;
3756 1.1 mrg }
3757 1.1 mrg break;
3758 1.1 mrg case ASHIFT:
3759 1.1 mrg case ASHIFTRT:
3760 1.1 mrg case LSHIFTRT:
3761 1.1 mrg case ROTATE:
3762 1.1 mrg case ROTATERT:
3763 1.1 mrg /* No way to tell if X is the 2nd operand so be conservative. */
3764 1.1 mrg default: break;
3765 1.1 mrg }
3766 1.1 mrg if (xtensa_simm12b (INTVAL (x)))
3767 1.1 mrg *total = 5;
3768 1.1 mrg else if (TARGET_CONST16)
3769 1.1 mrg *total = COSTS_N_INSNS (2);
3770 1.1 mrg else
3771 1.1 mrg *total = 6;
3772 1.1 mrg return true;
3773 1.1 mrg
3774 1.1 mrg case CONST:
3775 1.1 mrg case LABEL_REF:
3776 1.1 mrg case SYMBOL_REF:
3777 1.1 mrg if (TARGET_CONST16)
3778 1.1 mrg *total = COSTS_N_INSNS (2);
3779 1.1 mrg else
3780 1.1 mrg *total = 5;
3781 1.1 mrg return true;
3782 1.1 mrg
3783 1.1 mrg case CONST_DOUBLE:
3784 1.1 mrg if (TARGET_CONST16)
3785 1.1 mrg *total = COSTS_N_INSNS (4);
3786 1.1 mrg else
3787 1.1 mrg *total = 7;
3788 1.1 mrg return true;
3789 1.1 mrg
3790 1.1 mrg case MEM:
3791 1.1 mrg {
3792 1.1 mrg int num_words =
3793 1.1 mrg (GET_MODE_SIZE (mode) > UNITS_PER_WORD) ? 2 : 1;
3794 1.1 mrg
3795 1.1 mrg if (memory_address_p (mode, XEXP ((x), 0)))
3796 1.1 mrg *total = COSTS_N_INSNS (num_words);
3797 1.1 mrg else
3798 1.1 mrg *total = COSTS_N_INSNS (2*num_words);
3799 1.1 mrg return true;
3800 1.1 mrg }
3801 1.1 mrg
3802 1.1 mrg case FFS:
3803 1.1 mrg case CTZ:
3804 1.1 mrg *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
3805 1.1 mrg return true;
3806 1.1 mrg
3807 1.1 mrg case CLZ:
3808 1.1 mrg *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
3809 1.1 mrg return true;
3810 1.1 mrg
3811 1.1 mrg case NOT:
3812 1.1 mrg *total = COSTS_N_INSNS (mode == DImode ? 3 : 2);
3813 1.1 mrg return true;
3814 1.1 mrg
3815 1.1 mrg case AND:
3816 1.1 mrg case IOR:
3817 1.1 mrg case XOR:
3818 1.1 mrg if (mode == DImode)
3819 1.1 mrg *total = COSTS_N_INSNS (2);
3820 1.1 mrg else
3821 1.1 mrg *total = COSTS_N_INSNS (1);
3822 1.1 mrg return true;
3823 1.1 mrg
3824 1.1 mrg case ASHIFT:
3825 1.1 mrg case ASHIFTRT:
3826 1.1 mrg case LSHIFTRT:
3827 1.1 mrg if (mode == DImode)
3828 1.1 mrg *total = COSTS_N_INSNS (50);
3829 1.1 mrg else
3830 1.1 mrg *total = COSTS_N_INSNS (1);
3831 1.1 mrg return true;
3832 1.1 mrg
3833 1.1 mrg case ABS:
3834 1.1 mrg {
3835 1.1 mrg if (mode == SFmode)
3836 1.1 mrg *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3837 1.1 mrg else if (mode == DFmode)
3838 1.1 mrg *total = COSTS_N_INSNS (50);
3839 1.1 mrg else
3840 1.1 mrg *total = COSTS_N_INSNS (4);
3841 1.1 mrg return true;
3842 1.1 mrg }
3843 1.1 mrg
3844 1.1 mrg case PLUS:
3845 1.1 mrg case MINUS:
3846 1.1 mrg {
3847 1.1 mrg if (mode == SFmode)
3848 1.1 mrg *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3849 1.1 mrg else if (mode == DFmode || mode == DImode)
3850 1.1 mrg *total = COSTS_N_INSNS (50);
3851 1.1 mrg else
3852 1.1 mrg *total = COSTS_N_INSNS (1);
3853 1.1 mrg return true;
3854 1.1 mrg }
3855 1.1 mrg
3856 1.1 mrg case NEG:
3857 1.1 mrg *total = COSTS_N_INSNS (mode == DImode ? 4 : 2);
3858 1.1 mrg return true;
3859 1.1 mrg
3860 1.1 mrg case MULT:
3861 1.1 mrg {
3862 1.1 mrg if (mode == SFmode)
3863 1.1 mrg *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
3864 1.1 mrg else if (mode == DFmode)
3865 1.1 mrg *total = COSTS_N_INSNS (50);
3866 1.1 mrg else if (mode == DImode)
3867 1.1 mrg *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
3868 1.1 mrg else if (TARGET_MUL32)
3869 1.1 mrg *total = COSTS_N_INSNS (4);
3870 1.1 mrg else if (TARGET_MAC16)
3871 1.1 mrg *total = COSTS_N_INSNS (16);
3872 1.1 mrg else if (TARGET_MUL16)
3873 1.1 mrg *total = COSTS_N_INSNS (12);
3874 1.1 mrg else
3875 1.1 mrg *total = COSTS_N_INSNS (50);
3876 1.1 mrg return true;
3877 1.1 mrg }
3878 1.1 mrg
3879 1.1 mrg case DIV:
3880 1.1 mrg case MOD:
3881 1.1 mrg {
3882 1.1 mrg if (mode == SFmode)
3883 1.1 mrg {
3884 1.1 mrg *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3885 1.1 mrg return true;
3886 1.1 mrg }
3887 1.1 mrg else if (mode == DFmode)
3888 1.1 mrg {
3889 1.1 mrg *total = COSTS_N_INSNS (50);
3890 1.1 mrg return true;
3891 1.1 mrg }
3892 1.1 mrg }
3893 1.1 mrg /* Fall through. */
3894 1.1 mrg
3895 1.1 mrg case UDIV:
3896 1.1 mrg case UMOD:
3897 1.1 mrg {
3898 1.1 mrg if (mode == DImode)
3899 1.1 mrg *total = COSTS_N_INSNS (50);
3900 1.1 mrg else if (TARGET_DIV32)
3901 1.1 mrg *total = COSTS_N_INSNS (32);
3902 1.1 mrg else
3903 1.1 mrg *total = COSTS_N_INSNS (50);
3904 1.1 mrg return true;
3905 1.1 mrg }
3906 1.1 mrg
3907 1.1 mrg case SQRT:
3908 1.1 mrg if (mode == SFmode)
3909 1.1 mrg *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3910 1.1 mrg else
3911 1.1 mrg *total = COSTS_N_INSNS (50);
3912 1.1 mrg return true;
3913 1.1 mrg
3914 1.1 mrg case SMIN:
3915 1.1 mrg case UMIN:
3916 1.1 mrg case SMAX:
3917 1.1 mrg case UMAX:
3918 1.1 mrg *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3919 1.1 mrg return true;
3920 1.1 mrg
3921 1.1 mrg case SIGN_EXTRACT:
3922 1.1 mrg case SIGN_EXTEND:
3923 1.1 mrg *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3924 1.1 mrg return true;
3925 1.1 mrg
3926 1.1 mrg case ZERO_EXTRACT:
3927 1.1 mrg case ZERO_EXTEND:
3928 1.1 mrg *total = COSTS_N_INSNS (1);
3929 1.1 mrg return true;
3930 1.1 mrg
3931 1.1 mrg default:
3932 1.1 mrg return false;
3933 1.1 mrg }
3934 1.1 mrg }
3935 1.1 mrg
3936 1.1 mrg /* Worker function for TARGET_RETURN_IN_MEMORY. */
3937 1.1 mrg
3938 1.1 mrg static bool
3939 1.1 mrg xtensa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3940 1.1 mrg {
3941 1.1 mrg return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3942 1.1 mrg > 4 * UNITS_PER_WORD);
3943 1.1 mrg }
3944 1.1 mrg
3945 1.1 mrg /* Worker function for TARGET_FUNCTION_VALUE. */
3946 1.1 mrg
3947 1.1 mrg rtx
3948 1.1 mrg xtensa_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
3949 1.1 mrg bool outgoing)
3950 1.1 mrg {
3951 1.1 mrg return gen_rtx_REG ((INTEGRAL_TYPE_P (valtype)
3952 1.1 mrg && TYPE_PRECISION (valtype) < BITS_PER_WORD)
3953 1.1 mrg ? SImode : TYPE_MODE (valtype),
3954 1.1 mrg outgoing ? GP_OUTGOING_RETURN : GP_RETURN);
3955 1.1 mrg }
3956 1.1 mrg
3957 1.1 mrg /* Worker function for TARGET_LIBCALL_VALUE. */
3958 1.1 mrg
3959 1.1 mrg static rtx
3960 1.1 mrg xtensa_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
3961 1.1 mrg {
3962 1.1 mrg return gen_rtx_REG ((GET_MODE_CLASS (mode) == MODE_INT
3963 1.1 mrg && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3964 1.1 mrg ? SImode : mode, GP_RETURN);
3965 1.1 mrg }
3966 1.1 mrg
3967 1.1 mrg /* Worker function TARGET_FUNCTION_VALUE_REGNO_P. */
3968 1.1 mrg
3969 1.1 mrg static bool
3970 1.1 mrg xtensa_function_value_regno_p (const unsigned int regno)
3971 1.1 mrg {
3972 1.1 mrg return (regno == GP_RETURN);
3973 1.1 mrg }
3974 1.1 mrg
3975 1.1 mrg /* The static chain is passed in memory. Provide rtx giving 'mem'
3976 1.1 mrg expressions that denote where they are stored. */
3977 1.1 mrg
3978 1.1 mrg static rtx
3979 1.1 mrg xtensa_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
3980 1.1 mrg {
3981 1.1 mrg if (TARGET_WINDOWED_ABI)
3982 1.1 mrg {
3983 1.1 mrg rtx base = incoming_p ? arg_pointer_rtx : stack_pointer_rtx;
3984 1.1 mrg return gen_frame_mem (Pmode, plus_constant (Pmode, base,
3985 1.1 mrg -5 * UNITS_PER_WORD));
3986 1.1 mrg }
3987 1.1 mrg else
3988 1.1 mrg return gen_rtx_REG (Pmode, A8_REG);
3989 1.1 mrg }
3990 1.1 mrg
3991 1.1 mrg
3992 1.1 mrg /* TRAMPOLINE_TEMPLATE: For Xtensa, the trampoline must perform an ENTRY
3993 1.1 mrg instruction with a minimal stack frame in order to get some free
3994 1.1 mrg registers. Once the actual call target is known, the proper stack frame
3995 1.1 mrg size is extracted from the ENTRY instruction at the target and the
3996 1.1 mrg current frame is adjusted to match. The trampoline then transfers
3997 1.1 mrg control to the instruction following the ENTRY at the target. Note:
3998 1.1 mrg this assumes that the target begins with an ENTRY instruction. */
3999 1.1 mrg
4000 1.1 mrg static void
4001 1.1 mrg xtensa_asm_trampoline_template (FILE *stream)
4002 1.1 mrg {
4003 1.1 mrg bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
4004 1.1 mrg
4005 1.1 mrg fprintf (stream, "\t.begin no-transform\n");
4006 1.1 mrg
4007 1.1 mrg if (TARGET_WINDOWED_ABI)
4008 1.1 mrg {
4009 1.1 mrg fprintf (stream, "\tentry\tsp, %d\n", MIN_FRAME_SIZE);
4010 1.1 mrg
4011 1.1 mrg if (use_call0)
4012 1.1 mrg {
4013 1.1 mrg /* Save the return address. */
4014 1.1 mrg fprintf (stream, "\tmov\ta10, a0\n");
4015 1.1 mrg
4016 1.1 mrg /* Use a CALL0 instruction to skip past the constants and in the
4017 1.1 mrg process get the PC into A0. This allows PC-relative access to
4018 1.1 mrg the constants without relying on L32R. */
4019 1.1 mrg fprintf (stream, "\tcall0\t.Lskipconsts\n");
4020 1.1 mrg }
4021 1.1 mrg else
4022 1.1 mrg fprintf (stream, "\tj\t.Lskipconsts\n");
4023 1.1 mrg
4024 1.1 mrg fprintf (stream, "\t.align\t4\n");
4025 1.1 mrg fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
4026 1.1 mrg fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
4027 1.1 mrg fprintf (stream, ".Lskipconsts:\n");
4028 1.1 mrg
4029 1.1 mrg /* Load the static chain and function address from the trampoline. */
4030 1.1 mrg if (use_call0)
4031 1.1 mrg {
4032 1.1 mrg fprintf (stream, "\taddi\ta0, a0, 3\n");
4033 1.1 mrg fprintf (stream, "\tl32i\ta9, a0, 0\n");
4034 1.1 mrg fprintf (stream, "\tl32i\ta8, a0, 4\n");
4035 1.1 mrg }
4036 1.1 mrg else
4037 1.1 mrg {
4038 1.1 mrg fprintf (stream, "\tl32r\ta9, .Lchainval\n");
4039 1.1 mrg fprintf (stream, "\tl32r\ta8, .Lfnaddr\n");
4040 1.1 mrg }
4041 1.1 mrg
4042 1.1 mrg /* Store the static chain. */
4043 1.1 mrg fprintf (stream, "\ts32i\ta9, sp, %d\n", MIN_FRAME_SIZE - 20);
4044 1.1 mrg
4045 1.1 mrg /* Set the proper stack pointer value. */
4046 1.1 mrg fprintf (stream, "\tl32i\ta9, a8, 0\n");
4047 1.1 mrg fprintf (stream, "\textui\ta9, a9, %d, 12\n",
4048 1.1 mrg TARGET_BIG_ENDIAN ? 8 : 12);
4049 1.1 mrg fprintf (stream, "\tslli\ta9, a9, 3\n");
4050 1.1 mrg fprintf (stream, "\taddi\ta9, a9, %d\n", -MIN_FRAME_SIZE);
4051 1.1 mrg fprintf (stream, "\tsub\ta9, sp, a9\n");
4052 1.1 mrg fprintf (stream, "\tmovsp\tsp, a9\n");
4053 1.1 mrg
4054 1.1 mrg if (use_call0)
4055 1.1 mrg /* Restore the return address. */
4056 1.1 mrg fprintf (stream, "\tmov\ta0, a10\n");
4057 1.1 mrg
4058 1.1 mrg /* Jump to the instruction following the ENTRY. */
4059 1.1 mrg fprintf (stream, "\taddi\ta8, a8, 3\n");
4060 1.1 mrg fprintf (stream, "\tjx\ta8\n");
4061 1.1 mrg
4062 1.1 mrg /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
4063 1.1 mrg if (use_call0)
4064 1.1 mrg fprintf (stream, "\t.byte\t0\n");
4065 1.1 mrg else
4066 1.1 mrg fprintf (stream, "\tnop\n");
4067 1.1 mrg }
4068 1.1 mrg else
4069 1.1 mrg {
4070 1.1 mrg if (use_call0)
4071 1.1 mrg {
4072 1.1 mrg /* Save the return address. */
4073 1.1 mrg fprintf (stream, "\tmov\ta10, a0\n");
4074 1.1 mrg
4075 1.1 mrg /* Use a CALL0 instruction to skip past the constants and in the
4076 1.1 mrg process get the PC into A0. This allows PC-relative access to
4077 1.1 mrg the constants without relying on L32R. */
4078 1.1 mrg fprintf (stream, "\tcall0\t.Lskipconsts\n");
4079 1.1 mrg }
4080 1.1 mrg else
4081 1.1 mrg fprintf (stream, "\tj\t.Lskipconsts\n");
4082 1.1 mrg
4083 1.1 mrg fprintf (stream, "\t.align\t4\n");
4084 1.1 mrg fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
4085 1.1 mrg fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
4086 1.1 mrg fprintf (stream, ".Lskipconsts:\n");
4087 1.1 mrg
4088 1.1 mrg /* Load the static chain and function address from the trampoline. */
4089 1.1 mrg if (use_call0)
4090 1.1 mrg {
4091 1.1 mrg fprintf (stream, "\taddi\ta0, a0, 3\n");
4092 1.1 mrg fprintf (stream, "\tl32i\ta8, a0, 0\n");
4093 1.1 mrg fprintf (stream, "\tl32i\ta9, a0, 4\n");
4094 1.1 mrg fprintf (stream, "\tmov\ta0, a10\n");
4095 1.1 mrg }
4096 1.1 mrg else
4097 1.1 mrg {
4098 1.1 mrg fprintf (stream, "\tl32r\ta8, .Lchainval\n");
4099 1.1 mrg fprintf (stream, "\tl32r\ta9, .Lfnaddr\n");
4100 1.1 mrg }
4101 1.1 mrg fprintf (stream, "\tjx\ta9\n");
4102 1.1 mrg
4103 1.1 mrg /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
4104 1.1 mrg if (use_call0)
4105 1.1 mrg fprintf (stream, "\t.byte\t0\n");
4106 1.1 mrg else
4107 1.1 mrg fprintf (stream, "\tnop\n");
4108 1.1 mrg }
4109 1.1 mrg fprintf (stream, "\t.end no-transform\n");
4110 1.1 mrg }
4111 1.1 mrg
4112 1.1 mrg static void
4113 1.1 mrg xtensa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain)
4114 1.1 mrg {
4115 1.1 mrg rtx func = XEXP (DECL_RTL (fndecl), 0);
4116 1.1 mrg bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
4117 1.1 mrg int chain_off;
4118 1.1 mrg int func_off;
4119 1.1 mrg
4120 1.1 mrg if (TARGET_WINDOWED_ABI)
4121 1.1 mrg {
4122 1.1 mrg chain_off = use_call0 ? 12 : 8;
4123 1.1 mrg func_off = use_call0 ? 16 : 12;
4124 1.1 mrg }
4125 1.1 mrg else
4126 1.1 mrg {
4127 1.1 mrg chain_off = use_call0 ? 8 : 4;
4128 1.1 mrg func_off = use_call0 ? 12 : 8;
4129 1.1 mrg }
4130 1.1 mrg
4131 1.1 mrg emit_block_move (m_tramp, assemble_trampoline_template (),
4132 1.1 mrg GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
4133 1.1 mrg
4134 1.1 mrg emit_move_insn (adjust_address (m_tramp, SImode, chain_off), chain);
4135 1.1 mrg emit_move_insn (adjust_address (m_tramp, SImode, func_off), func);
4136 1.1 mrg emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_sync_caches"),
4137 1.1 mrg LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode);
4138 1.1 mrg }
4139 1.1 mrg
4140 1.1 mrg /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
4141 1.1 mrg
4142 1.1 mrg static bool
4143 1.1 mrg xtensa_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
4144 1.1 mrg {
4145 1.1 mrg return !xtensa_tls_referenced_p (x);
4146 1.1 mrg }
4147 1.1 mrg
4148 1.1 mrg /* Implement TARGET_CAN_USE_DOLOOP_P. */
4149 1.1 mrg
4150 1.1 mrg static bool
4151 1.1 mrg xtensa_can_use_doloop_p (const widest_int &, const widest_int &,
4152 1.1 mrg unsigned int loop_depth, bool entered_at_top)
4153 1.1 mrg {
4154 1.1 mrg /* Considering limitations in the hardware, only use doloop
4155 1.1 mrg for innermost loops which must be entered from the top. */
4156 1.1 mrg if (loop_depth > 1 || !entered_at_top)
4157 1.1 mrg return false;
4158 1.1 mrg
4159 1.1 mrg return true;
4160 1.1 mrg }
4161 1.1 mrg
4162 1.1 mrg /* NULL if INSN insn is valid within a low-overhead loop.
4163 1.1 mrg Otherwise return why doloop cannot be applied. */
4164 1.1 mrg
4165 1.1 mrg static const char *
4166 1.1 mrg xtensa_invalid_within_doloop (const rtx_insn *insn)
4167 1.1 mrg {
4168 1.1 mrg if (CALL_P (insn))
4169 1.1 mrg return "Function call in the loop.";
4170 1.1 mrg
4171 1.1 mrg if (JUMP_P (insn) && INSN_CODE (insn) == CODE_FOR_return)
4172 1.1 mrg return "Return from a call instruction in the loop.";
4173 1.1 mrg
4174 1.1 mrg return NULL;
4175 1.1 mrg }
4176 1.1 mrg
4177 1.1 mrg /* Optimize LOOP. */
4178 1.1 mrg
4179 1.1 mrg static bool
4180 1.1 mrg hwloop_optimize (hwloop_info loop)
4181 1.1 mrg {
4182 1.1 mrg int i;
4183 1.1 mrg edge entry_edge;
4184 1.1 mrg basic_block entry_bb;
4185 1.1 mrg rtx iter_reg;
4186 1.1 mrg rtx_insn *insn, *seq, *entry_after;
4187 1.1 mrg
4188 1.1 mrg if (loop->depth > 1)
4189 1.1 mrg {
4190 1.1 mrg if (dump_file)
4191 1.1 mrg fprintf (dump_file, ";; loop %d is not innermost\n",
4192 1.1 mrg loop->loop_no);
4193 1.1 mrg return false;
4194 1.1 mrg }
4195 1.1 mrg
4196 1.1 mrg if (!loop->incoming_dest)
4197 1.1 mrg {
4198 1.1 mrg if (dump_file)
4199 1.1 mrg fprintf (dump_file, ";; loop %d has more than one entry\n",
4200 1.1 mrg loop->loop_no);
4201 1.1 mrg return false;
4202 1.1 mrg }
4203 1.1 mrg
4204 1.1 mrg if (loop->incoming_dest != loop->head)
4205 1.1 mrg {
4206 1.1 mrg if (dump_file)
4207 1.1 mrg fprintf (dump_file, ";; loop %d is not entered from head\n",
4208 1.1 mrg loop->loop_no);
4209 1.1 mrg return false;
4210 1.1 mrg }
4211 1.1 mrg
4212 1.1 mrg if (loop->has_call || loop->has_asm)
4213 1.1 mrg {
4214 1.1 mrg if (dump_file)
4215 1.1 mrg fprintf (dump_file, ";; loop %d has invalid insn\n",
4216 1.1 mrg loop->loop_no);
4217 1.1 mrg return false;
4218 1.1 mrg }
4219 1.1 mrg
4220 1.1 mrg /* Scan all the blocks to make sure they don't use iter_reg. */
4221 1.1 mrg if (loop->iter_reg_used || loop->iter_reg_used_outside)
4222 1.1 mrg {
4223 1.1 mrg if (dump_file)
4224 1.1 mrg fprintf (dump_file, ";; loop %d uses iterator\n",
4225 1.1 mrg loop->loop_no);
4226 1.1 mrg return false;
4227 1.1 mrg }
4228 1.1 mrg
4229 1.1 mrg /* Check if start_label appears before doloop_end. */
4230 1.1 mrg insn = loop->start_label;
4231 1.1 mrg while (insn && insn != loop->loop_end)
4232 1.1 mrg insn = NEXT_INSN (insn);
4233 1.1 mrg
4234 1.1 mrg if (!insn)
4235 1.1 mrg {
4236 1.1 mrg if (dump_file)
4237 1.1 mrg fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
4238 1.1 mrg loop->loop_no);
4239 1.1 mrg return false;
4240 1.1 mrg }
4241 1.1 mrg
4242 1.1 mrg /* Get the loop iteration register. */
4243 1.1 mrg iter_reg = loop->iter_reg;
4244 1.1 mrg
4245 1.1 mrg gcc_assert (REG_P (iter_reg));
4246 1.1 mrg
4247 1.1 mrg entry_edge = NULL;
4248 1.1 mrg
4249 1.1 mrg FOR_EACH_VEC_SAFE_ELT (loop->incoming, i, entry_edge)
4250 1.1 mrg if (entry_edge->flags & EDGE_FALLTHRU)
4251 1.1 mrg break;
4252 1.1 mrg
4253 1.1 mrg if (entry_edge == NULL)
4254 1.1 mrg return false;
4255 1.1 mrg
4256 1.1 mrg /* Place the zero_cost_loop_start instruction before the loop. */
4257 1.1 mrg entry_bb = entry_edge->src;
4258 1.1 mrg
4259 1.1 mrg start_sequence ();
4260 1.1 mrg
4261 1.1 mrg insn = emit_insn (gen_zero_cost_loop_start (loop->iter_reg,
4262 1.1 mrg loop->start_label,
4263 1.1 mrg loop->iter_reg));
4264 1.1 mrg
4265 1.1 mrg seq = get_insns ();
4266 1.1 mrg
4267 1.1 mrg entry_after = BB_END (entry_bb);
4268 1.1 mrg if (!single_succ_p (entry_bb) || vec_safe_length (loop->incoming) > 1
4269 1.1 mrg || !entry_after)
4270 1.1 mrg {
4271 1.1 mrg basic_block new_bb;
4272 1.1 mrg edge e;
4273 1.1 mrg edge_iterator ei;
4274 1.1 mrg
4275 1.1 mrg emit_insn_before (seq, BB_HEAD (loop->head));
4276 1.1 mrg seq = emit_label_before (gen_label_rtx (), seq);
4277 1.1 mrg new_bb = create_basic_block (seq, insn, entry_bb);
4278 1.1 mrg FOR_EACH_EDGE (e, ei, loop->incoming)
4279 1.1 mrg {
4280 1.1 mrg if (!(e->flags & EDGE_FALLTHRU))
4281 1.1 mrg redirect_edge_and_branch_force (e, new_bb);
4282 1.1 mrg else
4283 1.1 mrg redirect_edge_succ (e, new_bb);
4284 1.1 mrg }
4285 1.1 mrg
4286 1.1 mrg make_edge (new_bb, loop->head, 0);
4287 1.1 mrg }
4288 1.1 mrg else
4289 1.1 mrg {
4290 1.1 mrg while (DEBUG_INSN_P (entry_after)
4291 1.1 mrg || (NOTE_P (entry_after)
4292 1.1 mrg && NOTE_KIND (entry_after) != NOTE_INSN_BASIC_BLOCK))
4293 1.1 mrg entry_after = PREV_INSN (entry_after);
4294 1.1 mrg
4295 1.1 mrg emit_insn_after (seq, entry_after);
4296 1.1 mrg }
4297 1.1 mrg
4298 1.1 mrg end_sequence ();
4299 1.1 mrg
4300 1.1 mrg return true;
4301 1.1 mrg }
4302 1.1 mrg
4303 1.1 mrg /* A callback for the hw-doloop pass. Called when a loop we have discovered
4304 1.1 mrg turns out not to be optimizable; we have to split the loop_end pattern into
4305 1.1 mrg a subtract and a test. */
4306 1.1 mrg
4307 1.1 mrg static void
4308 1.1 mrg hwloop_fail (hwloop_info loop)
4309 1.1 mrg {
4310 1.1 mrg rtx test;
4311 1.1 mrg rtx_insn *insn = loop->loop_end;
4312 1.1 mrg
4313 1.1 mrg emit_insn_before (gen_addsi3 (loop->iter_reg,
4314 1.1 mrg loop->iter_reg,
4315 1.1 mrg constm1_rtx),
4316 1.1 mrg loop->loop_end);
4317 1.1 mrg
4318 1.1 mrg test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
4319 1.1 mrg insn = emit_jump_insn_before (gen_cbranchsi4 (test,
4320 1.1 mrg loop->iter_reg, const0_rtx,
4321 1.1 mrg loop->start_label),
4322 1.1 mrg loop->loop_end);
4323 1.1 mrg
4324 1.1 mrg JUMP_LABEL (insn) = loop->start_label;
4325 1.1 mrg LABEL_NUSES (loop->start_label)++;
4326 1.1 mrg delete_insn (loop->loop_end);
4327 1.1 mrg }
4328 1.1 mrg
4329 1.1 mrg /* A callback for the hw-doloop pass. This function examines INSN; if
4330 1.1 mrg it is a doloop_end pattern we recognize, return the reg rtx for the
4331 1.1 mrg loop counter. Otherwise, return NULL_RTX. */
4332 1.1 mrg
4333 1.1 mrg static rtx
4334 1.1 mrg hwloop_pattern_reg (rtx_insn *insn)
4335 1.1 mrg {
4336 1.1 mrg rtx reg;
4337 1.1 mrg
4338 1.1 mrg if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
4339 1.1 mrg return NULL_RTX;
4340 1.1 mrg
4341 1.1 mrg reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
4342 1.1 mrg if (!REG_P (reg))
4343 1.1 mrg return NULL_RTX;
4344 1.1 mrg
4345 1.1 mrg return reg;
4346 1.1 mrg }
4347 1.1 mrg
4348 1.1 mrg
4349 1.1 mrg static struct hw_doloop_hooks xtensa_doloop_hooks =
4350 1.1 mrg {
4351 1.1 mrg hwloop_pattern_reg,
4352 1.1 mrg hwloop_optimize,
4353 1.1 mrg hwloop_fail
4354 1.1 mrg };
4355 1.1 mrg
4356 1.1 mrg /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
4357 1.1 mrg and tries to rewrite the RTL of these loops so that proper Xtensa
4358 1.1 mrg hardware loops are generated. */
4359 1.1 mrg
4360 1.1 mrg static void
4361 1.1 mrg xtensa_reorg_loops (void)
4362 1.1 mrg {
4363 1.1 mrg if (TARGET_LOOPS)
4364 1.1 mrg reorg_loops (false, &xtensa_doloop_hooks);
4365 1.1 mrg }
4366 1.1 mrg
4367 1.1 mrg /* Implement the TARGET_MACHINE_DEPENDENT_REORG pass. */
4368 1.1 mrg
4369 1.1 mrg static void
4370 1.1 mrg xtensa_reorg (void)
4371 1.1 mrg {
4372 1.1 mrg /* We are freeing block_for_insn in the toplev to keep compatibility
4373 1.1 mrg with old MDEP_REORGS that are not CFG based. Recompute it now. */
4374 1.1 mrg compute_bb_for_insn ();
4375 1.1 mrg
4376 1.1 mrg df_analyze ();
4377 1.1 mrg
4378 1.1 mrg /* Doloop optimization. */
4379 1.1 mrg xtensa_reorg_loops ();
4380 1.1 mrg }
4381 1.1 mrg
4382 1.1 mrg /* Update register usage after having seen the compiler flags. */
4383 1.1 mrg
4384 1.1 mrg static void
4385 1.1 mrg xtensa_conditional_register_usage (void)
4386 1.1 mrg {
4387 1.1 mrg unsigned i, c_mask;
4388 1.1 mrg
4389 1.1 mrg c_mask = TARGET_WINDOWED_ABI ? (1 << 1) : (1 << 2);
4390 1.1 mrg
4391 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4392 1.1 mrg {
4393 1.1 mrg /* Set/reset conditionally defined registers from
4394 1.1 mrg CALL_USED_REGISTERS initializer. */
4395 1.1 mrg if (call_used_regs[i] > 1)
4396 1.1 mrg call_used_regs[i] = !!(call_used_regs[i] & c_mask);
4397 1.1 mrg }
4398 1.1 mrg
4399 1.1 mrg /* Remove hard FP register from the preferred reload registers set. */
4400 1.1 mrg CLEAR_HARD_REG_BIT (reg_class_contents[(int)RL_REGS],
4401 1.1 mrg HARD_FRAME_POINTER_REGNUM);
4402 1.1 mrg }
4403 1.1 mrg
4404 1.1 mrg /* Map hard register number to register class */
4405 1.1 mrg
4406 1.1 mrg enum reg_class xtensa_regno_to_class (int regno)
4407 1.1 mrg {
4408 1.1 mrg static const enum reg_class regno_to_class[FIRST_PSEUDO_REGISTER] =
4409 1.1 mrg {
4410 1.1 mrg RL_REGS, SP_REG, RL_REGS, RL_REGS,
4411 1.1 mrg RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4412 1.1 mrg RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4413 1.1 mrg RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4414 1.1 mrg AR_REGS, AR_REGS, BR_REGS,
4415 1.1 mrg FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4416 1.1 mrg FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4417 1.1 mrg FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4418 1.1 mrg FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4419 1.1 mrg ACC_REG,
4420 1.1 mrg };
4421 1.1 mrg
4422 1.1 mrg if (regno == HARD_FRAME_POINTER_REGNUM)
4423 1.1 mrg return GR_REGS;
4424 1.1 mrg else
4425 1.1 mrg return regno_to_class[regno];
4426 1.1 mrg }
4427 1.1 mrg
4428 1.1 mrg /* Implement TARGET_CONSTANT_ALIGNMENT. Align string constants and
4429 1.1 mrg constructors to at least a word boundary. The typical use of this
4430 1.1 mrg macro is to increase alignment for string constants to be word
4431 1.1 mrg aligned so that 'strcpy' calls that copy constants can be done
4432 1.1 mrg inline. */
4433 1.1 mrg
4434 1.1 mrg static HOST_WIDE_INT
4435 1.1 mrg xtensa_constant_alignment (const_tree exp, HOST_WIDE_INT align)
4436 1.1 mrg {
4437 1.1 mrg if ((TREE_CODE (exp) == STRING_CST || TREE_CODE (exp) == CONSTRUCTOR)
4438 1.1 mrg && !optimize_size)
4439 1.1 mrg return MAX (align, BITS_PER_WORD);
4440 1.1 mrg return align;
4441 1.1 mrg }
4442 1.1 mrg
4443 1.1 mrg static bool
4444 1.1 mrg xtensa_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
4445 1.1 mrg {
4446 1.1 mrg gcc_assert (from == ARG_POINTER_REGNUM || from == FRAME_POINTER_REGNUM);
4447 1.1 mrg
4448 1.1 mrg /* If we need a frame pointer, ARG_POINTER_REGNUM and FRAME_POINTER_REGNUM
4449 1.1 mrg can only eliminate to HARD_FRAME_POINTER_REGNUM. */
4450 1.1 mrg return to == HARD_FRAME_POINTER_REGNUM
4451 1.1 mrg || (!frame_pointer_needed && to == STACK_POINTER_REGNUM);
4452 1.1 mrg }
4453 1.1 mrg
4454 1.1 mrg /* Implement TARGET_STARTING_FRAME_OFFSET. */
4455 1.1 mrg
4456 1.1 mrg static HOST_WIDE_INT
4457 1.1 mrg xtensa_starting_frame_offset (void)
4458 1.1 mrg {
4459 1.1 mrg if (FRAME_GROWS_DOWNWARD)
4460 1.1 mrg return 0;
4461 1.1 mrg return crtl->outgoing_args_size;
4462 1.1 mrg }
4463 1.1 mrg
4464 1.1 mrg /* Implement TARGET_ASAN_SHADOW_OFFSET. */
4465 1.1 mrg
4466 1.1 mrg static unsigned HOST_WIDE_INT
4467 1.1 mrg xtensa_asan_shadow_offset (void)
4468 1.1 mrg {
4469 1.1 mrg return HOST_WIDE_INT_UC (0x10000000);
4470 1.1 mrg }
4471 1.1 mrg
4472 1.1 mrg static rtx
4473 1.1 mrg xtensa_delegitimize_address (rtx op)
4474 1.1 mrg {
4475 1.1 mrg switch (GET_CODE (op))
4476 1.1 mrg {
4477 1.1 mrg case CONST:
4478 1.1 mrg return xtensa_delegitimize_address (XEXP (op, 0));
4479 1.1 mrg
4480 1.1 mrg case UNSPEC:
4481 1.1 mrg if (XINT (op, 1) == UNSPEC_PLT)
4482 1.1 mrg return XVECEXP(op, 0, 0);
4483 1.1 mrg break;
4484 1.1 mrg
4485 1.1 mrg default:
4486 1.1 mrg break;
4487 1.1 mrg }
4488 1.1 mrg return op;
4489 1.1 mrg }
4490
4491 #include "gt-xtensa.h"
4492