m32r.cc revision 1.1.1.1 1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2022 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define IN_TARGET_CODE 1
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "insn-config.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "alias.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "tm-constrs.h"
47 #include "builtins.h"
48 #include "opts.h"
49
50 /* This file should be included last. */
51 #include "target-def.h"
52
53 /* Array of valid operand punctuation characters. */
54 static char m32r_punct_chars[256];
55
56 /* Machine-specific symbol_ref flags. */
57 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
58 #define SYMBOL_REF_MODEL(X) \
59 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
60
61 /* For string literals, etc. */
62 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
63
64 /* Forward declaration. */
65 static void m32r_option_override (void);
66 static void init_reg_tables (void);
67 static void block_move_call (rtx, rtx, rtx);
68 static int m32r_is_insn (rtx);
69 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
70 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
71 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
72 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
73 static void m32r_print_operand (FILE *, rtx, int);
74 static void m32r_print_operand_address (FILE *, machine_mode, rtx);
75 static bool m32r_print_operand_punct_valid_p (unsigned char code);
76 static void m32r_output_function_prologue (FILE *);
77 static void m32r_output_function_epilogue (FILE *);
78
79 static void m32r_file_start (void);
80
81 static int m32r_adjust_priority (rtx_insn *, int);
82 static int m32r_issue_rate (void);
83
84 static void m32r_encode_section_info (tree, rtx, int);
85 static bool m32r_in_small_data_p (const_tree);
86 static bool m32r_return_in_memory (const_tree, const_tree);
87 static rtx m32r_function_value (const_tree, const_tree, bool);
88 static rtx m32r_libcall_value (machine_mode, const_rtx);
89 static bool m32r_function_value_regno_p (const unsigned int);
90 static void m32r_setup_incoming_varargs (cumulative_args_t,
91 const function_arg_info &,
92 int *, int);
93 static void init_idents (void);
94 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed);
95 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
96 static bool m32r_pass_by_reference (cumulative_args_t,
97 const function_arg_info &arg);
98 static int m32r_arg_partial_bytes (cumulative_args_t,
99 const function_arg_info &);
100 static rtx m32r_function_arg (cumulative_args_t, const function_arg_info &);
101 static void m32r_function_arg_advance (cumulative_args_t,
102 const function_arg_info &);
103 static bool m32r_can_eliminate (const int, const int);
104 static void m32r_conditional_register_usage (void);
105 static void m32r_trampoline_init (rtx, tree, rtx);
106 static bool m32r_legitimate_constant_p (machine_mode, rtx);
107 static bool m32r_attribute_identifier (const_tree);
108 static bool m32r_hard_regno_mode_ok (unsigned int, machine_mode);
109 static bool m32r_modes_tieable_p (machine_mode, machine_mode);
110 static HOST_WIDE_INT m32r_starting_frame_offset (void);
111
112 /* M32R specific attributes. */
114
115 static const struct attribute_spec m32r_attribute_table[] =
116 {
117 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
118 affects_type_identity, handler, exclude } */
119 { "interrupt", 0, 0, true, false, false, false, NULL, NULL },
120 { "model", 1, 1, true, false, false, false, m32r_handle_model_attribute,
121 NULL },
122 { NULL, 0, 0, false, false, false, false, NULL, NULL }
123 };
124
125 /* Initialize the GCC target structure. */
127 #undef TARGET_ATTRIBUTE_TABLE
128 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
129 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
130 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
131
132 #undef TARGET_LRA_P
133 #define TARGET_LRA_P hook_bool_void_false
134
135 #undef TARGET_LEGITIMATE_ADDRESS_P
136 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
137 #undef TARGET_LEGITIMIZE_ADDRESS
138 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
139 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
140 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
141
142 #undef TARGET_ASM_ALIGNED_HI_OP
143 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
144 #undef TARGET_ASM_ALIGNED_SI_OP
145 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
146
147 #undef TARGET_PRINT_OPERAND
148 #define TARGET_PRINT_OPERAND m32r_print_operand
149 #undef TARGET_PRINT_OPERAND_ADDRESS
150 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
151 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
152 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
153
154 #undef TARGET_ASM_FUNCTION_PROLOGUE
155 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
156 #undef TARGET_ASM_FUNCTION_EPILOGUE
157 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
158
159 #undef TARGET_ASM_FILE_START
160 #define TARGET_ASM_FILE_START m32r_file_start
161
162 #undef TARGET_SCHED_ADJUST_PRIORITY
163 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
164 #undef TARGET_SCHED_ISSUE_RATE
165 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
166
167 #undef TARGET_OPTION_OVERRIDE
168 #define TARGET_OPTION_OVERRIDE m32r_option_override
169
170 #undef TARGET_ENCODE_SECTION_INFO
171 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
172 #undef TARGET_IN_SMALL_DATA_P
173 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
174
175
176 #undef TARGET_MEMORY_MOVE_COST
177 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
178 #undef TARGET_RTX_COSTS
179 #define TARGET_RTX_COSTS m32r_rtx_costs
180 #undef TARGET_ADDRESS_COST
181 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
182
183 #undef TARGET_PROMOTE_PROTOTYPES
184 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
185 #undef TARGET_RETURN_IN_MEMORY
186 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
187
188 #undef TARGET_FUNCTION_VALUE
189 #define TARGET_FUNCTION_VALUE m32r_function_value
190 #undef TARGET_LIBCALL_VALUE
191 #define TARGET_LIBCALL_VALUE m32r_libcall_value
192 #undef TARGET_FUNCTION_VALUE_REGNO_P
193 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
194
195 #undef TARGET_SETUP_INCOMING_VARARGS
196 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
197 #undef TARGET_MUST_PASS_IN_STACK
198 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
199 #undef TARGET_PASS_BY_REFERENCE
200 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
201 #undef TARGET_ARG_PARTIAL_BYTES
202 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
203 #undef TARGET_FUNCTION_ARG
204 #define TARGET_FUNCTION_ARG m32r_function_arg
205 #undef TARGET_FUNCTION_ARG_ADVANCE
206 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
207
208 #undef TARGET_CAN_ELIMINATE
209 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
210
211 #undef TARGET_CONDITIONAL_REGISTER_USAGE
212 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
213
214 #undef TARGET_TRAMPOLINE_INIT
215 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
216
217 #undef TARGET_LEGITIMATE_CONSTANT_P
218 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
219
220 #undef TARGET_HARD_REGNO_MODE_OK
221 #define TARGET_HARD_REGNO_MODE_OK m32r_hard_regno_mode_ok
222
223 #undef TARGET_MODES_TIEABLE_P
224 #define TARGET_MODES_TIEABLE_P m32r_modes_tieable_p
225
226 #undef TARGET_CONSTANT_ALIGNMENT
227 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
228
229 #undef TARGET_STARTING_FRAME_OFFSET
230 #define TARGET_STARTING_FRAME_OFFSET m32r_starting_frame_offset
231
232 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
233 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
234
235 struct gcc_target targetm = TARGET_INITIALIZER;
236
237 /* Called by m32r_option_override to initialize various things. */
239
240 void
241 m32r_init (void)
242 {
243 init_reg_tables ();
244
245 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
246 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
247 m32r_punct_chars['#'] = 1;
248 m32r_punct_chars['@'] = 1; /* ??? no longer used */
249
250 /* Provide default value if not specified. */
251 if (!OPTION_SET_P (g_switch_value))
252 g_switch_value = SDATA_DEFAULT_SIZE;
253 }
254
255 static void
256 m32r_option_override (void)
257 {
258 /* These need to be done at start up.
259 It's convenient to do them here. */
260 m32r_init ();
261 SUBTARGET_OVERRIDE_OPTIONS;
262 }
263
264 /* Vectors to keep interesting information about registers where it can easily
265 be got. We use to use the actual mode value as the bit number, but there
266 is (or may be) more than 32 modes now. Instead we use two tables: one
267 indexed by hard register number, and one indexed by mode. */
268
269 /* The purpose of m32r_mode_class is to shrink the range of modes so that
270 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
271 mapped into one m32r_mode_class mode. */
272
273 enum m32r_mode_class
274 {
275 C_MODE,
276 S_MODE, D_MODE, T_MODE, O_MODE,
277 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
278 };
279
280 /* Modes for condition codes. */
281 #define C_MODES (1 << (int) C_MODE)
282
283 /* Modes for single-word and smaller quantities. */
284 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
285
286 /* Modes for double-word and smaller quantities. */
287 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
288
289 /* Modes for quad-word and smaller quantities. */
290 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
291
292 /* Modes for accumulators. */
293 #define A_MODES (1 << (int) A_MODE)
294
295 /* Value is 1 if register/mode pair is acceptable on arc. */
296
297 static const unsigned int m32r_hard_regno_modes[FIRST_PSEUDO_REGISTER] =
298 {
299 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
300 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
301 S_MODES, C_MODES, A_MODES, A_MODES
302 };
303
304 static unsigned int m32r_mode_class [NUM_MACHINE_MODES];
305
306 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
307
308 static void
309 init_reg_tables (void)
310 {
311 int i;
312
313 for (i = 0; i < NUM_MACHINE_MODES; i++)
314 {
315 machine_mode m = (machine_mode) i;
316
317 switch (GET_MODE_CLASS (m))
318 {
319 case MODE_INT:
320 case MODE_PARTIAL_INT:
321 case MODE_COMPLEX_INT:
322 if (GET_MODE_SIZE (m) <= 4)
323 m32r_mode_class[i] = 1 << (int) S_MODE;
324 else if (GET_MODE_SIZE (m) == 8)
325 m32r_mode_class[i] = 1 << (int) D_MODE;
326 else if (GET_MODE_SIZE (m) == 16)
327 m32r_mode_class[i] = 1 << (int) T_MODE;
328 else if (GET_MODE_SIZE (m) == 32)
329 m32r_mode_class[i] = 1 << (int) O_MODE;
330 else
331 m32r_mode_class[i] = 0;
332 break;
333 case MODE_FLOAT:
334 case MODE_COMPLEX_FLOAT:
335 if (GET_MODE_SIZE (m) <= 4)
336 m32r_mode_class[i] = 1 << (int) SF_MODE;
337 else if (GET_MODE_SIZE (m) == 8)
338 m32r_mode_class[i] = 1 << (int) DF_MODE;
339 else if (GET_MODE_SIZE (m) == 16)
340 m32r_mode_class[i] = 1 << (int) TF_MODE;
341 else if (GET_MODE_SIZE (m) == 32)
342 m32r_mode_class[i] = 1 << (int) OF_MODE;
343 else
344 m32r_mode_class[i] = 0;
345 break;
346 case MODE_CC:
347 m32r_mode_class[i] = 1 << (int) C_MODE;
348 break;
349 default:
350 m32r_mode_class[i] = 0;
351 break;
352 }
353 }
354
355 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
356 {
357 if (GPR_P (i))
358 m32r_regno_reg_class[i] = GENERAL_REGS;
359 else if (i == ARG_POINTER_REGNUM)
360 m32r_regno_reg_class[i] = GENERAL_REGS;
361 else
362 m32r_regno_reg_class[i] = NO_REGS;
363 }
364 }
365
366 /* M32R specific attribute support.
368
369 interrupt - for interrupt functions
370
371 model - select code model used to access object
372
373 small: addresses use 24 bits, use bl to make calls
374 medium: addresses use 32 bits, use bl to make calls
375 large: addresses use 32 bits, use seth/add3/jl to make calls
376
377 Grep for MODEL in m32r.h for more info. */
378
379 static tree small_ident1;
380 static tree small_ident2;
381 static tree medium_ident1;
382 static tree medium_ident2;
383 static tree large_ident1;
384 static tree large_ident2;
385
386 static void
387 init_idents (void)
388 {
389 if (small_ident1 == 0)
390 {
391 small_ident1 = get_identifier ("small");
392 small_ident2 = get_identifier ("__small__");
393 medium_ident1 = get_identifier ("medium");
394 medium_ident2 = get_identifier ("__medium__");
395 large_ident1 = get_identifier ("large");
396 large_ident2 = get_identifier ("__large__");
397 }
398 }
399
400 /* Handle an "model" attribute; arguments as in
401 struct attribute_spec.handler. */
402 static tree
403 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
404 tree args, int flags ATTRIBUTE_UNUSED,
405 bool *no_add_attrs)
406 {
407 tree arg;
408
409 init_idents ();
410 arg = TREE_VALUE (args);
411
412 if (arg != small_ident1
413 && arg != small_ident2
414 && arg != medium_ident1
415 && arg != medium_ident2
416 && arg != large_ident1
417 && arg != large_ident2)
418 {
419 warning (OPT_Wattributes, "invalid argument of %qs attribute",
420 IDENTIFIER_POINTER (name));
421 *no_add_attrs = true;
422 }
423
424 return NULL_TREE;
425 }
426
427 static bool
428 m32r_attribute_identifier (const_tree name)
429 {
430 return strcmp (IDENTIFIER_POINTER (name), "model") == 0
431 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0;
432 }
433
434 /* Encode section information of DECL, which is either a VAR_DECL,
436 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
437
438 For the M32R we want to record:
439
440 - whether the object lives in .sdata/.sbss.
441 - what code model should be used to access the object
442 */
443
444 static void
445 m32r_encode_section_info (tree decl, rtx rtl, int first)
446 {
447 int extra_flags = 0;
448 tree model_attr;
449 enum m32r_model model;
450
451 default_encode_section_info (decl, rtl, first);
452
453 if (!DECL_P (decl))
454 return;
455
456 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
457 if (model_attr)
458 {
459 tree id;
460
461 init_idents ();
462
463 id = TREE_VALUE (TREE_VALUE (model_attr));
464
465 if (id == small_ident1 || id == small_ident2)
466 model = M32R_MODEL_SMALL;
467 else if (id == medium_ident1 || id == medium_ident2)
468 model = M32R_MODEL_MEDIUM;
469 else if (id == large_ident1 || id == large_ident2)
470 model = M32R_MODEL_LARGE;
471 else
472 gcc_unreachable (); /* shouldn't happen */
473 }
474 else
475 {
476 if (TARGET_MODEL_SMALL)
477 model = M32R_MODEL_SMALL;
478 else if (TARGET_MODEL_MEDIUM)
479 model = M32R_MODEL_MEDIUM;
480 else if (TARGET_MODEL_LARGE)
481 model = M32R_MODEL_LARGE;
482 else
483 gcc_unreachable (); /* shouldn't happen */
484 }
485 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
486
487 if (extra_flags)
488 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
489 }
490
491 /* Only mark the object as being small data area addressable if
492 it hasn't been explicitly marked with a code model.
493
494 The user can explicitly put an object in the small data area with the
495 section attribute. If the object is in sdata/sbss and marked with a
496 code model do both [put the object in .sdata and mark it as being
497 addressed with a specific code model - don't mark it as being addressed
498 with an SDA reloc though]. This is ok and might be useful at times. If
499 the object doesn't fit the linker will give an error. */
500
501 static bool
502 m32r_in_small_data_p (const_tree decl)
503 {
504 const char *section;
505
506 if (TREE_CODE (decl) != VAR_DECL)
507 return false;
508
509 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
510 return false;
511
512 section = DECL_SECTION_NAME (decl);
513 if (section)
514 {
515 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
516 return true;
517 }
518 else
519 {
520 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
521 {
522 int size = int_size_in_bytes (TREE_TYPE (decl));
523
524 if (size > 0 && size <= g_switch_value)
525 return true;
526 }
527 }
528
529 return false;
530 }
531
532 /* Do anything needed before RTL is emitted for each function. */
533
534 void
535 m32r_init_expanders (void)
536 {
537 /* ??? At one point there was code here. The function is left in
538 to make it easy to experiment. */
539 }
540
541 bool
543 call_operand (rtx op, machine_mode mode)
544 {
545 if (!MEM_P (op))
546 return 0;
547 op = XEXP (op, 0);
548 return call_address_operand (op, mode);
549 }
550
551 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
552
553 bool
554 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
555 {
556 if (! TARGET_SDATA_USE)
557 return 0;
558
559 if (GET_CODE (op) == SYMBOL_REF)
560 return SYMBOL_REF_SMALL_P (op);
561
562 if (GET_CODE (op) == CONST
563 && GET_CODE (XEXP (op, 0)) == PLUS
564 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
565 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
566 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
567
568 return 0;
569 }
570
571 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
572
573 int
574 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
575 {
576 rtx sym;
577
578 if (flag_pic)
579 return 0;
580
581 if (GET_CODE (op) == LABEL_REF)
582 return TARGET_ADDR24;
583
584 if (GET_CODE (op) == SYMBOL_REF)
585 sym = op;
586 else if (GET_CODE (op) == CONST
587 && GET_CODE (XEXP (op, 0)) == PLUS
588 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
589 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
590 sym = XEXP (XEXP (op, 0), 0);
591 else
592 return 0;
593
594 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
595 return 1;
596
597 if (TARGET_ADDR24
598 && (CONSTANT_POOL_ADDRESS_P (sym)
599 || LIT_NAME_P (XSTR (sym, 0))))
600 return 1;
601
602 return 0;
603 }
604
605 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
606
607 int
608 addr32_operand (rtx op, machine_mode mode)
609 {
610 rtx sym;
611
612 if (GET_CODE (op) == LABEL_REF)
613 return TARGET_ADDR32;
614
615 if (GET_CODE (op) == SYMBOL_REF)
616 sym = op;
617 else if (GET_CODE (op) == CONST
618 && GET_CODE (XEXP (op, 0)) == PLUS
619 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
620 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
621 && ! flag_pic)
622 sym = XEXP (XEXP (op, 0), 0);
623 else
624 return 0;
625
626 return (! addr24_operand (sym, mode)
627 && ! small_data_operand (sym, mode));
628 }
629
630 /* Return 1 if OP is a function that can be called with the `bl' insn. */
631
632 int
633 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
634 {
635 if (flag_pic)
636 return 1;
637
638 if (GET_CODE (op) == SYMBOL_REF)
639 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
640
641 return TARGET_CALL26;
642 }
643
644 /* Return 1 if OP is a DImode const we want to handle inline.
645 This must match the code in the movdi pattern.
646 It is used by the 'G' constraint. */
647
648 int
649 easy_di_const (rtx op)
650 {
651 rtx high_rtx, low_rtx;
652 HOST_WIDE_INT high, low;
653
654 split_double (op, &high_rtx, &low_rtx);
655 high = INTVAL (high_rtx);
656 low = INTVAL (low_rtx);
657 /* Pick constants loadable with 2 16-bit `ldi' insns. */
658 if (high >= -128 && high <= 127
659 && low >= -128 && low <= 127)
660 return 1;
661 return 0;
662 }
663
664 /* Return 1 if OP is a DFmode const we want to handle inline.
665 This must match the code in the movdf pattern.
666 It is used by the 'H' constraint. */
667
668 int
669 easy_df_const (rtx op)
670 {
671 long l[2];
672
673 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l);
674 if (l[0] == 0 && l[1] == 0)
675 return 1;
676 if ((l[0] & 0xffff) == 0 && l[1] == 0)
677 return 1;
678 return 0;
679 }
680
681 /* Return 1 if OP is (mem (reg ...)).
682 This is used in insn length calcs. */
683
684 bool
685 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
686 {
687 return MEM_P (op) && REG_P (XEXP (op, 0));
688 }
689
690 /* Return nonzero if ARG must be passed by indirect reference. */
691
692 static bool
693 m32r_pass_by_reference (cumulative_args_t, const function_arg_info &arg)
694 {
695 int size = arg.type_size_in_bytes ();
696 return (size < 0 || size > 8);
697 }
698
699 /* Comparisons. */
701
702 /* X and Y are two things to compare using CODE. Emit the compare insn and
703 return the rtx for compare [arg0 of the if_then_else].
704 If need_compare is true then the comparison insn must be generated, rather
705 than being subsumed into the following branch instruction. */
706
707 rtx
708 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
709 {
710 enum rtx_code compare_code;
711 enum rtx_code branch_code;
712 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
713 int must_swap = 0;
714
715 switch (code)
716 {
717 case EQ: compare_code = EQ; branch_code = NE; break;
718 case NE: compare_code = EQ; branch_code = EQ; break;
719 case LT: compare_code = LT; branch_code = NE; break;
720 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
721 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
722 case GE: compare_code = LT; branch_code = EQ; break;
723 case LTU: compare_code = LTU; branch_code = NE; break;
724 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
725 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
726 case GEU: compare_code = LTU; branch_code = EQ; break;
727
728 default:
729 gcc_unreachable ();
730 }
731
732 if (need_compare)
733 {
734 switch (compare_code)
735 {
736 case EQ:
737 if (satisfies_constraint_P (y) /* Reg equal to small const. */
738 && y != const0_rtx)
739 {
740 rtx tmp = gen_reg_rtx (SImode);
741
742 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
743 x = tmp;
744 y = const0_rtx;
745 }
746 else if (CONSTANT_P (y)) /* Reg equal to const. */
747 {
748 rtx tmp = force_reg (GET_MODE (x), y);
749 y = tmp;
750 }
751
752 if (register_operand (y, SImode) /* Reg equal to reg. */
753 || y == const0_rtx) /* Reg equal to zero. */
754 {
755 emit_insn (gen_cmp_eqsi_insn (x, y));
756
757 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
758 }
759 break;
760
761 case LT:
762 if (register_operand (y, SImode)
763 || satisfies_constraint_P (y))
764 {
765 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
766
767 switch (code)
768 {
769 case LT:
770 emit_insn (gen_cmp_ltsi_insn (x, y));
771 code = EQ;
772 break;
773 case LE:
774 if (y == const0_rtx)
775 tmp = const1_rtx;
776 else
777 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
778 emit_insn (gen_cmp_ltsi_insn (x, tmp));
779 code = EQ;
780 break;
781 case GT:
782 if (CONST_INT_P (y))
783 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
784 else
785 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
786 emit_insn (gen_cmp_ltsi_insn (x, tmp));
787 code = NE;
788 break;
789 case GE:
790 emit_insn (gen_cmp_ltsi_insn (x, y));
791 code = NE;
792 break;
793 default:
794 gcc_unreachable ();
795 }
796
797 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
798 }
799 break;
800
801 case LTU:
802 if (register_operand (y, SImode)
803 || satisfies_constraint_P (y))
804 {
805 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
806
807 switch (code)
808 {
809 case LTU:
810 emit_insn (gen_cmp_ltusi_insn (x, y));
811 code = EQ;
812 break;
813 case LEU:
814 if (y == const0_rtx)
815 tmp = const1_rtx;
816 else
817 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
818 emit_insn (gen_cmp_ltusi_insn (x, tmp));
819 code = EQ;
820 break;
821 case GTU:
822 if (CONST_INT_P (y))
823 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
824 else
825 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
826 emit_insn (gen_cmp_ltusi_insn (x, tmp));
827 code = NE;
828 break;
829 case GEU:
830 emit_insn (gen_cmp_ltusi_insn (x, y));
831 code = NE;
832 break;
833 default:
834 gcc_unreachable ();
835 }
836
837 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
838 }
839 break;
840
841 default:
842 gcc_unreachable ();
843 }
844 }
845 else
846 {
847 /* Reg/reg equal comparison. */
848 if (compare_code == EQ
849 && register_operand (y, SImode))
850 return gen_rtx_fmt_ee (code, CCmode, x, y);
851
852 /* Reg/zero signed comparison. */
853 if ((compare_code == EQ || compare_code == LT)
854 && y == const0_rtx)
855 return gen_rtx_fmt_ee (code, CCmode, x, y);
856
857 /* Reg/smallconst equal comparison. */
858 if (compare_code == EQ
859 && satisfies_constraint_P (y))
860 {
861 rtx tmp = gen_reg_rtx (SImode);
862
863 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
864 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
865 }
866
867 /* Reg/const equal comparison. */
868 if (compare_code == EQ
869 && CONSTANT_P (y))
870 {
871 rtx tmp = force_reg (GET_MODE (x), y);
872
873 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
874 }
875 }
876
877 if (CONSTANT_P (y))
878 {
879 if (must_swap)
880 y = force_reg (GET_MODE (x), y);
881 else
882 {
883 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
884
885 if (! ok_const)
886 y = force_reg (GET_MODE (x), y);
887 }
888 }
889
890 switch (compare_code)
891 {
892 case EQ :
893 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
894 break;
895 case LT :
896 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
897 break;
898 case LTU :
899 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
900 break;
901
902 default:
903 gcc_unreachable ();
904 }
905
906 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
907 }
908
909 bool
910 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
911 {
912 machine_mode mode = GET_MODE (op0);
913
914 gcc_assert (mode == SImode);
915 switch (code)
916 {
917 case EQ:
918 if (!register_operand (op1, mode))
919 op1 = force_reg (mode, op1);
920
921 if (TARGET_M32RX || TARGET_M32R2)
922 {
923 if (!reg_or_zero_operand (op2, mode))
924 op2 = force_reg (mode, op2);
925
926 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
927 return true;
928 }
929 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
930 {
931 emit_insn (gen_seq_zero_insn (op0, op1));
932 return true;
933 }
934
935 if (!reg_or_eq_int16_operand (op2, mode))
936 op2 = force_reg (mode, op2);
937
938 emit_insn (gen_seq_insn (op0, op1, op2));
939 return true;
940
941 case NE:
942 if (!CONST_INT_P (op2)
943 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
944 {
945 rtx reg;
946
947 if (reload_completed || reload_in_progress)
948 return false;
949
950 reg = gen_reg_rtx (SImode);
951 emit_insn (gen_xorsi3 (reg, op1, op2));
952 op1 = reg;
953
954 if (!register_operand (op1, mode))
955 op1 = force_reg (mode, op1);
956
957 emit_insn (gen_sne_zero_insn (op0, op1));
958 return true;
959 }
960 return false;
961
962 case LT:
963 case GT:
964 if (code == GT)
965 {
966 rtx tmp = op2;
967 op2 = op1;
968 op1 = tmp;
969 code = LT;
970 }
971
972 if (!register_operand (op1, mode))
973 op1 = force_reg (mode, op1);
974
975 if (!reg_or_int16_operand (op2, mode))
976 op2 = force_reg (mode, op2);
977
978 emit_insn (gen_slt_insn (op0, op1, op2));
979 return true;
980
981 case LTU:
982 case GTU:
983 if (code == GTU)
984 {
985 rtx tmp = op2;
986 op2 = op1;
987 op1 = tmp;
988 code = LTU;
989 }
990
991 if (!register_operand (op1, mode))
992 op1 = force_reg (mode, op1);
993
994 if (!reg_or_int16_operand (op2, mode))
995 op2 = force_reg (mode, op2);
996
997 emit_insn (gen_sltu_insn (op0, op1, op2));
998 return true;
999
1000 case GE:
1001 case GEU:
1002 if (!register_operand (op1, mode))
1003 op1 = force_reg (mode, op1);
1004
1005 if (!reg_or_int16_operand (op2, mode))
1006 op2 = force_reg (mode, op2);
1007
1008 if (code == GE)
1009 emit_insn (gen_sge_insn (op0, op1, op2));
1010 else
1011 emit_insn (gen_sgeu_insn (op0, op1, op2));
1012 return true;
1013
1014 case LE:
1015 case LEU:
1016 if (!register_operand (op1, mode))
1017 op1 = force_reg (mode, op1);
1018
1019 if (CONST_INT_P (op2))
1020 {
1021 HOST_WIDE_INT value = INTVAL (op2);
1022 if (value >= 2147483647)
1023 {
1024 emit_move_insn (op0, const1_rtx);
1025 return true;
1026 }
1027
1028 op2 = GEN_INT (value + 1);
1029 if (value < -32768 || value >= 32767)
1030 op2 = force_reg (mode, op2);
1031
1032 if (code == LEU)
1033 emit_insn (gen_sltu_insn (op0, op1, op2));
1034 else
1035 emit_insn (gen_slt_insn (op0, op1, op2));
1036 return true;
1037 }
1038
1039 if (!register_operand (op2, mode))
1040 op2 = force_reg (mode, op2);
1041
1042 if (code == LEU)
1043 emit_insn (gen_sleu_insn (op0, op1, op2));
1044 else
1045 emit_insn (gen_sle_insn (op0, op1, op2));
1046 return true;
1047
1048 default:
1049 gcc_unreachable ();
1050 }
1051 }
1052
1053
1054 /* Split a 2 word move (DI or DF) into component parts. */
1056
1057 rtx
1058 gen_split_move_double (rtx operands[])
1059 {
1060 machine_mode mode = GET_MODE (operands[0]);
1061 rtx dest = operands[0];
1062 rtx src = operands[1];
1063 rtx val;
1064
1065 /* We might have (SUBREG (MEM)) here, so just get rid of the
1066 subregs to make this code simpler. It is safe to call
1067 alter_subreg any time after reload. */
1068 if (GET_CODE (dest) == SUBREG)
1069 alter_subreg (&dest, true);
1070 if (GET_CODE (src) == SUBREG)
1071 alter_subreg (&src, true);
1072
1073 start_sequence ();
1074 if (REG_P (dest))
1075 {
1076 int dregno = REGNO (dest);
1077
1078 /* Reg = reg. */
1079 if (REG_P (src))
1080 {
1081 int sregno = REGNO (src);
1082
1083 int reverse = (dregno == sregno + 1);
1084
1085 /* We normally copy the low-numbered register first. However, if
1086 the first register operand 0 is the same as the second register of
1087 operand 1, we must copy in the opposite order. */
1088 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1089 operand_subword (src, reverse, TRUE, mode)));
1090
1091 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1092 operand_subword (src, !reverse, TRUE, mode)));
1093 }
1094
1095 /* Reg = constant. */
1096 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1097 {
1098 rtx words[2];
1099 split_double (src, &words[0], &words[1]);
1100 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1101 words[0]));
1102
1103 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1104 words[1]));
1105 }
1106
1107 /* Reg = mem. */
1108 else if (MEM_P (src))
1109 {
1110 /* If the high-address word is used in the address, we must load it
1111 last. Otherwise, load it first. */
1112 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1113
1114 /* We used to optimize loads from single registers as
1115
1116 ld r1,r3+; ld r2,r3
1117
1118 if r3 were not used subsequently. However, the REG_NOTES aren't
1119 propagated correctly by the reload phase, and it can cause bad
1120 code to be generated. We could still try:
1121
1122 ld r1,r3+; ld r2,r3; addi r3,-4
1123
1124 which saves 2 bytes and doesn't force longword alignment. */
1125 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1126 adjust_address (src, SImode,
1127 reverse * UNITS_PER_WORD)));
1128
1129 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1130 adjust_address (src, SImode,
1131 !reverse * UNITS_PER_WORD)));
1132 }
1133 else
1134 gcc_unreachable ();
1135 }
1136
1137 /* Mem = reg. */
1138 /* We used to optimize loads from single registers as
1139
1140 st r1,r3; st r2,+r3
1141
1142 if r3 were not used subsequently. However, the REG_NOTES aren't
1143 propagated correctly by the reload phase, and it can cause bad
1144 code to be generated. We could still try:
1145
1146 st r1,r3; st r2,+r3; addi r3,-4
1147
1148 which saves 2 bytes and doesn't force longword alignment. */
1149 else if (MEM_P (dest) && REG_P (src))
1150 {
1151 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1152 operand_subword (src, 0, TRUE, mode)));
1153
1154 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1155 operand_subword (src, 1, TRUE, mode)));
1156 }
1157
1158 else
1159 gcc_unreachable ();
1160
1161 val = get_insns ();
1162 end_sequence ();
1163 return val;
1164 }
1165
1166
1167 static int
1169 m32r_arg_partial_bytes (cumulative_args_t cum_v, const function_arg_info &arg)
1170 {
1171 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1172
1173 int words;
1174 unsigned int size =
1175 (arg.promoted_size_in_bytes () + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1176
1177 if (*cum >= M32R_MAX_PARM_REGS)
1178 words = 0;
1179 else if (*cum + size > M32R_MAX_PARM_REGS)
1180 words = (*cum + size) - M32R_MAX_PARM_REGS;
1181 else
1182 words = 0;
1183
1184 return words * UNITS_PER_WORD;
1185 }
1186
1187 /* The ROUND_ADVANCE* macros are local to this file. */
1188 /* Round SIZE up to a word boundary. */
1189 #define ROUND_ADVANCE(SIZE) \
1190 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1191
1192 /* Round arg MODE/TYPE up to the next word boundary. */
1193 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1194 ((MODE) == BLKmode \
1195 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1196 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1197
1198 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1199 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1200
1201 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1202 a reg. This includes arguments that have to be passed by reference as the
1203 pointer to them is passed in a reg if one is available (and that is what
1204 we're given).
1205 This macro is only used in this file. */
1206 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1207 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1208
1209 /* Determine where to put an argument to a function.
1210 Value is zero to push the argument on the stack,
1211 or a hard register in which to store the argument.
1212
1213 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1214 the preceding args and about the function being called.
1215 ARG is a description of the argument. */
1216 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1217 and the rest are pushed. */
1218
1219 static rtx
1220 m32r_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
1221 {
1222 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1223
1224 return (PASS_IN_REG_P (*cum, arg.mode, arg.type)
1225 ? gen_rtx_REG (arg.mode,
1226 ROUND_ADVANCE_CUM (*cum, arg.mode, arg.type))
1227 : NULL_RTX);
1228 }
1229
1230 /* Update the data in CUM to advance over argument ARG. */
1231
1232 static void
1233 m32r_function_arg_advance (cumulative_args_t cum_v,
1234 const function_arg_info &arg)
1235 {
1236 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1237
1238 *cum = (ROUND_ADVANCE_CUM (*cum, arg.mode, arg.type)
1239 + ROUND_ADVANCE_ARG (arg.mode, arg.type));
1240 }
1241
1242 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1243
1244 static bool
1245 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1246 {
1247 cumulative_args_t dummy = pack_cumulative_args (NULL);
1248 function_arg_info arg (const_cast<tree> (type), /*named=*/false);
1249 return m32r_pass_by_reference (dummy, arg);
1250 }
1251
1252 /* Worker function for TARGET_FUNCTION_VALUE. */
1253
1254 static rtx
1255 m32r_function_value (const_tree valtype,
1256 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1257 bool outgoing ATTRIBUTE_UNUSED)
1258 {
1259 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1260 }
1261
1262 /* Worker function for TARGET_LIBCALL_VALUE. */
1263
1264 static rtx
1265 m32r_libcall_value (machine_mode mode,
1266 const_rtx fun ATTRIBUTE_UNUSED)
1267 {
1268 return gen_rtx_REG (mode, 0);
1269 }
1270
1271 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1272
1273 ??? What about r1 in DI/DF values. */
1274
1275 static bool
1276 m32r_function_value_regno_p (const unsigned int regno)
1277 {
1278 return (regno == 0);
1279 }
1280
1281 /* Do any needed setup for a variadic function. For the M32R, we must
1282 create a register parameter block, and then copy any anonymous arguments
1283 in registers to memory.
1284
1285 CUM has not been updated for the last named argument (which is given
1286 by ARG), and we rely on this fact. */
1287
1288 static void
1289 m32r_setup_incoming_varargs (cumulative_args_t cum,
1290 const function_arg_info &arg,
1291 int *pretend_size, int no_rtl)
1292 {
1293 int first_anon_arg;
1294
1295 if (no_rtl)
1296 return;
1297
1298 /* All BLKmode values are passed by reference. */
1299 gcc_assert (arg.mode != BLKmode);
1300
1301 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum),
1302 arg.mode, arg.type)
1303 + ROUND_ADVANCE_ARG (arg.mode, arg.type));
1304
1305 if (first_anon_arg < M32R_MAX_PARM_REGS)
1306 {
1307 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1308 int first_reg_offset = first_anon_arg;
1309 /* Size in words to "pretend" allocate. */
1310 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1311 rtx regblock;
1312
1313 regblock = gen_frame_mem (BLKmode,
1314 plus_constant (Pmode, arg_pointer_rtx,
1315 FIRST_PARM_OFFSET (0)));
1316 set_mem_alias_set (regblock, get_varargs_alias_set ());
1317 move_block_from_reg (first_reg_offset, regblock, size);
1318
1319 *pretend_size = (size * UNITS_PER_WORD);
1320 }
1321 }
1322
1323
1324 /* Return true if INSN is real instruction bearing insn. */
1326
1327 static int
1328 m32r_is_insn (rtx insn)
1329 {
1330 return (NONDEBUG_INSN_P (insn)
1331 && GET_CODE (PATTERN (insn)) != USE
1332 && GET_CODE (PATTERN (insn)) != CLOBBER);
1333 }
1334
1335 /* Increase the priority of long instructions so that the
1336 short instructions are scheduled ahead of the long ones. */
1337
1338 static int
1339 m32r_adjust_priority (rtx_insn *insn, int priority)
1340 {
1341 if (m32r_is_insn (insn)
1342 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1343 priority <<= 3;
1344
1345 return priority;
1346 }
1347
1348
1349 /* Indicate how many instructions can be issued at the same time.
1351 This is sort of a lie. The m32r can issue only 1 long insn at
1352 once, but it can issue 2 short insns. The default therefore is
1353 set at 2, but this can be overridden by the command line option
1354 -missue-rate=1. */
1355
1356 static int
1357 m32r_issue_rate (void)
1358 {
1359 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1360 }
1361
1362 /* Cost functions. */
1364 /* Memory is 3 times as expensive as registers.
1365 ??? Is that the right way to look at it? */
1366
1367 static int
1368 m32r_memory_move_cost (machine_mode mode,
1369 reg_class_t rclass ATTRIBUTE_UNUSED,
1370 bool in ATTRIBUTE_UNUSED)
1371 {
1372 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1373 return 6;
1374 else
1375 return 12;
1376 }
1377
1378 static bool
1379 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1380 int outer_code ATTRIBUTE_UNUSED,
1381 int opno ATTRIBUTE_UNUSED, int *total,
1382 bool speed ATTRIBUTE_UNUSED)
1383 {
1384 int code = GET_CODE (x);
1385
1386 switch (code)
1387 {
1388 /* Small integers are as cheap as registers. 4 byte values can be
1389 fetched as immediate constants - let's give that the cost of an
1390 extra insn. */
1391 case CONST_INT:
1392 if (INT16_P (INTVAL (x)))
1393 {
1394 *total = 0;
1395 return true;
1396 }
1397 /* FALLTHRU */
1398
1399 case CONST:
1400 case LABEL_REF:
1401 case SYMBOL_REF:
1402 *total = COSTS_N_INSNS (1);
1403 return true;
1404
1405 case CONST_DOUBLE:
1406 {
1407 rtx high, low;
1408
1409 split_double (x, &high, &low);
1410 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1411 + !INT16_P (INTVAL (low)));
1412 return true;
1413 }
1414
1415 case MULT:
1416 *total = COSTS_N_INSNS (3);
1417 return true;
1418
1419 case DIV:
1420 case UDIV:
1421 case MOD:
1422 case UMOD:
1423 *total = COSTS_N_INSNS (10);
1424 return true;
1425
1426 default:
1427 return false;
1428 }
1429 }
1430
1431 /* Type of function DECL.
1433
1434 The result is cached. To reset the cache at the end of a function,
1435 call with DECL = NULL_TREE. */
1436
1437 enum m32r_function_type
1438 m32r_compute_function_type (tree decl)
1439 {
1440 /* Cached value. */
1441 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1442 /* Last function we were called for. */
1443 static tree last_fn = NULL_TREE;
1444
1445 /* Resetting the cached value? */
1446 if (decl == NULL_TREE)
1447 {
1448 fn_type = M32R_FUNCTION_UNKNOWN;
1449 last_fn = NULL_TREE;
1450 return fn_type;
1451 }
1452
1453 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1454 return fn_type;
1455
1456 /* Compute function type. */
1457 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1458 ? M32R_FUNCTION_INTERRUPT
1459 : M32R_FUNCTION_NORMAL);
1460
1461 last_fn = decl;
1462 return fn_type;
1463 }
1464 /* Function prologue/epilogue handlers. */
1466
1467 /* M32R stack frames look like:
1468
1469 Before call After call
1470 +-----------------------+ +-----------------------+
1471 | | | |
1472 high | local variables, | | local variables, |
1473 mem | reg save area, etc. | | reg save area, etc. |
1474 | | | |
1475 +-----------------------+ +-----------------------+
1476 | | | |
1477 | arguments on stack. | | arguments on stack. |
1478 | | | |
1479 SP+0->+-----------------------+ +-----------------------+
1480 | reg parm save area, |
1481 | only created for |
1482 | variable argument |
1483 | functions |
1484 +-----------------------+
1485 | previous frame ptr |
1486 +-----------------------+
1487 | |
1488 | register save area |
1489 | |
1490 +-----------------------+
1491 | return address |
1492 +-----------------------+
1493 | |
1494 | local variables |
1495 | |
1496 +-----------------------+
1497 | |
1498 | alloca allocations |
1499 | |
1500 +-----------------------+
1501 | |
1502 low | arguments on stack |
1503 memory | |
1504 SP+0->+-----------------------+
1505
1506 Notes:
1507 1) The "reg parm save area" does not exist for non variable argument fns.
1508 2) The "reg parm save area" can be eliminated completely if we saved regs
1509 containing anonymous args separately but that complicates things too
1510 much (so it's not done).
1511 3) The return address is saved after the register save area so as to have as
1512 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1513
1514 /* Structure to be filled in by m32r_compute_frame_size with register
1515 save masks, and offsets for the current function. */
1516 struct m32r_frame_info
1517 {
1518 unsigned int total_size; /* # bytes that the entire frame takes up. */
1519 unsigned int extra_size; /* # bytes of extra stuff. */
1520 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1521 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1522 unsigned int reg_size; /* # bytes needed to store regs. */
1523 unsigned int var_size; /* # bytes that variables take up. */
1524 unsigned int gmask; /* Mask of saved gp registers. */
1525 unsigned int save_fp; /* Nonzero if fp must be saved. */
1526 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1527 int initialized; /* Nonzero if frame size already calculated. */
1528 };
1529
1530 /* Current frame information calculated by m32r_compute_frame_size. */
1531 static struct m32r_frame_info current_frame_info;
1532
1533 /* Zero structure to initialize current_frame_info. */
1534 static struct m32r_frame_info zero_frame_info;
1535
1536 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1537 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1538
1539 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1540 The return address and frame pointer are treated separately.
1541 Don't consider them here. */
1542 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1543 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1544 && (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
1545
1546 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1547 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1548
1549 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1550 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1551
1552 /* Return the bytes needed to compute the frame pointer from the current
1553 stack pointer.
1554
1555 SIZE is the size needed for local variables. */
1556
1557 unsigned int
1558 m32r_compute_frame_size (poly_int64 size) /* # of var. bytes allocated. */
1559 {
1560 unsigned int regno;
1561 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1562 unsigned int reg_size;
1563 unsigned int gmask;
1564 enum m32r_function_type fn_type;
1565 int interrupt_p;
1566 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1567 | crtl->profile);
1568
1569 var_size = M32R_STACK_ALIGN (size);
1570 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1571 pretend_size = crtl->args.pretend_args_size;
1572 extra_size = FIRST_PARM_OFFSET (0);
1573 total_size = extra_size + pretend_size + args_size + var_size;
1574 reg_size = 0;
1575 gmask = 0;
1576
1577 /* See if this is an interrupt handler. Call used registers must be saved
1578 for them too. */
1579 fn_type = m32r_compute_function_type (current_function_decl);
1580 interrupt_p = M32R_INTERRUPT_P (fn_type);
1581
1582 /* Calculate space needed for registers. */
1583 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1584 {
1585 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1586 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1587 {
1588 reg_size += UNITS_PER_WORD;
1589 gmask |= 1 << regno;
1590 }
1591 }
1592
1593 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1594 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1595
1596 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1597 * UNITS_PER_WORD);
1598 total_size += reg_size;
1599
1600 /* ??? Not sure this is necessary, and I don't think the epilogue
1601 handler will do the right thing if this changes total_size. */
1602 total_size = M32R_STACK_ALIGN (total_size);
1603
1604 /* frame_size = total_size - (pretend_size + reg_size); */
1605
1606 /* Save computed information. */
1607 current_frame_info.total_size = total_size;
1608 current_frame_info.extra_size = extra_size;
1609 current_frame_info.pretend_size = pretend_size;
1610 current_frame_info.var_size = var_size;
1611 current_frame_info.args_size = args_size;
1612 current_frame_info.reg_size = reg_size;
1613 current_frame_info.gmask = gmask;
1614 current_frame_info.initialized = reload_completed;
1615
1616 /* Ok, we're done. */
1617 return total_size;
1618 }
1619
1620 /* Worker function for TARGET_CAN_ELIMINATE. */
1621
1622 bool
1623 m32r_can_eliminate (const int from, const int to)
1624 {
1625 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1626 ? ! frame_pointer_needed
1627 : true);
1628 }
1629
1630
1631 /* The table we use to reference PIC data. */
1633 static rtx global_offset_table;
1634
1635 static void
1636 m32r_reload_lr (rtx sp, int size)
1637 {
1638 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1639
1640 if (size == 0)
1641 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1642 else if (size < 32768)
1643 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1644 gen_rtx_PLUS (Pmode, sp,
1645 GEN_INT (size)))));
1646 else
1647 {
1648 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1649
1650 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1651 emit_insn (gen_addsi3 (tmp, tmp, sp));
1652 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1653 }
1654
1655 emit_use (lr);
1656 }
1657
1658 void
1659 m32r_load_pic_register (void)
1660 {
1661 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1662 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1663 GEN_INT (TARGET_MODEL_SMALL)));
1664
1665 /* Need to emit this whether or not we obey regdecls,
1666 since setjmp/longjmp can cause life info to screw up. */
1667 emit_use (pic_offset_table_rtx);
1668 }
1669
1670 /* Expand the m32r prologue as a series of insns. */
1671
1672 void
1673 m32r_expand_prologue (void)
1674 {
1675 int regno;
1676 int frame_size;
1677 unsigned int gmask;
1678 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1679 | crtl->profile);
1680
1681 if (! current_frame_info.initialized)
1682 m32r_compute_frame_size (get_frame_size ());
1683
1684 if (flag_stack_usage_info)
1685 current_function_static_stack_size = current_frame_info.total_size;
1686
1687 gmask = current_frame_info.gmask;
1688
1689 /* These cases shouldn't happen. Catch them now. */
1690 gcc_assert (current_frame_info.total_size || !gmask);
1691
1692 /* Allocate space for register arguments if this is a variadic function. */
1693 if (current_frame_info.pretend_size != 0)
1694 {
1695 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1696 the wrong result on a 64-bit host. */
1697 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1698 emit_insn (gen_addsi3 (stack_pointer_rtx,
1699 stack_pointer_rtx,
1700 GEN_INT (-pretend_size)));
1701 }
1702
1703 /* Save any registers we need to and set up fp. */
1704 if (current_frame_info.save_fp)
1705 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1706
1707 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1708
1709 /* Save any needed call-saved regs (and call-used if this is an
1710 interrupt handler). */
1711 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1712 {
1713 if ((gmask & (1 << regno)) != 0)
1714 emit_insn (gen_movsi_push (stack_pointer_rtx,
1715 gen_rtx_REG (Pmode, regno)));
1716 }
1717
1718 if (current_frame_info.save_lr)
1719 emit_insn (gen_movsi_push (stack_pointer_rtx,
1720 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1721
1722 /* Allocate the stack frame. */
1723 frame_size = (current_frame_info.total_size
1724 - (current_frame_info.pretend_size
1725 + current_frame_info.reg_size));
1726
1727 if (frame_size == 0)
1728 ; /* Nothing to do. */
1729 else if (frame_size <= 32768)
1730 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1731 GEN_INT (-frame_size)));
1732 else
1733 {
1734 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1735
1736 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1737 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1738 }
1739
1740 if (frame_pointer_needed)
1741 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1742
1743 if (crtl->profile)
1744 /* Push lr for mcount (form_pc, x). */
1745 emit_insn (gen_movsi_push (stack_pointer_rtx,
1746 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1747
1748 if (pic_reg_used)
1749 {
1750 m32r_load_pic_register ();
1751 m32r_reload_lr (stack_pointer_rtx,
1752 (crtl->profile ? 0 : frame_size));
1753 }
1754
1755 if (crtl->profile && !pic_reg_used)
1756 emit_insn (gen_blockage ());
1757 }
1758
1759
1760 /* Set up the stack and frame pointer (if desired) for the function.
1762 Note, if this is changed, you need to mirror the changes in
1763 m32r_compute_frame_size which calculates the prolog size. */
1764
1765 static void
1766 m32r_output_function_prologue (FILE * file)
1767 {
1768 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1769
1770 /* If this is an interrupt handler, mark it as such. */
1771 if (M32R_INTERRUPT_P (fn_type))
1772 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1773
1774 if (! current_frame_info.initialized)
1775 m32r_compute_frame_size (get_frame_size ());
1776
1777 /* This is only for the human reader. */
1778 fprintf (file,
1779 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1780 ASM_COMMENT_START,
1781 current_frame_info.var_size,
1782 current_frame_info.reg_size / 4,
1783 current_frame_info.args_size,
1784 current_frame_info.extra_size);
1785 }
1786
1787 /* Output RTL to pop register REGNO from the stack. */
1789
1790 static void
1791 pop (int regno)
1792 {
1793 rtx x;
1794
1795 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1796 stack_pointer_rtx));
1797 add_reg_note (x, REG_INC, stack_pointer_rtx);
1798 }
1799
1800 /* Expand the m32r epilogue as a series of insns. */
1801
1802 void
1803 m32r_expand_epilogue (void)
1804 {
1805 int regno;
1806 int noepilogue = FALSE;
1807 int total_size;
1808
1809 gcc_assert (current_frame_info.initialized);
1810 total_size = current_frame_info.total_size;
1811
1812 if (total_size == 0)
1813 {
1814 rtx_insn *insn = get_last_insn ();
1815
1816 /* If the last insn was a BARRIER, we don't have to write any code
1817 because a jump (aka return) was put there. */
1818 if (insn && NOTE_P (insn))
1819 insn = prev_nonnote_insn (insn);
1820 if (insn && BARRIER_P (insn))
1821 noepilogue = TRUE;
1822 }
1823
1824 if (!noepilogue)
1825 {
1826 unsigned int var_size = current_frame_info.var_size;
1827 unsigned int args_size = current_frame_info.args_size;
1828 unsigned int gmask = current_frame_info.gmask;
1829 int can_trust_sp_p = !cfun->calls_alloca;
1830
1831 if (flag_exceptions)
1832 emit_insn (gen_blockage ());
1833
1834 /* The first thing to do is point the sp at the bottom of the register
1835 save area. */
1836 if (can_trust_sp_p)
1837 {
1838 unsigned int reg_offset = var_size + args_size;
1839
1840 if (reg_offset == 0)
1841 ; /* Nothing to do. */
1842 else if (reg_offset < 32768)
1843 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1844 GEN_INT (reg_offset)));
1845 else
1846 {
1847 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1848
1849 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1850 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1851 tmp));
1852 }
1853 }
1854 else if (frame_pointer_needed)
1855 {
1856 unsigned int reg_offset = var_size + args_size;
1857
1858 if (reg_offset == 0)
1859 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1860 else if (reg_offset < 32768)
1861 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1862 GEN_INT (reg_offset)));
1863 else
1864 {
1865 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1866
1867 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1868 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1869 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1870 tmp));
1871 }
1872 }
1873 else
1874 gcc_unreachable ();
1875
1876 if (current_frame_info.save_lr)
1877 pop (RETURN_ADDR_REGNUM);
1878
1879 /* Restore any saved registers, in reverse order of course. */
1880 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1881 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1882 {
1883 if ((gmask & (1L << regno)) != 0)
1884 pop (regno);
1885 }
1886
1887 if (current_frame_info.save_fp)
1888 pop (FRAME_POINTER_REGNUM);
1889
1890 /* Remove varargs area if present. */
1891 if (current_frame_info.pretend_size != 0)
1892 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1893 GEN_INT (current_frame_info.pretend_size)));
1894
1895 emit_insn (gen_blockage ());
1896 }
1897 }
1898
1899 /* Do any necessary cleanup after a function to restore stack, frame,
1900 and regs. */
1901
1902 static void
1903 m32r_output_function_epilogue (FILE *)
1904 {
1905 /* Reset state info for each function. */
1906 current_frame_info = zero_frame_info;
1907 m32r_compute_function_type (NULL_TREE);
1908 }
1909
1910 /* Return nonzero if this function is known to have a null or 1 instruction
1912 epilogue. */
1913
1914 int
1915 direct_return (void)
1916 {
1917 if (!reload_completed)
1918 return FALSE;
1919
1920 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1921 return FALSE;
1922
1923 if (! current_frame_info.initialized)
1924 m32r_compute_frame_size (get_frame_size ());
1925
1926 return current_frame_info.total_size == 0;
1927 }
1928
1929
1930 /* PIC. */
1932
1933 int
1934 m32r_legitimate_pic_operand_p (rtx x)
1935 {
1936 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1937 return 0;
1938
1939 if (GET_CODE (x) == CONST
1940 && GET_CODE (XEXP (x, 0)) == PLUS
1941 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1942 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1943 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1944 return 0;
1945
1946 return 1;
1947 }
1948
1949 rtx
1950 m32r_legitimize_pic_address (rtx orig, rtx reg)
1951 {
1952 #ifdef DEBUG_PIC
1953 printf("m32r_legitimize_pic_address()\n");
1954 #endif
1955
1956 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1957 {
1958 rtx pic_ref, address;
1959 int subregs = 0;
1960
1961 if (reg == 0)
1962 {
1963 gcc_assert (!reload_in_progress && !reload_completed);
1964 reg = gen_reg_rtx (Pmode);
1965
1966 subregs = 1;
1967 }
1968
1969 if (subregs)
1970 address = gen_reg_rtx (Pmode);
1971 else
1972 address = reg;
1973
1974 crtl->uses_pic_offset_table = 1;
1975
1976 if (GET_CODE (orig) == LABEL_REF
1977 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1978 {
1979 emit_insn (gen_gotoff_load_addr (reg, orig));
1980 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1981 return reg;
1982 }
1983
1984 emit_insn (gen_pic_load_addr (address, orig));
1985
1986 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1987 pic_ref = gen_const_mem (Pmode, address);
1988 emit_move_insn (reg, pic_ref);
1989 return reg;
1990 }
1991 else if (GET_CODE (orig) == CONST)
1992 {
1993 rtx base, offset;
1994
1995 if (GET_CODE (XEXP (orig, 0)) == PLUS
1996 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1997 return orig;
1998
1999 if (reg == 0)
2000 {
2001 gcc_assert (!reload_in_progress && !reload_completed);
2002 reg = gen_reg_rtx (Pmode);
2003 }
2004
2005 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2006 {
2007 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2008 if (base == reg)
2009 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2010 else
2011 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2012 }
2013 else
2014 return orig;
2015
2016 if (CONST_INT_P (offset))
2017 {
2018 if (INT16_P (INTVAL (offset)))
2019 return plus_constant (Pmode, base, INTVAL (offset));
2020 else
2021 {
2022 gcc_assert (! reload_in_progress && ! reload_completed);
2023 offset = force_reg (Pmode, offset);
2024 }
2025 }
2026
2027 return gen_rtx_PLUS (Pmode, base, offset);
2028 }
2029
2030 return orig;
2031 }
2032
2033 static rtx
2034 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2035 machine_mode mode ATTRIBUTE_UNUSED)
2036 {
2037 if (flag_pic)
2038 return m32r_legitimize_pic_address (x, NULL_RTX);
2039 else
2040 return x;
2041 }
2042
2043 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2044
2045 static bool
2046 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2047 {
2048 if (GET_CODE (addr) == LO_SUM)
2049 return true;
2050
2051 return false;
2052 }
2053
2054 /* Nested function support. */
2056
2057 /* Emit RTL insns to initialize the variable parts of a trampoline.
2058 FNADDR is an RTX for the address of the function's pure code.
2059 CXT is an RTX for the static chain value for the function. */
2060
2061 void
2062 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2063 rtx fnaddr ATTRIBUTE_UNUSED,
2064 rtx cxt ATTRIBUTE_UNUSED)
2065 {
2066 }
2067
2068 static void
2070 m32r_file_start (void)
2071 {
2072 default_file_start ();
2073
2074 if (flag_verbose_asm)
2075 fprintf (asm_out_file,
2076 "%s M32R/D special options: -G %d\n",
2077 ASM_COMMENT_START, g_switch_value);
2078
2079 if (TARGET_LITTLE_ENDIAN)
2080 fprintf (asm_out_file, "\t.little\n");
2081 }
2082
2083 /* Print operand X (an rtx) in assembler syntax to file FILE.
2085 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2086 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2087
2088 static void
2089 m32r_print_operand (FILE * file, rtx x, int code)
2090 {
2091 rtx addr;
2092
2093 switch (code)
2094 {
2095 /* The 's' and 'p' codes are used by output_block_move() to
2096 indicate post-increment 's'tores and 'p're-increment loads. */
2097 case 's':
2098 if (REG_P (x))
2099 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2100 else
2101 output_operand_lossage ("invalid operand to %%s code");
2102 return;
2103
2104 case 'p':
2105 if (REG_P (x))
2106 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2107 else
2108 output_operand_lossage ("invalid operand to %%p code");
2109 return;
2110
2111 case 'R' :
2112 /* Write second word of DImode or DFmode reference,
2113 register or memory. */
2114 if (REG_P (x))
2115 fputs (reg_names[REGNO (x)+1], file);
2116 else if (MEM_P (x))
2117 {
2118 machine_mode mode = GET_MODE (x);
2119
2120 fprintf (file, "@(");
2121 /* Handle possible auto-increment. Since it is pre-increment and
2122 we have already done it, we can just use an offset of four. */
2123 /* ??? This is taken from rs6000.cc I think. I don't think it is
2124 currently necessary, but keep it around. */
2125 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2126 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2127 output_address (mode, plus_constant (Pmode,
2128 XEXP (XEXP (x, 0), 0), 4));
2129 else
2130 output_address (mode, plus_constant (Pmode, XEXP (x, 0), 4));
2131 fputc (')', file);
2132 }
2133 else
2134 output_operand_lossage ("invalid operand to %%R code");
2135 return;
2136
2137 case 'H' : /* High word. */
2138 case 'L' : /* Low word. */
2139 if (REG_P (x))
2140 {
2141 /* L = least significant word, H = most significant word. */
2142 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2143 fputs (reg_names[REGNO (x)], file);
2144 else
2145 fputs (reg_names[REGNO (x)+1], file);
2146 }
2147 else if (CONST_INT_P (x)
2148 || GET_CODE (x) == CONST_DOUBLE)
2149 {
2150 rtx first, second;
2151
2152 split_double (x, &first, &second);
2153 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2154 code == 'L' ? INTVAL (first) : INTVAL (second));
2155 }
2156 else
2157 output_operand_lossage ("invalid operand to %%H/%%L code");
2158 return;
2159
2160 case 'A' :
2161 {
2162 char str[30];
2163
2164 if (GET_CODE (x) != CONST_DOUBLE
2165 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2166 fatal_insn ("bad insn for 'A'", x);
2167
2168 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2169 fprintf (file, "%s", str);
2170 return;
2171 }
2172
2173 case 'B' : /* Bottom half. */
2174 case 'T' : /* Top half. */
2175 /* Output the argument to a `seth' insn (sets the Top half-word).
2176 For constants output arguments to a seth/or3 pair to set Top and
2177 Bottom halves. For symbols output arguments to a seth/add3 pair to
2178 set Top and Bottom halves. The difference exists because for
2179 constants seth/or3 is more readable but for symbols we need to use
2180 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2181 switch (GET_CODE (x))
2182 {
2183 case CONST_INT :
2184 case CONST_DOUBLE :
2185 {
2186 rtx first, second;
2187
2188 split_double (x, &first, &second);
2189 x = WORDS_BIG_ENDIAN ? second : first;
2190 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2191 (code == 'B'
2192 ? INTVAL (x) & 0xffff
2193 : (INTVAL (x) >> 16) & 0xffff));
2194 }
2195 return;
2196 case CONST :
2197 case SYMBOL_REF :
2198 if (code == 'B'
2199 && small_data_operand (x, VOIDmode))
2200 {
2201 fputs ("sda(", file);
2202 output_addr_const (file, x);
2203 fputc (')', file);
2204 return;
2205 }
2206 /* fall through */
2207 case LABEL_REF :
2208 fputs (code == 'T' ? "shigh(" : "low(", file);
2209 output_addr_const (file, x);
2210 fputc (')', file);
2211 return;
2212 default :
2213 output_operand_lossage ("invalid operand to %%T/%%B code");
2214 return;
2215 }
2216 break;
2217
2218 case 'U' :
2219 /* ??? wip */
2220 /* Output a load/store with update indicator if appropriate. */
2221 if (MEM_P (x))
2222 {
2223 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2224 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2225 fputs (".a", file);
2226 }
2227 else
2228 output_operand_lossage ("invalid operand to %%U code");
2229 return;
2230
2231 case 'N' :
2232 /* Print a constant value negated. */
2233 if (CONST_INT_P (x))
2234 output_addr_const (file, GEN_INT (- INTVAL (x)));
2235 else
2236 output_operand_lossage ("invalid operand to %%N code");
2237 return;
2238
2239 case 'X' :
2240 /* Print a const_int in hex. Used in comments. */
2241 if (CONST_INT_P (x))
2242 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2243 return;
2244
2245 case '#' :
2246 fputs (IMMEDIATE_PREFIX, file);
2247 return;
2248
2249 case 0 :
2250 /* Do nothing special. */
2251 break;
2252
2253 default :
2254 /* Unknown flag. */
2255 output_operand_lossage ("invalid operand output code");
2256 }
2257
2258 switch (GET_CODE (x))
2259 {
2260 case REG :
2261 fputs (reg_names[REGNO (x)], file);
2262 break;
2263
2264 case MEM :
2265 addr = XEXP (x, 0);
2266 if (GET_CODE (addr) == PRE_INC)
2267 {
2268 if (!REG_P (XEXP (addr, 0)))
2269 fatal_insn ("pre-increment address is not a register", x);
2270
2271 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2272 }
2273 else if (GET_CODE (addr) == PRE_DEC)
2274 {
2275 if (!REG_P (XEXP (addr, 0)))
2276 fatal_insn ("pre-decrement address is not a register", x);
2277
2278 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2279 }
2280 else if (GET_CODE (addr) == POST_INC)
2281 {
2282 if (!REG_P (XEXP (addr, 0)))
2283 fatal_insn ("post-increment address is not a register", x);
2284
2285 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2286 }
2287 else
2288 {
2289 fputs ("@(", file);
2290 output_address (GET_MODE (x), addr);
2291 fputc (')', file);
2292 }
2293 break;
2294
2295 case CONST_DOUBLE :
2296 /* We handle SFmode constants here as output_addr_const doesn't. */
2297 if (GET_MODE (x) == SFmode)
2298 {
2299 long l;
2300
2301 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2302 fprintf (file, "0x%08lx", l);
2303 break;
2304 }
2305
2306 /* FALLTHRU */
2307 /* Let output_addr_const deal with it. */
2308
2309 default :
2310 output_addr_const (file, x);
2311 break;
2312 }
2313 }
2314
2315 /* Print a memory address as an operand to reference that memory location. */
2316
2317 static void
2318 m32r_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
2319 {
2320 rtx base;
2321 rtx index = 0;
2322 int offset = 0;
2323
2324 switch (GET_CODE (addr))
2325 {
2326 case REG :
2327 fputs (reg_names[REGNO (addr)], file);
2328 break;
2329
2330 case PLUS :
2331 if (CONST_INT_P (XEXP (addr, 0)))
2332 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2333 else if (CONST_INT_P (XEXP (addr, 1)))
2334 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2335 else
2336 base = XEXP (addr, 0), index = XEXP (addr, 1);
2337 if (REG_P (base))
2338 {
2339 /* Print the offset first (if present) to conform to the manual. */
2340 if (index == 0)
2341 {
2342 if (offset != 0)
2343 fprintf (file, "%d,", offset);
2344 fputs (reg_names[REGNO (base)], file);
2345 }
2346 /* The chip doesn't support this, but left in for generality. */
2347 else if (REG_P (index))
2348 fprintf (file, "%s,%s",
2349 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2350 /* Not sure this can happen, but leave in for now. */
2351 else if (GET_CODE (index) == SYMBOL_REF)
2352 {
2353 output_addr_const (file, index);
2354 fputc (',', file);
2355 fputs (reg_names[REGNO (base)], file);
2356 }
2357 else
2358 fatal_insn ("bad address", addr);
2359 }
2360 else if (GET_CODE (base) == LO_SUM)
2361 {
2362 gcc_assert (!index && REG_P (XEXP (base, 0)));
2363 if (small_data_operand (XEXP (base, 1), VOIDmode))
2364 fputs ("sda(", file);
2365 else
2366 fputs ("low(", file);
2367 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2368 offset));
2369 fputs ("),", file);
2370 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2371 }
2372 else
2373 fatal_insn ("bad address", addr);
2374 break;
2375
2376 case LO_SUM :
2377 if (!REG_P (XEXP (addr, 0)))
2378 fatal_insn ("lo_sum not of register", addr);
2379 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2380 fputs ("sda(", file);
2381 else
2382 fputs ("low(", file);
2383 output_addr_const (file, XEXP (addr, 1));
2384 fputs ("),", file);
2385 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2386 break;
2387
2388 case PRE_INC : /* Assume SImode. */
2389 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2390 break;
2391
2392 case PRE_DEC : /* Assume SImode. */
2393 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2394 break;
2395
2396 case POST_INC : /* Assume SImode. */
2397 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2398 break;
2399
2400 default :
2401 output_addr_const (file, addr);
2402 break;
2403 }
2404 }
2405
2406 static bool
2407 m32r_print_operand_punct_valid_p (unsigned char code)
2408 {
2409 return m32r_punct_chars[code];
2410 }
2411
2412 /* Return true if the operands are the constants 0 and 1. */
2413
2414 int
2415 zero_and_one (rtx operand1, rtx operand2)
2416 {
2417 return
2418 CONST_INT_P (operand1)
2419 && CONST_INT_P (operand2)
2420 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2421 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2422 }
2423
2424 /* Generate the correct assembler code to handle the conditional loading of a
2425 value into a register. It is known that the operands satisfy the
2426 conditional_move_operand() function above. The destination is operand[0].
2427 The condition is operand [1]. The 'true' value is operand [2] and the
2428 'false' value is operand [3]. */
2429
2430 char *
2431 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2432 {
2433 static char buffer [100];
2434 const char * dest = reg_names [REGNO (operands [0])];
2435
2436 buffer [0] = 0;
2437
2438 /* Destination must be a register. */
2439 gcc_assert (REG_P (operands [0]));
2440 gcc_assert (conditional_move_operand (operands [2], SImode));
2441 gcc_assert (conditional_move_operand (operands [3], SImode));
2442
2443 /* Check to see if the test is reversed. */
2444 if (GET_CODE (operands [1]) == NE)
2445 {
2446 rtx tmp = operands [2];
2447 operands [2] = operands [3];
2448 operands [3] = tmp;
2449 }
2450
2451 sprintf (buffer, "mvfc %s, cbr", dest);
2452
2453 /* If the true value was '0' then we need to invert the results of the move. */
2454 if (INTVAL (operands [2]) == 0)
2455 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2456 dest, dest);
2457
2458 return buffer;
2459 }
2460
2461 /* Returns true if the registers contained in the two
2462 rtl expressions are different. */
2463
2464 int
2465 m32r_not_same_reg (rtx a, rtx b)
2466 {
2467 int reg_a = -1;
2468 int reg_b = -2;
2469
2470 while (GET_CODE (a) == SUBREG)
2471 a = SUBREG_REG (a);
2472
2473 if (REG_P (a))
2474 reg_a = REGNO (a);
2475
2476 while (GET_CODE (b) == SUBREG)
2477 b = SUBREG_REG (b);
2478
2479 if (REG_P (b))
2480 reg_b = REGNO (b);
2481
2482 return reg_a != reg_b;
2483 }
2484
2485
2486 rtx
2488 m32r_function_symbol (const char *name)
2489 {
2490 int extra_flags = 0;
2491 enum m32r_model model;
2492 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2493
2494 if (TARGET_MODEL_SMALL)
2495 model = M32R_MODEL_SMALL;
2496 else if (TARGET_MODEL_MEDIUM)
2497 model = M32R_MODEL_MEDIUM;
2498 else if (TARGET_MODEL_LARGE)
2499 model = M32R_MODEL_LARGE;
2500 else
2501 gcc_unreachable (); /* Shouldn't happen. */
2502 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2503
2504 if (extra_flags)
2505 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2506
2507 return sym;
2508 }
2509
2510 /* Use a library function to move some bytes. */
2511
2512 static void
2513 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2514 {
2515 /* We want to pass the size as Pmode, which will normally be SImode
2516 but will be DImode if we are using 64-bit longs and pointers. */
2517 if (GET_MODE (bytes_rtx) != VOIDmode
2518 && GET_MODE (bytes_rtx) != Pmode)
2519 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2520
2521 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2522 VOIDmode, dest_reg, Pmode, src_reg, Pmode,
2523 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2524 TYPE_UNSIGNED (sizetype)),
2525 TYPE_MODE (sizetype));
2526 }
2527
2528 /* Expand string/block move operations.
2529
2530 operands[0] is the pointer to the destination.
2531 operands[1] is the pointer to the source.
2532 operands[2] is the number of bytes to move.
2533 operands[3] is the alignment.
2534
2535 Returns 1 upon success, 0 otherwise. */
2536
2537 int
2538 m32r_expand_block_move (rtx operands[])
2539 {
2540 rtx orig_dst = operands[0];
2541 rtx orig_src = operands[1];
2542 rtx bytes_rtx = operands[2];
2543 rtx align_rtx = operands[3];
2544 int constp = CONST_INT_P (bytes_rtx);
2545 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2546 int align = INTVAL (align_rtx);
2547 int leftover;
2548 rtx src_reg;
2549 rtx dst_reg;
2550
2551 if (constp && bytes <= 0)
2552 return 1;
2553
2554 /* Move the address into scratch registers. */
2555 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2556 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2557
2558 if (align > UNITS_PER_WORD)
2559 align = UNITS_PER_WORD;
2560
2561 /* If we prefer size over speed, always use a function call.
2562 If we do not know the size, use a function call.
2563 If the blocks are not word aligned, use a function call. */
2564 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2565 {
2566 block_move_call (dst_reg, src_reg, bytes_rtx);
2567 return 0;
2568 }
2569
2570 leftover = bytes % MAX_MOVE_BYTES;
2571 bytes -= leftover;
2572
2573 /* If necessary, generate a loop to handle the bulk of the copy. */
2574 if (bytes)
2575 {
2576 rtx_code_label *label = NULL;
2577 rtx final_src = NULL_RTX;
2578 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2579 rtx rounded_total = GEN_INT (bytes);
2580 rtx new_dst_reg = gen_reg_rtx (SImode);
2581 rtx new_src_reg = gen_reg_rtx (SImode);
2582
2583 /* If we are going to have to perform this loop more than
2584 once, then generate a label and compute the address the
2585 source register will contain upon completion of the final
2586 iteration. */
2587 if (bytes > MAX_MOVE_BYTES)
2588 {
2589 final_src = gen_reg_rtx (Pmode);
2590
2591 if (INT16_P(bytes))
2592 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2593 else
2594 {
2595 emit_insn (gen_movsi (final_src, rounded_total));
2596 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2597 }
2598
2599 label = gen_label_rtx ();
2600 emit_label (label);
2601 }
2602
2603 /* It is known that output_block_move() will update src_reg to point
2604 to the word after the end of the source block, and dst_reg to point
2605 to the last word of the destination block, provided that the block
2606 is MAX_MOVE_BYTES long. */
2607 emit_insn (gen_cpymemsi_internal (dst_reg, src_reg, at_a_time,
2608 new_dst_reg, new_src_reg));
2609 emit_move_insn (dst_reg, new_dst_reg);
2610 emit_move_insn (src_reg, new_src_reg);
2611 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2612
2613 if (bytes > MAX_MOVE_BYTES)
2614 {
2615 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2616 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2617 }
2618 }
2619
2620 if (leftover)
2621 emit_insn (gen_cpymemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2622 gen_reg_rtx (SImode),
2623 gen_reg_rtx (SImode)));
2624 return 1;
2625 }
2626
2627
2628 /* Emit load/stores for a small constant word aligned block_move.
2630
2631 operands[0] is the memory address of the destination.
2632 operands[1] is the memory address of the source.
2633 operands[2] is the number of bytes to move.
2634 operands[3] is a temp register.
2635 operands[4] is a temp register. */
2636
2637 void
2638 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2639 {
2640 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2641 int first_time;
2642 int got_extra = 0;
2643
2644 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2645
2646 /* We do not have a post-increment store available, so the first set of
2647 stores are done without any increment, then the remaining ones can use
2648 the pre-increment addressing mode.
2649
2650 Note: expand_block_move() also relies upon this behavior when building
2651 loops to copy large blocks. */
2652 first_time = 1;
2653
2654 while (bytes > 0)
2655 {
2656 if (bytes >= 8)
2657 {
2658 if (first_time)
2659 {
2660 output_asm_insn ("ld\t%5, %p1", operands);
2661 output_asm_insn ("ld\t%6, %p1", operands);
2662 output_asm_insn ("st\t%5, @%0", operands);
2663 output_asm_insn ("st\t%6, %s0", operands);
2664 }
2665 else
2666 {
2667 output_asm_insn ("ld\t%5, %p1", operands);
2668 output_asm_insn ("ld\t%6, %p1", operands);
2669 output_asm_insn ("st\t%5, %s0", operands);
2670 output_asm_insn ("st\t%6, %s0", operands);
2671 }
2672
2673 bytes -= 8;
2674 }
2675 else if (bytes >= 4)
2676 {
2677 if (bytes > 4)
2678 got_extra = 1;
2679
2680 output_asm_insn ("ld\t%5, %p1", operands);
2681
2682 if (got_extra)
2683 output_asm_insn ("ld\t%6, %p1", operands);
2684
2685 if (first_time)
2686 output_asm_insn ("st\t%5, @%0", operands);
2687 else
2688 output_asm_insn ("st\t%5, %s0", operands);
2689
2690 bytes -= 4;
2691 }
2692 else
2693 {
2694 /* Get the entire next word, even though we do not want all of it.
2695 The saves us from doing several smaller loads, and we assume that
2696 we cannot cause a page fault when at least part of the word is in
2697 valid memory [since we don't get called if things aren't properly
2698 aligned]. */
2699 int dst_offset = first_time ? 0 : 4;
2700 /* The amount of increment we have to make to the
2701 destination pointer. */
2702 int dst_inc_amount = dst_offset + bytes - 4;
2703 /* The same for the source pointer. */
2704 int src_inc_amount = bytes - (got_extra ? 4 : 0);
2705 int last_shift;
2706 rtx my_operands[3];
2707
2708 /* If got_extra is true then we have already loaded
2709 the next word as part of loading and storing the previous word. */
2710 if (! got_extra)
2711 output_asm_insn ("ld\t%6, @%1", operands);
2712
2713 if (bytes >= 2)
2714 {
2715 bytes -= 2;
2716
2717 output_asm_insn ("sra3\t%5, %6, #16", operands);
2718 my_operands[0] = operands[5];
2719 my_operands[1] = GEN_INT (dst_offset);
2720 my_operands[2] = operands[0];
2721 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2722
2723 /* If there is a byte left to store then increment the
2724 destination address and shift the contents of the source
2725 register down by 8 bits. We could not do the address
2726 increment in the store half word instruction, because it does
2727 not have an auto increment mode. */
2728 if (bytes > 0) /* assert (bytes == 1) */
2729 {
2730 dst_offset += 2;
2731 last_shift = 8;
2732 }
2733 }
2734 else
2735 last_shift = 24;
2736
2737 if (bytes > 0)
2738 {
2739 my_operands[0] = operands[6];
2740 my_operands[1] = GEN_INT (last_shift);
2741 output_asm_insn ("srai\t%0, #%1", my_operands);
2742 my_operands[0] = operands[6];
2743 my_operands[1] = GEN_INT (dst_offset);
2744 my_operands[2] = operands[0];
2745 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2746 }
2747
2748 /* Update the destination pointer if needed. We have to do
2749 this so that the patterns matches what we output in this
2750 function. */
2751 if (dst_inc_amount
2752 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2753 {
2754 my_operands[0] = operands[0];
2755 my_operands[1] = GEN_INT (dst_inc_amount);
2756 output_asm_insn ("addi\t%0, #%1", my_operands);
2757 }
2758
2759 /* Update the source pointer if needed. We have to do this
2760 so that the patterns matches what we output in this
2761 function. */
2762 if (src_inc_amount
2763 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2764 {
2765 my_operands[0] = operands[1];
2766 my_operands[1] = GEN_INT (src_inc_amount);
2767 output_asm_insn ("addi\t%0, #%1", my_operands);
2768 }
2769
2770 bytes = 0;
2771 }
2772
2773 first_time = 0;
2774 }
2775 }
2776
2777 /* Implement TARGET_HARD_REGNO_MODE_OK. */
2778
2779 static bool
2780 m32r_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
2781 {
2782 return (m32r_hard_regno_modes[regno] & m32r_mode_class[mode]) != 0;
2783 }
2784
2785 /* Implement TARGET_MODES_TIEABLE_P. Tie QI/HI/SI modes together. */
2786
2787 static bool
2788 m32r_modes_tieable_p (machine_mode mode1, machine_mode mode2)
2789 {
2790 return (GET_MODE_CLASS (mode1) == MODE_INT
2791 && GET_MODE_CLASS (mode2) == MODE_INT
2792 && GET_MODE_SIZE (mode1) <= UNITS_PER_WORD
2793 && GET_MODE_SIZE (mode2) <= UNITS_PER_WORD);
2794 }
2795
2796 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2797
2798 int
2799 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2800 unsigned int new_reg)
2801 {
2802 /* Interrupt routines can't clobber any register that isn't already used. */
2803 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2804 && !df_regs_ever_live_p (new_reg))
2805 return 0;
2806
2807 return 1;
2808 }
2809
2810 rtx
2811 m32r_return_addr (int count)
2812 {
2813 if (count != 0)
2814 return const0_rtx;
2815
2816 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2817 }
2818
2819 static void
2820 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2821 {
2822 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2823 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2824 0x017e8e17 : 0x178e7e01, SImode));
2825 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2826 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2827 0x0c00ae86 : 0x86ae000c, SImode));
2828 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2829 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2830 0xe627871e : 0x1e8727e6, SImode));
2831 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2832 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2833 0xc616c626 : 0x26c61fc6, SImode));
2834 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2835 chain_value);
2836 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2837 XEXP (DECL_RTL (fndecl), 0));
2838
2839 if (m32r_cache_flush_trap >= 0)
2840 emit_insn (gen_flush_icache
2841 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2842 gen_int_mode (m32r_cache_flush_trap, SImode)));
2843 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2844 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2845 LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode,
2846 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2847 GEN_INT (3), SImode);
2848 }
2849
2850 /* True if X is a reg that can be used as a base reg. */
2851
2852 static bool
2853 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2854 {
2855 if (! REG_P (x))
2856 return false;
2857
2858 if (strict)
2859 {
2860 if (GPR_P (REGNO (x)))
2861 return true;
2862 }
2863 else
2864 {
2865 if (GPR_P (REGNO (x))
2866 || REGNO (x) == ARG_POINTER_REGNUM
2867 || ! HARD_REGISTER_P (x))
2868 return true;
2869 }
2870
2871 return false;
2872 }
2873
2874 static inline bool
2875 m32r_rtx_ok_for_offset_p (const_rtx x)
2876 {
2877 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2878 }
2879
2880 static inline bool
2881 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2882 const_rtx x, bool strict)
2883 {
2884 if (GET_CODE (x) == PLUS
2885 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2886 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2887 return true;
2888
2889 return false;
2890 }
2891
2892 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2893 since more than one instruction will be required. */
2894
2895 static inline bool
2896 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2897 bool strict)
2898 {
2899 if (GET_CODE (x) == LO_SUM
2900 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2901 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2902 && CONSTANT_P (XEXP (x, 1)))
2903 return true;
2904
2905 return false;
2906 }
2907
2908 /* Is this a load and increment operation. */
2909
2910 static inline bool
2911 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2912 {
2913 if ((mode == SImode || mode == SFmode)
2914 && GET_CODE (x) == POST_INC
2915 && REG_P (XEXP (x, 0))
2916 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2917 return true;
2918
2919 return false;
2920 }
2921
2922 /* Is this an increment/decrement and store operation. */
2923
2924 static inline bool
2925 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2926 {
2927 if ((mode == SImode || mode == SFmode)
2928 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2929 && REG_P (XEXP (x, 0)) \
2930 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2931 return true;
2932
2933 return false;
2934 }
2935
2936 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2937
2938 static bool
2939 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2940 {
2941 if (m32r_rtx_ok_for_base_p (x, strict)
2942 || m32r_legitimate_offset_addres_p (mode, x, strict)
2943 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2944 || m32r_load_postinc_p (mode, x, strict)
2945 || m32r_store_preinc_predec_p (mode, x, strict))
2946 return true;
2947
2948 return false;
2949 }
2950
2951 static void
2952 m32r_conditional_register_usage (void)
2953 {
2954 if (flag_pic)
2955 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2956 }
2957
2958 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2959
2960 We don't allow (plus symbol large-constant) as the relocations can't
2961 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2962 We allow all CONST_DOUBLE's as the md file patterns will force the
2963 constant to memory if they can't handle them. */
2964
2965 static bool
2966 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2967 {
2968 return !(GET_CODE (x) == CONST
2969 && GET_CODE (XEXP (x, 0)) == PLUS
2970 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2971 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2972 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2973 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2974 }
2975
2976 /* Implement TARGET_STARTING_FRAME_OFFSET. The frame pointer points at
2977 the same place as the stack pointer, except if alloca has been called. */
2978
2979 static HOST_WIDE_INT
2980 m32r_starting_frame_offset (void)
2981 {
2982 return M32R_STACK_ALIGN (crtl->outgoing_args_size);
2983 }
2984